|
73 | 73 | except ImportError: |
74 | 74 | import json |
75 | 75 |
|
| 76 | +testyaml = False |
| 77 | +try: |
| 78 | + import oyaml as yaml |
| 79 | + testyaml = True |
| 80 | +except ImportError: |
| 81 | + try: |
| 82 | + import yaml |
| 83 | + testyaml = True |
| 84 | + except ImportError: |
| 85 | + testyaml = False |
| 86 | + |
76 | 87 | try: |
77 | 88 | import configparser |
78 | 89 | except ImportError: |
@@ -2612,7 +2623,7 @@ def _load_all_members_spooled(self): |
2612 | 2623 | scanned_leading = 0 # for tolerant header scan |
2613 | 2624 |
|
2614 | 2625 | while True: |
2615 | | - data = self.file.read(1 << 20) # 1 MiB blocks |
| 2626 | + data = self.file.read(__filebuff_size__) # 1 MiB blocks |
2616 | 2627 | if not data: |
2617 | 2628 | if d is not None: |
2618 | 2629 | self._spool.write(d.flush()) |
@@ -2770,7 +2781,7 @@ def write(self, data): |
2770 | 2781 |
|
2771 | 2782 | # Buffer and compress in chunks to limit memory |
2772 | 2783 | self._write_buf += data |
2773 | | - if len(self._write_buf) >= (1 << 20): # 1 MiB threshold |
| 2784 | + if len(self._write_buf) >= (__filebuff_size__): # 1 MiB threshold |
2774 | 2785 | chunk = self._compressor.compress(bytes(self._write_buf)) |
2775 | 2786 | if chunk: |
2776 | 2787 | self.file.write(chunk) |
@@ -3075,7 +3086,7 @@ def _load_all_members_spooled(self): |
3075 | 3086 |
|
3076 | 3087 | self._spool = tempfile.SpooledTemporaryFile(max_size=self.spool_threshold) |
3077 | 3088 |
|
3078 | | - CHUNK = 1 << 20 |
| 3089 | + CHUNK = __filebuff_size__ |
3079 | 3090 | pending = b"" |
3080 | 3091 | d = None |
3081 | 3092 | absolute_offset = 0 |
@@ -3238,7 +3249,7 @@ def write(self, data): |
3238 | 3249 |
|
3239 | 3250 | # Stage and compress in chunks |
3240 | 3251 | self._write_buf += data |
3241 | | - if len(self._write_buf) >= (1 << 20): # 1 MiB threshold |
| 3252 | + if len(self._write_buf) >= (__filebuff_size__): # 1 MiB threshold |
3242 | 3253 | out = self._compressor.compress(bytes(self._write_buf)) |
3243 | 3254 | if out: |
3244 | 3255 | self.file.write(out) |
@@ -3691,7 +3702,7 @@ def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__ |
3691 | 3702 | if CheckSumSupport(algo_key, hashlib_guaranteed): |
3692 | 3703 | h = hashlib.new(algo_key) |
3693 | 3704 | while True: |
3694 | | - chunk = inbytes.read(1 << 20) |
| 3705 | + chunk = inbytes.read(__filebuff_size__) |
3695 | 3706 | if not chunk: |
3696 | 3707 | break |
3697 | 3708 | if not isinstance(chunk, (bytes, bytearray, memoryview)): |
@@ -4143,6 +4154,28 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck |
4143 | 4154 | fprejsoncontent = "" |
4144 | 4155 | fjsonrawcontent = fprejsoncontent |
4145 | 4156 | fjsoncontent = {} |
| 4157 | + elif(testyaml and fjsontype == "yaml"): |
| 4158 | + fjsoncontent = {} |
| 4159 | + fprejsoncontent = fp.read(fjsonsize).decode("UTF-8") |
| 4160 | + if (fjsonsize > 0): |
| 4161 | + try: |
| 4162 | + # try base64 → utf-8 → YAML |
| 4163 | + fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8") |
| 4164 | + fjsoncontent = yaml.safe_load(fjsonrawcontent) or {} |
| 4165 | + except (binascii.Error, UnicodeDecodeError, yaml.YAMLError): |
| 4166 | + try: |
| 4167 | + # fall back to treating the bytes as plain text YAML |
| 4168 | + fjsonrawcontent = fprejsoncontent |
| 4169 | + fjsoncontent = yaml.safe_load(fjsonrawcontent) or {} |
| 4170 | + except (UnicodeDecodeError, yaml.YAMLError): |
| 4171 | + # final fallback: empty |
| 4172 | + fprejsoncontent = "" |
| 4173 | + fjsonrawcontent = fprejsoncontent |
| 4174 | + fjsoncontent = {} |
| 4175 | + else: |
| 4176 | + fprejsoncontent = "" |
| 4177 | + fjsonrawcontent = fprejsoncontent |
| 4178 | + fjsoncontent = {} |
4146 | 4179 | elif(fjsontype=="list"): |
4147 | 4180 | fprejsoncontent = fp.read(fjsonsize).decode("UTF-8") |
4148 | 4181 | flisttmp = MkTempFile() |
@@ -4316,6 +4349,28 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True, |
4316 | 4349 | fprejsoncontent = "" |
4317 | 4350 | fjsonrawcontent = fprejsoncontent |
4318 | 4351 | fjsoncontent = {} |
| 4352 | + elif(testyaml and fjsontype == "yaml"): |
| 4353 | + fjsoncontent = {} |
| 4354 | + fprejsoncontent = fp.read(fjsonsize).decode("UTF-8") |
| 4355 | + if (fjsonsize > 0): |
| 4356 | + try: |
| 4357 | + # try base64 → utf-8 → YAML |
| 4358 | + fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8") |
| 4359 | + fjsoncontent = yaml.safe_load(fjsonrawcontent) or {} |
| 4360 | + except (binascii.Error, UnicodeDecodeError, yaml.YAMLError): |
| 4361 | + try: |
| 4362 | + # fall back to treating the bytes as plain text YAML |
| 4363 | + fjsonrawcontent = fprejsoncontent |
| 4364 | + fjsoncontent = yaml.safe_load(fjsonrawcontent) or {} |
| 4365 | + except (UnicodeDecodeError, yaml.YAMLError): |
| 4366 | + # final fallback: empty |
| 4367 | + fprejsoncontent = "" |
| 4368 | + fjsonrawcontent = fprejsoncontent |
| 4369 | + fjsoncontent = {} |
| 4370 | + else: |
| 4371 | + fprejsoncontent = "" |
| 4372 | + fjsonrawcontent = fprejsoncontent |
| 4373 | + fjsoncontent = {} |
4319 | 4374 | elif(fjsontype=="list"): |
4320 | 4375 | fprejsoncontent = fp.read(fjsonsize).decode("UTF-8") |
4321 | 4376 | flisttmp = MkTempFile() |
@@ -4502,6 +4557,28 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False, |
4502 | 4557 | fprejsoncontent = "" |
4503 | 4558 | fjsonrawcontent = fprejsoncontent |
4504 | 4559 | fjsoncontent = {} |
| 4560 | + elif(testyaml and fjsontype == "yaml"): |
| 4561 | + fjsoncontent = {} |
| 4562 | + fprejsoncontent = fp.read(fjsonsize).decode("UTF-8") |
| 4563 | + if (fjsonsize > 0): |
| 4564 | + try: |
| 4565 | + # try base64 → utf-8 → YAML |
| 4566 | + fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8") |
| 4567 | + fjsoncontent = yaml.safe_load(fjsonrawcontent) or {} |
| 4568 | + except (binascii.Error, UnicodeDecodeError, yaml.YAMLError): |
| 4569 | + try: |
| 4570 | + # fall back to treating the bytes as plain text YAML |
| 4571 | + fjsonrawcontent = fprejsoncontent |
| 4572 | + fjsoncontent = yaml.safe_load(fjsonrawcontent) or {} |
| 4573 | + except (UnicodeDecodeError, yaml.YAMLError): |
| 4574 | + # final fallback: empty |
| 4575 | + fprejsoncontent = "" |
| 4576 | + fjsonrawcontent = fprejsoncontent |
| 4577 | + fjsoncontent = {} |
| 4578 | + else: |
| 4579 | + fprejsoncontent = "" |
| 4580 | + fjsonrawcontent = fprejsoncontent |
| 4581 | + fjsoncontent = {} |
4505 | 4582 | elif(fjsontype=="list"): |
4506 | 4583 | fprejsoncontent = fp.read(fjsonsize).decode("UTF-8") |
4507 | 4584 | flisttmp = MkTempFile() |
@@ -8758,7 +8835,7 @@ def ensure_filelike(infile, mode="rb", use_mmap=False, **adapter_kw): |
8758 | 8835 |
|
8759 | 8836 | # ========= copy helpers ========= |
8760 | 8837 |
|
8761 | | -def fast_copy(infp, outfp, bufsize=1 << 20): |
| 8838 | +def fast_copy(infp, outfp, bufsize=__filebuff_size__): |
8762 | 8839 | """ |
8763 | 8840 | Efficient copy from any readable file-like to any writable file-like. |
8764 | 8841 | Uses readinto() when available to avoid extra allocations. |
@@ -8802,7 +8879,7 @@ def copy_file_to_mmap_dest(src_path, outfp, chunk_size=__spoolfile_size__): |
8802 | 8879 | shutil.copyfileobj(fp, outfp, length=chunk_size) |
8803 | 8880 |
|
8804 | 8881 |
|
8805 | | -def copy_opaque(src, dst, bufsize=1 << 20, grow_step=64 << 20): |
| 8882 | +def copy_opaque(src, dst, bufsize=__filebuff_size__, grow_step=64 << 20): |
8806 | 8883 | """ |
8807 | 8884 | Copy opaque bytes from 'src' (any readable file-like) to 'dst' |
8808 | 8885 | (your mmap-backed FileLikeAdapter or any writable file-like). |
|
0 commit comments