diff --git a/docling_core/cli/view.py b/docling_core/cli/view.py index 6b521804..315c6505 100644 --- a/docling_core/cli/view.py +++ b/docling_core/cli/view.py @@ -69,8 +69,7 @@ def view( image_mode=ImageRefMode.EMBEDDED, split_page_view=split_view, ) - with open(target_path, "w", encoding="utf-8") as f: - f.write(html_output) + target_path.write_text(html_output, encoding="utf-8") webbrowser.open(url=f"file://{target_path.absolute().resolve()}") diff --git a/docling_core/types/doc/document.py b/docling_core/types/doc/document.py index a2ffab45..e3670aba 100644 --- a/docling_core/types/doc/document.py +++ b/docling_core/types/doc/document.py @@ -6,13 +6,12 @@ import json import logging import mimetypes -import os import re import sys import typing import warnings from enum import Enum -from io import BytesIO +from io import BytesIO, StringIO from pathlib import Path from typing import ( Any, @@ -65,7 +64,11 @@ PictureClassificationLabel, ) from docling_core.types.doc.tokens import DocumentToken, TableToken -from docling_core.types.doc.utils import parse_otsl_table_content, relative_path +from docling_core.types.doc.utils import ( + is_remote_path, + parse_otsl_table_content, + relative_path, +) _logger = logging.getLogger(__name__) @@ -4762,38 +4765,48 @@ def _with_pictures_refs( img_count = 0 image_dir.mkdir(parents=True, exist_ok=True) - if image_dir.is_dir(): - for item, level in result.iterate_items(page_no=page_no, with_groups=False): - if isinstance(item, PictureItem): - img = item.get_image(doc=self) - if img is not None: - - hexhash = PictureItem._image_to_hexhash(img) - - # loc_path = image_dir / f"image_{img_count:06}.png" - if hexhash is not None: - loc_path = image_dir / f"image_{img_count:06}_{hexhash}.png" - - img.save(loc_path) - if reference_path is not None: - obj_path = relative_path( - reference_path.resolve(), - loc_path.resolve(), - ) - else: - obj_path = loc_path + # Note: Skip is_dir() check for remote paths since S3/cloud storage + # doesn't have real directories - mkdir() is a no-op for remote paths + for item, level in result.iterate_items(page_no=page_no, with_groups=False): + if isinstance(item, PictureItem): + img = item.get_image(doc=self) + if img is not None: + + hexhash = PictureItem._image_to_hexhash(img) + + # loc_path = image_dir / f"image_{img_count:06}.png" + if hexhash is not None: + loc_path = image_dir / f"image_{img_count:06}_{hexhash}.png" + + # Use BytesIO + write_bytes for UPath compatibility + buf = BytesIO() + img.save(buf, format="PNG") + loc_path.write_bytes(buf.getvalue()) + + # For remote paths, use absolute URI string; for local, compute relative + if is_remote_path(loc_path) or is_remote_path(reference_path): + # Convert to string URI for remote paths (Pydantic can't serialize UPath) + obj_path = str(loc_path) + elif reference_path is not None: + obj_path = relative_path( + reference_path.resolve(), + loc_path.resolve(), + ) + else: + obj_path = loc_path - if item.image is None: - scale = img.size[0] / item.prov[0].bbox.width - item.image = ImageRef.from_pil( - image=img, dpi=round(72 * scale) - ) - item.image.uri = Path(obj_path) + if item.image is None: + scale = img.size[0] / item.prov[0].bbox.width + item.image = ImageRef.from_pil( + image=img, dpi=round(72 * scale) + ) + # For remote paths, store as string URI; for local, store as Path + item.image.uri = obj_path - # if item.image._pil is not None: - # item.image._pil.close() + # if item.image._pil is not None: + # item.image._pil.close() - img_count += 1 + img_count += 1 return result @@ -4859,7 +4872,7 @@ def save_as_json( artifacts_dir, reference_path = self._get_output_paths(filename, artifacts_dir) if image_mode == ImageRefMode.REFERENCED: - os.makedirs(artifacts_dir, exist_ok=True) + artifacts_dir.mkdir(parents=True, exist_ok=True) new_doc = self._make_copy_with_refmode( artifacts_dir, image_mode, page_no=None, reference_path=reference_path @@ -4868,8 +4881,7 @@ def save_as_json( out = new_doc.export_to_dict( coord_precision=coord_precision, confid_precision=confid_precision ) - with open(filename, "w", encoding="utf-8") as fw: - json.dump(out, fw, indent=indent) + filename.write_text(json.dumps(out, indent=indent), encoding="utf-8") @classmethod def load_from_json(cls, filename: Union[str, Path]) -> "DoclingDocument": @@ -4884,8 +4896,7 @@ def load_from_json(cls, filename: Union[str, Path]) -> "DoclingDocument": """ if isinstance(filename, str): filename = Path(filename) - with open(filename, "r", encoding="utf-8") as f: - return cls.model_validate_json(f.read()) + return cls.model_validate_json(filename.read_text(encoding="utf-8")) def save_as_yaml( self, @@ -4902,7 +4913,7 @@ def save_as_yaml( artifacts_dir, reference_path = self._get_output_paths(filename, artifacts_dir) if image_mode == ImageRefMode.REFERENCED: - os.makedirs(artifacts_dir, exist_ok=True) + artifacts_dir.mkdir(parents=True, exist_ok=True) new_doc = self._make_copy_with_refmode( artifacts_dir, image_mode, page_no=None, reference_path=reference_path @@ -4911,8 +4922,9 @@ def save_as_yaml( out = new_doc.export_to_dict( coord_precision=coord_precision, confid_precision=confid_precision ) - with open(filename, "w", encoding="utf-8") as fw: - yaml.dump(out, fw, default_flow_style=default_flow_style) + stream = StringIO() + yaml.dump(out, stream, default_flow_style=default_flow_style) + filename.write_text(stream.getvalue(), encoding="utf-8") @classmethod def load_from_yaml(cls, filename: Union[str, Path]) -> "DoclingDocument": @@ -4926,8 +4938,7 @@ def load_from_yaml(cls, filename: Union[str, Path]) -> "DoclingDocument": """ if isinstance(filename, str): filename = Path(filename) - with open(filename, encoding="utf-8") as f: - data = yaml.load(f, Loader=yaml.SafeLoader) + data = yaml.load(filename.read_text(encoding="utf-8"), Loader=yaml.SafeLoader) return DoclingDocument.model_validate(data) def export_to_dict( @@ -4979,7 +4990,7 @@ def save_as_markdown( artifacts_dir, reference_path = self._get_output_paths(filename, artifacts_dir) if image_mode == ImageRefMode.REFERENCED: - os.makedirs(artifacts_dir, exist_ok=True) + artifacts_dir.mkdir(parents=True, exist_ok=True) new_doc = self._make_copy_with_refmode( artifacts_dir, image_mode, page_no, reference_path=reference_path @@ -5005,8 +5016,7 @@ def save_as_markdown( mark_meta=mark_meta, ) - with open(filename, "w", encoding="utf-8") as fw: - fw.write(md_out) + filename.write_text(md_out, encoding="utf-8") def export_to_markdown( # noqa: C901 self, @@ -5185,7 +5195,7 @@ def save_as_html( artifacts_dir, reference_path = self._get_output_paths(filename, artifacts_dir) if image_mode == ImageRefMode.REFERENCED: - os.makedirs(artifacts_dir, exist_ok=True) + artifacts_dir.mkdir(parents=True, exist_ok=True) new_doc = self._make_copy_with_refmode( artifacts_dir, image_mode, page_no, reference_path=reference_path @@ -5205,8 +5215,7 @@ def save_as_html( include_annotations=include_annotations, ) - with open(filename, "w", encoding="utf-8") as fw: - fw.write(html_out) + filename.write_text(html_out, encoding="utf-8") def _get_output_paths( self, filename: Union[str, Path], artifacts_dir: Optional[Path] = None @@ -5850,8 +5859,7 @@ def save_as_doctags( minified=minified, ) - with open(filename, "w", encoding="utf-8") as fw: - fw.write(out) + filename.write_text(out, encoding="utf-8") @deprecated("Use export_to_doctags() instead.") def export_to_document_tokens(self, *args, **kwargs): diff --git a/docling_core/types/doc/page.py b/docling_core/types/doc/page.py index 7fc67ddf..fee6f89d 100644 --- a/docling_core/types/doc/page.py +++ b/docling_core/types/doc/page.py @@ -601,8 +601,7 @@ def save_as_json( if isinstance(filename, str): filename = Path(filename) out = self.export_to_dict() - with open(filename, "w", encoding="utf-8") as fw: - json.dump(out, fw, indent=indent) + filename.write_text(json.dumps(out, indent=indent), encoding="utf-8") @classmethod def load_from_json(cls, filename: Union[str, Path]) -> "SegmentedPdfPage": @@ -616,8 +615,7 @@ def load_from_json(cls, filename: Union[str, Path]) -> "SegmentedPdfPage": """ if isinstance(filename, str): filename = Path(filename) - with open(filename, "r", encoding="utf-8") as f: - return cls.model_validate_json(f.read()) + return cls.model_validate_json(filename.read_text(encoding="utf-8")) def crop_text( self, cell_unit: TextCellUnit, bbox: BoundingBox, eps: float = 1.0 @@ -1218,8 +1216,7 @@ def save_as_json(self, filename: Union[str, Path], indent: int = 2): if isinstance(filename, str): filename = Path(filename) out = self.export_to_dict() - with open(filename, "w", encoding="utf-8") as fw: - json.dump(out, fw, indent=indent) + filename.write_text(json.dumps(out, indent=indent), encoding="utf-8") @classmethod def load_from_json(cls, filename: Union[str, Path]) -> "PdfTableOfContents": @@ -1233,8 +1230,7 @@ def load_from_json(cls, filename: Union[str, Path]) -> "PdfTableOfContents": """ if isinstance(filename, str): filename = Path(filename) - with open(filename, "r", encoding="utf-8") as f: - return cls.model_validate_json(f.read()) + return cls.model_validate_json(filename.read_text(encoding="utf-8")) class ParsedPdfDocument(BaseModel): @@ -1280,8 +1276,7 @@ def save_as_json(self, filename: Union[str, Path], indent: int = 2): if isinstance(filename, str): filename = Path(filename) out = self.export_to_dict() - with open(filename, "w", encoding="utf-8") as fw: - json.dump(out, fw, indent=indent) + filename.write_text(json.dumps(out, indent=indent), encoding="utf-8") @classmethod def load_from_json(cls, filename: Union[str, Path]) -> "ParsedPdfDocument": @@ -1295,5 +1290,4 @@ def load_from_json(cls, filename: Union[str, Path]) -> "ParsedPdfDocument": """ if isinstance(filename, str): filename = Path(filename) - with open(filename, "r", encoding="utf-8") as f: - return cls.model_validate_json(f.read()) + return cls.model_validate_json(filename.read_text(encoding="utf-8")) diff --git a/docling_core/types/doc/utils.py b/docling_core/types/doc/utils.py index c4e517a5..7904ecea 100644 --- a/docling_core/types/doc/utils.py +++ b/docling_core/types/doc/utils.py @@ -5,7 +5,7 @@ import re import unicodedata from pathlib import Path -from typing import TYPE_CHECKING, List, Optional, Tuple +from typing import TYPE_CHECKING, Any, List, Optional, Tuple, Union from docling_core.types.doc.tokens import _LOC_PREFIX, DocumentToken, TableToken @@ -13,7 +13,25 @@ from docling_core.types.doc.document import TableCell, TableData -def relative_path(src: Path, target: Path) -> Path: +def is_remote_path(p: Any) -> bool: + """Check if a path is a remote/cloud path (e.g., S3, GCS, Azure). + + Works with UPath objects from universal-pathlib. Local paths return False. + + Args: + p: A path object (Path, UPath, or similar) + + Returns: + bool: True if the path is a remote/cloud path, False otherwise. + """ + # UPath objects have a 'protocol' attribute + protocol = getattr(p, "protocol", None) + if protocol is not None and protocol not in ("file", ""): + return True + return False + + +def relative_path(src: Union[str, Path], target: Union[str, Path]) -> Path: """Compute the relative path from `src` to `target`. Args: @@ -25,9 +43,19 @@ def relative_path(src: Path, target: Path) -> Path: Raises: ValueError: If either `src` or `target` is not an absolute path. + + Note: + For remote paths (UPath with non-file protocols), this function cannot + compute relative paths. Use is_remote_path() to check before calling. """ - src = Path(src).resolve() - target = Path(target).resolve() + # Convert to Path only if string, otherwise keep original type + if isinstance(src, str): + src = Path(src) + if isinstance(target, str): + target = Path(target) + + src = src.resolve() + target = target.resolve() # Ensure both paths are absolute if not src.is_absolute(): diff --git a/docling_core/utils/generate_docs.py b/docling_core/utils/generate_docs.py index 61ba4eb8..66f06a4d 100644 --- a/docling_core/utils/generate_docs.py +++ b/docling_core/utils/generate_docs.py @@ -6,7 +6,6 @@ import argparse import json -import os from argparse import BooleanOptionalAction from pathlib import Path from shutil import rmtree @@ -24,15 +23,16 @@ def _prepare_directory(folder: str, clean: bool = False) -> None: folder: The name of the directory. clean: Whether any existing content in the directory should be removed. """ - if os.path.isdir(folder): + folder_path = Path(folder) + if folder_path.is_dir(): if clean: - for path in Path(folder).glob("**/*"): + for path in folder_path.glob("**/*"): if path.is_file(): path.unlink() elif path.is_dir(): rmtree(path) else: - os.makedirs(folder, exist_ok=True) + folder_path.mkdir(parents=True, exist_ok=True) def generate_collection_jsonschema(folder: str): @@ -41,12 +41,13 @@ def generate_collection_jsonschema(folder: str): Args: folder: The name of the directory. """ + folder_path = Path(folder) for item in MODELS: json_schema = generate_json_schema(item) - with open( - os.path.join(folder, f"{item}.json"), mode="w", encoding="utf8" - ) as json_file: - json.dump(json_schema, json_file, ensure_ascii=False, indent=2) + output_file = folder_path / f"{item}.json" + output_file.write_text( + json.dumps(json_schema, ensure_ascii=False, indent=2), encoding="utf-8" + ) def main() -> None: diff --git a/docling_core/utils/validate.py b/docling_core/utils/validate.py index 51f6baf9..14507df3 100644 --- a/docling_core/utils/validate.py +++ b/docling_core/utils/validate.py @@ -32,10 +32,11 @@ def parse_arguments(): def run(): """Run the validation of a file containing a Document.""" + from pathlib import Path + file_format, input_file = parse_arguments() - with open(input_file, "r", encoding="utf-8") as fd: - file_ = json.load(fd) + file_ = json.loads(Path(input_file).read_text(encoding="utf-8")) result = (False, "Empty result") diff --git a/pyproject.toml b/pyproject.toml index 2fd1b86e..2caf79e1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -111,6 +111,7 @@ dev = [ "pytest~=8.3", "pytest-cov>=6.1.1", "python-semantic-release~=7.32", + "universal-pathlib>=0.3.7", ] [tool.uv] diff --git a/test/test_upath_support.py b/test/test_upath_support.py new file mode 100644 index 00000000..8b9684fe --- /dev/null +++ b/test/test_upath_support.py @@ -0,0 +1,175 @@ +"""Tests for UPath/fsspec support for cloud storage compatibility.""" + +import uuid +from pathlib import Path + +import pytest + +from docling_core.types.doc import DoclingDocument, ImageRefMode +from docling_core.types.doc.utils import is_remote_path, relative_path + +# Check if universal-pathlib is available +try: + from upath import UPath + + HAS_UPATH = True +except ImportError: + HAS_UPATH = False + + +@pytest.fixture +def memory_base(): + """Provide a unique memory:// base path for each test.""" + return UPath(f"memory://test-{uuid.uuid4()}") + + +# ============================================================================= +# Tests for is_remote_path() +# ============================================================================= + + +def test_is_remote_path_local_paths_return_false(): + assert is_remote_path(Path("/local/path")) is False + assert is_remote_path(Path(".")) is False + assert is_remote_path(None) is False + assert is_remote_path("/some/path") is False + assert is_remote_path(object()) is False + + +def test_is_remote_path_file_protocol_returns_false(): + class MockFilePath: + protocol = "file" + + class MockEmptyProtocol: + protocol = "" + + assert is_remote_path(MockFilePath()) is False + assert is_remote_path(MockEmptyProtocol()) is False + + +@pytest.mark.parametrize("protocol", ["s3", "gcs", "az", "http", "https"]) +def test_is_remote_path_cloud_protocols_return_true(protocol): + class MockCloudPath: + pass + + MockCloudPath.protocol = protocol + assert is_remote_path(MockCloudPath()) is True + + +@pytest.mark.skipif(not HAS_UPATH, reason="universal-pathlib not installed") +def test_is_remote_path_real_upath(): + assert is_remote_path(UPath("/tmp/test")) is False + assert is_remote_path(UPath("memory://test/path")) is True + + +# ============================================================================= +# Tests for relative_path() +# ============================================================================= + + +def test_relative_path_basic(): + assert relative_path(Path("/a/b"), Path("/a/b/c/d.txt")) == Path("c/d.txt") + assert relative_path("/a/b", "/a/b/c.txt") == Path("c.txt") + + +def test_relative_path_navigation(): + # Sibling directory + assert relative_path(Path("/a/b/c"), Path("/a/b/d/e.txt")) == Path("../d/e.txt") + # Parent directory + assert relative_path(Path("/a/b/c/d"), Path("/a/b/e.txt")) == Path("../../e.txt") + + +@pytest.mark.skipif(not HAS_UPATH, reason="universal-pathlib not installed") +def test_relative_path_with_upath(): + src = UPath("/home/user/docs") + target = UPath("/home/user/docs/images/img.png") + assert relative_path(src, target) == Path("images/img.png") + + +# ============================================================================= +# UPath integration tests (memory filesystem) +# ============================================================================= + + +@pytest.mark.skipif(not HAS_UPATH, reason="universal-pathlib not installed") +def test_upath_json_roundtrip(sample_doc, memory_base): + path = memory_base / "doc.json" + sample_doc.save_as_json(path) + assert path.exists() + + loaded = DoclingDocument.load_from_json(path) + assert sample_doc.export_to_dict() == loaded.export_to_dict() + + +@pytest.mark.skipif(not HAS_UPATH, reason="universal-pathlib not installed") +def test_upath_yaml_roundtrip(sample_doc, memory_base): + path = memory_base / "doc.yaml" + sample_doc.save_as_yaml(path) + assert path.exists() + + loaded = DoclingDocument.load_from_yaml(path) + assert sample_doc.export_to_dict() == loaded.export_to_dict() + + +@pytest.mark.skipif(not HAS_UPATH, reason="universal-pathlib not installed") +def test_upath_markdown(sample_doc, memory_base): + path = memory_base / "doc.md" + sample_doc.save_as_markdown(path) + assert path.exists() + assert len(path.read_text()) > 0 + + +@pytest.mark.skipif(not HAS_UPATH, reason="universal-pathlib not installed") +def test_upath_html(sample_doc, memory_base): + path = memory_base / "doc.html" + sample_doc.save_as_html(path) + assert path.exists() + assert "= '3.12'", @@ -71,8 +71,7 @@ name = "black" version = "24.10.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "click", version = "8.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "click" }, { name = "mypy-extensions" }, { name = "packaging" }, { name = "pathspec" }, @@ -327,41 +326,20 @@ wheels = [ name = "click" version = "8.1.8" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] dependencies = [ - { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593, upload-time = "2024-12-21T18:38:44.339Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188, upload-time = "2024-12-21T18:38:41.666Z" }, ] -[[package]] -name = "click" -version = "8.3.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.12'", - "python_full_version == '3.11.*'", - "python_full_version == '3.10.*'", -] -dependencies = [ - { name = "colorama", marker = "python_full_version >= '3.10' and sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" }, -] - [[package]] name = "click-log" version = "0.4.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "click", version = "8.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "click" }, ] sdist = { url = "https://files.pythonhosted.org/packages/32/32/228be4f971e4bd556c33d52a22682bfe318ffe57a1ddb7a546f347a90260/click-log-0.4.0.tar.gz", hash = "sha256:3970f8570ac54491237bcdb3d8ab5e3eef6c057df29f8c3d1151a51a9c23b975", size = 9985, upload-time = "2022-03-13T11:10:15.262Z" } wheels = [ @@ -784,6 +762,7 @@ dev = [ { name = "pytest-cov" }, { name = "python-semantic-release" }, { name = "types-setuptools" }, + { name = "universal-pathlib" }, ] [package.metadata] @@ -836,15 +815,16 @@ dev = [ { name = "pytest-cov", specifier = ">=6.1.1" }, { name = "python-semantic-release", specifier = "~=7.32" }, { name = "types-setuptools", specifier = "~=70.3" }, + { name = "universal-pathlib", specifier = ">=0.3.7" }, ] [[package]] name = "docutils" -version = "0.22.2" +version = "0.21.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4a/c0/89fe6215b443b919cb98a5002e107cb5026854ed1ccb6b5833e0768419d1/docutils-0.22.2.tar.gz", hash = "sha256:9fdb771707c8784c8f2728b67cb2c691305933d68137ef95a75db5f4dfbc213d", size = 2289092, upload-time = "2025-09-20T17:55:47.994Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/ed/aefcc8cd0ba62a0560c3c18c33925362d46c6075480bfa4df87b28e169a9/docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f", size = 2204444, upload-time = "2024-04-23T18:57:18.24Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/66/dd/f95350e853a4468ec37478414fc04ae2d61dad7a947b3015c3dcc51a09b9/docutils-0.22.2-py3-none-any.whl", hash = "sha256:b0e98d679283fc3bb0ead8a5da7f501baa632654e7056e9c5846842213d674d8", size = 632667, upload-time = "2025-09-20T17:55:43.052Z" }, + { url = "https://files.pythonhosted.org/packages/8f/d7/9322c609343d929e75e7e5e6255e614fcc67572cfd083959cdef3b7aad79/docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2", size = 587408, upload-time = "2024-04-23T18:57:14.835Z" }, ] [[package]] @@ -1932,6 +1912,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/16/32/f8e3c85d1d5250232a5d3477a2a28cc291968ff175caeadaf3cc19ce0e4a/parso-0.8.5-py2.py3-none-any.whl", hash = "sha256:646204b5ee239c396d040b90f9e272e9a8017c630092bf59980beb62fd033887", size = 106668, upload-time = "2025-08-23T15:15:25.663Z" }, ] +[[package]] +name = "pathlib-abc" +version = "0.5.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/cb/448649d7f25d228bf0be3a04590ab7afa77f15e056f8fa976ed05ec9a78f/pathlib_abc-0.5.2.tar.gz", hash = "sha256:fcd56f147234645e2c59c7ae22808b34c364bb231f685ddd9f96885aed78a94c", size = 33342, upload-time = "2025-10-10T18:37:20.524Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/29/c028a0731e202035f0e2e0bfbf1a3e46ad6c628cbb17f6f1cc9eea5d9ff1/pathlib_abc-0.5.2-py3-none-any.whl", hash = "sha256:4c9d94cf1b23af417ce7c0417b43333b06a106c01000b286c99de230d95eefbb", size = 19070, upload-time = "2025-10-10T18:37:19.437Z" }, +] + [[package]] name = "pathspec" version = "0.12.1" @@ -2544,8 +2533,7 @@ name = "python-semantic-release" version = "7.34.6" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "click", version = "8.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "click" }, { name = "click-log" }, { name = "dotty-dict" }, { name = "gitpython" }, @@ -3782,8 +3770,7 @@ name = "typer" version = "0.19.2" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "click", version = "8.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "click" }, { name = "rich" }, { name = "shellingham" }, { name = "typing-extensions" }, @@ -3841,6 +3828,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" }, ] +[[package]] +name = "universal-pathlib" +version = "0.3.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "fsspec" }, + { name = "pathlib-abc" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d5/96/b58b00ce27cbc37fd3c79944438dd8630d2c39f9467c9e73e1a4a3eec1ef/universal_pathlib-0.3.7.tar.gz", hash = "sha256:36331056fa59a7d7cd3b61b4045f3a3418f446f23ec1a01d281c4510814b4b05", size = 253466, upload-time = "2025-12-03T00:06:43.859Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/77/53c2d3a0413bc55b4c91067a0c41e55451be9b0784d204e4e46ce5abf668/universal_pathlib-0.3.7-py3-none-any.whl", hash = "sha256:fb95117b20b5981f86ef9d887fddbf9c61d3596634ba42cccea444931d87c201", size = 80738, upload-time = "2025-12-03T00:06:41.997Z" }, +] + [[package]] name = "urllib3" version = "2.5.0"