From 55a7e2238e400919703f4a2bf8a9086d5f472635 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 10 Jul 2025 14:27:40 +0200 Subject: [PATCH 001/247] Add test module for testing types modules --- test/hermes_test/model/types/__init__.py | 0 test/hermes_test/model/types/test_pyld_util.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 test/hermes_test/model/types/__init__.py create mode 100644 test/hermes_test/model/types/test_pyld_util.py diff --git a/test/hermes_test/model/types/__init__.py b/test/hermes_test/model/types/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/test/hermes_test/model/types/test_pyld_util.py b/test/hermes_test/model/types/test_pyld_util.py new file mode 100644 index 00000000..e69de29b From e789904984c867940840d15cd9a9088a4da543b0 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Fri, 11 Jul 2025 13:13:42 +0200 Subject: [PATCH 002/247] Add test class for ld_context --- test/hermes_test/model/types/__init__.py | 0 test/hermes_test/model/types/test_ld_context.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 test/hermes_test/model/types/__init__.py create mode 100644 test/hermes_test/model/types/test_ld_context.py diff --git a/test/hermes_test/model/types/__init__.py b/test/hermes_test/model/types/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/test/hermes_test/model/types/test_ld_context.py b/test/hermes_test/model/types/test_ld_context.py new file mode 100644 index 00000000..e69de29b From 4e45698096610cfa68e7813627f6ef3d99caec95 Mon Sep 17 00:00:00 2001 From: "Kernchen, Sophie" Date: Fri, 11 Jul 2025 14:11:59 +0200 Subject: [PATCH 003/247] First test for ld_container --- test/hermes_test/model/types/__init__.py | 0 .../model/types/test_ld_container.py | 19 +++++++++++++++++++ 2 files changed, 19 insertions(+) create mode 100644 test/hermes_test/model/types/__init__.py create mode 100644 test/hermes_test/model/types/test_ld_container.py diff --git a/test/hermes_test/model/types/__init__.py b/test/hermes_test/model/types/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/test/hermes_test/model/types/test_ld_container.py b/test/hermes_test/model/types/test_ld_container.py new file mode 100644 index 00000000..0ddd541a --- /dev/null +++ b/test/hermes_test/model/types/test_ld_container.py @@ -0,0 +1,19 @@ +from hermes.model.types.ld_container import ld_container + +'''we expect user of this class to give the right input data types + +example extendeed json ld: + { + "http://schema.org/name": [{"@value": "bacon"}], + "eggs": [{"@id": "spam"}], + "green": [{"@id": "png"}] + } +''' + + +def test_container_basic(): + cont = ld_container({"spam": [{"@value": "bacon"}]}) + + assert cont.key is None + assert cont.context == [] + assert cont._data == {"spam": [{"@value": "bacon"}]} From f7ef65d958b9a6ca96c581a67f8deac0150f8798 Mon Sep 17 00:00:00 2001 From: "Kernchen, Sophie" Date: Mon, 14 Jul 2025 13:06:50 +0200 Subject: [PATCH 004/247] Add license information --- test/hermes_test/model/types/__init__.py | 5 +++++ test/hermes_test/model/types/test_ld_container.py | 6 ++++++ 2 files changed, 11 insertions(+) diff --git a/test/hermes_test/model/types/__init__.py b/test/hermes_test/model/types/__init__.py index e69de29b..9a1d6097 100644 --- a/test/hermes_test/model/types/__init__.py +++ b/test/hermes_test/model/types/__init__.py @@ -0,0 +1,5 @@ +# SPDX-FileCopyrightText: 2025 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Sophie Kernchen \ No newline at end of file diff --git a/test/hermes_test/model/types/test_ld_container.py b/test/hermes_test/model/types/test_ld_container.py index 0ddd541a..27069a1f 100644 --- a/test/hermes_test/model/types/test_ld_container.py +++ b/test/hermes_test/model/types/test_ld_container.py @@ -1,3 +1,9 @@ +# SPDX-FileCopyrightText: 2025 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Sophie Kernchen + from hermes.model.types.ld_container import ld_container '''we expect user of this class to give the right input data types From ccec2ec71b49fba0f97f97ddd4fe44ff84a24179 Mon Sep 17 00:00:00 2001 From: "Kernchen, Sophie" Date: Thu, 24 Jul 2025 10:20:14 +0200 Subject: [PATCH 005/247] Test ld_container add_context --- .../model/types/test_ld_container.py | 42 ++++++++++++++++++- 1 file changed, 41 insertions(+), 1 deletion(-) diff --git a/test/hermes_test/model/types/test_ld_container.py b/test/hermes_test/model/types/test_ld_container.py index 27069a1f..68ce9cd9 100644 --- a/test/hermes_test/model/types/test_ld_container.py +++ b/test/hermes_test/model/types/test_ld_container.py @@ -1,14 +1,16 @@ # SPDX-FileCopyrightText: 2025 German Aerospace Center (DLR) # # SPDX-License-Identifier: Apache-2.0 +import re # SPDX-FileContributor: Sophie Kernchen +import pytest, requests from hermes.model.types.ld_container import ld_container '''we expect user of this class to give the right input data types -example extendeed json ld: +example extended json ld: { "http://schema.org/name": [{"@value": "bacon"}], "eggs": [{"@id": "spam"}], @@ -23,3 +25,41 @@ def test_container_basic(): assert cont.key is None assert cont.context == [] assert cont._data == {"spam": [{"@value": "bacon"}]} + + +def test_container_ld_value(): + cont = ld_container({"spam": [{"@value": "bacon"}]}) + + assert cont.ld_value == {"spam": [{"@value": "bacon"}]} + + +def test_container_add_context(httpserver): + content = {"@context": {"type": "@type", "id": "@id", "schema": "http://schema.org/", "ham": "https://fake.site/", + "Organization": {"@id": "schema:Organization"}}} + + url = httpserver.url_for("/url") + + httpserver.expect_request("/url").respond_with_json(content) + cont = ld_container({"spam": [{"@value": "bacon"}]}) + cont.add_context([url]) + + assert cont.context == [url] + assert cont.full_context == [url] + + +def test_container_parent(httpserver): + content = {"@context": {"type": "@type", "id": "@id", "schema": "http://schema.org/", "ham": "https://fake.site/", + "Organization": {"@id": "schema:Organization"}}} + + url = httpserver.url_for("/url") + + httpserver.expect_request("/url").respond_with_json(content) + + cont_parent = ld_container({"ham": [{"@value": "eggs"}]}) + cont = ld_container({"spam": [{"@value": "bacon"}]}, parent=cont_parent) + assert cont.full_context == [] + + cont_parent.add_context([url]) + + assert cont.parent == cont_parent + assert cont.full_context == [url] From 04a727d0bee5b8627897285aa31d56608871b176 Mon Sep 17 00:00:00 2001 From: "Kernchen, Sophie" Date: Thu, 24 Jul 2025 11:25:58 +0200 Subject: [PATCH 006/247] Refactor tests to class --- .../model/types/test_ld_container.py | 62 ++++++++++--------- 1 file changed, 34 insertions(+), 28 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_container.py b/test/hermes_test/model/types/test_ld_container.py index 68ce9cd9..264e2930 100644 --- a/test/hermes_test/model/types/test_ld_container.py +++ b/test/hermes_test/model/types/test_ld_container.py @@ -19,47 +19,53 @@ ''' -def test_container_basic(): - cont = ld_container({"spam": [{"@value": "bacon"}]}) +class TestLdContainer: + @classmethod + @pytest.fixture(autouse=True) + def setup_class(cls, httpserver): + content = { + "@context": {"type": "@type", "id": "@id", "schema": "http://schema.org/", "ham": "https://fake.site/", + "Organization": {"@id": "schema:Organization"}}} - assert cont.key is None - assert cont.context == [] - assert cont._data == {"spam": [{"@value": "bacon"}]} + cls.url = httpserver.url_for("/url") + httpserver.expect_request("/url").respond_with_json(content) -def test_container_ld_value(): - cont = ld_container({"spam": [{"@value": "bacon"}]}) + def test_container_basic(self): + cont = ld_container({"spam": [{"@value": "bacon"}]}) - assert cont.ld_value == {"spam": [{"@value": "bacon"}]} + assert cont.key is None + assert cont.context == [] + assert cont._data == {"spam": [{"@value": "bacon"}]} + def test_container_ld_value(self): + cont = ld_container({"spam": [{"@value": "bacon"}]}) -def test_container_add_context(httpserver): - content = {"@context": {"type": "@type", "id": "@id", "schema": "http://schema.org/", "ham": "https://fake.site/", - "Organization": {"@id": "schema:Organization"}}} + assert cont.ld_value == {"spam": [{"@value": "bacon"}]} - url = httpserver.url_for("/url") + def test_container_add_context(self): - httpserver.expect_request("/url").respond_with_json(content) - cont = ld_container({"spam": [{"@value": "bacon"}]}) - cont.add_context([url]) + cont = ld_container({"spam": [{"@value": "bacon"}]}) + cont.add_context([self.url]) - assert cont.context == [url] - assert cont.full_context == [url] + assert cont.context == [self.url] + assert cont.full_context == [self.url] + def test_container_parent(self): -def test_container_parent(httpserver): - content = {"@context": {"type": "@type", "id": "@id", "schema": "http://schema.org/", "ham": "https://fake.site/", - "Organization": {"@id": "schema:Organization"}}} - url = httpserver.url_for("/url") + cont_parent = ld_container({"ham": [{"@value": "eggs"}]}) + cont = ld_container({"spam": [{"@value": "bacon"}]}, parent=cont_parent) + assert cont.full_context == [] - httpserver.expect_request("/url").respond_with_json(content) + cont_parent.add_context([self.url]) - cont_parent = ld_container({"ham": [{"@value": "eggs"}]}) - cont = ld_container({"spam": [{"@value": "bacon"}]}, parent=cont_parent) - assert cont.full_context == [] + assert cont.parent == cont_parent + assert cont.full_context == [self.url] - cont_parent.add_context([url]) + def test_container_full_context(self): + cont_grand_parent = ld_container({"ham": [{"@value": "eggs"}]}, context=[self.url]) + cont_parent = ld_container({"ham": [{"@value": "eggs"}]},parent=cont_grand_parent) + cont = ld_container({"spam": [{"@value": "bacon"}]}, parent=cont_parent) - assert cont.parent == cont_parent - assert cont.full_context == [url] + assert cont.full_context == [self.url] \ No newline at end of file From 6cb305bd5b97e9eaff1a490ae36ced06004f1690 Mon Sep 17 00:00:00 2001 From: "Kernchen, Sophie" Date: Thu, 24 Jul 2025 13:35:07 +0200 Subject: [PATCH 007/247] Test representation methods --- src/hermes/model/types/ld_container.py | 11 ++++++- .../model/types/test_ld_container.py | 31 ++++++++++++++----- 2 files changed, 34 insertions(+), 8 deletions(-) diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index fd84e033..0f0048bf 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -70,6 +70,15 @@ def full_context(self): @property def path(self): """ Create a path representation for this item. """ + ''' + FIXME: #381 Decision if the path should look like this. Other option: + + if self.parent: + return self.parent.path + [self.key if self.index is None else self.index] + else: + return ["(self.key if self.index is None else self.index)"] braces to show you are here. + + ''' if self.parent: return self.parent.path + [self.key if self.index is None else self.index] else: @@ -130,7 +139,7 @@ def _to_expanded_json(self, key, value): return ld_value def __repr__(self): - return f'{type(self).__name__}({self._data[0]})' + return f'{type(self).__name__}({self._data[0]})' #FIXME: #397 KeyError, maybe removing the index def __str__(self): return str(self.to_python()) diff --git a/test/hermes_test/model/types/test_ld_container.py b/test/hermes_test/model/types/test_ld_container.py index 264e2930..768fc808 100644 --- a/test/hermes_test/model/types/test_ld_container.py +++ b/test/hermes_test/model/types/test_ld_container.py @@ -37,6 +37,7 @@ def test_container_basic(self): assert cont.key is None assert cont.context == [] assert cont._data == {"spam": [{"@value": "bacon"}]} + assert cont.path == ["$"] def test_container_ld_value(self): cont = ld_container({"spam": [{"@value": "bacon"}]}) @@ -52,8 +53,6 @@ def test_container_add_context(self): assert cont.full_context == [self.url] def test_container_parent(self): - - cont_parent = ld_container({"ham": [{"@value": "eggs"}]}) cont = ld_container({"spam": [{"@value": "bacon"}]}, parent=cont_parent) assert cont.full_context == [] @@ -63,9 +62,27 @@ def test_container_parent(self): assert cont.parent == cont_parent assert cont.full_context == [self.url] - def test_container_full_context(self): - cont_grand_parent = ld_container({"ham": [{"@value": "eggs"}]}, context=[self.url]) - cont_parent = ld_container({"ham": [{"@value": "eggs"}]},parent=cont_grand_parent) - cont = ld_container({"spam": [{"@value": "bacon"}]}, parent=cont_parent) + def test_container_full_context_and_path(self, httpserver): + httpserver.expect_request("/url2").respond_with_json({"spam": "eggs"}) + httpserver.expect_request("/url3").respond_with_json({"ham": "bacon"}) + httpserver.expect_request("/url4").respond_with_json({"@context": {"id": "@id"}}) + + cont_grand_parent = ld_container({"ham": [{"@value": "eggs"}]}, context=[self.url], index=1) + cont_parent = ld_container({"ham": [{"@value": "eggs"}]}, context=[httpserver.url_for("/url2"), + httpserver.url_for("/url4")], + parent=cont_grand_parent) + cont = ld_container({"spam": [{"@value": "bacon"}]}, context=[httpserver.url_for("/url3")], parent=cont_parent, + index=3) + assert cont_parent.full_context == [self.url, httpserver.url_for("/url2"), httpserver.url_for("/url4")] + assert cont.full_context == [self.url, httpserver.url_for("/url2"), httpserver.url_for("/url4"), + httpserver.url_for("/url3")] + assert cont_grand_parent.path == ["$"] # FIXME: #381 Decision if the path should look like this + assert cont_parent.path == ["$", None] + assert cont.path == ["$", None, 3] + + def test_container_str_and_repr(self): + cont = ld_container({"spam": [{"@value": "bacon"}]}, index=3) + assert repr(cont) == ld_container({"spam": [{"@value": "bacon"}]}) #FIXME: #397 KeyError + with pytest.raises(NotImplementedError): + str(cont) - assert cont.full_context == [self.url] \ No newline at end of file From de70bed3ecfdc25fdbc0ac687299fda88284e5fe Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Fri, 25 Jul 2025 18:47:59 +0200 Subject: [PATCH 008/247] Test getting items from context --- .../model/types/test_ld_context.py | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/test/hermes_test/model/types/test_ld_context.py b/test/hermes_test/model/types/test_ld_context.py index e69de29b..f9321ad3 100644 --- a/test/hermes_test/model/types/test_ld_context.py +++ b/test/hermes_test/model/types/test_ld_context.py @@ -0,0 +1,29 @@ +import pytest + +from hermes.model.types.ld_context import ContextPrefix, ALL_CONTEXTS, CODEMETA_CONTEXT, PROV_CONTEXT + +@pytest.fixture +def ctx(): + return ContextPrefix(ALL_CONTEXTS) + +def test_get_default_item(ctx): + item = ctx["maintainer"] + assert item == "https://codemeta.github.io/terms/maintainer" + +def test_get_prefixed_items(ctx): + item = ctx["schema:Organization"] + assert item == "http://schema.org/Organization" + item = ctx["hermes:semanticVersion"] + assert item == "https://schema.software-metadata.pub/hermes-content/1.0/semanticVersion" # TODO: Change on #393 fix + + +@pytest.mark.parametrize("non_str,error_type", [(0, TypeError), (None, TypeError), ([], ValueError), ({"foo"}, ValueError)]) +def test_get_non_str_item_fail(ctx, non_str, error_type): + with pytest.raises(error_type): + print(ctx[non_str]) + +@pytest.mark.parametrize("item", ["", "foo", [0, "foo"], (0, "foo"), {"foo": "bar", "baz": "foo"}]) +def test_get_item_fail(ctx, item): + with pytest.raises(Exception) as e: + item_ = ctx[item] + print(item, "->", item_) From 1b1df215c7068a2c6508d8d20962fbb5685688fd Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Mon, 28 Jul 2025 07:47:42 +0200 Subject: [PATCH 009/247] Add tests for invalid terms and input types --- .../model/types/test_ld_context.py | 55 +++++++++++++++---- 1 file changed, 45 insertions(+), 10 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_context.py b/test/hermes_test/model/types/test_ld_context.py index f9321ad3..1409f2b8 100644 --- a/test/hermes_test/model/types/test_ld_context.py +++ b/test/hermes_test/model/types/test_ld_context.py @@ -1,29 +1,64 @@ import pytest -from hermes.model.types.ld_context import ContextPrefix, ALL_CONTEXTS, CODEMETA_CONTEXT, PROV_CONTEXT +from hermes.model.types.ld_context import ( + ContextPrefix, + ALL_CONTEXTS, + CODEMETA_CONTEXT, + PROV_CONTEXT, +) + @pytest.fixture def ctx(): return ContextPrefix(ALL_CONTEXTS) -def test_get_default_item(ctx): + +def test_codemeta_prefix(ctx): + """Default vocabulary in context has the correct base IRI.""" + assert ctx.prefix[None] == "https://codemeta.github.io/terms/" + + +def test_get_codemeta_item(ctx): + """Context returns fully expanded terms for default vocabulary in the context.""" item = ctx["maintainer"] assert item == "https://codemeta.github.io/terms/maintainer" + def test_get_prefixed_items(ctx): + """Context returns fully expanded terms for prefixed vocabularies in the context.""" item = ctx["schema:Organization"] assert item == "http://schema.org/Organization" item = ctx["hermes:semanticVersion"] - assert item == "https://schema.software-metadata.pub/hermes-content/1.0/semanticVersion" # TODO: Change on #393 fix + assert ( + item + == "https://schema.software-metadata.pub/hermes-content/1.0/semanticVersion" + ) # TODO: Change on #393 fix -@pytest.mark.parametrize("non_str,error_type", [(0, TypeError), (None, TypeError), ([], ValueError), ({"foo"}, ValueError)]) +@pytest.mark.parametrize( + "non_str,error_type", + [(0, TypeError), (None, TypeError), ([], ValueError), ({"foo"}, ValueError)], +) def test_get_non_str_item_fail(ctx, non_str, error_type): + """Context raises on unacceptable input.""" with pytest.raises(error_type): - print(ctx[non_str]) + ctx[non_str] + -@pytest.mark.parametrize("item", ["", "foo", [0, "foo"], (0, "foo"), {"foo": "bar", "baz": "foo"}]) -def test_get_item_fail(ctx, item): - with pytest.raises(Exception) as e: - item_ = ctx[item] - print(item, "->", item_) +@pytest.mark.parametrize( + "item", + [ + "", + "fooBar", + [0, "foo"], + (0, "foo"), + {"foo": "bar", "baz": "foo"}, + "schema:fooBar", + "hermes:fooBar", + "codemeta:maintainer" # Prefixed CodeMeta doesn't exist in context + ], +) +def test_get_item_validate_fail(ctx, item): + """Context raises on terms that don't exist in the context.""" + with pytest.raises(Exception): # FIXME: Replace with custom error, e.g., hermes.model.errors.InvalidTermException + ctx[item] From 55ad4757c46d674b2c97be56aff450c9db98c44e Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Mon, 28 Jul 2025 07:51:07 +0200 Subject: [PATCH 010/247] Add documentation and some FIXMEs --- src/hermes/model/types/ld_context.py | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/src/hermes/model/types/ld_context.py b/src/hermes/model/types/ld_context.py index 4974911c..cfbb9beb 100644 --- a/src/hermes/model/types/ld_context.py +++ b/src/hermes/model/types/ld_context.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileContributor: Michael Meinel +# SPDX-FileContributor: Stephan Druskat CODEMETA_PREFIX = "https://doi.org/10.5063/schema/codemeta-2.0" @@ -27,7 +28,20 @@ class ContextPrefix: + """ + FIXME: Rename to `LDContext`, `HermesLDContext` or similar, + FIXME: as this class represents JSON-LD contexts. + Represents the context of the hermes JSON-LD data model and provides two views on the model: + + - as a two-item list, of which the first item is the default, unprefixed vocabulary IRI, and the second is a dict + mapping prefixes to vocabulary IRIs; + - as a dict mapping prefixes to vocabulary IRIs, where the default vocabulary has a prefix of None. + """ def __init__(self, context): + """ + @param context: A two-item list, where the first item is the default vocabulary's IRI string, and the second + is a dict mapping vocabulary prefixes to their respective IRI string. + """ self.context = context self.prefix = {} @@ -41,7 +55,15 @@ def __init__(self, context): if isinstance(base_url, str) }) - def __getitem__(self, item): + + def __getitem__(self, item): # FIXME Rename parameter to 'key' + """ + FIXME: Document in class, not here + FIXME: Add type hints for params and return + + Gets the fully qualified IRI for a term from a vocabulary inside the initialized context. + The vocabulary must have been added to the context at initialization. + """ if not isinstance(item, str): prefix, name = item elif ':' in item: From 39f2315bd03028aad2eba84d940cd5dfdbd54bf7 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Mon, 28 Jul 2025 07:53:38 +0200 Subject: [PATCH 011/247] Add copyright and license header --- test/hermes_test/model/types/test_ld_context.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/test/hermes_test/model/types/test_ld_context.py b/test/hermes_test/model/types/test_ld_context.py index 1409f2b8..80beb620 100644 --- a/test/hermes_test/model/types/test_ld_context.py +++ b/test/hermes_test/model/types/test_ld_context.py @@ -1,3 +1,8 @@ +# SPDX-FileCopyrightText: 2025 German Aerospace Center (DLR) +# SPDX-FileContributor: Stephan Druskat +# +# SPDX-License-Identifier: Apache-2.0 + import pytest from hermes.model.types.ld_context import ( From 3c6ca777fd9a57886e076575734c399ff0b3304c Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Mon, 28 Jul 2025 07:55:48 +0200 Subject: [PATCH 012/247] Add missing licenses --- LICENSES/CC-BY-SA-3.0.txt | 359 ++++++++++++++++++++++++++++++++++++++ LICENSES/W3C-20150513.txt | 17 ++ 2 files changed, 376 insertions(+) create mode 100644 LICENSES/CC-BY-SA-3.0.txt create mode 100644 LICENSES/W3C-20150513.txt diff --git a/LICENSES/CC-BY-SA-3.0.txt b/LICENSES/CC-BY-SA-3.0.txt new file mode 100644 index 00000000..604209a8 --- /dev/null +++ b/LICENSES/CC-BY-SA-3.0.txt @@ -0,0 +1,359 @@ +Creative Commons Legal Code + +Attribution-ShareAlike 3.0 Unported + + CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE + LEGAL SERVICES. DISTRIBUTION OF THIS LICENSE DOES NOT CREATE AN + ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS + INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES + REGARDING THE INFORMATION PROVIDED, AND DISCLAIMS LIABILITY FOR + DAMAGES RESULTING FROM ITS USE. + +License + +THE WORK (AS DEFINED BELOW) IS PROVIDED UNDER THE TERMS OF THIS CREATIVE +COMMONS PUBLIC LICENSE ("CCPL" OR "LICENSE"). THE WORK IS PROTECTED BY +COPYRIGHT AND/OR OTHER APPLICABLE LAW. ANY USE OF THE WORK OTHER THAN AS +AUTHORIZED UNDER THIS LICENSE OR COPYRIGHT LAW IS PROHIBITED. + +BY EXERCISING ANY RIGHTS TO THE WORK PROVIDED HERE, YOU ACCEPT AND AGREE +TO BE BOUND BY THE TERMS OF THIS LICENSE. TO THE EXTENT THIS LICENSE MAY +BE CONSIDERED TO BE A CONTRACT, THE LICENSOR GRANTS YOU THE RIGHTS +CONTAINED HERE IN CONSIDERATION OF YOUR ACCEPTANCE OF SUCH TERMS AND +CONDITIONS. + +1. Definitions + + a. "Adaptation" means a work based upon the Work, or upon the Work and + other pre-existing works, such as a translation, adaptation, + derivative work, arrangement of music or other alterations of a + literary or artistic work, or phonogram or performance and includes + cinematographic adaptations or any other form in which the Work may be + recast, transformed, or adapted including in any form recognizably + derived from the original, except that a work that constitutes a + Collection will not be considered an Adaptation for the purpose of + this License. For the avoidance of doubt, where the Work is a musical + work, performance or phonogram, the synchronization of the Work in + timed-relation with a moving image ("synching") will be considered an + Adaptation for the purpose of this License. + b. "Collection" means a collection of literary or artistic works, such as + encyclopedias and anthologies, or performances, phonograms or + broadcasts, or other works or subject matter other than works listed + in Section 1(f) below, which, by reason of the selection and + arrangement of their contents, constitute intellectual creations, in + which the Work is included in its entirety in unmodified form along + with one or more other contributions, each constituting separate and + independent works in themselves, which together are assembled into a + collective whole. A work that constitutes a Collection will not be + considered an Adaptation (as defined below) for the purposes of this + License. + c. "Creative Commons Compatible License" means a license that is listed + at https://creativecommons.org/compatiblelicenses that has been + approved by Creative Commons as being essentially equivalent to this + License, including, at a minimum, because that license: (i) contains + terms that have the same purpose, meaning and effect as the License + Elements of this License; and, (ii) explicitly permits the relicensing + of adaptations of works made available under that license under this + License or a Creative Commons jurisdiction license with the same + License Elements as this License. + d. "Distribute" means to make available to the public the original and + copies of the Work or Adaptation, as appropriate, through sale or + other transfer of ownership. + e. "License Elements" means the following high-level license attributes + as selected by Licensor and indicated in the title of this License: + Attribution, ShareAlike. + f. "Licensor" means the individual, individuals, entity or entities that + offer(s) the Work under the terms of this License. + g. "Original Author" means, in the case of a literary or artistic work, + the individual, individuals, entity or entities who created the Work + or if no individual or entity can be identified, the publisher; and in + addition (i) in the case of a performance the actors, singers, + musicians, dancers, and other persons who act, sing, deliver, declaim, + play in, interpret or otherwise perform literary or artistic works or + expressions of folklore; (ii) in the case of a phonogram the producer + being the person or legal entity who first fixes the sounds of a + performance or other sounds; and, (iii) in the case of broadcasts, the + organization that transmits the broadcast. + h. "Work" means the literary and/or artistic work offered under the terms + of this License including without limitation any production in the + literary, scientific and artistic domain, whatever may be the mode or + form of its expression including digital form, such as a book, + pamphlet and other writing; a lecture, address, sermon or other work + of the same nature; a dramatic or dramatico-musical work; a + choreographic work or entertainment in dumb show; a musical + composition with or without words; a cinematographic work to which are + assimilated works expressed by a process analogous to cinematography; + a work of drawing, painting, architecture, sculpture, engraving or + lithography; a photographic work to which are assimilated works + expressed by a process analogous to photography; a work of applied + art; an illustration, map, plan, sketch or three-dimensional work + relative to geography, topography, architecture or science; a + performance; a broadcast; a phonogram; a compilation of data to the + extent it is protected as a copyrightable work; or a work performed by + a variety or circus performer to the extent it is not otherwise + considered a literary or artistic work. + i. "You" means an individual or entity exercising rights under this + License who has not previously violated the terms of this License with + respect to the Work, or who has received express permission from the + Licensor to exercise rights under this License despite a previous + violation. + j. "Publicly Perform" means to perform public recitations of the Work and + to communicate to the public those public recitations, by any means or + process, including by wire or wireless means or public digital + performances; to make available to the public Works in such a way that + members of the public may access these Works from a place and at a + place individually chosen by them; to perform the Work to the public + by any means or process and the communication to the public of the + performances of the Work, including by public digital performance; to + broadcast and rebroadcast the Work by any means including signs, + sounds or images. + k. "Reproduce" means to make copies of the Work by any means including + without limitation by sound or visual recordings and the right of + fixation and reproducing fixations of the Work, including storage of a + protected performance or phonogram in digital form or other electronic + medium. + +2. Fair Dealing Rights. Nothing in this License is intended to reduce, +limit, or restrict any uses free from copyright or rights arising from +limitations or exceptions that are provided for in connection with the +copyright protection under copyright law or other applicable laws. + +3. License Grant. Subject to the terms and conditions of this License, +Licensor hereby grants You a worldwide, royalty-free, non-exclusive, +perpetual (for the duration of the applicable copyright) license to +exercise the rights in the Work as stated below: + + a. to Reproduce the Work, to incorporate the Work into one or more + Collections, and to Reproduce the Work as incorporated in the + Collections; + b. to create and Reproduce Adaptations provided that any such Adaptation, + including any translation in any medium, takes reasonable steps to + clearly label, demarcate or otherwise identify that changes were made + to the original Work. For example, a translation could be marked "The + original work was translated from English to Spanish," or a + modification could indicate "The original work has been modified."; + c. to Distribute and Publicly Perform the Work including as incorporated + in Collections; and, + d. to Distribute and Publicly Perform Adaptations. + e. For the avoidance of doubt: + + i. Non-waivable Compulsory License Schemes. In those jurisdictions in + which the right to collect royalties through any statutory or + compulsory licensing scheme cannot be waived, the Licensor + reserves the exclusive right to collect such royalties for any + exercise by You of the rights granted under this License; + ii. Waivable Compulsory License Schemes. In those jurisdictions in + which the right to collect royalties through any statutory or + compulsory licensing scheme can be waived, the Licensor waives the + exclusive right to collect such royalties for any exercise by You + of the rights granted under this License; and, + iii. Voluntary License Schemes. The Licensor waives the right to + collect royalties, whether individually or, in the event that the + Licensor is a member of a collecting society that administers + voluntary licensing schemes, via that society, from any exercise + by You of the rights granted under this License. + +The above rights may be exercised in all media and formats whether now +known or hereafter devised. The above rights include the right to make +such modifications as are technically necessary to exercise the rights in +other media and formats. Subject to Section 8(f), all rights not expressly +granted by Licensor are hereby reserved. + +4. Restrictions. The license granted in Section 3 above is expressly made +subject to and limited by the following restrictions: + + a. You may Distribute or Publicly Perform the Work only under the terms + of this License. You must include a copy of, or the Uniform Resource + Identifier (URI) for, this License with every copy of the Work You + Distribute or Publicly Perform. You may not offer or impose any terms + on the Work that restrict the terms of this License or the ability of + the recipient of the Work to exercise the rights granted to that + recipient under the terms of the License. You may not sublicense the + Work. You must keep intact all notices that refer to this License and + to the disclaimer of warranties with every copy of the Work You + Distribute or Publicly Perform. When You Distribute or Publicly + Perform the Work, You may not impose any effective technological + measures on the Work that restrict the ability of a recipient of the + Work from You to exercise the rights granted to that recipient under + the terms of the License. This Section 4(a) applies to the Work as + incorporated in a Collection, but this does not require the Collection + apart from the Work itself to be made subject to the terms of this + License. If You create a Collection, upon notice from any Licensor You + must, to the extent practicable, remove from the Collection any credit + as required by Section 4(c), as requested. If You create an + Adaptation, upon notice from any Licensor You must, to the extent + practicable, remove from the Adaptation any credit as required by + Section 4(c), as requested. + b. You may Distribute or Publicly Perform an Adaptation only under the + terms of: (i) this License; (ii) a later version of this License with + the same License Elements as this License; (iii) a Creative Commons + jurisdiction license (either this or a later license version) that + contains the same License Elements as this License (e.g., + Attribution-ShareAlike 3.0 US)); (iv) a Creative Commons Compatible + License. If you license the Adaptation under one of the licenses + mentioned in (iv), you must comply with the terms of that license. If + you license the Adaptation under the terms of any of the licenses + mentioned in (i), (ii) or (iii) (the "Applicable License"), you must + comply with the terms of the Applicable License generally and the + following provisions: (I) You must include a copy of, or the URI for, + the Applicable License with every copy of each Adaptation You + Distribute or Publicly Perform; (II) You may not offer or impose any + terms on the Adaptation that restrict the terms of the Applicable + License or the ability of the recipient of the Adaptation to exercise + the rights granted to that recipient under the terms of the Applicable + License; (III) You must keep intact all notices that refer to the + Applicable License and to the disclaimer of warranties with every copy + of the Work as included in the Adaptation You Distribute or Publicly + Perform; (IV) when You Distribute or Publicly Perform the Adaptation, + You may not impose any effective technological measures on the + Adaptation that restrict the ability of a recipient of the Adaptation + from You to exercise the rights granted to that recipient under the + terms of the Applicable License. This Section 4(b) applies to the + Adaptation as incorporated in a Collection, but this does not require + the Collection apart from the Adaptation itself to be made subject to + the terms of the Applicable License. + c. If You Distribute, or Publicly Perform the Work or any Adaptations or + Collections, You must, unless a request has been made pursuant to + Section 4(a), keep intact all copyright notices for the Work and + provide, reasonable to the medium or means You are utilizing: (i) the + name of the Original Author (or pseudonym, if applicable) if supplied, + and/or if the Original Author and/or Licensor designate another party + or parties (e.g., a sponsor institute, publishing entity, journal) for + attribution ("Attribution Parties") in Licensor's copyright notice, + terms of service or by other reasonable means, the name of such party + or parties; (ii) the title of the Work if supplied; (iii) to the + extent reasonably practicable, the URI, if any, that Licensor + specifies to be associated with the Work, unless such URI does not + refer to the copyright notice or licensing information for the Work; + and (iv) , consistent with Ssection 3(b), in the case of an + Adaptation, a credit identifying the use of the Work in the Adaptation + (e.g., "French translation of the Work by Original Author," or + "Screenplay based on original Work by Original Author"). The credit + required by this Section 4(c) may be implemented in any reasonable + manner; provided, however, that in the case of a Adaptation or + Collection, at a minimum such credit will appear, if a credit for all + contributing authors of the Adaptation or Collection appears, then as + part of these credits and in a manner at least as prominent as the + credits for the other contributing authors. For the avoidance of + doubt, You may only use the credit required by this Section for the + purpose of attribution in the manner set out above and, by exercising + Your rights under this License, You may not implicitly or explicitly + assert or imply any connection with, sponsorship or endorsement by the + Original Author, Licensor and/or Attribution Parties, as appropriate, + of You or Your use of the Work, without the separate, express prior + written permission of the Original Author, Licensor and/or Attribution + Parties. + d. Except as otherwise agreed in writing by the Licensor or as may be + otherwise permitted by applicable law, if You Reproduce, Distribute or + Publicly Perform the Work either by itself or as part of any + Adaptations or Collections, You must not distort, mutilate, modify or + take other derogatory action in relation to the Work which would be + prejudicial to the Original Author's honor or reputation. Licensor + agrees that in those jurisdictions (e.g. Japan), in which any exercise + of the right granted in Section 3(b) of this License (the right to + make Adaptations) would be deemed to be a distortion, mutilation, + modification or other derogatory action prejudicial to the Original + Author's honor and reputation, the Licensor will waive or not assert, + as appropriate, this Section, to the fullest extent permitted by the + applicable national law, to enable You to reasonably exercise Your + right under Section 3(b) of this License (right to make Adaptations) + but not otherwise. + +5. Representations, Warranties and Disclaimer + +UNLESS OTHERWISE MUTUALLY AGREED TO BY THE PARTIES IN WRITING, LICENSOR +OFFERS THE WORK AS-IS AND MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY +KIND CONCERNING THE WORK, EXPRESS, IMPLIED, STATUTORY OR OTHERWISE, +INCLUDING, WITHOUT LIMITATION, WARRANTIES OF TITLE, MERCHANTIBILITY, +FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF +LATENT OR OTHER DEFECTS, ACCURACY, OR THE PRESENCE OF ABSENCE OF ERRORS, +WHETHER OR NOT DISCOVERABLE. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION +OF IMPLIED WARRANTIES, SO SUCH EXCLUSION MAY NOT APPLY TO YOU. + +6. Limitation on Liability. EXCEPT TO THE EXTENT REQUIRED BY APPLICABLE +LAW, IN NO EVENT WILL LICENSOR BE LIABLE TO YOU ON ANY LEGAL THEORY FOR +ANY SPECIAL, INCIDENTAL, CONSEQUENTIAL, PUNITIVE OR EXEMPLARY DAMAGES +ARISING OUT OF THIS LICENSE OR THE USE OF THE WORK, EVEN IF LICENSOR HAS +BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. + +7. Termination + + a. This License and the rights granted hereunder will terminate + automatically upon any breach by You of the terms of this License. + Individuals or entities who have received Adaptations or Collections + from You under this License, however, will not have their licenses + terminated provided such individuals or entities remain in full + compliance with those licenses. Sections 1, 2, 5, 6, 7, and 8 will + survive any termination of this License. + b. Subject to the above terms and conditions, the license granted here is + perpetual (for the duration of the applicable copyright in the Work). + Notwithstanding the above, Licensor reserves the right to release the + Work under different license terms or to stop distributing the Work at + any time; provided, however that any such election will not serve to + withdraw this License (or any other license that has been, or is + required to be, granted under the terms of this License), and this + License will continue in full force and effect unless terminated as + stated above. + +8. Miscellaneous + + a. Each time You Distribute or Publicly Perform the Work or a Collection, + the Licensor offers to the recipient a license to the Work on the same + terms and conditions as the license granted to You under this License. + b. Each time You Distribute or Publicly Perform an Adaptation, Licensor + offers to the recipient a license to the original Work on the same + terms and conditions as the license granted to You under this License. + c. If any provision of this License is invalid or unenforceable under + applicable law, it shall not affect the validity or enforceability of + the remainder of the terms of this License, and without further action + by the parties to this agreement, such provision shall be reformed to + the minimum extent necessary to make such provision valid and + enforceable. + d. No term or provision of this License shall be deemed waived and no + breach consented to unless such waiver or consent shall be in writing + and signed by the party to be charged with such waiver or consent. + e. This License constitutes the entire agreement between the parties with + respect to the Work licensed here. There are no understandings, + agreements or representations with respect to the Work not specified + here. Licensor shall not be bound by any additional provisions that + may appear in any communication from You. This License may not be + modified without the mutual written agreement of the Licensor and You. + f. The rights granted under, and the subject matter referenced, in this + License were drafted utilizing the terminology of the Berne Convention + for the Protection of Literary and Artistic Works (as amended on + September 28, 1979), the Rome Convention of 1961, the WIPO Copyright + Treaty of 1996, the WIPO Performances and Phonograms Treaty of 1996 + and the Universal Copyright Convention (as revised on July 24, 1971). + These rights and subject matter take effect in the relevant + jurisdiction in which the License terms are sought to be enforced + according to the corresponding provisions of the implementation of + those treaty provisions in the applicable national law. If the + standard suite of rights granted under applicable copyright law + includes additional rights not granted under this License, such + additional rights are deemed to be included in the License; this + License is not intended to restrict the license of any rights under + applicable law. + + +Creative Commons Notice + + Creative Commons is not a party to this License, and makes no warranty + whatsoever in connection with the Work. Creative Commons will not be + liable to You or any party on any legal theory for any damages + whatsoever, including without limitation any general, special, + incidental or consequential damages arising in connection to this + license. Notwithstanding the foregoing two (2) sentences, if Creative + Commons has expressly identified itself as the Licensor hereunder, it + shall have all rights and obligations of Licensor. + + Except for the limited purpose of indicating to the public that the + Work is licensed under the CCPL, Creative Commons does not authorize + the use by either party of the trademark "Creative Commons" or any + related trademark or logo of Creative Commons without the prior + written consent of Creative Commons. Any permitted use will be in + compliance with Creative Commons' then-current trademark usage + guidelines, as may be published on its website or otherwise made + available upon request from time to time. For the avoidance of doubt, + this trademark restriction does not form part of the License. + + Creative Commons may be contacted at https://creativecommons.org/. diff --git a/LICENSES/W3C-20150513.txt b/LICENSES/W3C-20150513.txt new file mode 100644 index 00000000..abe1af9a --- /dev/null +++ b/LICENSES/W3C-20150513.txt @@ -0,0 +1,17 @@ +This work is being provided by the copyright holders under the following license. + +License +By obtaining and/or copying this work, you (the licensee) agree that you have read, understood, and will comply with the following terms and conditions. + +Permission to copy, modify, and distribute this work, with or without modification, for any purpose and without fee or royalty is hereby granted, provided that you include the following on ALL copies of the work or portions thereof, including modifications: + + • The full text of this NOTICE in a location viewable to users of the redistributed or derivative work. + • Any pre-existing intellectual property disclaimers, notices, or terms and conditions. If none exist, the W3C Software and Document Short Notice should be included. + • Notice of any changes or modifications, through a copyright statement on the new code or document such as "This software or document includes material copied from or derived from [title and URI of the W3C document]. Copyright (c) [YEAR] W3C® (MIT, ERCIM, Keio, Beihang)." + +Disclaimers +THIS WORK IS PROVIDED "AS IS," AND COPYRIGHT HOLDERS MAKE NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO, WARRANTIES OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE OR DOCUMENT WILL NOT INFRINGE ANY THIRD PARTY PATENTS, COPYRIGHTS, TRADEMARKS OR OTHER RIGHTS. + +COPYRIGHT HOLDERS WILL NOT BE LIABLE FOR ANY DIRECT, INDIRECT, SPECIAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF ANY USE OF THE SOFTWARE OR DOCUMENT. + +The name and trademarks of copyright holders may NOT be used in advertising or publicity pertaining to the work without specific, written prior permission. Title to copyright in this work will at all times remain with copyright holders. From b2eb3d348231a461a0183ef2ce9662c6d6d66f83 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Mon, 28 Jul 2025 08:12:10 +0200 Subject: [PATCH 013/247] Parametrize and reformat --- .../model/types/test_ld_context.py | 28 ++++++++++++------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_context.py b/test/hermes_test/model/types/test_ld_context.py index 80beb620..acf50932 100644 --- a/test/hermes_test/model/types/test_ld_context.py +++ b/test/hermes_test/model/types/test_ld_context.py @@ -29,15 +29,21 @@ def test_get_codemeta_item(ctx): assert item == "https://codemeta.github.io/terms/maintainer" -def test_get_prefixed_items(ctx): +@pytest.mark.parametrize( + "compacted,expanded", + [ + ("schema:Organization", "http://schema.org/Organization"), + ( + "hermes:semanticVersion", + "https://schema.software-metadata.pub/hermes-content/1.0/semanticVersion", # TODO: Change on #393 fix + ), + ], +) +def test_get_prefixed_items(ctx, compacted, expanded): """Context returns fully expanded terms for prefixed vocabularies in the context.""" - item = ctx["schema:Organization"] - assert item == "http://schema.org/Organization" - item = ctx["hermes:semanticVersion"] - assert ( - item - == "https://schema.software-metadata.pub/hermes-content/1.0/semanticVersion" - ) # TODO: Change on #393 fix + item = ctx[compacted] + assert item == expanded + @pytest.mark.parametrize( @@ -60,10 +66,12 @@ def test_get_non_str_item_fail(ctx, non_str, error_type): {"foo": "bar", "baz": "foo"}, "schema:fooBar", "hermes:fooBar", - "codemeta:maintainer" # Prefixed CodeMeta doesn't exist in context + "codemeta:maintainer", # Prefixed CodeMeta doesn't exist in context ], ) def test_get_item_validate_fail(ctx, item): """Context raises on terms that don't exist in the context.""" - with pytest.raises(Exception): # FIXME: Replace with custom error, e.g., hermes.model.errors.InvalidTermException + with pytest.raises( + Exception + ): # FIXME: Replace with custom error, e.g., hermes.model.errors.InvalidTermException ctx[item] From b38d8927deb4c6dcd71abc998a669d81319bdfab Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Mon, 28 Jul 2025 08:21:51 +0200 Subject: [PATCH 014/247] Test valid non-str inputs --- test/hermes_test/model/types/test_ld_context.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/test/hermes_test/model/types/test_ld_context.py b/test/hermes_test/model/types/test_ld_context.py index acf50932..1de51b6b 100644 --- a/test/hermes_test/model/types/test_ld_context.py +++ b/test/hermes_test/model/types/test_ld_context.py @@ -45,6 +45,19 @@ def test_get_prefixed_items(ctx, compacted, expanded): assert item == expanded +@pytest.mark.parametrize( + "compacted,expanded", + [ + ([None, "maintainer"], "https://codemeta.github.io/terms/maintainer"), + (["schema", "Organization"], "http://schema.org/Organization"), + ((None, "maintainer"), "https://codemeta.github.io/terms/maintainer"), + (("schema", "Organization"), "http://schema.org/Organization"), + ], +) +def test_get_valid_non_str_items(ctx, compacted, expanded): + """Context returns fully expanded terms for valid non-string inputs.""" + assert ctx[compacted] == expanded + @pytest.mark.parametrize( "non_str,error_type", From 7a230b90f5d6ac1dc41df4c0c6d6ede0ea5890c2 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Mon, 28 Jul 2025 08:22:21 +0200 Subject: [PATCH 015/247] Assert that dict inputs with >1 valid terms fail --- test/hermes_test/model/types/test_ld_context.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/hermes_test/model/types/test_ld_context.py b/test/hermes_test/model/types/test_ld_context.py index 1de51b6b..a84bc3c0 100644 --- a/test/hermes_test/model/types/test_ld_context.py +++ b/test/hermes_test/model/types/test_ld_context.py @@ -80,6 +80,8 @@ def test_get_non_str_item_fail(ctx, non_str, error_type): "schema:fooBar", "hermes:fooBar", "codemeta:maintainer", # Prefixed CodeMeta doesn't exist in context + # Even a dict with valid terms should fail, as it is unclear what to expect + {None: "maintainer", "schema": "Organization"}, ], ) def test_get_item_validate_fail(ctx, item): From d1f44685df288186115d8662e5cb77c2b8142548 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Mon, 28 Jul 2025 08:27:15 +0200 Subject: [PATCH 016/247] Remove unused imports --- test/hermes_test/model/types/test_ld_context.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_context.py b/test/hermes_test/model/types/test_ld_context.py index a84bc3c0..e36d91db 100644 --- a/test/hermes_test/model/types/test_ld_context.py +++ b/test/hermes_test/model/types/test_ld_context.py @@ -8,8 +8,6 @@ from hermes.model.types.ld_context import ( ContextPrefix, ALL_CONTEXTS, - CODEMETA_CONTEXT, - PROV_CONTEXT, ) From a863fbaa0646e3016e6d849b8c616edaf0f4e7ac Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Mon, 28 Jul 2025 11:54:23 +0200 Subject: [PATCH 017/247] Improve documentation --- src/hermes/model/types/ld_context.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/src/hermes/model/types/ld_context.py b/src/hermes/model/types/ld_context.py index cfbb9beb..4bdcbd5e 100644 --- a/src/hermes/model/types/ld_context.py +++ b/src/hermes/model/types/ld_context.py @@ -41,6 +41,9 @@ def __init__(self, context): """ @param context: A two-item list, where the first item is the default vocabulary's IRI string, and the second is a dict mapping vocabulary prefixes to their respective IRI string. + + # FIXME: Rename context and prefix to context_lst (or similar) and context respectively, + # FIXME: as currently, prefix represents the actual context more precisely than the throwaway value of context. """ self.context = context self.prefix = {} @@ -56,16 +59,22 @@ def __init__(self, context): }) - def __getitem__(self, item): # FIXME Rename parameter to 'key' + def __getitem__(self, item): """ FIXME: Document in class, not here FIXME: Add type hints for params and return Gets the fully qualified IRI for a term from a vocabulary inside the initialized context. The vocabulary must have been added to the context at initialization. + + @param item: A term from a vocabulary in the context; terms from the default vocabulary are passed with a prefix + of None, or as an unprefixed string, terms from non-default vocabularies are prefixed with the defined prefix + for the vocabulary. The term can either be passed in as string if prefix is None, or "prefix:term", or + as a two-element list ["prefix": "term"] or tuple ("prefix", "term") + @return: The fully qualified IRI for the passed term """ - if not isinstance(item, str): - prefix, name = item + if not isinstance(item, str): # FIXME: Rename to compressed_term + prefix, name = item # FIXME: "name" should be "term", "prefix" should be "base_iri" elif ':' in item: prefix, name = item.split(':', 1) if name.startswith('://'): @@ -74,7 +83,7 @@ def __getitem__(self, item): # FIXME Rename parameter to 'key' prefix, name = None, item if prefix in self.prefix: - item = self.prefix[prefix] + name + item = self.prefix[prefix] + name # FIXME: Rename "item" to "iri" return item From 83dd16aecd610d5c234f376b66b31ba8ee62692c Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Mon, 28 Jul 2025 11:55:22 +0200 Subject: [PATCH 018/247] Test instantiation --- test/hermes_test/model/types/test_ld_context.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/test/hermes_test/model/types/test_ld_context.py b/test/hermes_test/model/types/test_ld_context.py index e36d91db..312d6f6e 100644 --- a/test/hermes_test/model/types/test_ld_context.py +++ b/test/hermes_test/model/types/test_ld_context.py @@ -16,6 +16,11 @@ def ctx(): return ContextPrefix(ALL_CONTEXTS) +def test_ctx(): + ctx = ContextPrefix(["u1", {"2": "u2"}]) + assert ctx.prefix[None] == "u1" + assert ctx.prefix["2"] == "u2" + def test_codemeta_prefix(ctx): """Default vocabulary in context has the correct base IRI.""" assert ctx.prefix[None] == "https://codemeta.github.io/terms/" From c3beb4ae87a68dea74ef0a4834704f639179b4b8 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Mon, 28 Jul 2025 12:03:10 +0200 Subject: [PATCH 019/247] Improve documentation, rename local variables --- src/hermes/model/types/ld_context.py | 25 +++++++++++-------------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/src/hermes/model/types/ld_context.py b/src/hermes/model/types/ld_context.py index 4bdcbd5e..dde2d3d4 100644 --- a/src/hermes/model/types/ld_context.py +++ b/src/hermes/model/types/ld_context.py @@ -59,33 +59,30 @@ def __init__(self, context): }) - def __getitem__(self, item): + def __getitem__(self, compressed_term: str | tuple | list) -> str: """ - FIXME: Document in class, not here - FIXME: Add type hints for params and return - Gets the fully qualified IRI for a term from a vocabulary inside the initialized context. The vocabulary must have been added to the context at initialization. - @param item: A term from a vocabulary in the context; terms from the default vocabulary are passed with a prefix + @param compressed_term: A term from a vocabulary in the context; terms from the default vocabulary are passed with a prefix of None, or as an unprefixed string, terms from non-default vocabularies are prefixed with the defined prefix for the vocabulary. The term can either be passed in as string if prefix is None, or "prefix:term", or as a two-element list ["prefix": "term"] or tuple ("prefix", "term") @return: The fully qualified IRI for the passed term """ - if not isinstance(item, str): # FIXME: Rename to compressed_term - prefix, name = item # FIXME: "name" should be "term", "prefix" should be "base_iri" - elif ':' in item: - prefix, name = item.split(':', 1) - if name.startswith('://'): - prefix, name = True, item + if not isinstance(compressed_term, str): + prefix, term = compressed_term + elif ':' in compressed_term: + prefix, term = compressed_term.split(':', 1) + if term.startswith('://'): + prefix, term = True, compressed_term else: - prefix, name = None, item + prefix, term = None, compressed_term if prefix in self.prefix: - item = self.prefix[prefix] + name # FIXME: Rename "item" to "iri" + iri = self.prefix[prefix] + term - return item + return iri iri_map = ContextPrefix(ALL_CONTEXTS) From 1ce799d1e9816bd9b4a962297cfb494cf99b309f Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Mon, 28 Jul 2025 12:11:58 +0200 Subject: [PATCH 020/247] Test protocol string __getitem__ parameters --- test/hermes_test/model/types/test_ld_context.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/test/hermes_test/model/types/test_ld_context.py b/test/hermes_test/model/types/test_ld_context.py index 312d6f6e..61b41c7d 100644 --- a/test/hermes_test/model/types/test_ld_context.py +++ b/test/hermes_test/model/types/test_ld_context.py @@ -21,6 +21,7 @@ def test_ctx(): assert ctx.prefix[None] == "u1" assert ctx.prefix["2"] == "u2" + def test_codemeta_prefix(ctx): """Default vocabulary in context has the correct base IRI.""" assert ctx.prefix[None] == "https://codemeta.github.io/terms/" @@ -48,6 +49,18 @@ def test_get_prefixed_items(ctx, compacted, expanded): assert item == expanded +def test_get_protocol_items_pass(ctx): + item = ctx["https://schema.org/Organisation"] + assert item == "https://schema.org/Organisation" + + +def test_get_protocol_items_fail(ctx): + with pytest.raises(Exception) as e: + ctx["https://foo.bar/baz"] + print(str(e.value)) + assert "cannot access local variable" not in str(e.value) + + @pytest.mark.parametrize( "compacted,expanded", [ From 50b88ed2f6e19552d8fede3c4fa97af9584215c6 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Mon, 28 Jul 2025 12:13:27 +0200 Subject: [PATCH 021/247] Add FIXME re custom errors --- test/hermes_test/model/types/test_ld_context.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_context.py b/test/hermes_test/model/types/test_ld_context.py index 61b41c7d..03fd1cf3 100644 --- a/test/hermes_test/model/types/test_ld_context.py +++ b/test/hermes_test/model/types/test_ld_context.py @@ -57,8 +57,7 @@ def test_get_protocol_items_pass(ctx): def test_get_protocol_items_fail(ctx): with pytest.raises(Exception) as e: ctx["https://foo.bar/baz"] - print(str(e.value)) - assert "cannot access local variable" not in str(e.value) + assert "cannot access local variable" not in str(e.value) # FIXME: Replace with custom error @pytest.mark.parametrize( From 2efd117c5a9715447ec3b83b9fba4c1e284de046 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 31 Jul 2025 09:35:15 +0200 Subject: [PATCH 022/247] Improve documentation for constructor --- src/hermes/model/types/ld_context.py | 39 +++++++++++++++------------- 1 file changed, 21 insertions(+), 18 deletions(-) diff --git a/src/hermes/model/types/ld_context.py b/src/hermes/model/types/ld_context.py index dde2d3d4..e417c44d 100644 --- a/src/hermes/model/types/ld_context.py +++ b/src/hermes/model/types/ld_context.py @@ -1,6 +1,7 @@ # SPDX-FileCopyrightText: 2025 German Aerospace Center (DLR) # # SPDX-License-Identifier: Apache-2.0 +import typing # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: Stephan Druskat @@ -33,29 +34,31 @@ class ContextPrefix: FIXME: as this class represents JSON-LD contexts. Represents the context of the hermes JSON-LD data model and provides two views on the model: - - as a two-item list, of which the first item is the default, unprefixed vocabulary IRI, and the second is a dict - mapping prefixes to vocabulary IRIs; + - as a list of linked data vocabularies, where items can be vocabulary base IRI strings and/or dictionaries mapping + arbitrary strings used to prefix terms from a specific vocabulary to their respective vocabulary IRI strings.; - as a dict mapping prefixes to vocabulary IRIs, where the default vocabulary has a prefix of None. """ - def __init__(self, context): + def __init__(self, vocabularies: list[str | dict]): """ - @param context: A two-item list, where the first item is the default vocabulary's IRI string, and the second - is a dict mapping vocabulary prefixes to their respective IRI string. + @param vocabularies: A list of linked data vocabularies. Items can be vocabulary base IRI strings and/or dictionaries + mapping arbitrary strings used to prefix terms from a specific vocabulary to their respective vocabulary IRI + strings. - # FIXME: Rename context and prefix to context_lst (or similar) and context respectively, - # FIXME: as currently, prefix represents the actual context more precisely than the throwaway value of context. + If the list contains more than one string item, the last one will be used as the default vocabulary. If a prefix + string is used more than once across all dictionaries in the list, the last item with this key will be included + in the context. """ - self.context = context - self.prefix = {} + self.vocabularies = vocabularies + self.context = {} - for ctx in self.context: - if isinstance(ctx, str): - ctx = {None: ctx} + for vocab in self.vocabularies: + if isinstance(vocab, str): + vocab = {None: vocab} - self.prefix.update({ - prefix: base_url - for prefix, base_url in ctx.items() - if isinstance(base_url, str) + self.context.update({ + prefix: base_iri + for prefix, base_iri in vocab.items() + if isinstance(base_iri, str) }) @@ -79,8 +82,8 @@ def __getitem__(self, compressed_term: str | tuple | list) -> str: else: prefix, term = None, compressed_term - if prefix in self.prefix: - iri = self.prefix[prefix] + term + if prefix in self.context: + iri = self.context[prefix] + term return iri From 14373f88df55c8e5d2855ba19625f9667571751f Mon Sep 17 00:00:00 2001 From: "Kernchen, Sophie" Date: Thu, 31 Jul 2025 10:04:58 +0200 Subject: [PATCH 023/247] Correct container parameters --- src/hermes/model/types/ld_container.py | 2 +- .../model/types/test_ld_container.py | 44 ++++++++++--------- 2 files changed, 24 insertions(+), 22 deletions(-) diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index 0f0048bf..710fc6a0 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -139,7 +139,7 @@ def _to_expanded_json(self, key, value): return ld_value def __repr__(self): - return f'{type(self).__name__}({self._data[0]})' #FIXME: #397 KeyError, maybe removing the index + return f'{type(self).__name__}({self._data[0]})' def __str__(self): return str(self.to_python()) diff --git a/test/hermes_test/model/types/test_ld_container.py b/test/hermes_test/model/types/test_ld_container.py index 768fc808..f39768a9 100644 --- a/test/hermes_test/model/types/test_ld_container.py +++ b/test/hermes_test/model/types/test_ld_container.py @@ -5,17 +5,17 @@ # SPDX-FileContributor: Sophie Kernchen -import pytest, requests +import pytest from hermes.model.types.ld_container import ld_container '''we expect user of this class to give the right input data types example extended json ld: - { + [{ "http://schema.org/name": [{"@value": "bacon"}], "eggs": [{"@id": "spam"}], "green": [{"@id": "png"}] - } + }] ''' @@ -32,29 +32,30 @@ def setup_class(cls, httpserver): httpserver.expect_request("/url").respond_with_json(content) def test_container_basic(self): - cont = ld_container({"spam": [{"@value": "bacon"}]}) + cont = ld_container([{"spam": [{"@value": "bacon"}]}]) assert cont.key is None assert cont.context == [] - assert cont._data == {"spam": [{"@value": "bacon"}]} + assert cont._data == [{"spam": [{"@value": "bacon"}]}] assert cont.path == ["$"] def test_container_ld_value(self): - cont = ld_container({"spam": [{"@value": "bacon"}]}) + cont = ld_container([{"spam": [{"@value": "bacon"}]}]) - assert cont.ld_value == {"spam": [{"@value": "bacon"}]} + assert cont.ld_value == [{"spam": [{"@value": "bacon"}]}] def test_container_add_context(self): - cont = ld_container({"spam": [{"@value": "bacon"}]}) + cont = ld_container([{"spam": [{"@value": "bacon"}]}]) cont.add_context([self.url]) assert cont.context == [self.url] assert cont.full_context == [self.url] def test_container_parent(self): - cont_parent = ld_container({"ham": [{"@value": "eggs"}]}) - cont = ld_container({"spam": [{"@value": "bacon"}]}, parent=cont_parent) + cont_data = [{"spam": [{"@value": "bacon"}]}] + cont_parent = ld_container([{"ham": cont_data}]) + cont = ld_container(cont_data, parent=cont_parent, key="ham") assert cont.full_context == [] cont_parent.add_context([self.url]) @@ -67,22 +68,23 @@ def test_container_full_context_and_path(self, httpserver): httpserver.expect_request("/url3").respond_with_json({"ham": "bacon"}) httpserver.expect_request("/url4").respond_with_json({"@context": {"id": "@id"}}) - cont_grand_parent = ld_container({"ham": [{"@value": "eggs"}]}, context=[self.url], index=1) - cont_parent = ld_container({"ham": [{"@value": "eggs"}]}, context=[httpserver.url_for("/url2"), + cont_data = [{"spam": [{"@value": "bacon"}]}] + cont_parent_data = [cont_data] + cont_grand_parent = ld_container([{"ham": cont_parent_data}], context=[self.url]) + cont_parent = ld_container(cont_parent_data, context=[httpserver.url_for("/url2"), httpserver.url_for("/url4")], - parent=cont_grand_parent) - cont = ld_container({"spam": [{"@value": "bacon"}]}, context=[httpserver.url_for("/url3")], parent=cont_parent, - index=3) + parent=cont_grand_parent, key="ham") + cont = ld_container(cont_data, context=[httpserver.url_for("/url3")], parent=cont_parent, + index=0) assert cont_parent.full_context == [self.url, httpserver.url_for("/url2"), httpserver.url_for("/url4")] assert cont.full_context == [self.url, httpserver.url_for("/url2"), httpserver.url_for("/url4"), httpserver.url_for("/url3")] - assert cont_grand_parent.path == ["$"] # FIXME: #381 Decision if the path should look like this - assert cont_parent.path == ["$", None] - assert cont.path == ["$", None, 3] + assert cont_grand_parent.path == ["$"] + assert cont_parent.path == ["$", "ham"] + assert cont.path == ["$", "ham", 0] def test_container_str_and_repr(self): - cont = ld_container({"spam": [{"@value": "bacon"}]}, index=3) - assert repr(cont) == ld_container({"spam": [{"@value": "bacon"}]}) #FIXME: #397 KeyError + cont = ld_container([{"spam": [{"@value": "bacon"}]}]) + assert repr(cont) == "ld_container({'spam': [{'@value': 'bacon'}]})" with pytest.raises(NotImplementedError): str(cont) - From d23c39b7e73cd44dbf0519e652314aaa6d8083fe Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 31 Jul 2025 10:53:09 +0200 Subject: [PATCH 024/247] Improve documentation for getting items from context --- src/hermes/model/types/ld_context.py | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/src/hermes/model/types/ld_context.py b/src/hermes/model/types/ld_context.py index e417c44d..e9055a63 100644 --- a/src/hermes/model/types/ld_context.py +++ b/src/hermes/model/types/ld_context.py @@ -62,15 +62,26 @@ def __init__(self, vocabularies: list[str | dict]): }) - def __getitem__(self, compressed_term: str | tuple | list) -> str: + def __getitem__(self, compressed_term: str | tuple) -> str: """ Gets the fully qualified IRI for a term from a vocabulary inside the initialized context. The vocabulary must have been added to the context at initialization. - @param compressed_term: A term from a vocabulary in the context; terms from the default vocabulary are passed with a prefix - of None, or as an unprefixed string, terms from non-default vocabularies are prefixed with the defined prefix - for the vocabulary. The term can either be passed in as string if prefix is None, or "prefix:term", or - as a two-element list ["prefix": "term"] or tuple ("prefix", "term") + Example uses: + + context = (["iri_default", {"prefix1": "iri1"}]) + # access qualified term via str + term = context["term_in_default_vocabulary"] + term = context["prefix1:term"] + # access qualified term via tuple + term = context["prefix1", "term"] + term = context[None, "term_in_default_vocabulary"] + + @param compressed_term: A term from a vocabulary in the context; terms from the default vocabulary are passed + with a prefix of None, or as an unprefixed string, terms from non-default vocabularies are prefixed with the + defined prefix for the vocabulary. The term can either be passed in as string if prefix is None, or + ":", or as a tuple. + @return: The fully qualified IRI for the passed term """ if not isinstance(compressed_term, str): From 5e8e693fe0a2c5e00131670291f015c45b60408c Mon Sep 17 00:00:00 2001 From: "Kernchen, Sophie" Date: Thu, 31 Jul 2025 11:38:40 +0200 Subject: [PATCH 025/247] Fix method name and test convertion methods --- src/hermes/model/types/ld_container.py | 2 +- src/hermes/model/types/pyld_util.py | 2 +- .../model/types/test_ld_container.py | 18 +++++++++++++++++- 3 files changed, 19 insertions(+), 3 deletions(-) diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index 710fc6a0..981227aa 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -47,7 +47,7 @@ def __init__(self, data, *, parent=None, key=None, index=None, context=None): else: self.active_ctx = parent.active_ctx else: - self.active_ctx = self.ld_proc.inital_ctx( + self.active_ctx = self.ld_proc.initial_ctx( self.full_context, {"documentLoader": bundled_loader} ) diff --git a/src/hermes/model/types/pyld_util.py b/src/hermes/model/types/pyld_util.py index f652cce8..1d3f9bff 100644 --- a/src/hermes/model/types/pyld_util.py +++ b/src/hermes/model/types/pyld_util.py @@ -106,7 +106,7 @@ def expand_iri(self, active_ctx: t.Any, short_iri: str) -> str: def compact_iri(self, active_ctx: t.Any, long_iri: str) -> str: return self._compact_iri(active_ctx, long_iri, vocab=True) - def inital_ctx(self, local_ctx, options=None): + def initial_ctx(self, local_ctx, options=None): return self.process_context(self._INITIAL_CONTEXT, local_ctx, options or {}) @classmethod diff --git a/test/hermes_test/model/types/test_ld_container.py b/test/hermes_test/model/types/test_ld_container.py index f39768a9..a159907f 100644 --- a/test/hermes_test/model/types/test_ld_container.py +++ b/test/hermes_test/model/types/test_ld_container.py @@ -5,7 +5,7 @@ # SPDX-FileContributor: Sophie Kernchen -import pytest +import pytest, uuid from hermes.model.types.ld_container import ld_container '''we expect user of this class to give the right input data types @@ -38,6 +38,7 @@ def test_container_basic(self): assert cont.context == [] assert cont._data == [{"spam": [{"@value": "bacon"}]}] assert cont.path == ["$"] + assert cont.active_ctx == {'mappings': {}} def test_container_ld_value(self): cont = ld_container([{"spam": [{"@value": "bacon"}]}]) @@ -88,3 +89,18 @@ def test_container_str_and_repr(self): assert repr(cont) == "ld_container({'spam': [{'@value': 'bacon'}]})" with pytest.raises(NotImplementedError): str(cont) + + def test_to_python(self): + cont = ld_container([{"spam": [{"@value": "bacon", "@id": "ham", "@type": ["@id"]}]}]) + assert cont._to_python("spam", [{"@value": "bacon"}]) == 'bacon' + assert cont._to_python("@id", "ham") == "ham" + + cont.active_ctx['_uuid'] = str(uuid.uuid1()) # FIXME: 406 + assert cont._to_python("@type", ["@id"]) == '@id' + + def test_to_expanded(self): + cont = ld_container([{"spam": [{"@value": "bacon", "@id": "ham", "@type": "@id"}]}]) + cont.active_ctx['_uuid'] = str(uuid.uuid1()) # FIXME: 406 + assert cont._to_expanded_json("spam", "bacon") == [{"@value": "bacon"}] + assert cont._to_expanded_json("@id", "ham") == "ham" + assert cont._to_expanded_json("@type", "@id") == ["@id"] From 4847d661dcd58b93c840935c88592a2464c43c70 Mon Sep 17 00:00:00 2001 From: "Kernchen, Sophie" Date: Thu, 31 Jul 2025 11:42:42 +0200 Subject: [PATCH 026/247] Remove docstring from closed issue --- src/hermes/model/types/ld_container.py | 9 --------- test/hermes_test/model/types/test_ld_container.py | 2 +- 2 files changed, 1 insertion(+), 10 deletions(-) diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index 981227aa..9d8b10c2 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -70,15 +70,6 @@ def full_context(self): @property def path(self): """ Create a path representation for this item. """ - ''' - FIXME: #381 Decision if the path should look like this. Other option: - - if self.parent: - return self.parent.path + [self.key if self.index is None else self.index] - else: - return ["(self.key if self.index is None else self.index)"] braces to show you are here. - - ''' if self.parent: return self.parent.path + [self.key if self.index is None else self.index] else: diff --git a/test/hermes_test/model/types/test_ld_container.py b/test/hermes_test/model/types/test_ld_container.py index a159907f..00c93db5 100644 --- a/test/hermes_test/model/types/test_ld_container.py +++ b/test/hermes_test/model/types/test_ld_container.py @@ -73,7 +73,7 @@ def test_container_full_context_and_path(self, httpserver): cont_parent_data = [cont_data] cont_grand_parent = ld_container([{"ham": cont_parent_data}], context=[self.url]) cont_parent = ld_container(cont_parent_data, context=[httpserver.url_for("/url2"), - httpserver.url_for("/url4")], + httpserver.url_for("/url4")], parent=cont_grand_parent, key="ham") cont = ld_container(cont_data, context=[httpserver.url_for("/url3")], parent=cont_parent, index=0) From 019cd32044300287bc643207500f99cbddf3ac5b Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 31 Jul 2025 13:24:18 +0200 Subject: [PATCH 027/247] Appease flake8 --- src/hermes/model/types/ld_context.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/src/hermes/model/types/ld_context.py b/src/hermes/model/types/ld_context.py index e9055a63..9bb8209a 100644 --- a/src/hermes/model/types/ld_context.py +++ b/src/hermes/model/types/ld_context.py @@ -1,7 +1,6 @@ # SPDX-FileCopyrightText: 2025 German Aerospace Center (DLR) # # SPDX-License-Identifier: Apache-2.0 -import typing # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: Stephan Druskat @@ -40,9 +39,9 @@ class ContextPrefix: """ def __init__(self, vocabularies: list[str | dict]): """ - @param vocabularies: A list of linked data vocabularies. Items can be vocabulary base IRI strings and/or dictionaries - mapping arbitrary strings used to prefix terms from a specific vocabulary to their respective vocabulary IRI - strings. + @param vocabularies: A list of linked data vocabularies. Items can be vocabulary base IRI strings and/or + dictionaries mapping arbitrary strings used to prefix terms from a specific vocabulary to their respective + vocabulary IRI strings. If the list contains more than one string item, the last one will be used as the default vocabulary. If a prefix string is used more than once across all dictionaries in the list, the last item with this key will be included @@ -61,7 +60,6 @@ def __init__(self, vocabularies: list[str | dict]): if isinstance(base_iri, str) }) - def __getitem__(self, compressed_term: str | tuple) -> str: """ Gets the fully qualified IRI for a term from a vocabulary inside the initialized context. From e2995b338b0efbf577fdabcb0f0b8e77bf32cb06 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Thu, 31 Jul 2025 21:34:05 +0200 Subject: [PATCH 028/247] Add dependency for tests. --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index bf19ccca..8223b75e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -62,6 +62,7 @@ pytest-cov = "^3.0.0" taskipy = "^1.10.3" flake8 = "^5.0.4" requests-mock = "^1.10.0" +pytest-httpserver = "^1.1.3" # Packages for developers for creating documentation [tool.poetry.group.docs] From 8da44cb6a4fc8703d349fc6ff2489bbf49af3e56 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Thu, 31 Jul 2025 21:43:06 +0200 Subject: [PATCH 029/247] Adapt test of _To_expanded_json. See MR #398 for more information. --- test/hermes_test/model/types/test_ld_container.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_container.py b/test/hermes_test/model/types/test_ld_container.py index 00c93db5..73877bff 100644 --- a/test/hermes_test/model/types/test_ld_container.py +++ b/test/hermes_test/model/types/test_ld_container.py @@ -99,8 +99,17 @@ def test_to_python(self): assert cont._to_python("@type", ["@id"]) == '@id' def test_to_expanded(self): - cont = ld_container([{"spam": [{"@value": "bacon", "@id": "ham", "@type": "@id"}]}]) - cont.active_ctx['_uuid'] = str(uuid.uuid1()) # FIXME: 406 + # Define a mock vocabulary to work with + mock_context = { + "ham": {"@id": "http://ham.eggs/ham", "@type": "@id"}, + "spam": {"@id": "http://ham.eggs/spam"}, + "Eggs": {"@id": "http://ham.eggs/Eggs"}, + } + + # Create container with mock context + cont = ld_container([{}], context=[mock_context]) + + # Try simple cases of expansion assert cont._to_expanded_json("spam", "bacon") == [{"@value": "bacon"}] - assert cont._to_expanded_json("@id", "ham") == "ham" + assert cont._to_expanded_json("@id", "ham") == "http://ham.eggs/ham" assert cont._to_expanded_json("@type", "@id") == ["@id"] From f0148a585fcc9f66ee54644819cfdbcccda4ab42 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 1 Aug 2025 20:57:02 +0200 Subject: [PATCH 030/247] Add test helpers that provide JSON-LD data. Basically, this is a small but somewhat representative vocabulary that can be used. In addition, there are two representations of the same document (compact, expanded). --- test/hermes_test/model/types/conftest.py | 64 ++++++++++++++++++++++++ 1 file changed, 64 insertions(+) create mode 100644 test/hermes_test/model/types/conftest.py diff --git a/test/hermes_test/model/types/conftest.py b/test/hermes_test/model/types/conftest.py new file mode 100644 index 00000000..aa61252b --- /dev/null +++ b/test/hermes_test/model/types/conftest.py @@ -0,0 +1,64 @@ +# SPDX-FileCopyrightText: 2025 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +from datetime import datetime + +import pytest + + +class MockDocument: + @classmethod + def vocabulary(cls, base_url="http://spam.eggs/"): + return { + "spam": {"@id": f"{base_url}spam"}, + "ham": {"@id": f"{base_url}ham", "@type": "@id"}, + "eggs": {"@id": f"{base_url}eggs", "@container": "@list"}, + "use_until": {"@id": f"{base_url}use_until", "@type": "http://schema.org/DateTime"}, + + "Egg": {"@id": f"{base_url}Egg"}, + } + + @classmethod + def compact(cls, base_url="http://spam.eggs/"): + return { + "@context": cls.vocabulary(base_url), + + "spam": "bacon", + "ham": f"{base_url}identifier", + "eggs": [ + {"@type": "Egg", "use_until": datetime(2024, 4, 20, 16, 20).isoformat()}, + {"@type": "Egg", "use_until": datetime(2026, 12, 31, 23, 59, 59).isoformat()}, + ] + } + + @classmethod + def expanded(cls, base_url="http://spam.eggs/"): + return [{ + f"{base_url}spam": [{"@value": "bacon"}], + f"{base_url}ham": [{"@id": f"{base_url}identifier"}], + f"{base_url}eggs": [{"@list": [ + { + "@type": [f"{base_url}Egg"], + f"{base_url}use_until": [ + {"@type": "http://schema.org/DateTime", "@value": "2024-04-20T16:20:00"} + ], + }, + { + "@type": [f"{base_url}Egg"], + f"{base_url}use_until": [ + {"@type": "http://schema.org/DateTime", "@value": "2026-12-31T23:59:59"} + ], + } + ]}] + }] + + +@pytest.fixture +def mock_context(): + return MockDocument.vocabulary() + + +@pytest.fixture +def mock_document(): + return MockDocument From 2dfed2dfc5ccce3648707c5725cec37cb68ecc6e Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 1 Aug 2025 21:10:59 +0200 Subject: [PATCH 031/247] Add documentation to helpers. --- test/hermes_test/model/types/conftest.py | 32 +++++++++++++++++++++--- 1 file changed, 28 insertions(+), 4 deletions(-) diff --git a/test/hermes_test/model/types/conftest.py b/test/hermes_test/model/types/conftest.py index aa61252b..3a5eb106 100644 --- a/test/hermes_test/model/types/conftest.py +++ b/test/hermes_test/model/types/conftest.py @@ -8,8 +8,18 @@ class MockDocument: + """ + Helper that provides valid JSON-LD data. + """ + @classmethod - def vocabulary(cls, base_url="http://spam.eggs/"): + def vocabulary(cls, base_url: str = "http://spam.eggs/") -> dict: + """ + Retrieve the vocabulary used for the document. + + :param base_url: Optional base URL to use for IRIs in the vocabulary. + :returns: A JSON-LD vocabulary usable in as document context. + """ return { "spam": {"@id": f"{base_url}spam"}, "ham": {"@id": f"{base_url}ham", "@type": "@id"}, @@ -20,9 +30,17 @@ def vocabulary(cls, base_url="http://spam.eggs/"): } @classmethod - def compact(cls, base_url="http://spam.eggs/"): + def compact(cls, base_url: str = "http://spam.eggs/", embed_vocabulary: bool = True) -> dict: + """ + Get compact representation of the example document. + + :param base_url: Optional base URL used to generate the context. + :param embed_vocabulary: Optional switch to indicate whether the vocabulary should be embedded in the context + or only refrenced by the base url. + :returns: The rendered compact document. + """ return { - "@context": cls.vocabulary(base_url), + "@context": [cls.vocabulary(base_url) if embed_vocabulary else base_url], "spam": "bacon", "ham": f"{base_url}identifier", @@ -33,7 +51,13 @@ def compact(cls, base_url="http://spam.eggs/"): } @classmethod - def expanded(cls, base_url="http://spam.eggs/"): + def expanded(cls, base_url: str = "http://spam.eggs/") -> list[dict]: + """ + Get expanded representation of the example document. + + :param base_url: Optional base URL to use for IRIs. + :returns: The rendered expanded document. + """ return [{ f"{base_url}spam": [{"@value": "bacon"}], f"{base_url}ham": [{"@id": f"{base_url}identifier"}], From a527fb0a201497e0e8dbdd238973fb7250016322 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 1 Aug 2025 21:11:37 +0200 Subject: [PATCH 032/247] Add test for helpers. --- test/hermes_test/model/types/test_pyld_util.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 test/hermes_test/model/types/test_pyld_util.py diff --git a/test/hermes_test/model/types/test_pyld_util.py b/test/hermes_test/model/types/test_pyld_util.py new file mode 100644 index 00000000..b84afe35 --- /dev/null +++ b/test/hermes_test/model/types/test_pyld_util.py @@ -0,0 +1,18 @@ +import pytest + +from hermes.model.types import pyld_util + + +@pytest.fixture +def ld_proc(): + return pyld_util.JsonLdProcessor() + + +def test_mock_document_compact(ld_proc, mock_document): + compact_document = ld_proc.compact(mock_document.expanded(), [mock_document.vocabulary], {}) + assert compact_document == mock_document.compact() + + +def test_mock_document_compact(ld_proc, mock_document): + expanded_document = ld_proc.expand(mock_document.compact(), {}) + assert expanded_document == mock_document.expanded() From 2fbe1e055763d7ce4a7b0613e7d38f87ce24a178 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 1 Aug 2025 21:12:16 +0200 Subject: [PATCH 033/247] Using mock_document for setup. --- test/hermes_test/model/types/test_ld_container.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_container.py b/test/hermes_test/model/types/test_ld_container.py index 73877bff..e398a337 100644 --- a/test/hermes_test/model/types/test_ld_container.py +++ b/test/hermes_test/model/types/test_ld_container.py @@ -22,14 +22,9 @@ class TestLdContainer: @classmethod @pytest.fixture(autouse=True) - def setup_class(cls, httpserver): - content = { - "@context": {"type": "@type", "id": "@id", "schema": "http://schema.org/", "ham": "https://fake.site/", - "Organization": {"@id": "schema:Organization"}}} - - cls.url = httpserver.url_for("/url") - - httpserver.expect_request("/url").respond_with_json(content) + def setup_class(cls, httpserver, mock_document): + cls.url = httpserver.url_for("/") + httpserver.expect_request("/").respond_with_json({"@context": mock_document.vocabulary(cls.url)}) def test_container_basic(self): cont = ld_container([{"spam": [{"@value": "bacon"}]}]) From e362b15bf358893673d092657911c93e0271436d Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 1 Aug 2025 21:13:50 +0200 Subject: [PATCH 034/247] Remove test for pyld internals. cont.active_ctx is very special with regards to the implementation of a dependency. This is ugly and tests will be added to test_pyld_utils. --- test/hermes_test/model/types/test_ld_container.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_container.py b/test/hermes_test/model/types/test_ld_container.py index e398a337..c3e08168 100644 --- a/test/hermes_test/model/types/test_ld_container.py +++ b/test/hermes_test/model/types/test_ld_container.py @@ -33,7 +33,6 @@ def test_container_basic(self): assert cont.context == [] assert cont._data == [{"spam": [{"@value": "bacon"}]}] assert cont.path == ["$"] - assert cont.active_ctx == {'mappings': {}} def test_container_ld_value(self): cont = ld_container([{"spam": [{"@value": "bacon"}]}]) @@ -41,7 +40,6 @@ def test_container_ld_value(self): assert cont.ld_value == [{"spam": [{"@value": "bacon"}]}] def test_container_add_context(self): - cont = ld_container([{"spam": [{"@value": "bacon"}]}]) cont.add_context([self.url]) From 8b2e59f054c41c467a658f86032348eb4e31a709 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 1 Aug 2025 21:15:09 +0200 Subject: [PATCH 035/247] Switch test to use mock_context from helpers. --- .../model/types/test_ld_container.py | 23 ++++++++----------- 1 file changed, 9 insertions(+), 14 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_container.py b/test/hermes_test/model/types/test_ld_container.py index c3e08168..429c0f50 100644 --- a/test/hermes_test/model/types/test_ld_container.py +++ b/test/hermes_test/model/types/test_ld_container.py @@ -1,11 +1,10 @@ # SPDX-FileCopyrightText: 2025 German Aerospace Center (DLR) # # SPDX-License-Identifier: Apache-2.0 -import re # SPDX-FileContributor: Sophie Kernchen -import pytest, uuid +import pytest from hermes.model.types.ld_container import ld_container '''we expect user of this class to give the right input data types @@ -83,21 +82,17 @@ def test_container_str_and_repr(self): with pytest.raises(NotImplementedError): str(cont) - def test_to_python(self): - cont = ld_container([{"spam": [{"@value": "bacon", "@id": "ham", "@type": ["@id"]}]}]) - assert cont._to_python("spam", [{"@value": "bacon"}]) == 'bacon' - assert cont._to_python("@id", "ham") == "ham" + def test_to_python(self, mock_context): + # Create container with mock context + cont = ld_container([{}], context=[mock_context]) - cont.active_ctx['_uuid'] = str(uuid.uuid1()) # FIXME: 406 + # Try simple cases of conversion + assert cont._to_python("@id", "ham") == "ham" assert cont._to_python("@type", ["@id"]) == '@id' - def test_to_expanded(self): - # Define a mock vocabulary to work with - mock_context = { - "ham": {"@id": "http://ham.eggs/ham", "@type": "@id"}, - "spam": {"@id": "http://ham.eggs/spam"}, - "Eggs": {"@id": "http://ham.eggs/Eggs"}, - } + def test_to_python_list(self, mock_context): + cont = ld_container([{}], context=[mock_context]) + list_data = [{"@list": [{"@id": "spam"}, {"@id": "eggs"}]}] # Create container with mock context cont = ld_container([{}], context=[mock_context]) From 4b8c3b754d4d4f2ebb94d5d1c85b320ee2f07881 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 1 Aug 2025 21:16:24 +0200 Subject: [PATCH 036/247] Added more tests. The conversion tests (`_to_python` and `_to_expanded_json`) are pretty long now... --- .../model/types/test_ld_container.py | 29 +++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_container.py b/test/hermes_test/model/types/test_ld_container.py index 429c0f50..df730bb2 100644 --- a/test/hermes_test/model/types/test_ld_container.py +++ b/test/hermes_test/model/types/test_ld_container.py @@ -89,15 +89,40 @@ def test_to_python(self, mock_context): # Try simple cases of conversion assert cont._to_python("@id", "ham") == "ham" assert cont._to_python("@type", ["@id"]) == '@id' + assert cont._to_python("@type", ["@id", "http://spam.eggs/Egg"]) == ["@id", "Egg"] + + # Try type conversions + assert cont._to_python("http://spam.eggs/ham", [{"@id": "spam"}]) == 'spam' + + assert cont._to_python("http://soam.eggs/spam", [{"@value": "bacon"}]) == 'bacon' + assert cont._to_python("http://spam.eggs/spam", [{"@value": True}]) == True + assert cont._to_python("http://spam.eggs/spam", [{"@value": 123}]) == 123 + + assert cont._to_python("http://spam.eggs/eggs", [{ + "@value": "2022-02-22T00:00:00", "@type": "https://schema.org/DateTime" + }]) == "2022-02-22T00:00:00" def test_to_python_list(self, mock_context): cont = ld_container([{}], context=[mock_context]) list_data = [{"@list": [{"@id": "spam"}, {"@id": "eggs"}]}] + assert cont._to_python("ham", list_data).to_python() == ["spam", "eggs"] + + def test_to_expanded(self, mock_context): # Create container with mock context cont = ld_container([{}], context=[mock_context]) # Try simple cases of expansion + assert cont._to_expanded_json("@id", "ham") == "ham" + assert cont._to_expanded_json("@type", "Egg") == ["http://spam.eggs/Egg"] + + # Type conversions + assert cont._to_expanded_json("ham", "spam") == [{"@id": "spam"}] + assert cont._to_expanded_json("spam", "bacon") == [{"@value": "bacon"}] - assert cont._to_expanded_json("@id", "ham") == "http://ham.eggs/ham" - assert cont._to_expanded_json("@type", "@id") == ["@id"] + assert cont._to_expanded_json("spam", 123) == [{"@value": 123}] + assert cont._to_expanded_json("spam", True) == [{"@value": True}] + + assert cont._to_expanded_json("eggs", datetime(2022, 2,22)) == [ + {"@value": "2022-02-22T00:00:00", "@type": "http://schema.org/DateTime"} + ] From 3bceb496878cf9a79b18f11444b39d3b3ce9ca08 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 1 Aug 2025 22:04:46 +0200 Subject: [PATCH 037/247] Restructured tests for ld_container (type conversions). --- .../model/types/test_ld_container.py | 53 ++++++++++++++----- 1 file changed, 39 insertions(+), 14 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_container.py b/test/hermes_test/model/types/test_ld_container.py index df730bb2..873a7d65 100644 --- a/test/hermes_test/model/types/test_ld_container.py +++ b/test/hermes_test/model/types/test_ld_container.py @@ -3,8 +3,12 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileContributor: Sophie Kernchen +# SPDX-FileContributor: Michael Meinel + +from datetime import datetime import pytest + from hermes.model.types.ld_container import ld_container '''we expect user of this class to give the right input data types @@ -82,47 +86,68 @@ def test_container_str_and_repr(self): with pytest.raises(NotImplementedError): str(cont) - def test_to_python(self, mock_context): - # Create container with mock context + def test_to_python_id(self, mock_context): cont = ld_container([{}], context=[mock_context]) + assert cont._to_python("@id", "http://spam.eggs/ham") == "http://spam.eggs/ham" - # Try simple cases of conversion - assert cont._to_python("@id", "ham") == "ham" + def test_to_python_id_with_prefix(self, mock_context): + cont = ld_container([{}], context=[mock_context, {"prefix": self.url}]) + assert cont._to_python("@id", f"{self.url}identifier") == "prefix:identifier" + + def test_to_python_type(self, mock_context): + cont = ld_container([{}], context=[mock_context]) assert cont._to_python("@type", ["@id"]) == '@id' assert cont._to_python("@type", ["@id", "http://spam.eggs/Egg"]) == ["@id", "Egg"] - # Try type conversions - assert cont._to_python("http://spam.eggs/ham", [{"@id": "spam"}]) == 'spam' + def test_to_python_id_value(self, mock_context): + cont = ld_container([{}], context=[mock_context]) + assert cont._to_python("http://spam.eggs/ham", [{"@id": "http://spam.eggs/spam"}]) == "http://spam.eggs/spam" + assert cont._to_python("http://spam.eggs/ham", [{"@id": "http://spam.eggs/identifier"}]) == "http://spam.eggs/identifier" + def test_to_python_basic_value(self, mock_context): + cont = ld_container([{}], context=[mock_context]) assert cont._to_python("http://soam.eggs/spam", [{"@value": "bacon"}]) == 'bacon' assert cont._to_python("http://spam.eggs/spam", [{"@value": True}]) == True assert cont._to_python("http://spam.eggs/spam", [{"@value": 123}]) == 123 + def test_to_python_datetime_value(self, mock_context): + cont = ld_container([{}], context=[mock_context]) assert cont._to_python("http://spam.eggs/eggs", [{ "@value": "2022-02-22T00:00:00", "@type": "https://schema.org/DateTime" }]) == "2022-02-22T00:00:00" - def test_to_python_list(self, mock_context): + def test_to_expanded_id(self, mock_context): cont = ld_container([{}], context=[mock_context]) - list_data = [{"@list": [{"@id": "spam"}, {"@id": "eggs"}]}] + assert cont._to_expanded_json("@id", f"{self.url}identifier") == f"{self.url}identifier" - assert cont._to_python("ham", list_data).to_python() == ["spam", "eggs"] + # Regression test: "ham" is vocabulary and must not be expanded. + assert cont._to_expanded_json("@id", "ham") == "ham" - def test_to_expanded(self, mock_context): - # Create container with mock context - cont = ld_container([{}], context=[mock_context]) + def test_to_expanded_id_with_prefix(self, mock_context): + cont = ld_container([{}], context=[mock_context, {"prefix": self.url}]) + assert cont._to_expanded_json("@id", "prefix:identifier") == f"{self.url}identifier" - # Try simple cases of expansion + # Regression test: "ham" should still not be expaned, but "prefix:ham" should be. assert cont._to_expanded_json("@id", "ham") == "ham" + assert cont._to_expanded_json("@id", "prefix:ham") == f"{self.url}ham" + + def test_to_expanded_type(self, mock_context): + cont = ld_container([{}], context=[mock_context]) assert cont._to_expanded_json("@type", "Egg") == ["http://spam.eggs/Egg"] + assert cont._to_expanded_json("@type", ["Egg", "@id"]) == ["http://spam.eggs/Egg", "@id"] - # Type conversions + def test_to_expanded_id_value(self, mock_context): + cont = ld_container([{}], context=[mock_context]) assert cont._to_expanded_json("ham", "spam") == [{"@id": "spam"}] + def test_to_expanded_basic_value(self, mock_context): + cont = ld_container([{}], context=[mock_context]) assert cont._to_expanded_json("spam", "bacon") == [{"@value": "bacon"}] assert cont._to_expanded_json("spam", 123) == [{"@value": 123}] assert cont._to_expanded_json("spam", True) == [{"@value": True}] + def test_to_expanded_datetime_value(self, mock_context): + cont = ld_container([{}], context=[mock_context]) assert cont._to_expanded_json("eggs", datetime(2022, 2,22)) == [ {"@value": "2022-02-22T00:00:00", "@type": "http://schema.org/DateTime"} ] From ef2eb70f879fd4b49c730b3f2d48cd3b5788d07a Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 1 Aug 2025 22:08:49 +0200 Subject: [PATCH 038/247] Update poetry lock to get CI running. --- poetry.lock | 39 ++++++++++++++++++++++++++++++++++++--- 1 file changed, 36 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index f7104e16..bb7d9074 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. [[package]] name = "accessible-pygments" @@ -906,7 +906,7 @@ version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" -groups = ["docs"] +groups = ["dev", "docs"] files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -1553,6 +1553,21 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] +[[package]] +name = "pytest-httpserver" +version = "1.1.3" +description = "pytest-httpserver is a httpserver for pytest" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pytest_httpserver-1.1.3-py3-none-any.whl", hash = "sha256:5f84757810233e19e2bb5287f3826a71c97a3740abe3a363af9155c0f82fdbb9"}, + {file = "pytest_httpserver-1.1.3.tar.gz", hash = "sha256:af819d6b533f84b4680b9416a5b3f67f1df3701f1da54924afd4d6e4ba5917ec"}, +] + +[package.dependencies] +Werkzeug = ">=2.0.0" + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -2418,6 +2433,24 @@ h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "werkzeug" +version = "3.1.3" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"}, + {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + [[package]] name = "wheel" version = "0.45.1" @@ -2525,4 +2558,4 @@ files = [ [metadata] lock-version = "2.1" python-versions = "^3.10" -content-hash = "b6eb72a05b4bb10207b3618310c1fc9709e4a2cbd051caf6d9892f2eea299c16" +content-hash = "51553ed03b09cf6bb243a9dcb581826e74fbf8e4f27d582dbdd948671386b2c9" From af21aced986ff87d9334c3d3c37ac4fbe1f98159 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 1 Aug 2025 22:13:42 +0200 Subject: [PATCH 039/247] Fix linting errors. --- test/hermes_test/model/types/__init__.py | 2 +- test/hermes_test/model/types/test_ld_container.py | 10 ++++++---- test/hermes_test/model/types/test_pyld_util.py | 2 +- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/test/hermes_test/model/types/__init__.py b/test/hermes_test/model/types/__init__.py index 9a1d6097..ef943d95 100644 --- a/test/hermes_test/model/types/__init__.py +++ b/test/hermes_test/model/types/__init__.py @@ -2,4 +2,4 @@ # # SPDX-License-Identifier: Apache-2.0 -# SPDX-FileContributor: Sophie Kernchen \ No newline at end of file +# SPDX-FileContributor: Sophie Kernchen diff --git a/test/hermes_test/model/types/test_ld_container.py b/test/hermes_test/model/types/test_ld_container.py index 873a7d65..9da5b461 100644 --- a/test/hermes_test/model/types/test_ld_container.py +++ b/test/hermes_test/model/types/test_ld_container.py @@ -101,13 +101,15 @@ def test_to_python_type(self, mock_context): def test_to_python_id_value(self, mock_context): cont = ld_container([{}], context=[mock_context]) - assert cont._to_python("http://spam.eggs/ham", [{"@id": "http://spam.eggs/spam"}]) == "http://spam.eggs/spam" - assert cont._to_python("http://spam.eggs/ham", [{"@id": "http://spam.eggs/identifier"}]) == "http://spam.eggs/identifier" + assert cont._to_python("http://spam.eggs/ham", + [{"@id": "http://spam.eggs/spam"}]) == "http://spam.eggs/spam" + assert cont._to_python("http://spam.eggs/ham", + [{"@id": "http://spam.eggs/identifier"}]) == "http://spam.eggs/identifier" def test_to_python_basic_value(self, mock_context): cont = ld_container([{}], context=[mock_context]) assert cont._to_python("http://soam.eggs/spam", [{"@value": "bacon"}]) == 'bacon' - assert cont._to_python("http://spam.eggs/spam", [{"@value": True}]) == True + assert cont._to_python("http://spam.eggs/spam", [{"@value": True}]) is True assert cont._to_python("http://spam.eggs/spam", [{"@value": 123}]) == 123 def test_to_python_datetime_value(self, mock_context): @@ -148,6 +150,6 @@ def test_to_expanded_basic_value(self, mock_context): def test_to_expanded_datetime_value(self, mock_context): cont = ld_container([{}], context=[mock_context]) - assert cont._to_expanded_json("eggs", datetime(2022, 2,22)) == [ + assert cont._to_expanded_json("eggs", datetime(2022, 2, 22)) == [ {"@value": "2022-02-22T00:00:00", "@type": "http://schema.org/DateTime"} ] diff --git a/test/hermes_test/model/types/test_pyld_util.py b/test/hermes_test/model/types/test_pyld_util.py index b84afe35..00f008c4 100644 --- a/test/hermes_test/model/types/test_pyld_util.py +++ b/test/hermes_test/model/types/test_pyld_util.py @@ -13,6 +13,6 @@ def test_mock_document_compact(ld_proc, mock_document): assert compact_document == mock_document.compact() -def test_mock_document_compact(ld_proc, mock_document): +def test_mock_document_expanded(ld_proc, mock_document): expanded_document = ld_proc.expand(mock_document.compact(), {}) assert expanded_document == mock_document.expanded() From 8b4f9a33777de45931dcece39edf5406c86864f4 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 1 Aug 2025 22:28:03 +0200 Subject: [PATCH 040/247] Delete tests for old data model. --- test/hermes_test/model/test_base_context.py | 38 ----- .../model/test_codemeta_context.py | 71 ---------- .../hermes_test/model/test_harvest_context.py | 134 ------------------ 3 files changed, 243 deletions(-) delete mode 100644 test/hermes_test/model/test_base_context.py delete mode 100644 test/hermes_test/model/test_codemeta_context.py delete mode 100644 test/hermes_test/model/test_harvest_context.py diff --git a/test/hermes_test/model/test_base_context.py b/test/hermes_test/model/test_base_context.py deleted file mode 100644 index bdf016b7..00000000 --- a/test/hermes_test/model/test_base_context.py +++ /dev/null @@ -1,38 +0,0 @@ -# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) -# -# SPDX-License-Identifier: Apache-2.0 - -# SPDX-FileContributor: Michael Meinel - -from pathlib import Path - -from hermes.model.context import HermesContext - - -def test_context_hermes_dir_default(): - ctx = HermesContext() - assert ctx.hermes_dir == Path('.') / '.hermes' - - -def test_context_hermes_dir_custom(): - ctx = HermesContext('spam') - assert ctx.hermes_dir == Path('spam') / '.hermes' - - -def test_context_get_cache_default(): - ctx = HermesContext() - assert ctx.get_cache('spam', 'eggs') == Path('.') / '.hermes' / 'spam' / 'eggs.json' - - -def test_context_get_cache_cached(): - ctx = HermesContext() - ctx._caches[('spam', 'eggs')] = Path('spam_and_eggs') - assert ctx.get_cache('spam', 'eggs') == Path('spam_and_eggs') - - -def test_context_get_cache_create(tmpdir): - ctx = HermesContext(tmpdir) - subdir = Path(tmpdir) / '.hermes' / 'spam' - - assert ctx.get_cache('spam', 'eggs', create=True) == subdir / 'eggs.json' - assert subdir.exists() diff --git a/test/hermes_test/model/test_codemeta_context.py b/test/hermes_test/model/test_codemeta_context.py deleted file mode 100644 index b9a5ec3e..00000000 --- a/test/hermes_test/model/test_codemeta_context.py +++ /dev/null @@ -1,71 +0,0 @@ -# SPDX-FileCopyrightText: 2023 German Aerospace Center (DLR) -# -# SPDX-License-Identifier: Apache-2.0 - -# SPDX-FileContributor: Stephan Druskat - -import pytest -from unittest.mock import Mock - -from hermes.model.context import CodeMetaContext, HermesHarvestContext - - -@pytest.fixture -def mock_ep(): - ep = Mock() - ep.name = 'mock_name' - return ep - - -@pytest.fixture -def _context(): - return 'foo', 'bar' - - -@pytest.fixture -def _codemeta_context(): - return CodeMetaContext() - - -@pytest.fixture -def _data(_codemeta_context): - return { - '@context': [ - 'https://doi.org/10.5063/schema/codemeta-2.0', - {'hermes': 'https://software-metadata.pub/ns/hermes/'}], - '@type': 'SoftwareSourceCode' - } - - -@pytest.fixture -def _data_with_contexts(_codemeta_context): - return { - '@type': 'SoftwareSourceCode', - '@context': [ - 'https://doi.org/10.5063/schema/codemeta-2.0', - {'foo': 'bar', - 'hermes': 'https://software-metadata.pub/ns/hermes/'} - ] - } - - -def test_merge_contexts_from(mock_ep, _context, _codemeta_context): - assert _codemeta_context.contexts == {_codemeta_context.hermes_lod_context} - other = HermesHarvestContext(None, mock_ep) - other.contexts.add(_context) - _codemeta_context.merge_contexts_from(other) - assert _codemeta_context.contexts == {_context, _codemeta_context.hermes_lod_context} - - -def test_prepare_codemeta(_codemeta_context, _context, _data): - assert not _codemeta_context.keys() - _codemeta_context.prepare_codemeta() - assert _codemeta_context.get_data() == _data - - -def test_prepare_codemeta_with_contexts(_codemeta_context, _context, _data_with_contexts): - assert not _codemeta_context.keys() - assert _codemeta_context.contexts == {_codemeta_context.hermes_lod_context} - _codemeta_context.add_context(_context) - _codemeta_context.prepare_codemeta() - assert _codemeta_context.get_data() == _data_with_contexts diff --git a/test/hermes_test/model/test_harvest_context.py b/test/hermes_test/model/test_harvest_context.py deleted file mode 100644 index afb1ff70..00000000 --- a/test/hermes_test/model/test_harvest_context.py +++ /dev/null @@ -1,134 +0,0 @@ -# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) -# -# SPDX-License-Identifier: Apache-2.0 - -# SPDX-FileContributor: Michael Meinel - -import pytest - -from hermes.model.context import HermesContext, HermesHarvestContext - - -@pytest.fixture -def harvest_ctx(request: pytest.FixtureRequest): - ctx = HermesContext() - return HermesHarvestContext(ctx, request.function.__name__) - - -def test_context_default(harvest_ctx): - harvest_ctx.update('spam', 'eggs', test=True) - - assert harvest_ctx._data['spam'] == [ - ['eggs', {'test': True, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_default'}] - ] - - -def test_context_update_append(harvest_ctx): - harvest_ctx.update('spam', 'noodles', index=0) - harvest_ctx.update('spam', 'eggs', index=1) - - assert harvest_ctx._data['spam'] == [ - ['noodles', {'index': 0, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_update_append'}], - ['eggs', {'index': 1, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_update_append'}] - ] - - -def test_context_update_replace(harvest_ctx): - harvest_ctx.update('spam', 'noodles', test=True) - harvest_ctx.update('spam', 'eggs', test=True) - - assert harvest_ctx._data['spam'] == [ - ['eggs', {'test': True, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_update_replace'}] - ] - - -def test_context_bulk_flat(harvest_ctx): - harvest_ctx.update_from({ - 'ans': 42, - 'spam': 'eggs' - }, test=True) - - assert harvest_ctx._data['ans'] == [ - [42, {'test': True, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_bulk_flat'}] - ] - assert harvest_ctx._data['spam'] == [ - ['eggs', {'test': True, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_bulk_flat'}] - ] - - -def test_context_bulk_complex(harvest_ctx): - harvest_ctx.update_from({ - 'ans': 42, - 'author': [ - {'name': 'Monty Python', 'email': 'eggs@spam.io'}, - {'name': 'Herr Mes'}, - ] - }, test=True) - - assert harvest_ctx._data['ans'] == [ - [42, {'test': True, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_bulk_complex'}] - ] - assert harvest_ctx._data['author[0].name'] == [ - ['Monty Python', {'test': True, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_bulk_complex'}] - ] - assert harvest_ctx._data['author[0].email'] == [ - ['eggs@spam.io', {'test': True, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_bulk_complex'}] - ] - assert harvest_ctx._data['author[1].name'] == [ - ['Herr Mes', {'test': True, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_bulk_complex'}] - ] - - -def test_context_bulk_replace(harvest_ctx): - harvest_ctx.update('author[0].name', 'Monty Python', test=True) - harvest_ctx.update_from({'author': [{'name': 'Herr Mes', 'email': 'eggs@spam.io'}]}, test=True) - - assert harvest_ctx._data['author[0].name'] == [ - ['Herr Mes', {'test': True, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_bulk_replace'}] - ] - assert harvest_ctx._data['author[0].email'] == [ - ['eggs@spam.io', {'test': True, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_bulk_replace'}] - ] - - -def test_context_bulk_append(harvest_ctx): - harvest_ctx.update('author[0].name', 'Monty Python', index=0) - harvest_ctx.update_from({'author': [{'name': 'Herr Mes', 'email': 'eggs@spam.io'}]}, index=1) - - assert harvest_ctx._data['author[0].name'] == [ - ['Monty Python', {'index': 0, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_bulk_append'}], - ['Herr Mes', {'index': 1, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_bulk_append'}] - ] - assert harvest_ctx._data['author[0].email'] == [ - ['eggs@spam.io', {'index': 1, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_bulk_append'}] - ] From d825217f8bd65d02ca218b14caa13d189aae6ee8 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 1 Aug 2025 22:28:41 +0200 Subject: [PATCH 041/247] Disable tests that fail due to missing old data model. --- test/hermes_test/commands/deposit/test_invenio.py | 2 ++ test/hermes_test/commands/init/test_init.py | 3 +++ test/hermes_test/test_cli.py | 2 ++ test/hermes_test/test_main.py | 4 ++++ test/hermes_test/test_marketplace.py | 4 ++++ 5 files changed, 15 insertions(+) diff --git a/test/hermes_test/commands/deposit/test_invenio.py b/test/hermes_test/commands/deposit/test_invenio.py index 38c64d1d..01190d53 100644 --- a/test/hermes_test/commands/deposit/test_invenio.py +++ b/test/hermes_test/commands/deposit/test_invenio.py @@ -10,6 +10,8 @@ import click import pytest +pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) + from hermes.commands.deposit import invenio from hermes.error import MisconfigurationError diff --git a/test/hermes_test/commands/init/test_init.py b/test/hermes_test/commands/init/test_init.py index c32138f3..4fe03c76 100644 --- a/test/hermes_test/commands/init/test_init.py +++ b/test/hermes_test/commands/init/test_init.py @@ -4,6 +4,9 @@ import json import pytest + +pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) + from hermes.commands.init.base import string_in_file, download_file_from_url from unittest.mock import patch, MagicMock import hermes.commands.init.util.oauth_process as oauth_process diff --git a/test/hermes_test/test_cli.py b/test/hermes_test/test_cli.py index 85b40e5e..ac831792 100644 --- a/test/hermes_test/test_cli.py +++ b/test/hermes_test/test_cli.py @@ -6,6 +6,8 @@ import pytest +pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) + from hermes.commands import cli diff --git a/test/hermes_test/test_main.py b/test/hermes_test/test_main.py index 64d6abc0..19dbf365 100644 --- a/test/hermes_test/test_main.py +++ b/test/hermes_test/test_main.py @@ -4,6 +4,10 @@ # SPDX-FileContributor: Michael Meinel +import pytest + +pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) + import subprocess import sys diff --git a/test/hermes_test/test_marketplace.py b/test/hermes_test/test_marketplace.py index 9fe31045..50dc1350 100644 --- a/test/hermes_test/test_marketplace.py +++ b/test/hermes_test/test_marketplace.py @@ -2,6 +2,10 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileContributor: David Pape +import pytest + +pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) + import requests_mock from hermes.commands.marketplace import ( From d9761ab6df61bfa15d76f024eee5e90c30241f17 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 1 Aug 2025 22:29:09 +0200 Subject: [PATCH 042/247] Fix test for mock document. --- test/hermes_test/model/types/test_pyld_util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/hermes_test/model/types/test_pyld_util.py b/test/hermes_test/model/types/test_pyld_util.py index 00f008c4..fa4e539d 100644 --- a/test/hermes_test/model/types/test_pyld_util.py +++ b/test/hermes_test/model/types/test_pyld_util.py @@ -9,7 +9,7 @@ def ld_proc(): def test_mock_document_compact(ld_proc, mock_document): - compact_document = ld_proc.compact(mock_document.expanded(), [mock_document.vocabulary], {}) + compact_document = ld_proc.compact(mock_document.expanded(), [mock_document.vocabulary()], {}) assert compact_document == mock_document.compact() From 9113c7bbd869c104b47e6c01280c893076544355 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 1 Aug 2025 22:29:25 +0200 Subject: [PATCH 043/247] Fix mock document. --- test/hermes_test/model/types/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/hermes_test/model/types/conftest.py b/test/hermes_test/model/types/conftest.py index 3a5eb106..8a1c7c2e 100644 --- a/test/hermes_test/model/types/conftest.py +++ b/test/hermes_test/model/types/conftest.py @@ -40,7 +40,7 @@ def compact(cls, base_url: str = "http://spam.eggs/", embed_vocabulary: bool = T :returns: The rendered compact document. """ return { - "@context": [cls.vocabulary(base_url) if embed_vocabulary else base_url], + "@context": cls.vocabulary(base_url) if embed_vocabulary else base_url, "spam": "bacon", "ham": f"{base_url}identifier", From df5b0efa73c1f6a439db1f058463e93558997683 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 1 Aug 2025 22:37:36 +0200 Subject: [PATCH 044/247] Silence flake8 on inactive tests. --- test/hermes_test/commands/deposit/test_invenio.py | 1 + test/hermes_test/commands/init/test_init.py | 1 + test/hermes_test/test_cli.py | 1 + test/hermes_test/test_main.py | 1 + test/hermes_test/test_marketplace.py | 1 + 5 files changed, 5 insertions(+) diff --git a/test/hermes_test/commands/deposit/test_invenio.py b/test/hermes_test/commands/deposit/test_invenio.py index 01190d53..969d3496 100644 --- a/test/hermes_test/commands/deposit/test_invenio.py +++ b/test/hermes_test/commands/deposit/test_invenio.py @@ -12,6 +12,7 @@ pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) +# noqa: E402 from hermes.commands.deposit import invenio from hermes.error import MisconfigurationError diff --git a/test/hermes_test/commands/init/test_init.py b/test/hermes_test/commands/init/test_init.py index 4fe03c76..791b0baf 100644 --- a/test/hermes_test/commands/init/test_init.py +++ b/test/hermes_test/commands/init/test_init.py @@ -7,6 +7,7 @@ pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) +# noqa: E402 from hermes.commands.init.base import string_in_file, download_file_from_url from unittest.mock import patch, MagicMock import hermes.commands.init.util.oauth_process as oauth_process diff --git a/test/hermes_test/test_cli.py b/test/hermes_test/test_cli.py index ac831792..dbc366e8 100644 --- a/test/hermes_test/test_cli.py +++ b/test/hermes_test/test_cli.py @@ -8,6 +8,7 @@ pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) +# noqa: E402 from hermes.commands import cli diff --git a/test/hermes_test/test_main.py b/test/hermes_test/test_main.py index 19dbf365..3314307c 100644 --- a/test/hermes_test/test_main.py +++ b/test/hermes_test/test_main.py @@ -8,6 +8,7 @@ pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) +# noqa: E402 import subprocess import sys diff --git a/test/hermes_test/test_marketplace.py b/test/hermes_test/test_marketplace.py index 50dc1350..f76284cc 100644 --- a/test/hermes_test/test_marketplace.py +++ b/test/hermes_test/test_marketplace.py @@ -6,6 +6,7 @@ pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) +# noqa: E402 import requests_mock from hermes.commands.marketplace import ( From 4fe4ffb5ccf4cbba58947de5efed970a2b81d0e6 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 1 Aug 2025 22:39:52 +0200 Subject: [PATCH 045/247] Silence flake8 on inactive tests more agressively. --- test/hermes_test/commands/deposit/test_invenio.py | 3 ++- test/hermes_test/commands/init/test_init.py | 3 ++- test/hermes_test/test_cli.py | 3 ++- test/hermes_test/test_main.py | 3 ++- test/hermes_test/test_marketplace.py | 3 ++- 5 files changed, 10 insertions(+), 5 deletions(-) diff --git a/test/hermes_test/commands/deposit/test_invenio.py b/test/hermes_test/commands/deposit/test_invenio.py index 969d3496..0ade0b82 100644 --- a/test/hermes_test/commands/deposit/test_invenio.py +++ b/test/hermes_test/commands/deposit/test_invenio.py @@ -5,6 +5,8 @@ # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: David Pape +# flake8: noqa + from unittest import mock import click @@ -12,7 +14,6 @@ pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) -# noqa: E402 from hermes.commands.deposit import invenio from hermes.error import MisconfigurationError diff --git a/test/hermes_test/commands/init/test_init.py b/test/hermes_test/commands/init/test_init.py index 791b0baf..98653dda 100644 --- a/test/hermes_test/commands/init/test_init.py +++ b/test/hermes_test/commands/init/test_init.py @@ -2,12 +2,13 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileContributor: Nitai Heeb +# flake8: noqa + import json import pytest pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) -# noqa: E402 from hermes.commands.init.base import string_in_file, download_file_from_url from unittest.mock import patch, MagicMock import hermes.commands.init.util.oauth_process as oauth_process diff --git a/test/hermes_test/test_cli.py b/test/hermes_test/test_cli.py index dbc366e8..26d8c7ef 100644 --- a/test/hermes_test/test_cli.py +++ b/test/hermes_test/test_cli.py @@ -4,11 +4,12 @@ # SPDX-FileContributor: Michael Meinel +# flake8: noqa + import pytest pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) -# noqa: E402 from hermes.commands import cli diff --git a/test/hermes_test/test_main.py b/test/hermes_test/test_main.py index 3314307c..74023020 100644 --- a/test/hermes_test/test_main.py +++ b/test/hermes_test/test_main.py @@ -4,11 +4,12 @@ # SPDX-FileContributor: Michael Meinel +# flake8: noqa + import pytest pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) -# noqa: E402 import subprocess import sys diff --git a/test/hermes_test/test_marketplace.py b/test/hermes_test/test_marketplace.py index f76284cc..ec76f240 100644 --- a/test/hermes_test/test_marketplace.py +++ b/test/hermes_test/test_marketplace.py @@ -2,11 +2,12 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileContributor: David Pape +# flake8: noqa + import pytest pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) -# noqa: E402 import requests_mock from hermes.commands.marketplace import ( From 797ded37fa6b90337c4c80a071e8b5c1d449a2e3 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 1 Aug 2025 22:50:45 +0200 Subject: [PATCH 046/247] Fix handling of identifier in compaction and expansion. --- src/hermes/model/types/ld_container.py | 4 ++-- src/hermes/model/types/pyld_util.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index fd84e033..226ae871 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -82,7 +82,7 @@ def ld_value(self): def _to_python(self, full_iri, ld_value): if full_iri == "@id": - value = ld_value + value = self.ld_proc.compact_iri(self.active_ctx, ld_value, vocab=False) elif full_iri == "@type": value = [ self.ld_proc.compact_iri(self.active_ctx, ld_type) @@ -100,7 +100,7 @@ def _to_python(self, full_iri, ld_value): def _to_expanded_json(self, key, value): if key == "@id": - ld_value = self.ld_proc.expand_iri(self.active_ctx, value) + ld_value = self.ld_proc.expand_iri(self.active_ctx, value, vocab=False) elif key == "@type": if not isinstance(value, list): value = [value] diff --git a/src/hermes/model/types/pyld_util.py b/src/hermes/model/types/pyld_util.py index f652cce8..2c349c30 100644 --- a/src/hermes/model/types/pyld_util.py +++ b/src/hermes/model/types/pyld_util.py @@ -100,11 +100,11 @@ class JsonLdProcessor(jsonld.JsonLdProcessor): 'mappings': {} }) - def expand_iri(self, active_ctx: t.Any, short_iri: str) -> str: - return self._expand_iri(active_ctx, short_iri, vocab=True) + def expand_iri(self, active_ctx: t.Any, short_iri: str, vocab: bool = True) -> str: + return self._expand_iri(active_ctx, short_iri, vocab=vocab) - def compact_iri(self, active_ctx: t.Any, long_iri: str) -> str: - return self._compact_iri(active_ctx, long_iri, vocab=True) + def compact_iri(self, active_ctx: t.Any, long_iri: str, vocab: bool = True) -> str: + return self._compact_iri(active_ctx, long_iri, vocab=vocab) def inital_ctx(self, local_ctx, options=None): return self.process_context(self._INITIAL_CONTEXT, local_ctx, options or {}) From 026b09f068fc927220f161ff8b72704463a865fe Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 1 Aug 2025 22:28:41 +0200 Subject: [PATCH 047/247] Disable tests that fail due to missing old data model. --- test/hermes_test/commands/deposit/test_invenio.py | 2 ++ test/hermes_test/commands/init/test_init.py | 3 +++ test/hermes_test/test_cli.py | 2 ++ test/hermes_test/test_main.py | 4 ++++ test/hermes_test/test_marketplace.py | 4 ++++ 5 files changed, 15 insertions(+) diff --git a/test/hermes_test/commands/deposit/test_invenio.py b/test/hermes_test/commands/deposit/test_invenio.py index 38c64d1d..01190d53 100644 --- a/test/hermes_test/commands/deposit/test_invenio.py +++ b/test/hermes_test/commands/deposit/test_invenio.py @@ -10,6 +10,8 @@ import click import pytest +pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) + from hermes.commands.deposit import invenio from hermes.error import MisconfigurationError diff --git a/test/hermes_test/commands/init/test_init.py b/test/hermes_test/commands/init/test_init.py index c32138f3..4fe03c76 100644 --- a/test/hermes_test/commands/init/test_init.py +++ b/test/hermes_test/commands/init/test_init.py @@ -4,6 +4,9 @@ import json import pytest + +pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) + from hermes.commands.init.base import string_in_file, download_file_from_url from unittest.mock import patch, MagicMock import hermes.commands.init.util.oauth_process as oauth_process diff --git a/test/hermes_test/test_cli.py b/test/hermes_test/test_cli.py index 85b40e5e..ac831792 100644 --- a/test/hermes_test/test_cli.py +++ b/test/hermes_test/test_cli.py @@ -6,6 +6,8 @@ import pytest +pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) + from hermes.commands import cli diff --git a/test/hermes_test/test_main.py b/test/hermes_test/test_main.py index 64d6abc0..19dbf365 100644 --- a/test/hermes_test/test_main.py +++ b/test/hermes_test/test_main.py @@ -4,6 +4,10 @@ # SPDX-FileContributor: Michael Meinel +import pytest + +pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) + import subprocess import sys diff --git a/test/hermes_test/test_marketplace.py b/test/hermes_test/test_marketplace.py index 9fe31045..50dc1350 100644 --- a/test/hermes_test/test_marketplace.py +++ b/test/hermes_test/test_marketplace.py @@ -2,6 +2,10 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileContributor: David Pape +import pytest + +pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) + import requests_mock from hermes.commands.marketplace import ( From b347dc22f4347eb5b90bdeee1e435128d5a03b32 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 1 Aug 2025 22:37:36 +0200 Subject: [PATCH 048/247] Silence flake8 on inactive tests. --- test/hermes_test/commands/deposit/test_invenio.py | 1 + test/hermes_test/commands/init/test_init.py | 1 + test/hermes_test/test_cli.py | 1 + test/hermes_test/test_main.py | 1 + test/hermes_test/test_marketplace.py | 1 + 5 files changed, 5 insertions(+) diff --git a/test/hermes_test/commands/deposit/test_invenio.py b/test/hermes_test/commands/deposit/test_invenio.py index 01190d53..969d3496 100644 --- a/test/hermes_test/commands/deposit/test_invenio.py +++ b/test/hermes_test/commands/deposit/test_invenio.py @@ -12,6 +12,7 @@ pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) +# noqa: E402 from hermes.commands.deposit import invenio from hermes.error import MisconfigurationError diff --git a/test/hermes_test/commands/init/test_init.py b/test/hermes_test/commands/init/test_init.py index 4fe03c76..791b0baf 100644 --- a/test/hermes_test/commands/init/test_init.py +++ b/test/hermes_test/commands/init/test_init.py @@ -7,6 +7,7 @@ pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) +# noqa: E402 from hermes.commands.init.base import string_in_file, download_file_from_url from unittest.mock import patch, MagicMock import hermes.commands.init.util.oauth_process as oauth_process diff --git a/test/hermes_test/test_cli.py b/test/hermes_test/test_cli.py index ac831792..dbc366e8 100644 --- a/test/hermes_test/test_cli.py +++ b/test/hermes_test/test_cli.py @@ -8,6 +8,7 @@ pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) +# noqa: E402 from hermes.commands import cli diff --git a/test/hermes_test/test_main.py b/test/hermes_test/test_main.py index 19dbf365..3314307c 100644 --- a/test/hermes_test/test_main.py +++ b/test/hermes_test/test_main.py @@ -8,6 +8,7 @@ pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) +# noqa: E402 import subprocess import sys diff --git a/test/hermes_test/test_marketplace.py b/test/hermes_test/test_marketplace.py index 50dc1350..f76284cc 100644 --- a/test/hermes_test/test_marketplace.py +++ b/test/hermes_test/test_marketplace.py @@ -6,6 +6,7 @@ pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) +# noqa: E402 import requests_mock from hermes.commands.marketplace import ( From 7a2b0d63a28fa18d0b4e219eb4a3878b559ce1ce Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 1 Aug 2025 22:39:52 +0200 Subject: [PATCH 049/247] Silence flake8 on inactive tests more agressively. --- test/hermes_test/commands/deposit/test_invenio.py | 3 ++- test/hermes_test/commands/init/test_init.py | 3 ++- test/hermes_test/test_cli.py | 3 ++- test/hermes_test/test_main.py | 3 ++- test/hermes_test/test_marketplace.py | 3 ++- 5 files changed, 10 insertions(+), 5 deletions(-) diff --git a/test/hermes_test/commands/deposit/test_invenio.py b/test/hermes_test/commands/deposit/test_invenio.py index 969d3496..0ade0b82 100644 --- a/test/hermes_test/commands/deposit/test_invenio.py +++ b/test/hermes_test/commands/deposit/test_invenio.py @@ -5,6 +5,8 @@ # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: David Pape +# flake8: noqa + from unittest import mock import click @@ -12,7 +14,6 @@ pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) -# noqa: E402 from hermes.commands.deposit import invenio from hermes.error import MisconfigurationError diff --git a/test/hermes_test/commands/init/test_init.py b/test/hermes_test/commands/init/test_init.py index 791b0baf..98653dda 100644 --- a/test/hermes_test/commands/init/test_init.py +++ b/test/hermes_test/commands/init/test_init.py @@ -2,12 +2,13 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileContributor: Nitai Heeb +# flake8: noqa + import json import pytest pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) -# noqa: E402 from hermes.commands.init.base import string_in_file, download_file_from_url from unittest.mock import patch, MagicMock import hermes.commands.init.util.oauth_process as oauth_process diff --git a/test/hermes_test/test_cli.py b/test/hermes_test/test_cli.py index dbc366e8..26d8c7ef 100644 --- a/test/hermes_test/test_cli.py +++ b/test/hermes_test/test_cli.py @@ -4,11 +4,12 @@ # SPDX-FileContributor: Michael Meinel +# flake8: noqa + import pytest pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) -# noqa: E402 from hermes.commands import cli diff --git a/test/hermes_test/test_main.py b/test/hermes_test/test_main.py index 3314307c..74023020 100644 --- a/test/hermes_test/test_main.py +++ b/test/hermes_test/test_main.py @@ -4,11 +4,12 @@ # SPDX-FileContributor: Michael Meinel +# flake8: noqa + import pytest pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) -# noqa: E402 import subprocess import sys diff --git a/test/hermes_test/test_marketplace.py b/test/hermes_test/test_marketplace.py index f76284cc..ec76f240 100644 --- a/test/hermes_test/test_marketplace.py +++ b/test/hermes_test/test_marketplace.py @@ -2,11 +2,12 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileContributor: David Pape +# flake8: noqa + import pytest pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) -# noqa: E402 import requests_mock from hermes.commands.marketplace import ( From d577aaba8c0b5bb91f83481f23f868d2c990a7bd Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 1 Aug 2025 23:08:34 +0200 Subject: [PATCH 050/247] Revert "Silence flake8 on inactive tests more agressively." This reverts commit 4fe4ffb5ccf4cbba58947de5efed970a2b81d0e6. --- test/hermes_test/commands/deposit/test_invenio.py | 3 +-- test/hermes_test/commands/init/test_init.py | 3 +-- test/hermes_test/test_cli.py | 3 +-- test/hermes_test/test_main.py | 3 +-- test/hermes_test/test_marketplace.py | 3 +-- 5 files changed, 5 insertions(+), 10 deletions(-) diff --git a/test/hermes_test/commands/deposit/test_invenio.py b/test/hermes_test/commands/deposit/test_invenio.py index 0ade0b82..969d3496 100644 --- a/test/hermes_test/commands/deposit/test_invenio.py +++ b/test/hermes_test/commands/deposit/test_invenio.py @@ -5,8 +5,6 @@ # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: David Pape -# flake8: noqa - from unittest import mock import click @@ -14,6 +12,7 @@ pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) +# noqa: E402 from hermes.commands.deposit import invenio from hermes.error import MisconfigurationError diff --git a/test/hermes_test/commands/init/test_init.py b/test/hermes_test/commands/init/test_init.py index 98653dda..791b0baf 100644 --- a/test/hermes_test/commands/init/test_init.py +++ b/test/hermes_test/commands/init/test_init.py @@ -2,13 +2,12 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileContributor: Nitai Heeb -# flake8: noqa - import json import pytest pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) +# noqa: E402 from hermes.commands.init.base import string_in_file, download_file_from_url from unittest.mock import patch, MagicMock import hermes.commands.init.util.oauth_process as oauth_process diff --git a/test/hermes_test/test_cli.py b/test/hermes_test/test_cli.py index 26d8c7ef..dbc366e8 100644 --- a/test/hermes_test/test_cli.py +++ b/test/hermes_test/test_cli.py @@ -4,12 +4,11 @@ # SPDX-FileContributor: Michael Meinel -# flake8: noqa - import pytest pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) +# noqa: E402 from hermes.commands import cli diff --git a/test/hermes_test/test_main.py b/test/hermes_test/test_main.py index 74023020..3314307c 100644 --- a/test/hermes_test/test_main.py +++ b/test/hermes_test/test_main.py @@ -4,12 +4,11 @@ # SPDX-FileContributor: Michael Meinel -# flake8: noqa - import pytest pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) +# noqa: E402 import subprocess import sys diff --git a/test/hermes_test/test_marketplace.py b/test/hermes_test/test_marketplace.py index ec76f240..f76284cc 100644 --- a/test/hermes_test/test_marketplace.py +++ b/test/hermes_test/test_marketplace.py @@ -2,12 +2,11 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileContributor: David Pape -# flake8: noqa - import pytest pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) +# noqa: E402 import requests_mock from hermes.commands.marketplace import ( From b432ccfe2f6be5c6d10e8ab69ab43b254733499e Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 1 Aug 2025 23:08:34 +0200 Subject: [PATCH 051/247] Revert "Silence flake8 on inactive tests." This reverts commit df5b0efa73c1f6a439db1f058463e93558997683. --- test/hermes_test/commands/deposit/test_invenio.py | 1 - test/hermes_test/commands/init/test_init.py | 1 - test/hermes_test/test_cli.py | 1 - test/hermes_test/test_main.py | 1 - test/hermes_test/test_marketplace.py | 1 - 5 files changed, 5 deletions(-) diff --git a/test/hermes_test/commands/deposit/test_invenio.py b/test/hermes_test/commands/deposit/test_invenio.py index 969d3496..01190d53 100644 --- a/test/hermes_test/commands/deposit/test_invenio.py +++ b/test/hermes_test/commands/deposit/test_invenio.py @@ -12,7 +12,6 @@ pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) -# noqa: E402 from hermes.commands.deposit import invenio from hermes.error import MisconfigurationError diff --git a/test/hermes_test/commands/init/test_init.py b/test/hermes_test/commands/init/test_init.py index 791b0baf..4fe03c76 100644 --- a/test/hermes_test/commands/init/test_init.py +++ b/test/hermes_test/commands/init/test_init.py @@ -7,7 +7,6 @@ pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) -# noqa: E402 from hermes.commands.init.base import string_in_file, download_file_from_url from unittest.mock import patch, MagicMock import hermes.commands.init.util.oauth_process as oauth_process diff --git a/test/hermes_test/test_cli.py b/test/hermes_test/test_cli.py index dbc366e8..ac831792 100644 --- a/test/hermes_test/test_cli.py +++ b/test/hermes_test/test_cli.py @@ -8,7 +8,6 @@ pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) -# noqa: E402 from hermes.commands import cli diff --git a/test/hermes_test/test_main.py b/test/hermes_test/test_main.py index 3314307c..19dbf365 100644 --- a/test/hermes_test/test_main.py +++ b/test/hermes_test/test_main.py @@ -8,7 +8,6 @@ pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) -# noqa: E402 import subprocess import sys diff --git a/test/hermes_test/test_marketplace.py b/test/hermes_test/test_marketplace.py index f76284cc..50dc1350 100644 --- a/test/hermes_test/test_marketplace.py +++ b/test/hermes_test/test_marketplace.py @@ -6,7 +6,6 @@ pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) -# noqa: E402 import requests_mock from hermes.commands.marketplace import ( From 6a101290416e562a36fd5be371459d18d45bfe25 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 1 Aug 2025 23:08:35 +0200 Subject: [PATCH 052/247] Revert "Disable tests that fail due to missing old data model." This reverts commit d825217f8bd65d02ca218b14caa13d189aae6ee8. --- test/hermes_test/commands/deposit/test_invenio.py | 2 -- test/hermes_test/commands/init/test_init.py | 3 --- test/hermes_test/test_cli.py | 2 -- test/hermes_test/test_main.py | 4 ---- test/hermes_test/test_marketplace.py | 4 ---- 5 files changed, 15 deletions(-) diff --git a/test/hermes_test/commands/deposit/test_invenio.py b/test/hermes_test/commands/deposit/test_invenio.py index 01190d53..38c64d1d 100644 --- a/test/hermes_test/commands/deposit/test_invenio.py +++ b/test/hermes_test/commands/deposit/test_invenio.py @@ -10,8 +10,6 @@ import click import pytest -pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) - from hermes.commands.deposit import invenio from hermes.error import MisconfigurationError diff --git a/test/hermes_test/commands/init/test_init.py b/test/hermes_test/commands/init/test_init.py index 4fe03c76..c32138f3 100644 --- a/test/hermes_test/commands/init/test_init.py +++ b/test/hermes_test/commands/init/test_init.py @@ -4,9 +4,6 @@ import json import pytest - -pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) - from hermes.commands.init.base import string_in_file, download_file_from_url from unittest.mock import patch, MagicMock import hermes.commands.init.util.oauth_process as oauth_process diff --git a/test/hermes_test/test_cli.py b/test/hermes_test/test_cli.py index ac831792..85b40e5e 100644 --- a/test/hermes_test/test_cli.py +++ b/test/hermes_test/test_cli.py @@ -6,8 +6,6 @@ import pytest -pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) - from hermes.commands import cli diff --git a/test/hermes_test/test_main.py b/test/hermes_test/test_main.py index 19dbf365..64d6abc0 100644 --- a/test/hermes_test/test_main.py +++ b/test/hermes_test/test_main.py @@ -4,10 +4,6 @@ # SPDX-FileContributor: Michael Meinel -import pytest - -pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) - import subprocess import sys diff --git a/test/hermes_test/test_marketplace.py b/test/hermes_test/test_marketplace.py index 50dc1350..9fe31045 100644 --- a/test/hermes_test/test_marketplace.py +++ b/test/hermes_test/test_marketplace.py @@ -2,10 +2,6 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileContributor: David Pape -import pytest - -pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) - import requests_mock from hermes.commands.marketplace import ( From 82b45e578b817ec26e00aac27424b33149111077 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 1 Aug 2025 22:28:03 +0200 Subject: [PATCH 053/247] Delete tests for old data model. --- .../model/test_codemeta_context.py | 71 ---------- .../hermes_test/model/test_harvest_context.py | 134 ------------------ 2 files changed, 205 deletions(-) delete mode 100644 test/hermes_test/model/test_codemeta_context.py delete mode 100644 test/hermes_test/model/test_harvest_context.py diff --git a/test/hermes_test/model/test_codemeta_context.py b/test/hermes_test/model/test_codemeta_context.py deleted file mode 100644 index b9a5ec3e..00000000 --- a/test/hermes_test/model/test_codemeta_context.py +++ /dev/null @@ -1,71 +0,0 @@ -# SPDX-FileCopyrightText: 2023 German Aerospace Center (DLR) -# -# SPDX-License-Identifier: Apache-2.0 - -# SPDX-FileContributor: Stephan Druskat - -import pytest -from unittest.mock import Mock - -from hermes.model.context import CodeMetaContext, HermesHarvestContext - - -@pytest.fixture -def mock_ep(): - ep = Mock() - ep.name = 'mock_name' - return ep - - -@pytest.fixture -def _context(): - return 'foo', 'bar' - - -@pytest.fixture -def _codemeta_context(): - return CodeMetaContext() - - -@pytest.fixture -def _data(_codemeta_context): - return { - '@context': [ - 'https://doi.org/10.5063/schema/codemeta-2.0', - {'hermes': 'https://software-metadata.pub/ns/hermes/'}], - '@type': 'SoftwareSourceCode' - } - - -@pytest.fixture -def _data_with_contexts(_codemeta_context): - return { - '@type': 'SoftwareSourceCode', - '@context': [ - 'https://doi.org/10.5063/schema/codemeta-2.0', - {'foo': 'bar', - 'hermes': 'https://software-metadata.pub/ns/hermes/'} - ] - } - - -def test_merge_contexts_from(mock_ep, _context, _codemeta_context): - assert _codemeta_context.contexts == {_codemeta_context.hermes_lod_context} - other = HermesHarvestContext(None, mock_ep) - other.contexts.add(_context) - _codemeta_context.merge_contexts_from(other) - assert _codemeta_context.contexts == {_context, _codemeta_context.hermes_lod_context} - - -def test_prepare_codemeta(_codemeta_context, _context, _data): - assert not _codemeta_context.keys() - _codemeta_context.prepare_codemeta() - assert _codemeta_context.get_data() == _data - - -def test_prepare_codemeta_with_contexts(_codemeta_context, _context, _data_with_contexts): - assert not _codemeta_context.keys() - assert _codemeta_context.contexts == {_codemeta_context.hermes_lod_context} - _codemeta_context.add_context(_context) - _codemeta_context.prepare_codemeta() - assert _codemeta_context.get_data() == _data_with_contexts diff --git a/test/hermes_test/model/test_harvest_context.py b/test/hermes_test/model/test_harvest_context.py deleted file mode 100644 index afb1ff70..00000000 --- a/test/hermes_test/model/test_harvest_context.py +++ /dev/null @@ -1,134 +0,0 @@ -# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) -# -# SPDX-License-Identifier: Apache-2.0 - -# SPDX-FileContributor: Michael Meinel - -import pytest - -from hermes.model.context import HermesContext, HermesHarvestContext - - -@pytest.fixture -def harvest_ctx(request: pytest.FixtureRequest): - ctx = HermesContext() - return HermesHarvestContext(ctx, request.function.__name__) - - -def test_context_default(harvest_ctx): - harvest_ctx.update('spam', 'eggs', test=True) - - assert harvest_ctx._data['spam'] == [ - ['eggs', {'test': True, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_default'}] - ] - - -def test_context_update_append(harvest_ctx): - harvest_ctx.update('spam', 'noodles', index=0) - harvest_ctx.update('spam', 'eggs', index=1) - - assert harvest_ctx._data['spam'] == [ - ['noodles', {'index': 0, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_update_append'}], - ['eggs', {'index': 1, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_update_append'}] - ] - - -def test_context_update_replace(harvest_ctx): - harvest_ctx.update('spam', 'noodles', test=True) - harvest_ctx.update('spam', 'eggs', test=True) - - assert harvest_ctx._data['spam'] == [ - ['eggs', {'test': True, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_update_replace'}] - ] - - -def test_context_bulk_flat(harvest_ctx): - harvest_ctx.update_from({ - 'ans': 42, - 'spam': 'eggs' - }, test=True) - - assert harvest_ctx._data['ans'] == [ - [42, {'test': True, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_bulk_flat'}] - ] - assert harvest_ctx._data['spam'] == [ - ['eggs', {'test': True, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_bulk_flat'}] - ] - - -def test_context_bulk_complex(harvest_ctx): - harvest_ctx.update_from({ - 'ans': 42, - 'author': [ - {'name': 'Monty Python', 'email': 'eggs@spam.io'}, - {'name': 'Herr Mes'}, - ] - }, test=True) - - assert harvest_ctx._data['ans'] == [ - [42, {'test': True, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_bulk_complex'}] - ] - assert harvest_ctx._data['author[0].name'] == [ - ['Monty Python', {'test': True, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_bulk_complex'}] - ] - assert harvest_ctx._data['author[0].email'] == [ - ['eggs@spam.io', {'test': True, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_bulk_complex'}] - ] - assert harvest_ctx._data['author[1].name'] == [ - ['Herr Mes', {'test': True, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_bulk_complex'}] - ] - - -def test_context_bulk_replace(harvest_ctx): - harvest_ctx.update('author[0].name', 'Monty Python', test=True) - harvest_ctx.update_from({'author': [{'name': 'Herr Mes', 'email': 'eggs@spam.io'}]}, test=True) - - assert harvest_ctx._data['author[0].name'] == [ - ['Herr Mes', {'test': True, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_bulk_replace'}] - ] - assert harvest_ctx._data['author[0].email'] == [ - ['eggs@spam.io', {'test': True, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_bulk_replace'}] - ] - - -def test_context_bulk_append(harvest_ctx): - harvest_ctx.update('author[0].name', 'Monty Python', index=0) - harvest_ctx.update_from({'author': [{'name': 'Herr Mes', 'email': 'eggs@spam.io'}]}, index=1) - - assert harvest_ctx._data['author[0].name'] == [ - ['Monty Python', {'index': 0, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_bulk_append'}], - ['Herr Mes', {'index': 1, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_bulk_append'}] - ] - assert harvest_ctx._data['author[0].email'] == [ - ['eggs@spam.io', {'index': 1, - 'timestamp': HermesContext.default_timestamp, - 'harvester': 'test_context_bulk_append'}] - ] From de09fdb3ebee065d2cb7fc1657fdd333088a3ba3 Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Fri, 1 Aug 2025 23:15:57 +0200 Subject: [PATCH 054/247] Revert "Delete tests for old data model." This reverts commit 8b4f9a33777de45931dcece39edf5406c86864f4. --- test/hermes_test/model/test_base_context.py | 38 +++++ .../model/test_codemeta_context.py | 71 ++++++++++ .../hermes_test/model/test_harvest_context.py | 134 ++++++++++++++++++ 3 files changed, 243 insertions(+) create mode 100644 test/hermes_test/model/test_base_context.py create mode 100644 test/hermes_test/model/test_codemeta_context.py create mode 100644 test/hermes_test/model/test_harvest_context.py diff --git a/test/hermes_test/model/test_base_context.py b/test/hermes_test/model/test_base_context.py new file mode 100644 index 00000000..bdf016b7 --- /dev/null +++ b/test/hermes_test/model/test_base_context.py @@ -0,0 +1,38 @@ +# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Meinel + +from pathlib import Path + +from hermes.model.context import HermesContext + + +def test_context_hermes_dir_default(): + ctx = HermesContext() + assert ctx.hermes_dir == Path('.') / '.hermes' + + +def test_context_hermes_dir_custom(): + ctx = HermesContext('spam') + assert ctx.hermes_dir == Path('spam') / '.hermes' + + +def test_context_get_cache_default(): + ctx = HermesContext() + assert ctx.get_cache('spam', 'eggs') == Path('.') / '.hermes' / 'spam' / 'eggs.json' + + +def test_context_get_cache_cached(): + ctx = HermesContext() + ctx._caches[('spam', 'eggs')] = Path('spam_and_eggs') + assert ctx.get_cache('spam', 'eggs') == Path('spam_and_eggs') + + +def test_context_get_cache_create(tmpdir): + ctx = HermesContext(tmpdir) + subdir = Path(tmpdir) / '.hermes' / 'spam' + + assert ctx.get_cache('spam', 'eggs', create=True) == subdir / 'eggs.json' + assert subdir.exists() diff --git a/test/hermes_test/model/test_codemeta_context.py b/test/hermes_test/model/test_codemeta_context.py new file mode 100644 index 00000000..b9a5ec3e --- /dev/null +++ b/test/hermes_test/model/test_codemeta_context.py @@ -0,0 +1,71 @@ +# SPDX-FileCopyrightText: 2023 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Stephan Druskat + +import pytest +from unittest.mock import Mock + +from hermes.model.context import CodeMetaContext, HermesHarvestContext + + +@pytest.fixture +def mock_ep(): + ep = Mock() + ep.name = 'mock_name' + return ep + + +@pytest.fixture +def _context(): + return 'foo', 'bar' + + +@pytest.fixture +def _codemeta_context(): + return CodeMetaContext() + + +@pytest.fixture +def _data(_codemeta_context): + return { + '@context': [ + 'https://doi.org/10.5063/schema/codemeta-2.0', + {'hermes': 'https://software-metadata.pub/ns/hermes/'}], + '@type': 'SoftwareSourceCode' + } + + +@pytest.fixture +def _data_with_contexts(_codemeta_context): + return { + '@type': 'SoftwareSourceCode', + '@context': [ + 'https://doi.org/10.5063/schema/codemeta-2.0', + {'foo': 'bar', + 'hermes': 'https://software-metadata.pub/ns/hermes/'} + ] + } + + +def test_merge_contexts_from(mock_ep, _context, _codemeta_context): + assert _codemeta_context.contexts == {_codemeta_context.hermes_lod_context} + other = HermesHarvestContext(None, mock_ep) + other.contexts.add(_context) + _codemeta_context.merge_contexts_from(other) + assert _codemeta_context.contexts == {_context, _codemeta_context.hermes_lod_context} + + +def test_prepare_codemeta(_codemeta_context, _context, _data): + assert not _codemeta_context.keys() + _codemeta_context.prepare_codemeta() + assert _codemeta_context.get_data() == _data + + +def test_prepare_codemeta_with_contexts(_codemeta_context, _context, _data_with_contexts): + assert not _codemeta_context.keys() + assert _codemeta_context.contexts == {_codemeta_context.hermes_lod_context} + _codemeta_context.add_context(_context) + _codemeta_context.prepare_codemeta() + assert _codemeta_context.get_data() == _data_with_contexts diff --git a/test/hermes_test/model/test_harvest_context.py b/test/hermes_test/model/test_harvest_context.py new file mode 100644 index 00000000..afb1ff70 --- /dev/null +++ b/test/hermes_test/model/test_harvest_context.py @@ -0,0 +1,134 @@ +# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Meinel + +import pytest + +from hermes.model.context import HermesContext, HermesHarvestContext + + +@pytest.fixture +def harvest_ctx(request: pytest.FixtureRequest): + ctx = HermesContext() + return HermesHarvestContext(ctx, request.function.__name__) + + +def test_context_default(harvest_ctx): + harvest_ctx.update('spam', 'eggs', test=True) + + assert harvest_ctx._data['spam'] == [ + ['eggs', {'test': True, + 'timestamp': HermesContext.default_timestamp, + 'harvester': 'test_context_default'}] + ] + + +def test_context_update_append(harvest_ctx): + harvest_ctx.update('spam', 'noodles', index=0) + harvest_ctx.update('spam', 'eggs', index=1) + + assert harvest_ctx._data['spam'] == [ + ['noodles', {'index': 0, + 'timestamp': HermesContext.default_timestamp, + 'harvester': 'test_context_update_append'}], + ['eggs', {'index': 1, + 'timestamp': HermesContext.default_timestamp, + 'harvester': 'test_context_update_append'}] + ] + + +def test_context_update_replace(harvest_ctx): + harvest_ctx.update('spam', 'noodles', test=True) + harvest_ctx.update('spam', 'eggs', test=True) + + assert harvest_ctx._data['spam'] == [ + ['eggs', {'test': True, + 'timestamp': HermesContext.default_timestamp, + 'harvester': 'test_context_update_replace'}] + ] + + +def test_context_bulk_flat(harvest_ctx): + harvest_ctx.update_from({ + 'ans': 42, + 'spam': 'eggs' + }, test=True) + + assert harvest_ctx._data['ans'] == [ + [42, {'test': True, + 'timestamp': HermesContext.default_timestamp, + 'harvester': 'test_context_bulk_flat'}] + ] + assert harvest_ctx._data['spam'] == [ + ['eggs', {'test': True, + 'timestamp': HermesContext.default_timestamp, + 'harvester': 'test_context_bulk_flat'}] + ] + + +def test_context_bulk_complex(harvest_ctx): + harvest_ctx.update_from({ + 'ans': 42, + 'author': [ + {'name': 'Monty Python', 'email': 'eggs@spam.io'}, + {'name': 'Herr Mes'}, + ] + }, test=True) + + assert harvest_ctx._data['ans'] == [ + [42, {'test': True, + 'timestamp': HermesContext.default_timestamp, + 'harvester': 'test_context_bulk_complex'}] + ] + assert harvest_ctx._data['author[0].name'] == [ + ['Monty Python', {'test': True, + 'timestamp': HermesContext.default_timestamp, + 'harvester': 'test_context_bulk_complex'}] + ] + assert harvest_ctx._data['author[0].email'] == [ + ['eggs@spam.io', {'test': True, + 'timestamp': HermesContext.default_timestamp, + 'harvester': 'test_context_bulk_complex'}] + ] + assert harvest_ctx._data['author[1].name'] == [ + ['Herr Mes', {'test': True, + 'timestamp': HermesContext.default_timestamp, + 'harvester': 'test_context_bulk_complex'}] + ] + + +def test_context_bulk_replace(harvest_ctx): + harvest_ctx.update('author[0].name', 'Monty Python', test=True) + harvest_ctx.update_from({'author': [{'name': 'Herr Mes', 'email': 'eggs@spam.io'}]}, test=True) + + assert harvest_ctx._data['author[0].name'] == [ + ['Herr Mes', {'test': True, + 'timestamp': HermesContext.default_timestamp, + 'harvester': 'test_context_bulk_replace'}] + ] + assert harvest_ctx._data['author[0].email'] == [ + ['eggs@spam.io', {'test': True, + 'timestamp': HermesContext.default_timestamp, + 'harvester': 'test_context_bulk_replace'}] + ] + + +def test_context_bulk_append(harvest_ctx): + harvest_ctx.update('author[0].name', 'Monty Python', index=0) + harvest_ctx.update_from({'author': [{'name': 'Herr Mes', 'email': 'eggs@spam.io'}]}, index=1) + + assert harvest_ctx._data['author[0].name'] == [ + ['Monty Python', {'index': 0, + 'timestamp': HermesContext.default_timestamp, + 'harvester': 'test_context_bulk_append'}], + ['Herr Mes', {'index': 1, + 'timestamp': HermesContext.default_timestamp, + 'harvester': 'test_context_bulk_append'}] + ] + assert harvest_ctx._data['author[0].email'] == [ + ['eggs@spam.io', {'index': 1, + 'timestamp': HermesContext.default_timestamp, + 'harvester': 'test_context_bulk_append'}] + ] From 6bcdbe9fb4edae9ed8c1613dc8880fdf9fd8a2ae Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Tue, 5 Aug 2025 09:39:57 +0200 Subject: [PATCH 055/247] Show skipped tests when running --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index bf19ccca..83704b5f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -115,7 +115,7 @@ norecursedirs = "docs/*" testpaths = [ "test" ] -addopts = "--cov=hermes --cov-report term" +addopts = "-r s --cov=hermes --cov-report term" [build-system] requires = ["poetry-core>=1.2.0"] From 68aec7f8bacc2f6de4c23d6737dad259c03f89bb Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Tue, 5 Aug 2025 22:35:44 +0200 Subject: [PATCH 056/247] Update poetry.lock for added test dependencies. --- poetry.lock | 39 ++++++++++++++++++++++++++++++++++++--- 1 file changed, 36 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 47ce16c1..932f4b45 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. [[package]] name = "accessible-pygments" @@ -914,7 +914,7 @@ version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" -groups = ["docs"] +groups = ["dev", "docs"] files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -1543,6 +1543,21 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] +[[package]] +name = "pytest-httpserver" +version = "1.1.3" +description = "pytest-httpserver is a httpserver for pytest" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pytest_httpserver-1.1.3-py3-none-any.whl", hash = "sha256:5f84757810233e19e2bb5287f3826a71c97a3740abe3a363af9155c0f82fdbb9"}, + {file = "pytest_httpserver-1.1.3.tar.gz", hash = "sha256:af819d6b533f84b4680b9416a5b3f67f1df3701f1da54924afd4d6e4ba5917ec"}, +] + +[package.dependencies] +Werkzeug = ">=2.0.0" + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -2370,6 +2385,24 @@ h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "werkzeug" +version = "3.1.3" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"}, + {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + [[package]] name = "wheel" version = "0.45.1" @@ -2477,4 +2510,4 @@ files = [ [metadata] lock-version = "2.1" python-versions = ">=3.10, <4.0.0" -content-hash = "58304fd33d6ec1ce3400b43ecffb16b3f48a5621e513c3e8057f9e3e050835e8" +content-hash = "53ac01822d5f15562af78d86245f1d7302873452f979cfd90d5c8bebed3d29a9" From ea081055c0b784df7ed2fe896bc966a510a2517b Mon Sep 17 00:00:00 2001 From: Michael Meinel Date: Tue, 5 Aug 2025 22:41:37 +0200 Subject: [PATCH 057/247] Add missing dependencies again. Update poetry.lock for added test dependencies. --- poetry.lock | 73 ++++++++++++++++++++++++++++++++++++++++++++++++-- pyproject.toml | 2 ++ 2 files changed, 73 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index 932f4b45..d09e8e97 100644 --- a/poetry.lock +++ b/poetry.lock @@ -700,6 +700,19 @@ files = [ {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, ] +[[package]] +name = "isodate" +version = "0.7.2" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.10\"" +files = [ + {file = "isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15"}, + {file = "isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6"}, +] + [[package]] name = "jinja2" version = "3.1.6" @@ -1139,6 +1152,24 @@ files = [ dev = ["abi3audit", "black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"] test = ["pytest", "pytest-xdist", "setuptools"] +[[package]] +name = "pyaml" +version = "25.7.0" +description = "PyYAML-based module to produce a bit more pretty and readable YAML-serialized data" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pyaml-25.7.0-py3-none-any.whl", hash = "sha256:ce5d7867cc2b455efdb9b0448324ff7b9f74d99f64650f12ca570102db6b985f"}, + {file = "pyaml-25.7.0.tar.gz", hash = "sha256:e113a64ec16881bf2b092e2beb84b7dcf1bd98096ad17f5f14e8fb782a75d99b"}, +] + +[package.dependencies] +PyYAML = "*" + +[package.extras] +anchors = ["unidecode"] + [[package]] name = "pycodestyle" version = "2.9.1" @@ -1609,7 +1640,7 @@ version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" -groups = ["docs"] +groups = ["main", "docs"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -1666,6 +1697,29 @@ files = [ {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] +[[package]] +name = "rdflib" +version = "7.1.4" +description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information." +optional = false +python-versions = "<4.0.0,>=3.8.1" +groups = ["main"] +files = [ + {file = "rdflib-7.1.4-py3-none-any.whl", hash = "sha256:72f4adb1990fa5241abd22ddaf36d7cafa5d91d9ff2ba13f3086d339b213d997"}, + {file = "rdflib-7.1.4.tar.gz", hash = "sha256:fed46e24f26a788e2ab8e445f7077f00edcf95abb73bcef4b86cefa8b62dd174"}, +] + +[package.dependencies] +isodate = {version = ">=0.7.2,<1.0.0", markers = "python_version < \"3.11\""} +pyparsing = ">=2.1.0,<4" + +[package.extras] +berkeleydb = ["berkeleydb (>=18.1.0,<19.0.0)"] +html = ["html5rdf (>=1.2,<2)"] +lxml = ["lxml (>=4.3,<6.0)"] +networkx = ["networkx (>=2,<4)"] +orjson = ["orjson (>=3.9.14,<4)"] + [[package]] name = "requests" version = "2.32.4" @@ -1821,6 +1875,21 @@ files = [ {file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"}, ] +[[package]] +name = "schemaorg" +version = "0.1.1" +description = "Python functions for applied use of schema.org" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "schemaorg-0.1.1.tar.gz", hash = "sha256:567f1735df666221c893d2c206dd70f9cddcc983c8cdc39f3a7b7726884d2c51"}, +] + +[package.dependencies] +lxml = ">=4.1.1" +pyaml = ">=17.12.1" + [[package]] name = "setuptools" version = "80.9.0" @@ -2510,4 +2579,4 @@ files = [ [metadata] lock-version = "2.1" python-versions = ">=3.10, <4.0.0" -content-hash = "53ac01822d5f15562af78d86245f1d7302873452f979cfd90d5c8bebed3d29a9" +content-hash = "a8cbf610eb2e4405175914d22c8383c796ccb443724d04d2f3e58fcb08205e66" diff --git a/pyproject.toml b/pyproject.toml index a8fa06e9..d28f60fd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,6 +39,8 @@ dependencies = [ "pydantic-settings>=2.1.0, <3.0.0", "requests-oauthlib>=2.0.0, <3.0.0", "pynacl>=1.5.0, <2.0.0", + "rdflib (>=7.1.4,<8.0.0)", + "schemaorg (>=0.1.1,<0.2.0)", ] requires-python = ">=3.10, <4.0.0" From 409b43a7c33f5ea5c9aee8e5d6f3b49e674ea792 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Fri, 1 Aug 2025 19:47:15 +0200 Subject: [PATCH 058/247] Add test file for `ld_dict` --- test/hermes_test/model/types/test_ld_dict.py | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 test/hermes_test/model/types/test_ld_dict.py diff --git a/test/hermes_test/model/types/test_ld_dict.py b/test/hermes_test/model/types/test_ld_dict.py new file mode 100644 index 00000000..b4ddda65 --- /dev/null +++ b/test/hermes_test/model/types/test_ld_dict.py @@ -0,0 +1,4 @@ +# SPDX-FileCopyrightText: 2025 German Aerospace Center (DLR) +# SPDX-FileContributor: Stephan Druskat +# +# SPDX-License-Identifier: Apache-2.0 From 79ffbc6955a6d38ea10290e9e4d9da56f5624806 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 7 Aug 2025 09:45:32 +0200 Subject: [PATCH 059/247] Fix typos in docstring --- src/hermes/model/types/ld_container.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index fd84e033..b37a8019 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -11,9 +11,9 @@ class ld_container: """ Base class for Linked Data containers. - A linked data container impelements a view on the expanded form of an JSON-LD document. - It allows to easily interacts them by hinding all the nesting and automatically mapping - between different forms. + A linked data container implements a view on the expanded form of an JSON-LD document. + It allows to easily interact with such documents by hiding all the nesting and + automatically mapping between different forms. """ ld_proc = JsonLdProcessor() From bb7741ba96071716f2805d94cdd57f18eb0d67bf Mon Sep 17 00:00:00 2001 From: "Kernchen, Sophie" Date: Thu, 7 Aug 2025 09:48:40 +0200 Subject: [PATCH 060/247] Add missing license header --- test/hermes_test/model/types/test_pyld_util.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/test/hermes_test/model/types/test_pyld_util.py b/test/hermes_test/model/types/test_pyld_util.py index fa4e539d..1a1c6f4d 100644 --- a/test/hermes_test/model/types/test_pyld_util.py +++ b/test/hermes_test/model/types/test_pyld_util.py @@ -1,3 +1,9 @@ +# SPDX-FileCopyrightText: 2025 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Meinel + import pytest from hermes.model.types import pyld_util From 78af0b5469a7acbc4fd366085ed3f0899548e588 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 7 Aug 2025 10:37:34 +0200 Subject: [PATCH 061/247] Use correct field name in tests --- test/hermes_test/model/types/test_ld_context.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_context.py b/test/hermes_test/model/types/test_ld_context.py index 03fd1cf3..4964c062 100644 --- a/test/hermes_test/model/types/test_ld_context.py +++ b/test/hermes_test/model/types/test_ld_context.py @@ -18,13 +18,13 @@ def ctx(): def test_ctx(): ctx = ContextPrefix(["u1", {"2": "u2"}]) - assert ctx.prefix[None] == "u1" - assert ctx.prefix["2"] == "u2" + assert ctx.context[None] == "u1" + assert ctx.context["2"] == "u2" def test_codemeta_prefix(ctx): """Default vocabulary in context has the correct base IRI.""" - assert ctx.prefix[None] == "https://codemeta.github.io/terms/" + assert ctx.context[None] == "https://codemeta.github.io/terms/" def test_get_codemeta_item(ctx): From c1a04485928181482ae2a49a8c8cf1ada0eb9940 Mon Sep 17 00:00:00 2001 From: "Kernchen, Sophie" Date: Thu, 7 Aug 2025 10:45:47 +0200 Subject: [PATCH 062/247] Fix KeyError and first Tests for ld_list --- src/hermes/model/types/ld_list.py | 2 +- test/hermes_test/model/types/test_ld_list.py | 22 ++++++++++++++++++++ 2 files changed, 23 insertions(+), 1 deletion(-) create mode 100644 test/hermes_test/model/types/test_ld_list.py diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index 62a7e5f3..c2c68e4d 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -27,7 +27,7 @@ def __init__(self, data, *, parent=None, key=None, index=None, context=None): self.container = container break else: - raise ValueError(f"Unexpected dict: {self.data}") + raise ValueError(f"Unexpected dict: {data}") def __getitem__(self, index): if isinstance(index, slice): diff --git a/test/hermes_test/model/types/test_ld_list.py b/test/hermes_test/model/types/test_ld_list.py new file mode 100644 index 00000000..00f054c9 --- /dev/null +++ b/test/hermes_test/model/types/test_ld_list.py @@ -0,0 +1,22 @@ +# SPDX-FileCopyrightText: 2025 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Sophie Kernchen + +import pytest + +from hermes.model.types.ld_list import ld_list + + +def test_undefined_list(): + with pytest.raises(ValueError): + li = ld_list([{"spam": [{"@value": "bacon"}]}]) + +@pytest.mark.dev +def test_list_basics(): + li = ld_list([{"@list": [0], "spam": [{"@value": "bacon"}]}]) + assert li._data == [{"@list": [0], "spam": [{"@value": "bacon"}]}] + assert li.container == '@list' + assert li.item_list == [0] + From 4cb2af04ca5eb0caab54d22558444c6883b55b55 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 7 Aug 2025 11:11:36 +0200 Subject: [PATCH 063/247] Mark test as expectedly failing due to #419 --- test/hermes_test/model/types/test_ld_context.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/hermes_test/model/types/test_ld_context.py b/test/hermes_test/model/types/test_ld_context.py index 4964c062..4481769e 100644 --- a/test/hermes_test/model/types/test_ld_context.py +++ b/test/hermes_test/model/types/test_ld_context.py @@ -22,6 +22,8 @@ def test_ctx(): assert ctx.context["2"] == "u2" +@pytest.mark.xfail(raises=AssertionError, reason="Currently, the wrong CodeMeta IRI is used in the implementation: " + "https://github.com/softwarepub/hermes/issues/419") def test_codemeta_prefix(ctx): """Default vocabulary in context has the correct base IRI.""" assert ctx.context[None] == "https://codemeta.github.io/terms/" From 32c41f100e3006155434e04142c619971c10baa5 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 7 Aug 2025 11:24:39 +0200 Subject: [PATCH 064/247] Make passing xfail tests fail test suite --- pyproject.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index d28f60fd..49b64ee7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -111,7 +111,9 @@ flake8 = "poetry run flake8 ./test/ ./src/ --count --select=E9,F63,F7,F82 --stat [tool.pytest.ini_options] +minversion = "6.0" norecursedirs = "docs/*" +xfail_strict = true testpaths = [ "test" ] From 9e7ecf587917a80c1af544cdd9f410d8e1c14089 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 7 Aug 2025 11:27:09 +0200 Subject: [PATCH 065/247] Mark another test as expectedly failing due to #419 --- test/hermes_test/model/types/test_ld_context.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/test/hermes_test/model/types/test_ld_context.py b/test/hermes_test/model/types/test_ld_context.py index 4481769e..d4620f49 100644 --- a/test/hermes_test/model/types/test_ld_context.py +++ b/test/hermes_test/model/types/test_ld_context.py @@ -29,6 +29,9 @@ def test_codemeta_prefix(ctx): assert ctx.context[None] == "https://codemeta.github.io/terms/" +@pytest.mark.xfail(raises=AssertionError, reason="Currently, the wrong CodeMeta IRI is used in the implementation," + "so expanding terms doesn't work correctly, see " + "https://github.com/softwarepub/hermes/issues/419") def test_get_codemeta_item(ctx): """Context returns fully expanded terms for default vocabulary in the context.""" item = ctx["maintainer"] From d4e23665b51fb40053306ec1634fb03b6aefc941 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 7 Aug 2025 12:48:48 +0200 Subject: [PATCH 066/247] Parametrize xfailing test getting CodeMeta items, re-format --- .../model/types/test_ld_context.py | 20 ++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_context.py b/test/hermes_test/model/types/test_ld_context.py index d4620f49..400923c2 100644 --- a/test/hermes_test/model/types/test_ld_context.py +++ b/test/hermes_test/model/types/test_ld_context.py @@ -22,19 +22,25 @@ def test_ctx(): assert ctx.context["2"] == "u2" -@pytest.mark.xfail(raises=AssertionError, reason="Currently, the wrong CodeMeta IRI is used in the implementation: " - "https://github.com/softwarepub/hermes/issues/419") +@pytest.mark.xfail( + raises=AssertionError, + reason="Currently, the wrong CodeMeta IRI is used in the implementation: " + "https://github.com/softwarepub/hermes/issues/419", +) def test_codemeta_prefix(ctx): """Default vocabulary in context has the correct base IRI.""" assert ctx.context[None] == "https://codemeta.github.io/terms/" -@pytest.mark.xfail(raises=AssertionError, reason="Currently, the wrong CodeMeta IRI is used in the implementation," - "so expanding terms doesn't work correctly, see " - "https://github.com/softwarepub/hermes/issues/419") -def test_get_codemeta_item(ctx): +@pytest.mark.xfail( + raises=AssertionError, + reason="Currently, the wrong CodeMeta IRI is used in the implementation, so expanding terms doesn't work correctly, " + "see https://github.com/softwarepub/hermes/issues/419", +) +@pytest.mark.parametrize("compacted", ["maintainer", (None, "maintainer")]) +def test_get_item_from_default_vocabulary_pass(ctx, compacted): """Context returns fully expanded terms for default vocabulary in the context.""" - item = ctx["maintainer"] + item = ctx[compacted] assert item == "https://codemeta.github.io/terms/maintainer" From 1f4501c73e3734e4e3c25ad878e3578a2fe30fee Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 7 Aug 2025 12:51:01 +0200 Subject: [PATCH 067/247] Test correct inputs for getting expanded terms from prefixed vocabularies --- test/hermes_test/model/types/test_ld_context.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_context.py b/test/hermes_test/model/types/test_ld_context.py index 400923c2..e328b539 100644 --- a/test/hermes_test/model/types/test_ld_context.py +++ b/test/hermes_test/model/types/test_ld_context.py @@ -52,10 +52,18 @@ def test_get_item_from_default_vocabulary_pass(ctx, compacted): "hermes:semanticVersion", "https://schema.software-metadata.pub/hermes-content/1.0/semanticVersion", # TODO: Change on #393 fix ), + (("schema", "Organization"), "http://schema.org/Organization"), + ( + ("hermes", "semanticVersion"), + "https://schema.software-metadata.pub/hermes-content/1.0/semanticVersion", + ), # TODO: Change on #393 fix ], ) -def test_get_prefixed_items(ctx, compacted, expanded): - """Context returns fully expanded terms for prefixed vocabularies in the context.""" +def test_get_item_from_prefixed_vocabulary_pass(ctx, compacted, expanded): + """ + Context returns fully expanded terms for prefixed vocabularies in the context, + for all accepted parameter formats. + """ item = ctx[compacted] assert item == expanded From 5512f7ece2f01f0447c0133a11a6a80e6b378936 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 7 Aug 2025 12:52:48 +0200 Subject: [PATCH 068/247] Test raises when prefix doesn't exist for compacted input --- .../model/types/test_ld_context.py | 21 ++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_context.py b/test/hermes_test/model/types/test_ld_context.py index e328b539..a0e2dd5e 100644 --- a/test/hermes_test/model/types/test_ld_context.py +++ b/test/hermes_test/model/types/test_ld_context.py @@ -68,9 +68,24 @@ def test_get_item_from_prefixed_vocabulary_pass(ctx, compacted, expanded): assert item == expanded -def test_get_protocol_items_pass(ctx): - item = ctx["https://schema.org/Organisation"] - assert item == "https://schema.org/Organisation" +@pytest.mark.parametrize( + "not_exist", + [ + "https://foo.bar/baz", + "foobar:baz", + ("foobar", "baz"), + ], +) +def test_get_item_from_prefixed_vocabulary_raises_on_prefix_not_exist(ctx, not_exist): + """ + Tests that an exception is raised when trying to get compacted items for which there is no + prefixed vocabulary in the context, and that the raised exception is not raised due to side effects. + """ + with pytest.raises(Exception) as e: # FIXME: Replace with custom error + ctx[not_exist] + assert "cannot access local variable" not in str( + e.value + ) def test_get_protocol_items_fail(ctx): From 3262691e2132bd8b695c39c2c16eb9354c002c42 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 7 Aug 2025 12:53:13 +0200 Subject: [PATCH 069/247] Test raises when term doesn't exist for compacted input --- .../model/types/test_ld_context.py | 25 ++++++++++++++++--- 1 file changed, 21 insertions(+), 4 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_context.py b/test/hermes_test/model/types/test_ld_context.py index a0e2dd5e..02595d74 100644 --- a/test/hermes_test/model/types/test_ld_context.py +++ b/test/hermes_test/model/types/test_ld_context.py @@ -88,10 +88,27 @@ def test_get_item_from_prefixed_vocabulary_raises_on_prefix_not_exist(ctx, not_e ) -def test_get_protocol_items_fail(ctx): - with pytest.raises(Exception) as e: - ctx["https://foo.bar/baz"] - assert "cannot access local variable" not in str(e.value) # FIXME: Replace with custom error +@pytest.mark.parametrize( + "not_exist", + [ + "baz", + "hermes:baz", + "schema:baz", + (None, "baz"), + ("hermes", "baz"), + ("schema", "baz"), + ], +) +def test_get_item_item_from_prefixed_vocabulary_raises_on_term_not_exist(ctx, not_exist): + """ + Tests that an exception is raised when trying to get compacted items for which the vocabulary exists, + but doesn't contain the requested term, and that the raised exception is not raised due to side effects. + """ + with pytest.raises(Exception) as e: # FIXME: Replace with custom error + ctx[not_exist] + assert "cannot access local variable" not in str( + e.value + ) @pytest.mark.parametrize( From 241f914af1e1932db54cda6ba7dff8c3d1837d27 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 7 Aug 2025 12:59:13 +0200 Subject: [PATCH 070/247] Remove duplicate tests, and not-to-be-tested parameters - Remove duplicate tests of __get_item__ on tuples - This includes test parameters that we decided not to test, as the API doesn't see them as valid (list inputs), although they're supported at runtime. - Related: https://github.com/softwarepub/hermes/pull/403#discussion_r2245059253 --- test/hermes_test/model/types/test_ld_context.py | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_context.py b/test/hermes_test/model/types/test_ld_context.py index 02595d74..e44cace5 100644 --- a/test/hermes_test/model/types/test_ld_context.py +++ b/test/hermes_test/model/types/test_ld_context.py @@ -111,20 +111,6 @@ def test_get_item_item_from_prefixed_vocabulary_raises_on_term_not_exist(ctx, no ) -@pytest.mark.parametrize( - "compacted,expanded", - [ - ([None, "maintainer"], "https://codemeta.github.io/terms/maintainer"), - (["schema", "Organization"], "http://schema.org/Organization"), - ((None, "maintainer"), "https://codemeta.github.io/terms/maintainer"), - (("schema", "Organization"), "http://schema.org/Organization"), - ], -) -def test_get_valid_non_str_items(ctx, compacted, expanded): - """Context returns fully expanded terms for valid non-string inputs.""" - assert ctx[compacted] == expanded - - @pytest.mark.parametrize( "non_str,error_type", [(0, TypeError), (None, TypeError), ([], ValueError), ({"foo"}, ValueError)], From ebe92008dd5b20f51c29ba82c3acee1986a6d619 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 7 Aug 2025 13:19:06 +0200 Subject: [PATCH 071/247] Improve reporting unexpected exceptions and add tests for expanded inputs --- .../model/types/test_ld_context.py | 31 ++++++++++++++----- 1 file changed, 23 insertions(+), 8 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_context.py b/test/hermes_test/model/types/test_ld_context.py index e44cace5..33137b7c 100644 --- a/test/hermes_test/model/types/test_ld_context.py +++ b/test/hermes_test/model/types/test_ld_context.py @@ -71,7 +71,6 @@ def test_get_item_from_prefixed_vocabulary_pass(ctx, compacted, expanded): @pytest.mark.parametrize( "not_exist", [ - "https://foo.bar/baz", "foobar:baz", ("foobar", "baz"), ], @@ -83,9 +82,8 @@ def test_get_item_from_prefixed_vocabulary_raises_on_prefix_not_exist(ctx, not_e """ with pytest.raises(Exception) as e: # FIXME: Replace with custom error ctx[not_exist] - assert "cannot access local variable" not in str( - e.value - ) + if any(s in str(e.value) for s in ["cannot access local variable", "referenced before assignment"]): + pytest.fail("Unexpected exception raised not due to the expected cause.") @pytest.mark.parametrize( @@ -99,16 +97,33 @@ def test_get_item_from_prefixed_vocabulary_raises_on_prefix_not_exist(ctx, not_e ("schema", "baz"), ], ) -def test_get_item_item_from_prefixed_vocabulary_raises_on_term_not_exist(ctx, not_exist): +def test_get_item_from_prefixed_vocabulary_raises_on_term_not_exist(ctx, not_exist): """ Tests that an exception is raised when trying to get compacted items for which the vocabulary exists, but doesn't contain the requested term, and that the raised exception is not raised due to side effects. """ with pytest.raises(Exception) as e: # FIXME: Replace with custom error ctx[not_exist] - assert "cannot access local variable" not in str( - e.value - ) + if any(s in str(e.value) for s in ["cannot access local variable", "referenced before assignment"]): + pytest.fail("Unexpected exception raised not due to the expected cause.") + +@pytest.mark.parametrize("expanded", ["https://schema.org/Organisation", "https://schema.software-metadata.pub/hermes-content/1.0/semanticVersion"]) +def test_get_item_from_expanded_pass(ctx, expanded): + """ + Tests that getting items via their fully expanded terms works as expected. + """ + assert ctx[expanded] == expanded + + +def test_get_item_from_expanded_fail(ctx): + """ + Tests that context raises on unsupported expanded term input. + """ + with pytest.raises(Exception) as e: + ctx["https://foo.bar/baz"] + if any(s in str(e.value) for s in ["cannot access local variable", "referenced before assignment"]): + pytest.fail("Unexpected exception raised not due to the expected cause.") + @pytest.mark.parametrize( From de0f24a9f157bd54d77d7a420865910e154fc60d Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 7 Aug 2025 13:29:26 +0200 Subject: [PATCH 072/247] Add test for unimplemented and undecided functionality --- .../model/types/test_ld_context.py | 37 +++++++++++++++---- 1 file changed, 30 insertions(+), 7 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_context.py b/test/hermes_test/model/types/test_ld_context.py index 33137b7c..b4c9eee7 100644 --- a/test/hermes_test/model/types/test_ld_context.py +++ b/test/hermes_test/model/types/test_ld_context.py @@ -35,7 +35,7 @@ def test_codemeta_prefix(ctx): @pytest.mark.xfail( raises=AssertionError, reason="Currently, the wrong CodeMeta IRI is used in the implementation, so expanding terms doesn't work correctly, " - "see https://github.com/softwarepub/hermes/issues/419", + "see https://github.com/softwarepub/hermes/issues/419", ) @pytest.mark.parametrize("compacted", ["maintainer", (None, "maintainer")]) def test_get_item_from_default_vocabulary_pass(ctx, compacted): @@ -82,7 +82,10 @@ def test_get_item_from_prefixed_vocabulary_raises_on_prefix_not_exist(ctx, not_e """ with pytest.raises(Exception) as e: # FIXME: Replace with custom error ctx[not_exist] - if any(s in str(e.value) for s in ["cannot access local variable", "referenced before assignment"]): + if any( + s in str(e.value) + for s in ["cannot access local variable", "referenced before assignment"] + ): pytest.fail("Unexpected exception raised not due to the expected cause.") @@ -104,15 +107,33 @@ def test_get_item_from_prefixed_vocabulary_raises_on_term_not_exist(ctx, not_exi """ with pytest.raises(Exception) as e: # FIXME: Replace with custom error ctx[not_exist] - if any(s in str(e.value) for s in ["cannot access local variable", "referenced before assignment"]): + if any( + s in str(e.value) + for s in ["cannot access local variable", "referenced before assignment"] + ): pytest.fail("Unexpected exception raised not due to the expected cause.") -@pytest.mark.parametrize("expanded", ["https://schema.org/Organisation", "https://schema.software-metadata.pub/hermes-content/1.0/semanticVersion"]) + +@pytest.mark.parametrize( + "expanded", + [ + "https://codemeta.github.io/terms/maintainer", + "https://schema.org/Organisation", + "https://schema.software-metadata.pub/hermes-content/1.0/semanticVersion", + ], +) +@pytest.mark.xfail( + raises=NotImplementedError, + reason="Passing back expanded terms on their input if they are valid in the context " + "is not yet implemented (or decided).", +) def test_get_item_from_expanded_pass(ctx, expanded): """ Tests that getting items via their fully expanded terms works as expected. """ - assert ctx[expanded] == expanded + with pytest.raises(Exception) as e: + assert ctx[expanded] == expanded + raise NotImplementedError def test_get_item_from_expanded_fail(ctx): @@ -121,11 +142,13 @@ def test_get_item_from_expanded_fail(ctx): """ with pytest.raises(Exception) as e: ctx["https://foo.bar/baz"] - if any(s in str(e.value) for s in ["cannot access local variable", "referenced before assignment"]): + if any( + s in str(e.value) + for s in ["cannot access local variable", "referenced before assignment"] + ): pytest.fail("Unexpected exception raised not due to the expected cause.") - @pytest.mark.parametrize( "non_str,error_type", [(0, TypeError), (None, TypeError), ([], ValueError), ({"foo"}, ValueError)], From 3793e7ca4999652c3bf5675a5660bb283289655e Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 7 Aug 2025 13:32:30 +0200 Subject: [PATCH 073/247] Report unexpectedly raised exception --- test/hermes_test/model/types/test_ld_context.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_context.py b/test/hermes_test/model/types/test_ld_context.py index b4c9eee7..9213e257 100644 --- a/test/hermes_test/model/types/test_ld_context.py +++ b/test/hermes_test/model/types/test_ld_context.py @@ -86,7 +86,7 @@ def test_get_item_from_prefixed_vocabulary_raises_on_prefix_not_exist(ctx, not_e s in str(e.value) for s in ["cannot access local variable", "referenced before assignment"] ): - pytest.fail("Unexpected exception raised not due to the expected cause.") + pytest.fail(f"Unexpected exception raised not due to the expected cause: {e.value}.") @pytest.mark.parametrize( @@ -111,7 +111,7 @@ def test_get_item_from_prefixed_vocabulary_raises_on_term_not_exist(ctx, not_exi s in str(e.value) for s in ["cannot access local variable", "referenced before assignment"] ): - pytest.fail("Unexpected exception raised not due to the expected cause.") + pytest.fail(f"Unexpected exception raised not due to the expected cause: {e.value}.") @pytest.mark.parametrize( @@ -146,7 +146,7 @@ def test_get_item_from_expanded_fail(ctx): s in str(e.value) for s in ["cannot access local variable", "referenced before assignment"] ): - pytest.fail("Unexpected exception raised not due to the expected cause.") + pytest.fail(f"Unexpected exception raised not due to the expected cause: {e.value}.") @pytest.mark.parametrize( From 586201e06a6411888e5834b67492acda2d764ca4 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 7 Aug 2025 15:14:48 +0200 Subject: [PATCH 074/247] Rename module to match hermes.error --- src/hermes/commands/harvest/base.py | 4 ++-- src/hermes/model/{errors.py => error.py} | 0 2 files changed, 2 insertions(+), 2 deletions(-) rename src/hermes/model/{errors.py => error.py} (100%) diff --git a/src/hermes/commands/harvest/base.py b/src/hermes/commands/harvest/base.py index 460345ea..59fad8f1 100644 --- a/src/hermes/commands/harvest/base.py +++ b/src/hermes/commands/harvest/base.py @@ -12,7 +12,7 @@ from hermes.commands.base import HermesCommand, HermesPlugin from hermes.model.context import HermesContext, HermesHarvestContext -from hermes.model.errors import HermesValidationError, MergeError +from hermes.model.error import HermesValidationError, HermesMergeError class HermesHarvestPlugin(HermesPlugin): @@ -55,7 +55,7 @@ def __call__(self, args: argparse.Namespace) -> None: timestamp=datetime.now().isoformat(), **tags) for _key, ((_value, _tag), *_trace) in harvest_ctx._data.items(): if any(v != _value and t == _tag for v, t in _trace): - raise MergeError(_key, None, _value) + raise HermesMergeError(_key, None, _value) except KeyError as e: self.log.error("Plugin '%s' not found.", plugin_name) diff --git a/src/hermes/model/errors.py b/src/hermes/model/error.py similarity index 100% rename from src/hermes/model/errors.py rename to src/hermes/model/error.py From e99d930023428255c4bb55cb97a062133f6c6716 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 7 Aug 2025 15:16:25 +0200 Subject: [PATCH 075/247] Define context error, fix bad naming --- src/hermes/model/error.py | 23 ++++++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/src/hermes/model/error.py b/src/hermes/model/error.py index f84d74bb..1da95943 100644 --- a/src/hermes/model/error.py +++ b/src/hermes/model/error.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileContributor: Michael Meinel +# SPDX-FileContributor: Stephan Druskat import typing as t @@ -20,15 +21,31 @@ class HermesValidationError(Exception): except ValueError as e: raise HermesValidationError(src_file) from e """ + pass + + +class HermesContextError(Exception): + """ + This exception should be thrown when interacting with the model context. + # TODO Change class name and docstring if we decide to call it differently + # TODO in https://github.com/softwarepub/hermes/issues/392. + + To be able to track and fix the error, you should use this in conjunction with the original exception if applicable: + .. code:: python + try: + context[term] + except ValueError as e: + raise HermesContextError(term) from e + """ pass -class MergeError(Exception): +class HermesMergeError(Exception): """ This exception should be raised when there is an error during a merge / set operation. """ - def __init__(self, path: t.List[str | int], old_Value: t.Any, new_value: t.Any, **kwargs): + def __init__(self, path: t.List[str | int], old_value: t.Any, new_value: t.Any, **kwargs): """ Create a new merge incident. @@ -38,7 +55,7 @@ def __init__(self, path: t.List[str | int], old_Value: t.Any, new_value: t.Any, :param kwargs: Tag data for the new value. """ self.path = path - self.old_value = old_Value + self.old_value = old_value self.new_value = new_value self.tag = kwargs super().__init__(f'Error merging {self.path} (ambiguous values "{self.old_value}" and "{self.new_value}")') From 0260ab5a894c97fae2eb1dd8f5b4aa5a92cd662e Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 7 Aug 2025 15:20:35 +0200 Subject: [PATCH 076/247] Rename error (add prefix) --- src/hermes/commands/deposit/invenio.py | 2 +- src/hermes/error.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/hermes/commands/deposit/invenio.py b/src/hermes/commands/deposit/invenio.py index e88066c6..69fb87a0 100644 --- a/src/hermes/commands/deposit/invenio.py +++ b/src/hermes/commands/deposit/invenio.py @@ -612,7 +612,7 @@ def _get_community_identifiers(self): This function gets the communities to be used for the deposition on an Invenio-based site from the config and checks their validity against the site's API. If one of the - identifiers can not be found on the site, a :class:`MisconfigurationError` is + identifiers can not be found on the site, a :class:`HermesMisconfigurationError` is raised. """ diff --git a/src/hermes/error.py b/src/hermes/error.py index 1669ed39..e56c2499 100644 --- a/src/hermes/error.py +++ b/src/hermes/error.py @@ -4,5 +4,5 @@ # SPDX-FileContributor: David Pape -class MisconfigurationError(Exception): +class HermesMisconfigurationError(Exception): pass From 6c4a523bf43806b3ea38e610d965c6f6d499443e Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 7 Aug 2025 16:00:30 +0200 Subject: [PATCH 077/247] Raise new error when getting term whose prafix is not in context --- src/hermes/model/types/ld_context.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/hermes/model/types/ld_context.py b/src/hermes/model/types/ld_context.py index 9bb8209a..64aac256 100644 --- a/src/hermes/model/types/ld_context.py +++ b/src/hermes/model/types/ld_context.py @@ -5,6 +5,7 @@ # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: Stephan Druskat +from hermes.model.error import HermesContextError CODEMETA_PREFIX = "https://doi.org/10.5063/schema/codemeta-2.0" CODEMETA_CONTEXT = [CODEMETA_PREFIX] @@ -91,10 +92,12 @@ def __getitem__(self, compressed_term: str | tuple) -> str: else: prefix, term = None, compressed_term - if prefix in self.context: - iri = self.context[prefix] + term + try: + base_iri = self.context[prefix] + except KeyError as ke: + raise HermesContextError(prefix) from ke - return iri + return base_iri + term iri_map = ContextPrefix(ALL_CONTEXTS) From 124231175e4f045eca36647d8082166524aac214 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 7 Aug 2025 16:03:26 +0200 Subject: [PATCH 078/247] Test that non-existent prefix raises error on getting item --- .../model/types/test_ld_context.py | 31 +++++++++++-------- 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_context.py b/test/hermes_test/model/types/test_ld_context.py index 9213e257..ec62b808 100644 --- a/test/hermes_test/model/types/test_ld_context.py +++ b/test/hermes_test/model/types/test_ld_context.py @@ -10,6 +10,8 @@ ALL_CONTEXTS, ) +from hermes.model.error import HermesContextError + @pytest.fixture def ctx(): @@ -69,24 +71,25 @@ def test_get_item_from_prefixed_vocabulary_pass(ctx, compacted, expanded): @pytest.mark.parametrize( - "not_exist", + "prefix,not_exist", [ - "foobar:baz", - ("foobar", "baz"), + ("foobar", item) + for item in [ + "foobar:baz", + ("foobar", "baz"), + ] ], ) -def test_get_item_from_prefixed_vocabulary_raises_on_prefix_not_exist(ctx, not_exist): +def test_get_item_from_prefixed_vocabulary_raises_on_prefix_not_exist( + ctx, prefix, not_exist +): """ Tests that an exception is raised when trying to get compacted items for which there is no - prefixed vocabulary in the context, and that the raised exception is not raised due to side effects. + prefixed vocabulary in the context. """ - with pytest.raises(Exception) as e: # FIXME: Replace with custom error - ctx[not_exist] - if any( - s in str(e.value) - for s in ["cannot access local variable", "referenced before assignment"] - ): - pytest.fail(f"Unexpected exception raised not due to the expected cause: {e.value}.") + with pytest.raises(HermesContextError) as hce: + _ = ctx[not_exist] + assert str(hce.value) == prefix @pytest.mark.parametrize( @@ -146,7 +149,9 @@ def test_get_item_from_expanded_fail(ctx): s in str(e.value) for s in ["cannot access local variable", "referenced before assignment"] ): - pytest.fail(f"Unexpected exception raised not due to the expected cause: {e.value}.") + pytest.fail( + f"Unexpected exception raised not due to the expected cause: {e.value}." + ) @pytest.mark.parametrize( From 9621e77878c10324784e112322d61a6e569a915e Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 7 Aug 2025 16:13:08 +0200 Subject: [PATCH 079/247] Add xfailing test for returning only existing terms from given vocabulary --- .../model/types/test_ld_context.py | 41 +++++++++++-------- 1 file changed, 24 insertions(+), 17 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_context.py b/test/hermes_test/model/types/test_ld_context.py index ec62b808..44194f43 100644 --- a/test/hermes_test/model/types/test_ld_context.py +++ b/test/hermes_test/model/types/test_ld_context.py @@ -93,28 +93,35 @@ def test_get_item_from_prefixed_vocabulary_raises_on_prefix_not_exist( @pytest.mark.parametrize( - "not_exist", + "term,not_exist", [ - "baz", - "hermes:baz", - "schema:baz", - (None, "baz"), - ("hermes", "baz"), - ("schema", "baz"), + ("baz", item) + for item in [ + "baz", + "hermes:baz", + "schema:baz", + (None, "baz"), + ("hermes", "baz"), + ("schema", "baz"), + ] ], ) -def test_get_item_from_prefixed_vocabulary_raises_on_term_not_exist(ctx, not_exist): +@pytest.mark.xfail( + raises=NotImplementedError, + reason="Not yet implemented/decided: Check if terms exist in given vocabulary.", +) +def test_get_item_from_prefixed_vocabulary_raises_on_term_not_exist( + ctx, term, not_exist +): """ Tests that an exception is raised when trying to get compacted items for which the vocabulary exists, - but doesn't contain the requested term, and that the raised exception is not raised due to side effects. + but doesn't contain the requested term. """ - with pytest.raises(Exception) as e: # FIXME: Replace with custom error - ctx[not_exist] - if any( - s in str(e.value) - for s in ["cannot access local variable", "referenced before assignment"] - ): - pytest.fail(f"Unexpected exception raised not due to the expected cause: {e.value}.") + with pytest.raises(HermesContextError) as hce: + _ = ctx[not_exist] + with pytest.raises(Exception): + assert str(hce.value) == term + raise NotImplementedError @pytest.mark.parametrize( @@ -128,7 +135,7 @@ def test_get_item_from_prefixed_vocabulary_raises_on_term_not_exist(ctx, not_exi @pytest.mark.xfail( raises=NotImplementedError, reason="Passing back expanded terms on their input if they are valid in the context " - "is not yet implemented (or decided).", + "is not yet implemented (or decided).", ) def test_get_item_from_expanded_pass(ctx, expanded): """ From 9a980b4783924af4e086980dd5bec6a4a498e5b2 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 7 Aug 2025 16:27:12 +0200 Subject: [PATCH 080/247] Test raising context errors where implemented --- .../model/types/test_ld_context.py | 38 ++++++++++--------- 1 file changed, 21 insertions(+), 17 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_context.py b/test/hermes_test/model/types/test_ld_context.py index 44194f43..a6f61b13 100644 --- a/test/hermes_test/model/types/test_ld_context.py +++ b/test/hermes_test/model/types/test_ld_context.py @@ -150,15 +150,8 @@ def test_get_item_from_expanded_fail(ctx): """ Tests that context raises on unsupported expanded term input. """ - with pytest.raises(Exception) as e: + with pytest.raises(HermesContextError) as e: ctx["https://foo.bar/baz"] - if any( - s in str(e.value) - for s in ["cannot access local variable", "referenced before assignment"] - ): - pytest.fail( - f"Unexpected exception raised not due to the expected cause: {e.value}." - ) @pytest.mark.parametrize( @@ -175,20 +168,31 @@ def test_get_non_str_item_fail(ctx, non_str, error_type): "item", [ "", - "fooBar", + pytest.param( + "fooBar", + marks=pytest.mark.xfail( + reason="Not yet implemented/decided: Check if terms exist in given vocabulary." + ), + ), [0, "foo"], (0, "foo"), {"foo": "bar", "baz": "foo"}, - "schema:fooBar", - "hermes:fooBar", + pytest.param( + "schema:fooBar", + marks=pytest.mark.xfail( + reason="Not yet implemented/decided: Check if terms exist in given vocabulary." + ), + ), + pytest.param( + "hermes:fooBar", + marks=pytest.mark.xfail( + reason="Not yet implemented/decided: Check if terms exist in given vocabulary." + ), + ), "codemeta:maintainer", # Prefixed CodeMeta doesn't exist in context - # Even a dict with valid terms should fail, as it is unclear what to expect - {None: "maintainer", "schema": "Organization"}, ], ) def test_get_item_validate_fail(ctx, item): - """Context raises on terms that don't exist in the context.""" - with pytest.raises( - Exception - ): # FIXME: Replace with custom error, e.g., hermes.model.errors.InvalidTermException + """Context raises on theoretically valid compressed terms that don't exist in the context.""" + with pytest.raises(HermesContextError): ctx[item] From b808152b2d720ab6f7c0a0ab50ff83ee00c8a349 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 7 Aug 2025 16:34:38 +0200 Subject: [PATCH 081/247] Raise context error on empty string, reformat --- src/hermes/model/types/ld_context.py | 45 ++++++++++++++++++---------- 1 file changed, 30 insertions(+), 15 deletions(-) diff --git a/src/hermes/model/types/ld_context.py b/src/hermes/model/types/ld_context.py index 64aac256..3d60bb41 100644 --- a/src/hermes/model/types/ld_context.py +++ b/src/hermes/model/types/ld_context.py @@ -16,16 +16,26 @@ PROV_PREFIX = "http://www.w3.org/ns/prov#" PROV_CONTEXT = [{"prov": PROV_PREFIX}] -HERMES_RT_PREFIX = 'https://schema.software-metadata.pub/hermes-runtime/1.0/' -HERMES_RT_CONTEXT = [{'hermes-rt': HERMES_RT_PREFIX}] -HERMES_CONTENT_CONTEXT = [{'hermes': 'https://schema.software-metadata.pub/hermes-content/1.0/'}] +HERMES_RT_PREFIX = "https://schema.software-metadata.pub/hermes-runtime/1.0/" +HERMES_RT_CONTEXT = [{"hermes-rt": HERMES_RT_PREFIX}] +HERMES_CONTENT_CONTEXT = [ + {"hermes": "https://schema.software-metadata.pub/hermes-content/1.0/"} +] HERMES_CONTEXT = [{**HERMES_RT_CONTEXT[0], **HERMES_CONTENT_CONTEXT[0]}] -HERMES_BASE_CONTEXT = [*CODEMETA_CONTEXT, {**SCHEMA_ORG_CONTEXT[0], **HERMES_CONTENT_CONTEXT[0]}] -HERMES_PROV_CONTEXT = [{**SCHEMA_ORG_CONTEXT[0], **HERMES_RT_CONTEXT[0], **PROV_CONTEXT[0]}] +HERMES_BASE_CONTEXT = [ + *CODEMETA_CONTEXT, + {**SCHEMA_ORG_CONTEXT[0], **HERMES_CONTENT_CONTEXT[0]}, +] +HERMES_PROV_CONTEXT = [ + {**SCHEMA_ORG_CONTEXT[0], **HERMES_RT_CONTEXT[0], **PROV_CONTEXT[0]} +] -ALL_CONTEXTS = [*CODEMETA_CONTEXT, {**SCHEMA_ORG_CONTEXT[0], **PROV_CONTEXT[0], **HERMES_CONTEXT[0]}] +ALL_CONTEXTS = [ + *CODEMETA_CONTEXT, + {**SCHEMA_ORG_CONTEXT[0], **PROV_CONTEXT[0], **HERMES_CONTEXT[0]}, +] class ContextPrefix: @@ -38,6 +48,7 @@ class ContextPrefix: arbitrary strings used to prefix terms from a specific vocabulary to their respective vocabulary IRI strings.; - as a dict mapping prefixes to vocabulary IRIs, where the default vocabulary has a prefix of None. """ + def __init__(self, vocabularies: list[str | dict]): """ @param vocabularies: A list of linked data vocabularies. Items can be vocabulary base IRI strings and/or @@ -55,11 +66,13 @@ def __init__(self, vocabularies: list[str | dict]): if isinstance(vocab, str): vocab = {None: vocab} - self.context.update({ - prefix: base_iri - for prefix, base_iri in vocab.items() - if isinstance(base_iri, str) - }) + self.context.update( + { + prefix: base_iri + for prefix, base_iri in vocab.items() + if isinstance(base_iri, str) + } + ) def __getitem__(self, compressed_term: str | tuple) -> str: """ @@ -85,12 +98,14 @@ def __getitem__(self, compressed_term: str | tuple) -> str: """ if not isinstance(compressed_term, str): prefix, term = compressed_term - elif ':' in compressed_term: - prefix, term = compressed_term.split(':', 1) - if term.startswith('://'): + elif ":" in compressed_term: + prefix, term = compressed_term.split(":", 1) + if term.startswith("://"): prefix, term = True, compressed_term - else: + elif compressed_term != "": prefix, term = None, compressed_term + else: + raise HermesContextError(compressed_term) try: base_iri = self.context[prefix] From 0d61163ef944d58aa08174af429b55bf82dfd82f Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 7 Aug 2025 16:41:32 +0200 Subject: [PATCH 082/247] Pacify flake8 --- test/hermes_test/model/types/test_ld_context.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_context.py b/test/hermes_test/model/types/test_ld_context.py index a6f61b13..2c155b6f 100644 --- a/test/hermes_test/model/types/test_ld_context.py +++ b/test/hermes_test/model/types/test_ld_context.py @@ -36,8 +36,8 @@ def test_codemeta_prefix(ctx): @pytest.mark.xfail( raises=AssertionError, - reason="Currently, the wrong CodeMeta IRI is used in the implementation, so expanding terms doesn't work correctly, " - "see https://github.com/softwarepub/hermes/issues/419", + reason="Currently, the wrong CodeMeta IRI is used in the implementation, so expanding terms doesn't work correctly," + " see https://github.com/softwarepub/hermes/issues/419", ) @pytest.mark.parametrize("compacted", ["maintainer", (None, "maintainer")]) def test_get_item_from_default_vocabulary_pass(ctx, compacted): @@ -141,7 +141,7 @@ def test_get_item_from_expanded_pass(ctx, expanded): """ Tests that getting items via their fully expanded terms works as expected. """ - with pytest.raises(Exception) as e: + with pytest.raises(Exception): assert ctx[expanded] == expanded raise NotImplementedError @@ -150,7 +150,7 @@ def test_get_item_from_expanded_fail(ctx): """ Tests that context raises on unsupported expanded term input. """ - with pytest.raises(HermesContextError) as e: + with pytest.raises(HermesContextError): ctx["https://foo.bar/baz"] From e9ca17599c64d56f33af45ff2ffe37674b327676 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 7 Aug 2025 16:44:38 +0200 Subject: [PATCH 083/247] Satisfy REUSE --- test/hermes_test/model/types/test_pyld_util.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/test/hermes_test/model/types/test_pyld_util.py b/test/hermes_test/model/types/test_pyld_util.py index fa4e539d..3117ad04 100644 --- a/test/hermes_test/model/types/test_pyld_util.py +++ b/test/hermes_test/model/types/test_pyld_util.py @@ -1,3 +1,8 @@ +# SPDX-FileCopyrightText: 2025 German Aerospace Center (DLR) +# SPDX-FileContributor: Michael Meinel +# +# SPDX-License-Identifier: Apache-2.0 + import pytest from hermes.model.types import pyld_util From 9109d5235ad9604cb46af91f0bdb903554b9b951 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Wed, 3 Sep 2025 14:07:54 +0200 Subject: [PATCH 084/247] wrote first testsfor ld_dict --- test/hermes_test/model/types/test_ld_dict.py | 138 +++++++++++++++++++ 1 file changed, 138 insertions(+) diff --git a/test/hermes_test/model/types/test_ld_dict.py b/test/hermes_test/model/types/test_ld_dict.py index b4ddda65..22286fe1 100644 --- a/test/hermes_test/model/types/test_ld_dict.py +++ b/test/hermes_test/model/types/test_ld_dict.py @@ -1,4 +1,142 @@ # SPDX-FileCopyrightText: 2025 German Aerospace Center (DLR) # SPDX-FileContributor: Stephan Druskat +# SPDX-FileContributor: Michael Fritzsche # # SPDX-License-Identifier: Apache-2.0 + +import pytest + +from hermes.model.types.ld_dict import ld_dict + + +def test_dict_basics(): + di = ld_dict([{"foo": "bar", "foobar": "barfoo"}]) + assert di.data_dict == {"foo": "bar", "foobar": "barfoo"} + assert di.context == [] + assert di.parent is None and di.key is None and di.index is None + + +def test_malformed_input(): + with pytest.raises(Exception): + ld_dict([]) + with pytest.raises(Exception): + ld_dict([{"foo": "bar"}, {"bar": "foo"}]) + + +def test_build_in_get(): + di = ld_dict([{"name": [{"@value": "Manu Sporny"}], + "homepage": [{"@id": "http://manu.sporny.org/"}], + "foo": [{"foobar": "bar", "barfoo": "foo"}]}]) + assert di["name"] == "Manu Sporny" + assert di["homepage"] == "http://manu.sporny.org/" + assert di["foo"].data_dict == ld_dict([{"foobar": "bar", "barfoo": "foo"}]).data_dict + with pytest.raises(KeyError): + di["bar"] + di = ld_dict([{"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}]}], + context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + assert di["xmlns:name"] == "Manu Sporny" + + +def test_build_in_set(): + di = ld_dict([{}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + di["http://xmlns.com/foaf/0.1/name"] = "Manu Sporny" + assert di.data_dict == {"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}]} + di = ld_dict([{}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + di["xmlns:name"] = "Manu Sporny" + di["xmlns:homepage"] = {"@id": "http://manu.sporny.org/"} + assert di.data_dict == {"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}], + "http://xmlns.com/foaf/0.1/homepage": [{"@id": "http://manu.sporny.org/"}]} + + +def test_build_in_delete(): + di = ld_dict([{"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}], + "http://xmlns.com/foaf/0.1/homepage": [{"@id": "http://manu.sporny.org/"}]}], + context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + del di["http://xmlns.com/foaf/0.1/name"] + del di["xmlns:homepage"] + assert di.data_dict == {} + + +def test_build_in_contains(): + di = ld_dict([{"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}], + "http://xmlns.com/foaf/0.1/homepage": [{"@id": "http://manu.sporny.org/"}]}], + context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + assert "http://xmlns.com/foaf/0.1/name" in di + assert "xmlns:homepage" in di + + +def test_get(): + di = ld_dict([{"name": [{"@value": "Manu Sporny"}], + "homepage": [{"@id": "http://manu.sporny.org/"}], + "foo": [{"foobar": "bar", "barfoo": "foo"}]}]) + assert di.get("name") == "Manu Sporny" + assert di.get("bar", None) is None + with pytest.raises(KeyError): + di.get("bar") + + +def test_update(): + di = ld_dict([{"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}], + "http://xmlns.com/foaf/0.1/homepage": [{"@id": "http://manu.sporny.org/"}]}], + context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + di.update({}) + assert di.data_dict == {"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}], + "http://xmlns.com/foaf/0.1/homepage": [{"@id": "http://manu.sporny.org/"}]} + di.update({"http://xmlns.com/foaf/0.1/name": "foo", "xmlns:homepage": {"@id": "bar"}, + "xmlns:foo": {"xmlns:foobar": "bar", "http://xmlns.com/foaf/0.1/barfoo": {"@id": "foo"}}}) + assert di.data_dict == {"http://xmlns.com/foaf/0.1/name": [{"@value": "foo"}], + "http://xmlns.com/foaf/0.1/homepage": [{"@id": "bar"}], + "http://xmlns.com/foaf/0.1/foo": [{"http://xmlns.com/foaf/0.1/foobar": [{"@value": "bar"}], + "http://xmlns.com/foaf/0.1/barfoo": [{"@id": "foo"}]}]} + with pytest.raises(AttributeError): + di.update(["", ""]) + + +def test_keys(): + di = ld_dict([{}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + di.update({"http://xmlns.com/foaf/0.1/name": "Manu Sporny", "xmlns:homepage": {"@id": "http://manu.sporny.org/"}}) + assert {*di.keys()} == {"http://xmlns.com/foaf/0.1/name", "http://xmlns.com/foaf/0.1/homepage"} + + +def test_compact_keys(): + di = ld_dict([{}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + di.update({"http://xmlns.com/foaf/0.1/name": "Manu Sporny", "xmlns:homepage": {"@id": "http://manu.sporny.org/"}}) + assert {*di.compact_keys()} == {"xmlns:name", "xmlns:homepage"} + di = ld_dict([{}], context={"homepage": "http://xmlns.com/foaf/0.1/homepage"}) + di.update({"http://xmlns.com/foaf/0.1/name": "Manu Sporny", + "http://xmlns.com/foaf/0.1/homepage": {"@id": "http://manu.sporny.org/"}}) + assert {*di.compact_keys()} == {"http://xmlns.com/foaf/0.1/name", "homepage"} + di = ld_dict([{}], context={"xmls": "http://xmlns.com/foaf/0.1/", "homepage": "http://xmlns.com/foaf/0.1/homepage"}) + di.update({"http://xmlns.com/foaf/0.1/name": "Manu Sporny", + "http://xmlns.com/foaf/0.1/homepage": {"@id": "http://manu.sporny.org/"}}) + assert {*di.compact_keys()} == {"xmls:name", "homepage"} + + +def test_items(): + di = ld_dict([{}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + inner_di = ld_dict([{}], parent=di) + inner_di.update({"xmlns:foobar": "bar", "http://xmlns.com/foaf/0.1/barfoo": {"@id": "foo"}}) + di.update({"http://xmlns.com/foaf/0.1/name": "foo", "xmlns:homepage": {"@id": "bar"}, "xmlns:foo": inner_di}) + assert [*di.items()] == [ + ("http://xmlns.com/foaf/0.1/name", "foo"), ("http://xmlns.com/foaf/0.1/homepage", "bar"), + ("http://xmlns.com/foaf/0.1/foo", {"http://xmlns.com/foaf/0.1/foobar": inner_di["xmlns:foobar"], + "http://xmlns.com/foaf/0.1/barfoo": inner_di["xmlns:barfoo"]}) + ] + + +def test_ref(): + di = ld_dict([{}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + di.update({"@id": "http://xmlns.com/foaf/0.1/homepage", "xmlns:name": "homepage"}) + assert di.ref == {"@id": "http://xmlns.com/foaf/0.1/homepage"} + di = ld_dict([{}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + di.update({"http://xmlns.com/foaf/0.1/name": "foo"}) + assert di.ref == di.data_dict # or KeyError depends on interpretation of what should happen in this case + + +def test_to_python(): + di = ld_dict([{}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + inner_di = ld_dict([{}], parent=di) + inner_di.update({"xmlns:foobar": "bar", "http://xmlns.com/foaf/0.1/barfoo": {"@id": "foo"}}) + di.update({"http://xmlns.com/foaf/0.1/name": "foo", "xmlns:homepage": {"@id": "bar"}, "xmlns:foo": inner_di}) + assert di.to_python() == {"xmlns:name": "foo", "xmlns:homepage": "bar", + "xmlns:foo": {"xmlns:foobar": "bar", "xmlns:barfoo": "foo"}} From 4fd198feda65b0690834172c00d74b26e7646782 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Thu, 4 Sep 2025 12:31:02 +0200 Subject: [PATCH 085/247] added missing dependency --- poetry.lock | 40 ++++++++++++++++++++++++++++++++++++++-- pyproject.toml | 1 + 2 files changed, 39 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index 47ce16c1..7e9de848 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. [[package]] name = "accessible-pygments" @@ -700,6 +700,19 @@ files = [ {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, ] +[[package]] +name = "isodate" +version = "0.7.2" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.10\"" +files = [ + {file = "isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15"}, + {file = "isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6"}, +] + [[package]] name = "jinja2" version = "3.1.6" @@ -1651,6 +1664,29 @@ files = [ {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] +[[package]] +name = "rdflib" +version = "7.1.4" +description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information." +optional = false +python-versions = "<4.0.0,>=3.8.1" +groups = ["main"] +files = [ + {file = "rdflib-7.1.4-py3-none-any.whl", hash = "sha256:72f4adb1990fa5241abd22ddaf36d7cafa5d91d9ff2ba13f3086d339b213d997"}, + {file = "rdflib-7.1.4.tar.gz", hash = "sha256:fed46e24f26a788e2ab8e445f7077f00edcf95abb73bcef4b86cefa8b62dd174"}, +] + +[package.dependencies] +isodate = {version = ">=0.7.2,<1.0.0", markers = "python_version < \"3.11\""} +pyparsing = ">=2.1.0,<4" + +[package.extras] +berkeleydb = ["berkeleydb (>=18.1.0,<19.0.0)"] +html = ["html5rdf (>=1.2,<2)"] +lxml = ["lxml (>=4.3,<6.0)"] +networkx = ["networkx (>=2,<4)"] +orjson = ["orjson (>=3.9.14,<4)"] + [[package]] name = "requests" version = "2.32.4" @@ -2477,4 +2513,4 @@ files = [ [metadata] lock-version = "2.1" python-versions = ">=3.10, <4.0.0" -content-hash = "58304fd33d6ec1ce3400b43ecffb16b3f48a5621e513c3e8057f9e3e050835e8" +content-hash = "2e2405b30c3dee4416a6e77828c7cff1197a8be71665770bcbdb308c19ef4358" diff --git a/pyproject.toml b/pyproject.toml index a42543b9..df646b78 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,6 +39,7 @@ dependencies = [ "pydantic-settings>=2.1.0, <3.0.0", "requests-oauthlib>=2.0.0, <3.0.0", "pynacl>=1.5.0, <2.0.0", + "rdflib (>=7.1.4,<8.0.0)", ] requires-python = ">=3.10, <4.0.0" From 0c4a500dd10e1360679b8bb15eaf5f839d3453be Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Thu, 4 Sep 2025 12:40:04 +0200 Subject: [PATCH 086/247] added more tests --- src/hermes/model/types/ld_dict.py | 2 + test/hermes_test/model/types/test_ld_dict.py | 47 ++++++++++++++++++++ 2 files changed, 49 insertions(+) diff --git a/src/hermes/model/types/ld_dict.py b/src/hermes/model/types/ld_dict.py index d134b99e..67adfc53 100644 --- a/src/hermes/model/types/ld_dict.py +++ b/src/hermes/model/types/ld_dict.py @@ -87,6 +87,8 @@ def from_dict(cls, value, *, parent=None, key=None, context=None, ld_type=None): if parent is None and data_context: ld_data["@context"] = data_context elif parent is not None: + # fails if parent.parent is None + # because then parent.full_context is a dict instead of a list full_context[:0] = parent.full_context ld_value = cls.ld_proc.expand(ld_data, {"expandContext": full_context, "documentLoader": bundled_loader}) diff --git a/test/hermes_test/model/types/test_ld_dict.py b/test/hermes_test/model/types/test_ld_dict.py index 22286fe1..30ee0230 100644 --- a/test/hermes_test/model/types/test_ld_dict.py +++ b/test/hermes_test/model/types/test_ld_dict.py @@ -19,6 +19,7 @@ def test_dict_basics(): def test_malformed_input(): with pytest.raises(Exception): ld_dict([]) + with pytest.raises(Exception): ld_dict([{"foo": "bar"}, {"bar": "foo"}]) @@ -32,6 +33,7 @@ def test_build_in_get(): assert di["foo"].data_dict == ld_dict([{"foobar": "bar", "barfoo": "foo"}]).data_dict with pytest.raises(KeyError): di["bar"] + di = ld_dict([{"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}]}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) assert di["xmlns:name"] == "Manu Sporny" @@ -41,6 +43,7 @@ def test_build_in_set(): di = ld_dict([{}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) di["http://xmlns.com/foaf/0.1/name"] = "Manu Sporny" assert di.data_dict == {"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}]} + di = ld_dict([{}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) di["xmlns:name"] = "Manu Sporny" di["xmlns:homepage"] = {"@id": "http://manu.sporny.org/"} @@ -82,6 +85,7 @@ def test_update(): di.update({}) assert di.data_dict == {"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}], "http://xmlns.com/foaf/0.1/homepage": [{"@id": "http://manu.sporny.org/"}]} + di.update({"http://xmlns.com/foaf/0.1/name": "foo", "xmlns:homepage": {"@id": "bar"}, "xmlns:foo": {"xmlns:foobar": "bar", "http://xmlns.com/foaf/0.1/barfoo": {"@id": "foo"}}}) assert di.data_dict == {"http://xmlns.com/foaf/0.1/name": [{"@value": "foo"}], @@ -102,10 +106,12 @@ def test_compact_keys(): di = ld_dict([{}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) di.update({"http://xmlns.com/foaf/0.1/name": "Manu Sporny", "xmlns:homepage": {"@id": "http://manu.sporny.org/"}}) assert {*di.compact_keys()} == {"xmlns:name", "xmlns:homepage"} + di = ld_dict([{}], context={"homepage": "http://xmlns.com/foaf/0.1/homepage"}) di.update({"http://xmlns.com/foaf/0.1/name": "Manu Sporny", "http://xmlns.com/foaf/0.1/homepage": {"@id": "http://manu.sporny.org/"}}) assert {*di.compact_keys()} == {"http://xmlns.com/foaf/0.1/name", "homepage"} + di = ld_dict([{}], context={"xmls": "http://xmlns.com/foaf/0.1/", "homepage": "http://xmlns.com/foaf/0.1/homepage"}) di.update({"http://xmlns.com/foaf/0.1/name": "Manu Sporny", "http://xmlns.com/foaf/0.1/homepage": {"@id": "http://manu.sporny.org/"}}) @@ -128,6 +134,7 @@ def test_ref(): di = ld_dict([{}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) di.update({"@id": "http://xmlns.com/foaf/0.1/homepage", "xmlns:name": "homepage"}) assert di.ref == {"@id": "http://xmlns.com/foaf/0.1/homepage"} + di = ld_dict([{}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) di.update({"http://xmlns.com/foaf/0.1/name": "foo"}) assert di.ref == di.data_dict # or KeyError depends on interpretation of what should happen in this case @@ -140,3 +147,43 @@ def test_to_python(): di.update({"http://xmlns.com/foaf/0.1/name": "foo", "xmlns:homepage": {"@id": "bar"}, "xmlns:foo": inner_di}) assert di.to_python() == {"xmlns:name": "foo", "xmlns:homepage": "bar", "xmlns:foo": {"xmlns:foobar": "bar", "xmlns:barfoo": "foo"}} + + +def test_from_dict(): + di = ld_dict.from_dict({"@type": "xmlns:hompage", "@id": "foo"}) + assert di.data_dict == {"@type": ["xmlns:hompage"], "@id": "foo"} + assert di.active_ctx == {"mappings": {}} and di.context == di.full_context == [] + assert di.index is di.key is di.parent is None + + di = ld_dict.from_dict({"@type": "xmlns:hompage", "@id": "foo"}, ld_type="xmlns:webpage") + assert di.data_dict == {"@type": ["xmlns:webpage", "xmlns:hompage"], "@id": "foo"} + assert di.active_ctx == {"mappings": {}} and di.context == di.full_context == [] + assert di.index is di.key is di.parent is None + + di = ld_dict.from_dict({"@context": {"schema": "https://schema.org/"}, "@type": "schema:Thing", "@id": "foo"}) + assert di.data_dict == {"@type": ["https://schema.org/Thing"], "@id": "foo"} + assert di.context == di.full_context == {"schema": "https://schema.org/"} + assert di.index is di.key is di.parent is None + + outer_di = di + di = ld_dict.from_dict({"@context": {"schema": "https://schema.org/"}, "@type": "schema:Action", + "schema:name": "foo"}, + parent=outer_di, key="schema:result") + assert di.data_dict == {"@type": ["https://schema.org/Action"], "https://schema.org/name": [{"@value": "foo"}]} + assert di.full_context == [{"schema": "https://schema.org/"}, {"schema": "https://schema.org/"}] + assert di.context == {"schema": "https://schema.org/"} and di.index is None and di.key == "schema:result" + + +def test_is_ld_dict(): + assert not any(ld_dict.is_ld_dict(item) for item in [{}, {"foo": "bar"}, {"@id": "foo"}]) + assert not any(ld_dict.is_ld_dict(item) for item in [[{"@id": "foo"}], [{"@set": "foo"}], [{}, {}], [], [""]]) + assert not ld_dict.is_ld_dict([{}]) + assert all(ld_dict.is_ld_dict([item]) for item in [{"@id": "foo", "foobar": "bar"}, {"foo": "bar"}]) + + +def test_is_json_dict(): + assert not any(ld_dict.is_json_dict(item) for item in [1, "", [], {""}, ld_dict([{}])]) + assert not any(ld_dict.is_json_dict({key: [], "foo": "bar"}) for key in ["@set", "@graph", "@list", "@value"]) + assert not ld_dict.is_json_dict({"@id": "foo"}) + assert ld_dict.is_json_dict({"@id": "foo", "foobar": "bar"}) + assert ld_dict.is_json_dict({"foo": "bar"}) From 75f2b6625fb003dd92b78065fc1c6d9bbe9c5caf Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Thu, 4 Sep 2025 13:20:39 +0200 Subject: [PATCH 087/247] added two more test cases --- test/hermes_test/model/types/test_ld_dict.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_dict.py b/test/hermes_test/model/types/test_ld_dict.py index 30ee0230..d2de3301 100644 --- a/test/hermes_test/model/types/test_ld_dict.py +++ b/test/hermes_test/model/types/test_ld_dict.py @@ -155,6 +155,12 @@ def test_from_dict(): assert di.active_ctx == {"mappings": {}} and di.context == di.full_context == [] assert di.index is di.key is di.parent is None + di = ld_dict.from_dict({"http://xmlns.com/foaf/0.1/name": [{"@value": "foo"}], + "http://xmlns.com/foaf/0.1/foo": [{"http://xmlns.com/foaf/0.1/barfoo": [{"@id": "foo"}], + "http://xmlns.com/foaf/0.1/fooba": [{"@value": "ba"}]}]}) + assert di.active_ctx == {"mappings": {}} and di.context == di.full_context == [] + assert di.index is di.key is di.parent is None + di = ld_dict.from_dict({"@type": "xmlns:hompage", "@id": "foo"}, ld_type="xmlns:webpage") assert di.data_dict == {"@type": ["xmlns:webpage", "xmlns:hompage"], "@id": "foo"} assert di.active_ctx == {"mappings": {}} and di.context == di.full_context == [] @@ -170,8 +176,16 @@ def test_from_dict(): "schema:name": "foo"}, parent=outer_di, key="schema:result") assert di.data_dict == {"@type": ["https://schema.org/Action"], "https://schema.org/name": [{"@value": "foo"}]} - assert di.full_context == [{"schema": "https://schema.org/"}, {"schema": "https://schema.org/"}] - assert di.context == {"schema": "https://schema.org/"} and di.index is None and di.key == "schema:result" + assert di.full_context == 2 * [{"schema": "https://schema.org/"}] + assert di.context == {"schema": "https://schema.org/"} and di.key == "schema:result" and di.index is None + + outer_di = di + di = ld_dict.from_dict({"@context": {"schema": "https://schema.org/"}, "@type": "schema:Thing", + "schema:name": "foo"}, + parent=outer_di, key="schema:error") + assert di.data_dict == {"@type": ["https://schema.org/Thing"], "https://schema.org/name": [{"@value": "foo"}]} + assert di.full_context == 3 * [{"schema": "https://schema.org/"}] + assert di.context == {"schema": "https://schema.org/"} and di.key == "schema:error" and di.index is None def test_is_ld_dict(): From 0ca9af4162d6e29add564a64e33089a46dfcfe0b Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Thu, 4 Sep 2025 13:28:22 +0200 Subject: [PATCH 088/247] fixed small bug --- src/hermes/model/types/ld_dict.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/hermes/model/types/ld_dict.py b/src/hermes/model/types/ld_dict.py index 67adfc53..bcea80b5 100644 --- a/src/hermes/model/types/ld_dict.py +++ b/src/hermes/model/types/ld_dict.py @@ -87,9 +87,7 @@ def from_dict(cls, value, *, parent=None, key=None, context=None, ld_type=None): if parent is None and data_context: ld_data["@context"] = data_context elif parent is not None: - # fails if parent.parent is None - # because then parent.full_context is a dict instead of a list - full_context[:0] = parent.full_context + full_context[:0] = [temp] if isinstance(temp := parent.full_context, dict) else temp ld_value = cls.ld_proc.expand(ld_data, {"expandContext": full_context, "documentLoader": bundled_loader}) ld_value = cls(ld_value, parent=parent, key=key, context=data_context) From f3133e8a92d1b2241536ae5136a8e9df818a9554 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 5 Sep 2025 11:32:56 +0200 Subject: [PATCH 089/247] added tests for ld_list --- test/hermes_test/model/types/test_ld_list.py | 35 +++++++++++++++++++- 1 file changed, 34 insertions(+), 1 deletion(-) diff --git a/test/hermes_test/model/types/test_ld_list.py b/test/hermes_test/model/types/test_ld_list.py index 00f054c9..ba4f3e18 100644 --- a/test/hermes_test/model/types/test_ld_list.py +++ b/test/hermes_test/model/types/test_ld_list.py @@ -3,15 +3,20 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileContributor: Sophie Kernchen +# SPDX-FileContributor: Michael Fritzsche import pytest from hermes.model.types.ld_list import ld_list +from hermes.model.types.ld_dict import ld_dict def test_undefined_list(): with pytest.raises(ValueError): - li = ld_list([{"spam": [{"@value": "bacon"}]}]) + ld_list([{"spam": [{"@value": "bacon"}]}]) + with pytest.raises(ValueError): + ld_list([{"@list": ["a", "b"], "@set": ["foo", "bar"]}]) + @pytest.mark.dev def test_list_basics(): @@ -20,3 +25,31 @@ def test_list_basics(): assert li.container == '@list' assert li.item_list == [0] + +def test_build_in_get(): + li = ld_list([{"@list": [{"@value": "foo"}, {"@value": "bar"}, {"@value": "foobar"}]}]) + assert li[0] == "foo" and li[-1] == "foobar" + assert li[:2] == ["foo", "bar"] and li[1:-1] == ["bar"] + assert li[::2] == ["foo", "foobar"] and li[::-1] == ["foobar", "bar", "foo"] + + li = ld_list([{"@list": [ld_dict([{"@type": "A", "schema:name": "a"}])]}]) + assert isinstance(li[0], ld_dict) and li[0].data_dict == {"@type": "A", "schema:name": "a"} and li[0].index == 0 + + +def test_build_in_set(): + li = ld_list([{"@list": [{"@value": "foo"}, {"@value": "bar"}, {"@value": "foobar"}]}], key="https://schema.org/name", context={"schema": "https://schema.org/"}) + li[0] = "bar" + li[-1] = "barfoo" + assert li.item_list[0] == {"@value": "bar"} and li.item_list[-1] == {"@value": "barfoo"} + li[:2] = ["fo", "ar"] + assert li.item_list == [{"@value": "fo"}, {"@value": "ar"}, {"@value": "barfoo"}] + li[1:-1] = ["br"] + assert li.item_list == [{"@value": "fo"}, {"@value": "br"}, {"@value": "barfoo"}] + li[::2] = ["oo", "fooba"] + assert li.item_list == [{"@value": "oo"}, {"@value": "br"}, {"@value": "fooba"}] + li[::-1] = ["foobar", "bar", "foo"] + assert li.item_list == [{"@value": "foo"}, {"@value": "bar"}, {"@value": "foobar"}] + with pytest.raises(ValueError): + li[::2] = "foo" + with pytest.raises(TypeError): + li[:2] = 1 From 71a5f878ce2a8ea3ef0ce71efd7c8af2a9d0669a Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 5 Sep 2025 11:33:32 +0200 Subject: [PATCH 090/247] fixed bug in __getitem__ --- src/hermes/model/types/ld_list.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index c2c68e4d..83ced857 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -33,7 +33,7 @@ def __getitem__(self, index): if isinstance(index, slice): return [self[i] for i in [*range(len(self))][index]] - item = self._to_python(self.key, self.item_list[index:index + 1]) + item = self._to_python(self.key, temp if isinstance(temp := self.item_list[index], ld_container) else [temp]) if isinstance(item, ld_container): item.index = index return item From 23eda6cac56053d7e26e76ceaf79a8b346a07baf Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 5 Sep 2025 11:34:24 +0200 Subject: [PATCH 091/247] added support for slices and fixed bug in __setitem__ --- src/hermes/model/types/ld_list.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index 83ced857..9da33d8d 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -39,7 +39,15 @@ def __getitem__(self, index): return item def __setitem__(self, index, value): - self.item_list[index:index + 1] = self._to_expanded_json(self.key, value) + if not isinstance(index, slice): + self.item_list[index] = temp[0] if isinstance(temp := self._to_expanded_json(self.key, value), list) else temp + return + try: + iter(value) + except TypeError as exc: + raise TypeError("must assign iterable to extended slice") + expanded_value = [self._to_expanded_json(self.key, val) for val in value] + self.item_list[index] = [val[0] if isinstance(val, list) else val for val in expanded_value] def __len__(self): return len(self.item_list) From 562f3bfa54220737a6660e336b643b90cbb227a2 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 5 Sep 2025 11:38:04 +0200 Subject: [PATCH 092/247] linted ld_list.py and test_ld_list.py --- src/hermes/model/types/ld_list.py | 4 ++-- test/hermes_test/model/types/test_ld_list.py | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index 9da33d8d..12570067 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -40,12 +40,12 @@ def __getitem__(self, index): def __setitem__(self, index, value): if not isinstance(index, slice): - self.item_list[index] = temp[0] if isinstance(temp := self._to_expanded_json(self.key, value), list) else temp + self.item_list[index] = val[0] if isinstance(val := self._to_expanded_json(self.key, value), list) else val return try: iter(value) except TypeError as exc: - raise TypeError("must assign iterable to extended slice") + raise TypeError("must assign iterable to extended slice") from exc expanded_value = [self._to_expanded_json(self.key, val) for val in value] self.item_list[index] = [val[0] if isinstance(val, list) else val for val in expanded_value] diff --git a/test/hermes_test/model/types/test_ld_list.py b/test/hermes_test/model/types/test_ld_list.py index ba4f3e18..c00cfbc7 100644 --- a/test/hermes_test/model/types/test_ld_list.py +++ b/test/hermes_test/model/types/test_ld_list.py @@ -37,7 +37,8 @@ def test_build_in_get(): def test_build_in_set(): - li = ld_list([{"@list": [{"@value": "foo"}, {"@value": "bar"}, {"@value": "foobar"}]}], key="https://schema.org/name", context={"schema": "https://schema.org/"}) + li = ld_list([{"@list": [{"@value": "foo"}, {"@value": "bar"}, {"@value": "foobar"}]}], + key="https://schema.org/name", context={"schema": "https://schema.org/"}) li[0] = "bar" li[-1] = "barfoo" assert li.item_list[0] == {"@value": "bar"} and li.item_list[-1] == {"@value": "barfoo"} From 2679a0a44cdd9a3cd593fbeaf4ccbb84cefa2c03 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 5 Sep 2025 13:53:59 +0200 Subject: [PATCH 093/247] added more tests --- test/hermes_test/model/types/test_ld_list.py | 70 ++++++++++++++++++++ 1 file changed, 70 insertions(+) diff --git a/test/hermes_test/model/types/test_ld_list.py b/test/hermes_test/model/types/test_ld_list.py index c00cfbc7..37c4439d 100644 --- a/test/hermes_test/model/types/test_ld_list.py +++ b/test/hermes_test/model/types/test_ld_list.py @@ -54,3 +54,73 @@ def test_build_in_set(): li[::2] = "foo" with pytest.raises(TypeError): li[:2] = 1 + + +def test_build_in_len(): + assert len(ld_list([{"@list": []}])) == 0 + assert len(ld_list([{"@list": [{"@value": "foo"}, {"@value": "bar"}, {"@value": "foobar"}]}])) == 3 + + +def test_build_in_iter(): + li = ld_list([{"@list": [{"@value": "foo"}, ld_dict([{"@type": "A", "schema:name": "a"}])]}], + key="https://schema.org/name", context={"schema": "https://schema.org/"}) + li = [val for val in li] + assert li[0] == "foo" and li[1].data_dict == {"@type": "A", "schema:name": "a"} and li[1].index == 1 + + +def test_append(): + li = ld_list([{"@list": []}], key="https://schema.org/name", context={"schema": "https://schema.org/"}) + li.append("foo") + assert li[0] == "foo" and li.item_list[0] == {"@value": "foo"} and len(li) == 1 + li.append("bar") + assert li[0:2] == ["foo", "bar"] and li.item_list[1] == {"@value": "bar"} and len(li) == 2 + li.append(ld_dict([{"@type": "A", "schema:name": "a"}])) + assert li[-1].data_dict == {"@type": "A", "schema:name": "a"} and len(li) == 3 + + +def test_extend(): + li = ld_list([{"@list": []}], key="https://schema.org/name", context={"schema": "https://schema.org/"}) + li.extend([]) + assert len(li) == 0 + li.extend(["foo"]) + assert li[0] == "foo" and li.item_list[0] == {"@value": "foo"} and len(li) == 1 + li.extend(["bar"]) + assert li[0:2] == ["foo", "bar"] and li.item_list[1] == {"@value": "bar"} and len(li) == 2 + li.extend([ld_dict([{"@type": "A", "schema:name": "a"}])]) + assert li[-1].data_dict == {"@type": "A", "schema:name": "a"} and len(li) == 3 + + li = ld_list([{"@list": []}], key="https://schema.org/name", context={"schema": "https://schema.org/"}) + li.extend(["foo", "bar", ld_dict([{"@type": "A", "schema:name": "a"}])]) + assert li[0:2] == ["foo", "bar"] and li.item_list[0:2] == [{"@value": "foo"}, {"@value": "bar"}] + assert li[-1].data_dict == {"@type": "A", "schema:name": "a"} and len(li) == 3 + + li = ld_list([{"@list": []}], key="https://schema.org/name", context={"schema": "https://schema.org/"}) + li.append("foo") + li.extend(["bar", ld_dict([{"@type": "A", "schema:name": "a"}])]) + assert li[0:2] == ["foo", "bar"] and li.item_list[0:2] == [{"@value": "foo"}, {"@value": "bar"}] + assert li[-1].data_dict == {"@type": "A", "schema:name": "a"} and len(li) == 3 + + +def test_is_ld_list(): + assert not any(ld_list.is_ld_list(item) for item in [1, "", [], {}, {"@list": []}, [{}], [{"a": "b"}]]) + assert not any(ld_list.is_ld_list(item) for item in [[{"@list": ""}], [{"@list": ["a"]}, {"@list": ["b"]}]]) + assert all(ld_list.is_ld_list([{container_type: []}]) for container_type in ["@list", "@set", "@graph"]) + + +def test_is_container(): + assert not any(ld_list.is_container(item) for item in [1, "", [], {}, {"a": "b"}]) + assert not any(ld_list.is_container(item) for item in [ld_dict([{"a": "b"}]), ld_list([{"@list": ["a"]}])]) + assert not any(ld_list.is_container({"@list": value}) for value in ["", 1, {}]) + assert all(ld_list.is_container({container_type: []}) for container_type in ["@list", "@graph", "@set"]) + + +def test_from_list(): + li = ld_list.from_list([]) + assert li.container == "@list" and li.item_list == li.context == [] and li.parent is li.key is li.index is None + li = ld_list.from_list([], parent=li, key="schema:name", context={"schema":"https://schema.org/"}, container="@set") + assert li.container == "@set" and li.item_list == [] and li.parent is not None and li.key == "schema:name" + assert li.index is None and li.context == {"schema":"https://schema.org/"} + li = ld_list.from_list(["a", {"@value": "b"}], parent=None, key="https://schema.org/name", + context={"schema":"https://schema.org/"}, container="@graph") + assert li.container == "@graph" and li.item_list == [{"@value": "a"}, {"@value": "b"}] and li.parent is None + assert li.key == "https://schema.org/name" and li.index is None and li.context == {"schema":"https://schema.org/"} From 1da06c3256e53bfff007d619530b9485d39288b3 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 5 Sep 2025 13:56:47 +0200 Subject: [PATCH 094/247] fixed small bug in __iter__ of ld_list --- src/hermes/model/types/ld_list.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index 12570067..11c7ff41 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -54,7 +54,7 @@ def __len__(self): def __iter__(self): for index, value in enumerate(self.item_list): - item = self._to_python(self.key, [value]) + item = self._to_python(self.key, value if isinstance(value, ld_container) else [value]) if isinstance(item, ld_container): item.index = index yield item From 8975cf9b8e249ebf8559bd48a13a26a71f38e7ae Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 5 Sep 2025 13:57:20 +0200 Subject: [PATCH 095/247] added FIXME and linted test_ld_list.py --- src/hermes/model/types/ld_list.py | 18 ++++++++++++++++++ test/hermes_test/model/types/test_ld_list.py | 9 +++++---- 2 files changed, 23 insertions(+), 4 deletions(-) diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index 11c7ff41..1bfd2c5b 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -68,6 +68,24 @@ def extend(self, value): self.append(item) def to_python(self): + # FIXME: + # why is item not converted to it's python equivalent? + # + # ld_list([{"@list": [{"@value": "a"}]}])[0] + # == "a" + # ld_list([{"@list": [{"@value": "a"}]}]).to_python()[0] + # == {"@value": "a"} why not "a"? + # ld_list([{"@list": [ld_dict([{"@type": "Person", "name": "a"}])]}])[0] + # == ld_dict([{"@type": "Person", "name": "a"}]) + # ld_list([{"@list": [ld_dict([{"@type": "Person", "name": "a"}])]}]).to_python()[0] + # == {"@type": "Person", "name": "a"} + # + # ld_dict([{"name": [{"@value": "a"}]}])["name"] == "a" + # ld_dict([{"name": [{"@value": "a"}]}]).to_python()["name"] == "a" why not {"@value": "a"}? + # ld_dict([{"person": [ld_dict([{"@type": "Person", "name": "a"}])]}])["person"] + # == ld_dict([{"@type": "Person", "name": "a"}]) + # ld_dict([{"person": [ld_dict([{"@type": "Person", "name": "a"}])]}]).to_python()["person"] + # == {"@type": "Person", "name": "a"} return [ item.to_python() if isinstance(item, ld_container) else item for item in self diff --git a/test/hermes_test/model/types/test_ld_list.py b/test/hermes_test/model/types/test_ld_list.py index 37c4439d..95fbb85f 100644 --- a/test/hermes_test/model/types/test_ld_list.py +++ b/test/hermes_test/model/types/test_ld_list.py @@ -117,10 +117,11 @@ def test_is_container(): def test_from_list(): li = ld_list.from_list([]) assert li.container == "@list" and li.item_list == li.context == [] and li.parent is li.key is li.index is None - li = ld_list.from_list([], parent=li, key="schema:name", context={"schema":"https://schema.org/"}, container="@set") + li = ld_list.from_list([], parent=li, key="schema:name", context={"schema": "https://schema.org/"}, + container="@set") assert li.container == "@set" and li.item_list == [] and li.parent is not None and li.key == "schema:name" - assert li.index is None and li.context == {"schema":"https://schema.org/"} + assert li.index is None and li.context == {"schema": "https://schema.org/"} li = ld_list.from_list(["a", {"@value": "b"}], parent=None, key="https://schema.org/name", - context={"schema":"https://schema.org/"}, container="@graph") + context={"schema": "https://schema.org/"}, container="@graph") assert li.container == "@graph" and li.item_list == [{"@value": "a"}, {"@value": "b"}] and li.parent is None - assert li.key == "https://schema.org/name" and li.index is None and li.context == {"schema":"https://schema.org/"} + assert li.key == "https://schema.org/name" and li.index is None and li.context == {"schema": "https://schema.org/"} From f29875b532f86b96c90f0c7efc2cd8b09a2ee1e1 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Tue, 9 Sep 2025 11:20:21 +0200 Subject: [PATCH 096/247] added tests and adjusted some according to what was discussed in a meeting --- test/hermes_test/model/types/test_ld_dict.py | 56 ++++++++++++++++---- 1 file changed, 47 insertions(+), 9 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_dict.py b/test/hermes_test/model/types/test_ld_dict.py index d2de3301..6ac991b5 100644 --- a/test/hermes_test/model/types/test_ld_dict.py +++ b/test/hermes_test/model/types/test_ld_dict.py @@ -7,6 +7,7 @@ import pytest from hermes.model.types.ld_dict import ld_dict +from hermes.model.types.ld_list import ld_list def test_dict_basics(): @@ -17,10 +18,10 @@ def test_dict_basics(): def test_malformed_input(): - with pytest.raises(Exception): + with pytest.raises(ValueError): ld_dict([]) - with pytest.raises(Exception): + with pytest.raises(ValueError): ld_dict([{"foo": "bar"}, {"bar": "foo"}]) @@ -50,6 +51,41 @@ def test_build_in_set(): assert di.data_dict == {"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}], "http://xmlns.com/foaf/0.1/homepage": [{"@id": "http://manu.sporny.org/"}]} + di = ld_dict([{}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + di["xmlns:name"] = ["Manu Sporny", "foo"] + assert di.data_dict == {"http://xmlns.com/foaf/0.1/name": [{"@list": [{"@value": "Manu Sporny"}, + {"@value": "foo"}]}]} + + di = ld_dict([{}], context={"schema": "https://schema.org/"}) + di["@type"] = "schema:Thing" + di["schema:result"] = {"@type": "schema:Action", "schema:error": {"@type": "schema:Thing", "schema:name": "foo"}} + assert di.data_dict == { + "@type": ["https://schema.org/Thing"], + "https://schema.org/result": [{ + "@type": ["https://schema.org/Action"], + "https://schema.org/error": [{ + "@type": ["https://schema.org/Thing"], + "https://schema.org/name": [{"@value": "foo"}] + }] + }] + } + + di = ld_dict([{}], context={"schema": "https://schema.org/"}) + di["@type"] = "schema:Thing" + di["schema:result"] = {"@type": "schema:Action", "schema:error": {"@type": "schema:Thing", "schema:name": ["foo", + "bar"]}} + assert di.data_dict == { + "@type": ["https://schema.org/Thing"], + "https://schema.org/result": [{ + "@type": ["https://schema.org/Action"], + "https://schema.org/error": [{ + "@type": ["https://schema.org/Thing"], + "https://schema.org/name": [{"@value": "foo"}, {"@value": "bar"}] + }] + }] + } + assert isinstance(di["schema:result"]["schema:error"]["schema:name"], ld_list) + def test_build_in_delete(): di = ld_dict([{"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}], @@ -66,6 +102,8 @@ def test_build_in_contains(): context={"xmlns": "http://xmlns.com/foaf/0.1/"}) assert "http://xmlns.com/foaf/0.1/name" in di assert "xmlns:homepage" in di + assert "xmlns:foo" not in di + assert "foo" not in di def test_get(): @@ -123,11 +161,11 @@ def test_items(): inner_di = ld_dict([{}], parent=di) inner_di.update({"xmlns:foobar": "bar", "http://xmlns.com/foaf/0.1/barfoo": {"@id": "foo"}}) di.update({"http://xmlns.com/foaf/0.1/name": "foo", "xmlns:homepage": {"@id": "bar"}, "xmlns:foo": inner_di}) - assert [*di.items()] == [ - ("http://xmlns.com/foaf/0.1/name", "foo"), ("http://xmlns.com/foaf/0.1/homepage", "bar"), - ("http://xmlns.com/foaf/0.1/foo", {"http://xmlns.com/foaf/0.1/foobar": inner_di["xmlns:foobar"], - "http://xmlns.com/foaf/0.1/barfoo": inner_di["xmlns:barfoo"]}) - ] + assert [*di.items()][0:2] == [("http://xmlns.com/foaf/0.1/name", "foo"), + ("http://xmlns.com/foaf/0.1/homepage", "bar")] + assert [*di.items()][2][0] == "http://xmlns.com/foaf/0.1/foo" + assert [*di.items()][2][1].data_dict == {"http://xmlns.com/foaf/0.1/foobar": [{"@value": "bar"}], + "http://xmlns.com/foaf/0.1/barfoo": [{"@id": "foo"}]} def test_ref(): @@ -137,7 +175,8 @@ def test_ref(): di = ld_dict([{}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) di.update({"http://xmlns.com/foaf/0.1/name": "foo"}) - assert di.ref == di.data_dict # or KeyError depends on interpretation of what should happen in this case + with pytest.raises(KeyError): + di.ref def test_to_python(): @@ -191,7 +230,6 @@ def test_from_dict(): def test_is_ld_dict(): assert not any(ld_dict.is_ld_dict(item) for item in [{}, {"foo": "bar"}, {"@id": "foo"}]) assert not any(ld_dict.is_ld_dict(item) for item in [[{"@id": "foo"}], [{"@set": "foo"}], [{}, {}], [], [""]]) - assert not ld_dict.is_ld_dict([{}]) assert all(ld_dict.is_ld_dict([item]) for item in [{"@id": "foo", "foobar": "bar"}, {"foo": "bar"}]) From 76a93b22253a99e7d59e2a460bb416364fbc8b7c Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Tue, 9 Sep 2025 11:21:53 +0200 Subject: [PATCH 097/247] added check to __init__ of ld_dict --- src/hermes/model/types/ld_dict.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/hermes/model/types/ld_dict.py b/src/hermes/model/types/ld_dict.py index bcea80b5..baa8a586 100644 --- a/src/hermes/model/types/ld_dict.py +++ b/src/hermes/model/types/ld_dict.py @@ -13,6 +13,8 @@ class ld_dict(ld_container): _NO_DEFAULT = type("NO DEFAULT") def __init__(self, data, *, parent=None, key=None, index=None, context=None): + if not self.is_ld_dict(data): + raise ValueError("The given data does not represent a ld_dict.") super().__init__(data, parent=parent, key=key, index=index, context=context) self.data_dict = data[0] From af19fccd6f7abee67cd897ac22175566a86a9b63 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Tue, 9 Sep 2025 11:28:33 +0200 Subject: [PATCH 098/247] suggested fix for nested conversions --- src/hermes/model/types/__init__.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/src/hermes/model/types/__init__.py b/src/hermes/model/types/__init__.py index a10d58a3..9b1260a3 100644 --- a/src/hermes/model/types/__init__.py +++ b/src/hermes/model/types/__init__.py @@ -37,6 +37,25 @@ (ld_container.is_typed_json_value, dict(python=ld_container.typed_ld_to_py)), (ld_container.is_json_value, dict(python=lambda c, **_: c["@value"], expanded_json=lambda c, **_: [c])), (ld_list.is_container, dict(ld_container=lambda c, **kw: ld_list([c], **kw))), + # FIXME: add conversion from list and json dict to expanded_json + # to parse nested dicts and lists when using for example __setitem__(key, value) from ld_dict + # where value is converted to expanded_json bevor adding it to data_dict + # Suggested: + #( + # ld_dict.is_json_dict, + # { + # "ld_container": ld_dict.from_dict, + # "expanded_json": lambda c, **kw: kw["parent"]._to_expanded_json(kw["key"], ld_dict.from_dict(c, **kw)) + # } + #), + # + #( + # lambda c: isinstance(c, list), + # { + # "ld_container": ld_list.from_list, + # "expanded_json": lambda c, **kw: kw["parent"]._to_expanded_json(kw["key"], ld_list.from_list(c, **kw)) + # } + #), (ld_dict.is_json_dict, dict(ld_container=ld_dict.from_dict)), (lambda c: isinstance(c, list), dict(ld_container=ld_list.from_list)), From f4621ab4faef2b560814c67dd1e292cff292d8b9 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Tue, 9 Sep 2025 11:37:55 +0200 Subject: [PATCH 099/247] fixed formatting mistakes --- src/hermes/model/types/__init__.py | 29 +++++++++++++++-------------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/src/hermes/model/types/__init__.py b/src/hermes/model/types/__init__.py index 9b1260a3..6fff24b2 100644 --- a/src/hermes/model/types/__init__.py +++ b/src/hermes/model/types/__init__.py @@ -37,25 +37,26 @@ (ld_container.is_typed_json_value, dict(python=ld_container.typed_ld_to_py)), (ld_container.is_json_value, dict(python=lambda c, **_: c["@value"], expanded_json=lambda c, **_: [c])), (ld_list.is_container, dict(ld_container=lambda c, **kw: ld_list([c], **kw))), + # FIXME: add conversion from list and json dict to expanded_json # to parse nested dicts and lists when using for example __setitem__(key, value) from ld_dict # where value is converted to expanded_json bevor adding it to data_dict # Suggested: - #( - # ld_dict.is_json_dict, - # { - # "ld_container": ld_dict.from_dict, - # "expanded_json": lambda c, **kw: kw["parent"]._to_expanded_json(kw["key"], ld_dict.from_dict(c, **kw)) - # } - #), + # ( + # ld_dict.is_json_dict, + # { + # "ld_container": ld_dict.from_dict, + # "expanded_json": lambda c, **kw: kw["parent"]._to_expanded_json(kw["key"], ld_dict.from_dict(c, **kw)) + # } + # ), # - #( - # lambda c: isinstance(c, list), - # { - # "ld_container": ld_list.from_list, - # "expanded_json": lambda c, **kw: kw["parent"]._to_expanded_json(kw["key"], ld_list.from_list(c, **kw)) - # } - #), + # ( + # lambda c: isinstance(c, list), + # { + # "ld_container": ld_list.from_list, + # "expanded_json": lambda c, **kw: kw["parent"]._to_expanded_json(kw["key"], ld_list.from_list(c, **kw)) + # } + # ), (ld_dict.is_json_dict, dict(ld_container=ld_dict.from_dict)), (lambda c: isinstance(c, list), dict(ld_container=ld_list.from_list)), From da2f0109486a7af44cc5273078af15a8fe704ede Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Tue, 9 Sep 2025 14:54:28 +0200 Subject: [PATCH 100/247] added a fixme comment --- src/hermes/model/types/ld_dict.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/hermes/model/types/ld_dict.py b/src/hermes/model/types/ld_dict.py index baa8a586..49461894 100644 --- a/src/hermes/model/types/ld_dict.py +++ b/src/hermes/model/types/ld_dict.py @@ -98,6 +98,9 @@ def from_dict(cls, value, *, parent=None, key=None, context=None, ld_type=None): @classmethod def is_ld_dict(cls, ld_value): + # FIXME: maybe rename to is_expanded_ld_dict + # because this functions tests if ld_value could be an expanded json ld dict + # (is_ld_node returns False for every compacted json ld dictionary) return cls.is_ld_node(ld_value) and cls.is_json_dict(ld_value[0]) @classmethod From d822fb55e02b001bd00e5eec6e79a0ae20ea7e4a Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Tue, 9 Sep 2025 14:57:53 +0200 Subject: [PATCH 101/247] added some edge cases to the tests --- test/hermes_test/model/types/test_ld_dict.py | 37 ++++++++++++++------ 1 file changed, 27 insertions(+), 10 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_dict.py b/test/hermes_test/model/types/test_ld_dict.py index 6ac991b5..2fc0d5f3 100644 --- a/test/hermes_test/model/types/test_ld_dict.py +++ b/test/hermes_test/model/types/test_ld_dict.py @@ -56,6 +56,17 @@ def test_build_in_set(): assert di.data_dict == {"http://xmlns.com/foaf/0.1/name": [{"@list": [{"@value": "Manu Sporny"}, {"@value": "foo"}]}]} + di = ld_dict([{}], context={"schema": "https://schema.org/"}) + di["@type"] = "schema:Thing" + di["schema:result"] = {"@type": "schema:Action", "schema:name": "Test"} + assert di.data_dict == { + "@type": ["https://schema.org/Thing"], + "https://schema.org/result": [{ + "@type": ["https://schema.org/Action"], + "https://schema.org/name": [{"@value": "Test"}] + }] + } + di = ld_dict([{}], context={"schema": "https://schema.org/"}) di["@type"] = "schema:Thing" di["schema:result"] = {"@type": "schema:Action", "schema:error": {"@type": "schema:Thing", "schema:name": "foo"}} @@ -100,17 +111,14 @@ def test_build_in_contains(): di = ld_dict([{"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}], "http://xmlns.com/foaf/0.1/homepage": [{"@id": "http://manu.sporny.org/"}]}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) - assert "http://xmlns.com/foaf/0.1/name" in di - assert "xmlns:homepage" in di - assert "xmlns:foo" not in di - assert "foo" not in di + assert "http://xmlns.com/foaf/0.1/name" in di and "xmlns:homepage" in di + assert "xmlns:foo" not in di and "homepage" not in di and "foo" not in di def test_get(): - di = ld_dict([{"name": [{"@value": "Manu Sporny"}], - "homepage": [{"@id": "http://manu.sporny.org/"}], - "foo": [{"foobar": "bar", "barfoo": "foo"}]}]) - assert di.get("name") == "Manu Sporny" + di = ld_dict([{"https://schema.org/name": [{"@value": "Manu Sporny"}]}], context={"schema": "https://schema.org/"}) + assert di.get("https://schema.org/name") == "Manu Sporny" + assert di.get("schema:name") == "Manu Sporny" assert di.get("bar", None) is None with pytest.raises(KeyError): di.get("bar") @@ -173,8 +181,7 @@ def test_ref(): di.update({"@id": "http://xmlns.com/foaf/0.1/homepage", "xmlns:name": "homepage"}) assert di.ref == {"@id": "http://xmlns.com/foaf/0.1/homepage"} - di = ld_dict([{}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) - di.update({"http://xmlns.com/foaf/0.1/name": "foo"}) + di = ld_dict([{"http://xmlns.com/foaf/0.1/name": "foo"}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) with pytest.raises(KeyError): di.ref @@ -226,6 +233,16 @@ def test_from_dict(): assert di.full_context == 3 * [{"schema": "https://schema.org/"}] assert di.context == {"schema": "https://schema.org/"} and di.key == "schema:error" and di.index is None + di = ld_dict.from_dict({"@type": "schema:Thing", "schema:name": "foo"}, parent=outer_di, key="schema:error") + assert di.data_dict == {"@type": ["https://schema.org/Thing"], "https://schema.org/name": [{"@value": "foo"}]} + assert di.full_context == 2 * [{"schema": "https://schema.org/"}] + assert di.context == [] and di.key == "schema:error" and di.index is None + + di = ld_dict.from_dict({"@context": {"schema": "https://schema.org/"}, "@type": "schema:Thing", "xmlns:name": "fo"}, + context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + assert di["http://xmlns.com/foaf/0.1/name"] == di["xmlns:name"] == "fo" + assert di.context == {"schema": "https://schema.org/", "xmlns": "http://xmlns.com/foaf/0.1/"} + def test_is_ld_dict(): assert not any(ld_dict.is_ld_dict(item) for item in [{}, {"foo": "bar"}, {"@id": "foo"}]) From 1feddda4f25d6062339622b027953ff06e6b3b1f Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Wed, 10 Sep 2025 00:22:45 +0200 Subject: [PATCH 102/247] Add basic implementation of API class --- src/hermes/model/__init__.py | 2 ++ src/hermes/model/api.py | 10 ++++++++++ 2 files changed, 12 insertions(+) create mode 100644 src/hermes/model/api.py diff --git a/src/hermes/model/__init__.py b/src/hermes/model/__init__.py index faf5a2f5..4a4bca25 100644 --- a/src/hermes/model/__init__.py +++ b/src/hermes/model/__init__.py @@ -1,3 +1,5 @@ # SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) # # SPDX-License-Identifier: Apache-2.0 + +from hermes.model.api import SoftwareMetadata diff --git a/src/hermes/model/api.py b/src/hermes/model/api.py new file mode 100644 index 00000000..6deba117 --- /dev/null +++ b/src/hermes/model/api.py @@ -0,0 +1,10 @@ +from hermes.model.types import ld_dict + +from hermes.model.types.ld_context import ALL_CONTEXTS + +class SoftwareMetadata(ld_dict): + + def __init__(self, data: dict=None, extra_vocabs: dict[str, str]=None) -> None: + ctx = ALL_CONTEXTS + [{**extra_vocabs}] if extra_vocabs is not None else ALL_CONTEXTS + super().__init__([data or {}], context=ctx) + From 74ba45df3890fd6b1cf38632961e7a6e39d47da3 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Wed, 10 Sep 2025 00:24:00 +0200 Subject: [PATCH 103/247] Test initialization of API class --- test/hermes_test/model/test_api.py | 39 ++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 test/hermes_test/model/test_api.py diff --git a/test/hermes_test/model/test_api.py b/test/hermes_test/model/test_api.py new file mode 100644 index 00000000..6b4b0d06 --- /dev/null +++ b/test/hermes_test/model/test_api.py @@ -0,0 +1,39 @@ +import pytest + +from hermes.model import SoftwareMetadata + +from hermes.model.types.ld_context import ALL_CONTEXTS + +EXTRA_VOCABS = {"foo": "https://bar.net/schema"} + +@pytest.fixture +def default_context(): + return {"@context": ALL_CONTEXTS} + +@pytest.fixture +def default_ld(): + return {"@context": ALL_CONTEXTS, "funding": "foo"} + +@pytest.fixture +def custom_context(): + return {"@context": ALL_CONTEXTS + [EXTRA_VOCABS]} + +@pytest.fixture +def custom_ld(): + return {"@context": ALL_CONTEXTS + [EXTRA_VOCABS], "funding": "foo"} + +@pytest.fixture +def none(): + return None + +@pytest.mark.parametrize("data,codemeta,full_context,expanded", [ + (SoftwareMetadata(), "default_context", "default_context", "none"), # FIXME: Replace none fixtures + (SoftwareMetadata({"funding": "foo"}), "default_ld", "none", "none"), # FIXME: Replace none fixtures + (SoftwareMetadata(extra_vocabs=EXTRA_VOCABS), "custom_context", "custom_context", "none"), # FIXME: Replace none fixtures + (SoftwareMetadata({"funding": "foo"}, extra_vocabs=EXTRA_VOCABS), "custom_ld", "none", "none"), # FIXME: Replace none fixtures +]) +def test_init(data, codemeta, full_context, expanded, request): + assert data.compact() == request.getfixturevalue(codemeta) + assert data.full_context == request.getfixturevalue(full_context)["@context"] + assert data.ld_value == request.getfixturevalue(expanded) + From e709d6fc7158c70caf7fd9828fd49dfca15b2903 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Wed, 10 Sep 2025 11:32:35 +0200 Subject: [PATCH 104/247] added fixme comments and two test cases and removed unused pytest marker --- src/hermes/model/types/ld_list.py | 3 +++ test/hermes_test/model/types/test_ld_list.py | 5 ++++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index 1bfd2c5b..57a04dd4 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -15,8 +15,11 @@ class ld_list(ld_container): def __init__(self, data, *, parent=None, key=None, index=None, context=None): """ Create a new ld_list.py container. + # FIXME: there is no parameter container :param container: The container type for this list. """ + # FIXME: A set container does not contain "@set" in the expected data format (expanded json ld) + # Instead it is just a list of dicts and therefor would raise a ValueError here (and fail ld_list.is_ld_list) super().__init__(data, parent=parent, key=key, index=index, context=context) diff --git a/test/hermes_test/model/types/test_ld_list.py b/test/hermes_test/model/types/test_ld_list.py index 95fbb85f..e6ec8f53 100644 --- a/test/hermes_test/model/types/test_ld_list.py +++ b/test/hermes_test/model/types/test_ld_list.py @@ -12,13 +12,16 @@ def test_undefined_list(): + with pytest.raises(ValueError): + ld_list([{}]) with pytest.raises(ValueError): ld_list([{"spam": [{"@value": "bacon"}]}]) with pytest.raises(ValueError): ld_list([{"@list": ["a", "b"], "@set": ["foo", "bar"]}]) + with pytest.raises(ValueError): + ld_list([{"@list": ["a", "b"]}, {"@set": ["foo", "bar"]}]) -@pytest.mark.dev def test_list_basics(): li = ld_list([{"@list": [0], "spam": [{"@value": "bacon"}]}]) assert li._data == [{"@list": [0], "spam": [{"@value": "bacon"}]}] From 88fd4092ac21955dce92bd8165879cd3f83e67c3 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Wed, 10 Sep 2025 11:38:15 +0200 Subject: [PATCH 105/247] adjusted tests and ld_list as ld_list.itemlist should be expanded json and not partially ld_container --- src/hermes/model/types/ld_list.py | 4 ++-- test/hermes_test/model/types/test_ld_list.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index 57a04dd4..5ff4cc6d 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -36,7 +36,7 @@ def __getitem__(self, index): if isinstance(index, slice): return [self[i] for i in [*range(len(self))][index]] - item = self._to_python(self.key, temp if isinstance(temp := self.item_list[index], ld_container) else [temp]) + item = self._to_python(self.key, [self.item_list[index]]) if isinstance(item, ld_container): item.index = index return item @@ -57,7 +57,7 @@ def __len__(self): def __iter__(self): for index, value in enumerate(self.item_list): - item = self._to_python(self.key, value if isinstance(value, ld_container) else [value]) + item = self._to_python(self.key, [value]) if isinstance(item, ld_container): item.index = index yield item diff --git a/test/hermes_test/model/types/test_ld_list.py b/test/hermes_test/model/types/test_ld_list.py index e6ec8f53..3bc45c3b 100644 --- a/test/hermes_test/model/types/test_ld_list.py +++ b/test/hermes_test/model/types/test_ld_list.py @@ -35,7 +35,7 @@ def test_build_in_get(): assert li[:2] == ["foo", "bar"] and li[1:-1] == ["bar"] assert li[::2] == ["foo", "foobar"] and li[::-1] == ["foobar", "bar", "foo"] - li = ld_list([{"@list": [ld_dict([{"@type": "A", "schema:name": "a"}])]}]) + li = ld_list([{"@list": [{"@type": "A", "schema:name": "a"}]}]) assert isinstance(li[0], ld_dict) and li[0].data_dict == {"@type": "A", "schema:name": "a"} and li[0].index == 0 @@ -65,7 +65,7 @@ def test_build_in_len(): def test_build_in_iter(): - li = ld_list([{"@list": [{"@value": "foo"}, ld_dict([{"@type": "A", "schema:name": "a"}])]}], + li = ld_list([{"@list": [{"@value": "foo"}, {"@type": "A", "schema:name": "a"}]}], key="https://schema.org/name", context={"schema": "https://schema.org/"}) li = [val for val in li] assert li[0] == "foo" and li[1].data_dict == {"@type": "A", "schema:name": "a"} and li[1].index == 1 From 63de367bf4b1d58120ed31b9f85901c78bdb65d8 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Wed, 10 Sep 2025 13:28:54 +0200 Subject: [PATCH 106/247] added more fixme comments and test cases --- src/hermes/model/types/ld_list.py | 14 ++++++++++++ test/hermes_test/model/types/test_ld_list.py | 24 +++++++++++++++++--- 2 files changed, 35 insertions(+), 3 deletions(-) diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index 5ff4cc6d..75356b1d 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -42,6 +42,18 @@ def __getitem__(self, index): return item def __setitem__(self, index, value): + # FIXME: what should your_ld_list[index] = [{"@type": "foo", "name": "bar"}] mean? + # set your_ld_list[index] to the dict {"@type": "foo", "name": "bar"} given in expanded form or + # set your_ld_list[index] to the list [{"@type": "foo", "name": "bar"}] given in non expanded form or + # set your_ld_list[index] to the set [{"@type": "foo", "name": "bar"}] given in expanded form + # (ld_list.fromlist([{"@type": "foo", "name": "bar"}]) defaults to container type list + # which would have the object as an expanded form whereas the expanded form of a list would be + # ["@list": [{"@type": "foo", "name": "bar"}]] + # This is relevent because nested sets get unnested when being expanded and lists not. + # Moreover a set inside a list gets automaticaly converted to a list when expanded) + + # FIXME: what happens when a ld_list is put inside another also depends on their container types + if not isinstance(index, slice): self.item_list[index] = val[0] if isinstance(val := self._to_expanded_json(self.key, value), list) else val return @@ -96,10 +108,12 @@ def to_python(self): @classmethod def is_ld_list(cls, ld_value): + # FIXME: every python list that contains at least one dict can be considerd a set in expanded json form return cls.is_ld_node(ld_value) and cls.is_container(ld_value[0]) @classmethod def is_container(cls, value): + # FIXME: "@set" will never be inside a dictionary of an expanded json ld object return isinstance(value, dict) and any(ct in value for ct in cls.container_types) @classmethod diff --git a/test/hermes_test/model/types/test_ld_list.py b/test/hermes_test/model/types/test_ld_list.py index 3bc45c3b..b3ce093d 100644 --- a/test/hermes_test/model/types/test_ld_list.py +++ b/test/hermes_test/model/types/test_ld_list.py @@ -35,8 +35,10 @@ def test_build_in_get(): assert li[:2] == ["foo", "bar"] and li[1:-1] == ["bar"] assert li[::2] == ["foo", "foobar"] and li[::-1] == ["foobar", "bar", "foo"] - li = ld_list([{"@list": [{"@type": "A", "schema:name": "a"}]}]) + li = ld_list([{"@list": [{"@type": "A", "schema:name": "a"}, {"@list": [{"@type": "A", "schema:name": "a"}]}]}]) assert isinstance(li[0], ld_dict) and li[0].data_dict == {"@type": "A", "schema:name": "a"} and li[0].index == 0 + assert isinstance(li[1], ld_list) and li[1].item_list == [{"@type": "A", "schema:name": "a"}] and li[1].index == 1 + assert li[1].key == li.key def test_build_in_set(): @@ -57,6 +59,14 @@ def test_build_in_set(): li[::2] = "foo" with pytest.raises(TypeError): li[:2] = 1 + li[0] = ld_dict([{"@type": "schema:Thing", "schema:name": "a"}], parent=li, key=li.key) + assert isinstance(li[0], ld_dict) and li[0].data_dict == {"@type": "schema:Thing", "schema:name": "a"} + li[0] = {"@type": "schema:Thing", "schema:name": "a"} + assert isinstance(li[0], ld_dict) and li[0].data_dict == {"@type": "schema:Thing", "schema:name": "a"} + li[0] = ld_list([{"@set": [{"@type": "schema:Thing", "schema:name": "a"}]}], parent=li, key=li.key) + assert isinstance(li[0], ld_list) and li[0].item_list == [{"@type": "schema:Thing", "schema:name": "a"}] + li[0] = {"@set": [{"@type": "schema:Thing", "schema:name": "a"}]} + assert isinstance(li[0], ld_list) and li[0].item_list == [{"@type": "schema:Thing", "schema:name": "a"}] def test_build_in_len(): @@ -65,10 +75,12 @@ def test_build_in_len(): def test_build_in_iter(): - li = ld_list([{"@list": [{"@value": "foo"}, {"@type": "A", "schema:name": "a"}]}], + li = ld_list([{"@list": [{"@value": "foo"}, {"@type": "A", "schema:name": "a"}, {"@list": [{"@value": "bar"}]}]}], key="https://schema.org/name", context={"schema": "https://schema.org/"}) li = [val for val in li] assert li[0] == "foo" and li[1].data_dict == {"@type": "A", "schema:name": "a"} and li[1].index == 1 + assert isinstance(li[2], ld_list) and li[2].item_list == [{"@value": "bar"}] and li[2].index == 2 + assert li[2].key == "https://schema.org/name" def test_append(): @@ -78,7 +90,13 @@ def test_append(): li.append("bar") assert li[0:2] == ["foo", "bar"] and li.item_list[1] == {"@value": "bar"} and len(li) == 2 li.append(ld_dict([{"@type": "A", "schema:name": "a"}])) - assert li[-1].data_dict == {"@type": "A", "schema:name": "a"} and len(li) == 3 + assert li.item_list[2] == {"@type": "A", "schema:name": "a"} and len(li) == 3 + li.append({"@type": "A", "schema:name": "a"}) + assert li.item_list[2] == li.item_list[3] + li.append(ld_list([{"@list": [{"@type": "A", "schema:name": "a"}]}])) + li.append([{"@type": "A", "schema:name": "a"}]) + li.append(2 * [{"@type": "A", "schema:name": "a"}]) + assert 2 * li.item_list[4] == 2 * li.item_list[5] == li.item_list[6] def test_extend(): From 99571c5fe16797adc8bdc4a90b26b3d3059d2be3 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Thu, 11 Sep 2025 13:30:42 +0200 Subject: [PATCH 107/247] added (all) missing kw args to __init__.py (#364) --- src/hermes/model/types/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/hermes/model/types/__init__.py b/src/hermes/model/types/__init__.py index 6fff24b2..67d2acc0 100644 --- a/src/hermes/model/types/__init__.py +++ b/src/hermes/model/types/__init__.py @@ -33,7 +33,7 @@ (ld_dict.is_ld_dict, dict(ld_container=ld_dict)), # Expand and access JSON data - (ld_container.is_json_id, dict(python=lambda c: c["@id"], expanded_json=lambda c, **_: [c])), + (ld_container.is_json_id, dict(python=lambda c, **_: c["@id"], expanded_json=lambda c, **_: [c])), (ld_container.is_typed_json_value, dict(python=ld_container.typed_ld_to_py)), (ld_container.is_json_value, dict(python=lambda c, **_: c["@value"], expanded_json=lambda c, **_: [c])), (ld_list.is_container, dict(ld_container=lambda c, **kw: ld_list([c], **kw))), From 4dce65088299ee2638f3852c987f2052e386fffd Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 12 Sep 2025 12:15:34 +0200 Subject: [PATCH 108/247] added fixme string --- src/hermes/model/types/ld_container.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index b37a8019..833d03ed 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -186,6 +186,7 @@ def is_typed_json_value(cls, ld_value): @classmethod def typed_ld_to_py(cls, data, **kwargs): + # FIXME: dates are not returned as datetime/ date/ time but as string ld_value = data[0]['@value'] return ld_value From 29f0a8a3039da675598a2deface800674926a37a Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 12 Sep 2025 14:49:46 +0200 Subject: [PATCH 109/247] removed false/ unnecessary fixme --- src/hermes/model/types/ld_list.py | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index 75356b1d..5ac4d021 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -83,24 +83,6 @@ def extend(self, value): self.append(item) def to_python(self): - # FIXME: - # why is item not converted to it's python equivalent? - # - # ld_list([{"@list": [{"@value": "a"}]}])[0] - # == "a" - # ld_list([{"@list": [{"@value": "a"}]}]).to_python()[0] - # == {"@value": "a"} why not "a"? - # ld_list([{"@list": [ld_dict([{"@type": "Person", "name": "a"}])]}])[0] - # == ld_dict([{"@type": "Person", "name": "a"}]) - # ld_list([{"@list": [ld_dict([{"@type": "Person", "name": "a"}])]}]).to_python()[0] - # == {"@type": "Person", "name": "a"} - # - # ld_dict([{"name": [{"@value": "a"}]}])["name"] == "a" - # ld_dict([{"name": [{"@value": "a"}]}]).to_python()["name"] == "a" why not {"@value": "a"}? - # ld_dict([{"person": [ld_dict([{"@type": "Person", "name": "a"}])]}])["person"] - # == ld_dict([{"@type": "Person", "name": "a"}]) - # ld_dict([{"person": [ld_dict([{"@type": "Person", "name": "a"}])]}]).to_python()["person"] - # == {"@type": "Person", "name": "a"} return [ item.to_python() if isinstance(item, ld_container) else item for item in self From 79575b8845123c4aabc8b35b6d6ab511f9dfb81f Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Wed, 17 Sep 2025 18:03:51 +0200 Subject: [PATCH 110/247] Test API object initiatlization with and without data --- test/hermes_test/model/test_api.py | 33 +++++++++++------------------- 1 file changed, 12 insertions(+), 21 deletions(-) diff --git a/test/hermes_test/model/test_api.py b/test/hermes_test/model/test_api.py index 6b4b0d06..deb00d74 100644 --- a/test/hermes_test/model/test_api.py +++ b/test/hermes_test/model/test_api.py @@ -10,30 +10,21 @@ def default_context(): return {"@context": ALL_CONTEXTS} -@pytest.fixture -def default_ld(): - return {"@context": ALL_CONTEXTS, "funding": "foo"} - @pytest.fixture def custom_context(): return {"@context": ALL_CONTEXTS + [EXTRA_VOCABS]} -@pytest.fixture -def custom_ld(): - return {"@context": ALL_CONTEXTS + [EXTRA_VOCABS], "funding": "foo"} - -@pytest.fixture -def none(): - return None - -@pytest.mark.parametrize("data,codemeta,full_context,expanded", [ - (SoftwareMetadata(), "default_context", "default_context", "none"), # FIXME: Replace none fixtures - (SoftwareMetadata({"funding": "foo"}), "default_ld", "none", "none"), # FIXME: Replace none fixtures - (SoftwareMetadata(extra_vocabs=EXTRA_VOCABS), "custom_context", "custom_context", "none"), # FIXME: Replace none fixtures - (SoftwareMetadata({"funding": "foo"}, extra_vocabs=EXTRA_VOCABS), "custom_ld", "none", "none"), # FIXME: Replace none fixtures +@pytest.mark.parametrize("metadata,full_context", [ + (SoftwareMetadata(), "default_context"), + (SoftwareMetadata(extra_vocabs=EXTRA_VOCABS), "custom_context"), ]) -def test_init(data, codemeta, full_context, expanded, request): - assert data.compact() == request.getfixturevalue(codemeta) - assert data.full_context == request.getfixturevalue(full_context)["@context"] - assert data.ld_value == request.getfixturevalue(expanded) +def test_init_no_data(metadata, full_context, request): + assert metadata.full_context == request.getfixturevalue(full_context)["@context"] +@pytest.mark.parametrize("metadata,full_context", [ + (SoftwareMetadata({"funding": "foo"}), "default_context"), + (SoftwareMetadata({"funding": "foo"}, extra_vocabs=EXTRA_VOCABS), "custom_context"), +]) +def test_init_with_data(metadata, full_context, request): + assert metadata.full_context == request.getfixturevalue(full_context)["@context"] + assert metadata["funding"] == "foo" From 69f6a245dd1b3c52c4a77fd62a30de8c73ee02a2 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Wed, 17 Sep 2025 18:09:03 +0200 Subject: [PATCH 111/247] Test API object initialization with nested object --- test/hermes_test/model/test_api.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/test/hermes_test/model/test_api.py b/test/hermes_test/model/test_api.py index deb00d74..4ccf29ea 100644 --- a/test/hermes_test/model/test_api.py +++ b/test/hermes_test/model/test_api.py @@ -6,6 +6,7 @@ EXTRA_VOCABS = {"foo": "https://bar.net/schema"} + @pytest.fixture def default_context(): return {"@context": ALL_CONTEXTS} @@ -14,6 +15,7 @@ def default_context(): def custom_context(): return {"@context": ALL_CONTEXTS + [EXTRA_VOCABS]} + @pytest.mark.parametrize("metadata,full_context", [ (SoftwareMetadata(), "default_context"), (SoftwareMetadata(extra_vocabs=EXTRA_VOCABS), "custom_context"), @@ -21,6 +23,7 @@ def custom_context(): def test_init_no_data(metadata, full_context, request): assert metadata.full_context == request.getfixturevalue(full_context)["@context"] + @pytest.mark.parametrize("metadata,full_context", [ (SoftwareMetadata({"funding": "foo"}), "default_context"), (SoftwareMetadata({"funding": "foo"}, extra_vocabs=EXTRA_VOCABS), "custom_context"), @@ -28,3 +31,14 @@ def test_init_no_data(metadata, full_context, request): def test_init_with_data(metadata, full_context, request): assert metadata.full_context == request.getfixturevalue(full_context)["@context"] assert metadata["funding"] == "foo" + + +def test_init_full_object(): + my_software = {"foo:softwareName": "MySoftware", "foo:egg": "spam", "foo:ham": "eggs", + "maintainer": {"name": "Some Name", "email": "maintainer@example.com"}, + "author": [{"name": "Foo"}, {"name": "Bar"}]} + data = SoftwareMetadata(my_software, extra_vocabs={"foo": "https://foo.bar"}) + assert data["foo:softwareName"] == "MySoftware" + assert data["maintainer"]["name"] == "Some Name" + for author in data["author"]: + assert author["name"] in ["Foo", "Bar"] From 8e1a38bc942ebccf15e91c41e489560e7790de7f Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Wed, 17 Sep 2025 18:11:35 +0200 Subject: [PATCH 112/247] Test appending objects to model via API --- test/hermes_test/model/test_api.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/test/hermes_test/model/test_api.py b/test/hermes_test/model/test_api.py index 4ccf29ea..29ed3580 100644 --- a/test/hermes_test/model/test_api.py +++ b/test/hermes_test/model/test_api.py @@ -33,7 +33,7 @@ def test_init_with_data(metadata, full_context, request): assert metadata["funding"] == "foo" -def test_init_full_object(): +def test_init_nested_object(): my_software = {"foo:softwareName": "MySoftware", "foo:egg": "spam", "foo:ham": "eggs", "maintainer": {"name": "Some Name", "email": "maintainer@example.com"}, "author": [{"name": "Foo"}, {"name": "Bar"}]} @@ -42,3 +42,15 @@ def test_init_full_object(): assert data["maintainer"]["name"] == "Some Name" for author in data["author"]: assert author["name"] in ["Foo", "Bar"] + + +def test_append(): + data = SoftwareMetadata(extra_vocabs={"foo": "https://foo.bar"}) + author1 = {"name": "Foo"} + data["author"] = author1 + author2 = {"name": "Bar"} + data["author"].append(author2) + assert len(data["author"]) == 2 + assert data["author"][0]["name"] == "Foo" + assert data["author"][1]["name"] == "Bar" + From b65989e7bb91e1e604f6df61b0bee995b259bb56 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Wed, 17 Sep 2025 18:35:42 +0200 Subject: [PATCH 113/247] Test model building via API object - Adds tests for the expected behaviour as described in https://github.com/softwarepub/hermes/issues/423#issuecomment-3271450262 --- test/hermes_test/model/test_api.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/test/hermes_test/model/test_api.py b/test/hermes_test/model/test_api.py index 29ed3580..7b40bc35 100644 --- a/test/hermes_test/model/test_api.py +++ b/test/hermes_test/model/test_api.py @@ -45,12 +45,29 @@ def test_init_nested_object(): def test_append(): - data = SoftwareMetadata(extra_vocabs={"foo": "https://foo.bar"}) + data = SoftwareMetadata() author1 = {"name": "Foo"} data["author"] = author1 + assert type(data["author"]) is list author2 = {"name": "Bar"} data["author"].append(author2) assert len(data["author"]) == 2 assert data["author"][0]["name"] == "Foo" assert data["author"][1]["name"] == "Bar" + +def test_iterative_assignment(): + # This tests iterative assignments/traversals to edit/appending values + # This requires SoftwareMetadata.__getitem__ to return a plain dict. SoftwareMetadata.__setitem__ can then + # implement the isinstanceof checks that @notactuallyfinn suggested. + data = SoftwareMetadata(extra_vocabs={"foo": "https://foo.bar"}) + data["author"] = {"name": "Foo"} + # Look, a squirrel! + authors = data["author"] + assert type(authors) is list + author1 = authors[0] + author1["email"] = "author@example.com" + authors[0] = author1 + assert len(authors) == 1 + authors.append({"name": "Bar", "email": "author2@example.com"}) + data["author"] = authors From a08eaf2fe75b64051820e2ccd7f662c13b824811 Mon Sep 17 00:00:00 2001 From: "Kernchen, Sophie" Date: Thu, 18 Sep 2025 11:07:20 +0200 Subject: [PATCH 114/247] Correct context parameter to list --- src/hermes/model/types/ld_dict.py | 2 +- test/hermes_test/model/types/test_ld_dict.py | 38 ++++++++++---------- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/src/hermes/model/types/ld_dict.py b/src/hermes/model/types/ld_dict.py index 49461894..927007b3 100644 --- a/src/hermes/model/types/ld_dict.py +++ b/src/hermes/model/types/ld_dict.py @@ -89,7 +89,7 @@ def from_dict(cls, value, *, parent=None, key=None, context=None, ld_type=None): if parent is None and data_context: ld_data["@context"] = data_context elif parent is not None: - full_context[:0] = [temp] if isinstance(temp := parent.full_context, dict) else temp + full_context[:0] = parent.full_context ld_value = cls.ld_proc.expand(ld_data, {"expandContext": full_context, "documentLoader": bundled_loader}) ld_value = cls(ld_value, parent=parent, key=key, context=data_context) diff --git a/test/hermes_test/model/types/test_ld_dict.py b/test/hermes_test/model/types/test_ld_dict.py index 2fc0d5f3..15aa5f2d 100644 --- a/test/hermes_test/model/types/test_ld_dict.py +++ b/test/hermes_test/model/types/test_ld_dict.py @@ -36,27 +36,27 @@ def test_build_in_get(): di["bar"] di = ld_dict([{"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}]}], - context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + context=[{"xmlns": "http://xmlns.com/foaf/0.1/"}]) assert di["xmlns:name"] == "Manu Sporny" def test_build_in_set(): - di = ld_dict([{}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + di = ld_dict([{}], context=[{"xmlns": "http://xmlns.com/foaf/0.1/"}]) di["http://xmlns.com/foaf/0.1/name"] = "Manu Sporny" assert di.data_dict == {"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}]} - di = ld_dict([{}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + di = ld_dict([{}], context=[{"xmlns": "http://xmlns.com/foaf/0.1/"}]) di["xmlns:name"] = "Manu Sporny" di["xmlns:homepage"] = {"@id": "http://manu.sporny.org/"} assert di.data_dict == {"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}], "http://xmlns.com/foaf/0.1/homepage": [{"@id": "http://manu.sporny.org/"}]} - di = ld_dict([{}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + di = ld_dict([{}], context=[{"xmlns": "http://xmlns.com/foaf/0.1/"}]) di["xmlns:name"] = ["Manu Sporny", "foo"] assert di.data_dict == {"http://xmlns.com/foaf/0.1/name": [{"@list": [{"@value": "Manu Sporny"}, {"@value": "foo"}]}]} - di = ld_dict([{}], context={"schema": "https://schema.org/"}) + di = ld_dict([{}], context=[{"schema": "https://schema.org/"}]) di["@type"] = "schema:Thing" di["schema:result"] = {"@type": "schema:Action", "schema:name": "Test"} assert di.data_dict == { @@ -67,7 +67,7 @@ def test_build_in_set(): }] } - di = ld_dict([{}], context={"schema": "https://schema.org/"}) + di = ld_dict([{}], context=[{"schema": "https://schema.org/"}]) di["@type"] = "schema:Thing" di["schema:result"] = {"@type": "schema:Action", "schema:error": {"@type": "schema:Thing", "schema:name": "foo"}} assert di.data_dict == { @@ -81,7 +81,7 @@ def test_build_in_set(): }] } - di = ld_dict([{}], context={"schema": "https://schema.org/"}) + di = ld_dict([{}], context=[{"schema": "https://schema.org/"}]) di["@type"] = "schema:Thing" di["schema:result"] = {"@type": "schema:Action", "schema:error": {"@type": "schema:Thing", "schema:name": ["foo", "bar"]}} @@ -101,7 +101,7 @@ def test_build_in_set(): def test_build_in_delete(): di = ld_dict([{"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}], "http://xmlns.com/foaf/0.1/homepage": [{"@id": "http://manu.sporny.org/"}]}], - context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + context=[{"xmlns": "http://xmlns.com/foaf/0.1/"}]) del di["http://xmlns.com/foaf/0.1/name"] del di["xmlns:homepage"] assert di.data_dict == {} @@ -110,13 +110,13 @@ def test_build_in_delete(): def test_build_in_contains(): di = ld_dict([{"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}], "http://xmlns.com/foaf/0.1/homepage": [{"@id": "http://manu.sporny.org/"}]}], - context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + context=[{"xmlns": "http://xmlns.com/foaf/0.1/"}]) assert "http://xmlns.com/foaf/0.1/name" in di and "xmlns:homepage" in di assert "xmlns:foo" not in di and "homepage" not in di and "foo" not in di def test_get(): - di = ld_dict([{"https://schema.org/name": [{"@value": "Manu Sporny"}]}], context={"schema": "https://schema.org/"}) + di = ld_dict([{"https://schema.org/name": [{"@value": "Manu Sporny"}]}], context=[{"schema": "https://schema.org/"}]) assert di.get("https://schema.org/name") == "Manu Sporny" assert di.get("schema:name") == "Manu Sporny" assert di.get("bar", None) is None @@ -127,7 +127,7 @@ def test_get(): def test_update(): di = ld_dict([{"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}], "http://xmlns.com/foaf/0.1/homepage": [{"@id": "http://manu.sporny.org/"}]}], - context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + context=[{"xmlns": "http://xmlns.com/foaf/0.1/"}]) di.update({}) assert di.data_dict == {"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}], "http://xmlns.com/foaf/0.1/homepage": [{"@id": "http://manu.sporny.org/"}]} @@ -143,29 +143,29 @@ def test_update(): def test_keys(): - di = ld_dict([{}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + di = ld_dict([{}], context=[{"xmlns": "http://xmlns.com/foaf/0.1/"}]) di.update({"http://xmlns.com/foaf/0.1/name": "Manu Sporny", "xmlns:homepage": {"@id": "http://manu.sporny.org/"}}) assert {*di.keys()} == {"http://xmlns.com/foaf/0.1/name", "http://xmlns.com/foaf/0.1/homepage"} def test_compact_keys(): - di = ld_dict([{}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + di = ld_dict([{}], context=[{"xmlns": "http://xmlns.com/foaf/0.1/"}]) di.update({"http://xmlns.com/foaf/0.1/name": "Manu Sporny", "xmlns:homepage": {"@id": "http://manu.sporny.org/"}}) assert {*di.compact_keys()} == {"xmlns:name", "xmlns:homepage"} - di = ld_dict([{}], context={"homepage": "http://xmlns.com/foaf/0.1/homepage"}) + di = ld_dict([{}], context=[{"homepage": "http://xmlns.com/foaf/0.1/homepage"}]) di.update({"http://xmlns.com/foaf/0.1/name": "Manu Sporny", "http://xmlns.com/foaf/0.1/homepage": {"@id": "http://manu.sporny.org/"}}) assert {*di.compact_keys()} == {"http://xmlns.com/foaf/0.1/name", "homepage"} - di = ld_dict([{}], context={"xmls": "http://xmlns.com/foaf/0.1/", "homepage": "http://xmlns.com/foaf/0.1/homepage"}) + di = ld_dict([{}], context=[{"xmls": "http://xmlns.com/foaf/0.1/", "homepage": "http://xmlns.com/foaf/0.1/homepage"}]) di.update({"http://xmlns.com/foaf/0.1/name": "Manu Sporny", "http://xmlns.com/foaf/0.1/homepage": {"@id": "http://manu.sporny.org/"}}) assert {*di.compact_keys()} == {"xmls:name", "homepage"} def test_items(): - di = ld_dict([{}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + di = ld_dict([{}], context=[{"xmlns": "http://xmlns.com/foaf/0.1/"}]) inner_di = ld_dict([{}], parent=di) inner_di.update({"xmlns:foobar": "bar", "http://xmlns.com/foaf/0.1/barfoo": {"@id": "foo"}}) di.update({"http://xmlns.com/foaf/0.1/name": "foo", "xmlns:homepage": {"@id": "bar"}, "xmlns:foo": inner_di}) @@ -177,7 +177,7 @@ def test_items(): def test_ref(): - di = ld_dict([{}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + di = ld_dict([{}], context=[{"xmlns": "http://xmlns.com/foaf/0.1/"}]) di.update({"@id": "http://xmlns.com/foaf/0.1/homepage", "xmlns:name": "homepage"}) assert di.ref == {"@id": "http://xmlns.com/foaf/0.1/homepage"} @@ -187,7 +187,7 @@ def test_ref(): def test_to_python(): - di = ld_dict([{}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + di = ld_dict([{}], context=[{"xmlns": "http://xmlns.com/foaf/0.1/"}]) inner_di = ld_dict([{}], parent=di) inner_di.update({"xmlns:foobar": "bar", "http://xmlns.com/foaf/0.1/barfoo": {"@id": "foo"}}) di.update({"http://xmlns.com/foaf/0.1/name": "foo", "xmlns:homepage": {"@id": "bar"}, "xmlns:foo": inner_di}) @@ -214,7 +214,7 @@ def test_from_dict(): di = ld_dict.from_dict({"@context": {"schema": "https://schema.org/"}, "@type": "schema:Thing", "@id": "foo"}) assert di.data_dict == {"@type": ["https://schema.org/Thing"], "@id": "foo"} - assert di.context == di.full_context == {"schema": "https://schema.org/"} + assert di.context == di.full_context == [{"schema": "https://schema.org/"}] assert di.index is di.key is di.parent is None outer_di = di From c6fab45d01391b20c3ab5e057523fb2102576128 Mon Sep 17 00:00:00 2001 From: "Kernchen, Sophie" Date: Wed, 24 Sep 2025 23:53:45 +0200 Subject: [PATCH 115/247] Fixes test input format and describes all problems --- pyproject.toml | 1 - src/hermes/model/types/ld_container.py | 4 +- src/hermes/model/types/ld_dict.py | 2 +- test/hermes_test/model/types/test_ld_dict.py | 85 ++++++++++++++++---- 4 files changed, 72 insertions(+), 20 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index df646b78..a42543b9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,7 +39,6 @@ dependencies = [ "pydantic-settings>=2.1.0, <3.0.0", "requests-oauthlib>=2.0.0, <3.0.0", "pynacl>=1.5.0, <2.0.0", - "rdflib (>=7.1.4,<8.0.0)", ] requires-python = ">=3.10, <4.0.0" diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index 833d03ed..62716fe9 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -81,6 +81,7 @@ def ld_value(self): return self._data def _to_python(self, full_iri, ld_value): + # FIXME: #434 dates are not returned as datetime/ date/ time but as string if full_iri == "@id": value = ld_value elif full_iri == "@type": @@ -151,6 +152,7 @@ def merge_to_list(cls, *args): return [] head, *tail = args + # FIXME: #435 Context is not merged if isinstance(head, list): return [*head, *cls.merge_to_list(*tail)] else: @@ -186,7 +188,7 @@ def is_typed_json_value(cls, ld_value): @classmethod def typed_ld_to_py(cls, data, **kwargs): - # FIXME: dates are not returned as datetime/ date/ time but as string + # FIXME: #434 dates are not returned as datetime/ date/ time but as string ld_value = data[0]['@value'] return ld_value diff --git a/src/hermes/model/types/ld_dict.py b/src/hermes/model/types/ld_dict.py index 927007b3..8e8cc75f 100644 --- a/src/hermes/model/types/ld_dict.py +++ b/src/hermes/model/types/ld_dict.py @@ -98,7 +98,7 @@ def from_dict(cls, value, *, parent=None, key=None, context=None, ld_type=None): @classmethod def is_ld_dict(cls, ld_value): - # FIXME: maybe rename to is_expanded_ld_dict + # FIXME: #435 maybe rename to is_expanded_ld_dict # because this functions tests if ld_value could be an expanded json ld dict # (is_ld_node returns False for every compacted json ld dictionary) return cls.is_ld_node(ld_value) and cls.is_json_dict(ld_value[0]) diff --git a/test/hermes_test/model/types/test_ld_dict.py b/test/hermes_test/model/types/test_ld_dict.py index 15aa5f2d..b47c26ee 100644 --- a/test/hermes_test/model/types/test_ld_dict.py +++ b/test/hermes_test/model/types/test_ld_dict.py @@ -51,14 +51,21 @@ def test_build_in_set(): assert di.data_dict == {"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}], "http://xmlns.com/foaf/0.1/homepage": [{"@id": "http://manu.sporny.org/"}]} + # FIXME: TypeError for nested ld_container or mind in api di = ld_dict([{}], context=[{"xmlns": "http://xmlns.com/foaf/0.1/"}]) - di["xmlns:name"] = ["Manu Sporny", "foo"] + di2 = ld_list([{"@list": [{"@value": "Manu Sporny"}, {"@value": "foo"}]}], parent=di, + key="http://xmlns.com/foaf/0.1/name") + + di["xmlns:name"] = di2 assert di.data_dict == {"http://xmlns.com/foaf/0.1/name": [{"@list": [{"@value": "Manu Sporny"}, {"@value": "foo"}]}]} - + # FIXME: context is not applied on value side di = ld_dict([{}], context=[{"schema": "https://schema.org/"}]) + di2 = ld_dict([{"@type": ["https://schema.org/Action"], "https://schema.org/name": [{"@value": "Test"}]}], + context=[{"schema": "https://schema.org/"}], parent=di, key="https://schema.org/result") + di["@type"] = "schema:Thing" - di["schema:result"] = {"@type": "schema:Action", "schema:name": "Test"} + di["schema:result"] = di2 assert di.data_dict == { "@type": ["https://schema.org/Thing"], "https://schema.org/result": [{ @@ -66,10 +73,20 @@ def test_build_in_set(): "https://schema.org/name": [{"@value": "Test"}] }] } - + # FIXME: nesting in defintion is very long but with intermediate steps you need to define with None di = ld_dict([{}], context=[{"schema": "https://schema.org/"}]) + + di2 = ld_dict([{"@type": ["https://schema.org/Action"], "https://schema.org/error": None}], + context=[{"schema": "https://schema.org/"}], parent=di, key="https://schema.org/result") + di3 = ld_dict([{ + "@type": ["https://schema.org/Thing"], + "https://schema.org/name": [{"@value": "foo"}] + }], context="https://schema.org/", parent=di2, key="https://schema.org/error") + di2["schema:error"] = di3 + di["@type"] = "schema:Thing" - di["schema:result"] = {"@type": "schema:Action", "schema:error": {"@type": "schema:Thing", "schema:name": "foo"}} + di["schema:result"] = di2 + assert di.data_dict == { "@type": ["https://schema.org/Thing"], "https://schema.org/result": [{ @@ -82,9 +99,18 @@ def test_build_in_set(): } di = ld_dict([{}], context=[{"schema": "https://schema.org/"}]) + + di2 = ld_dict([{"@type": ["https://schema.org/Action"], "https://schema.org/error": None}], + context=[{"schema": "https://schema.org/"}], parent=di, key="https://schema.org/result") + di3 = ld_dict([{ + "@type": ["https://schema.org/Thing"], + "https://schema.org/name": [{"@value": "foo"}, {"@value": "bar"}] + }], context="https://schema.org/", parent=di2, key="https://schema.org/error") + di2["schema:error"] = di3 + di["@type"] = "schema:Thing" - di["schema:result"] = {"@type": "schema:Action", "schema:error": {"@type": "schema:Thing", "schema:name": ["foo", - "bar"]}} + di["schema:result"] = di2 + assert di.data_dict == { "@type": ["https://schema.org/Thing"], "https://schema.org/result": [{ @@ -131,13 +157,28 @@ def test_update(): di.update({}) assert di.data_dict == {"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}], "http://xmlns.com/foaf/0.1/homepage": [{"@id": "http://manu.sporny.org/"}]} + di.update({"http://xmlns.com/foaf/0.1/name": "ham"}) + assert di.data_dict == {"http://xmlns.com/foaf/0.1/name": [{"@value": "ham"}], + "http://xmlns.com/foaf/0.1/homepage": [{"@id": "http://manu.sporny.org/"}]} + + di.update({"xmlns:bacon": "eggs"}) + assert di.data_dict == {"http://xmlns.com/foaf/0.1/name": [{"@value": "ham"}], + "http://xmlns.com/foaf/0.1/homepage": [{"@id": "http://manu.sporny.org/"}], + "http://xmlns.com/foaf/0.1/bacon": [{"@value": "eggs"}]} + di2 = ld_dict([{"http://xmlns.com/foaf/0.1/foobar": [{"@value": "bar"}], + "http://xmlns.com/foaf/0.1/barfoo": [{"@id": "foo"}]}], + context=[{"xmlns": "http://xmlns.com/foaf/0.1/"}], parent=di, key="http://xmlns.com/foaf/0.1/foo") + + # FIXME: #435 Allow nested ld_dicts? see commit before di.update({"http://xmlns.com/foaf/0.1/name": "foo", "xmlns:homepage": {"@id": "bar"}, - "xmlns:foo": {"xmlns:foobar": "bar", "http://xmlns.com/foaf/0.1/barfoo": {"@id": "foo"}}}) + "xmlns:foo": di2}) assert di.data_dict == {"http://xmlns.com/foaf/0.1/name": [{"@value": "foo"}], "http://xmlns.com/foaf/0.1/homepage": [{"@id": "bar"}], + "http://xmlns.com/foaf/0.1/bacon": [{"@value": "eggs"}], "http://xmlns.com/foaf/0.1/foo": [{"http://xmlns.com/foaf/0.1/foobar": [{"@value": "bar"}], "http://xmlns.com/foaf/0.1/barfoo": [{"@id": "foo"}]}]} + with pytest.raises(AttributeError): di.update(["", ""]) @@ -166,7 +207,7 @@ def test_compact_keys(): def test_items(): di = ld_dict([{}], context=[{"xmlns": "http://xmlns.com/foaf/0.1/"}]) - inner_di = ld_dict([{}], parent=di) + inner_di = ld_dict([{}], parent=di, key="http://xmlns.com/foaf/0.1/foo") inner_di.update({"xmlns:foobar": "bar", "http://xmlns.com/foaf/0.1/barfoo": {"@id": "foo"}}) di.update({"http://xmlns.com/foaf/0.1/name": "foo", "xmlns:homepage": {"@id": "bar"}, "xmlns:foo": inner_di}) assert [*di.items()][0:2] == [("http://xmlns.com/foaf/0.1/name", "foo"), @@ -193,6 +234,12 @@ def test_to_python(): di.update({"http://xmlns.com/foaf/0.1/name": "foo", "xmlns:homepage": {"@id": "bar"}, "xmlns:foo": inner_di}) assert di.to_python() == {"xmlns:name": "foo", "xmlns:homepage": "bar", "xmlns:foo": {"xmlns:foobar": "bar", "xmlns:barfoo": "foo"}} + di.update({"http://spam.eggs/eggs": { + "@value": "2022-02-22T00:00:00", "@type": "https://schema.org/DateTime" + }}) + assert di.to_python() == {"xmlns:name": "foo", "xmlns:homepage": "bar", + "xmlns:foo": {"xmlns:foobar": "bar", "xmlns:barfoo": "foo"}, + "http://spam.eggs/eggs": "2022-02-22T00:00:00"} def test_from_dict(): @@ -212,39 +259,43 @@ def test_from_dict(): assert di.active_ctx == {"mappings": {}} and di.context == di.full_context == [] assert di.index is di.key is di.parent is None - di = ld_dict.from_dict({"@context": {"schema": "https://schema.org/"}, "@type": "schema:Thing", "@id": "foo"}) + # FIXME: #435 When you can do short form from dict, then context should also be given in short form (as dict) + di = ld_dict.from_dict({"@context": [{"schema": "https://schema.org/"}], "@type": "schema:Thing", "@id": "foo"}) assert di.data_dict == {"@type": ["https://schema.org/Thing"], "@id": "foo"} assert di.context == di.full_context == [{"schema": "https://schema.org/"}] assert di.index is di.key is di.parent is None outer_di = di - di = ld_dict.from_dict({"@context": {"schema": "https://schema.org/"}, "@type": "schema:Action", + di = ld_dict.from_dict({"@context": [{"schema": "https://schema.org/"}], "@type": "schema:Action", "schema:name": "foo"}, parent=outer_di, key="schema:result") assert di.data_dict == {"@type": ["https://schema.org/Action"], "https://schema.org/name": [{"@value": "foo"}]} + # FIXME: #435 Full Context with the same key_value pair twice? assert di.full_context == 2 * [{"schema": "https://schema.org/"}] - assert di.context == {"schema": "https://schema.org/"} and di.key == "schema:result" and di.index is None + assert di.context == [{"schema": "https://schema.org/"}] and di.key == "schema:result" and di.index is None outer_di = di - di = ld_dict.from_dict({"@context": {"schema": "https://schema.org/"}, "@type": "schema:Thing", + di = ld_dict.from_dict({"@context": [{"schema": "https://schema.org/"}], "@type": "schema:Thing", "schema:name": "foo"}, parent=outer_di, key="schema:error") assert di.data_dict == {"@type": ["https://schema.org/Thing"], "https://schema.org/name": [{"@value": "foo"}]} assert di.full_context == 3 * [{"schema": "https://schema.org/"}] - assert di.context == {"schema": "https://schema.org/"} and di.key == "schema:error" and di.index is None + assert di.context == [{"schema": "https://schema.org/"}] and di.key == "schema:error" and di.index is None di = ld_dict.from_dict({"@type": "schema:Thing", "schema:name": "foo"}, parent=outer_di, key="schema:error") assert di.data_dict == {"@type": ["https://schema.org/Thing"], "https://schema.org/name": [{"@value": "foo"}]} assert di.full_context == 2 * [{"schema": "https://schema.org/"}] assert di.context == [] and di.key == "schema:error" and di.index is None - di = ld_dict.from_dict({"@context": {"schema": "https://schema.org/"}, "@type": "schema:Thing", "xmlns:name": "fo"}, - context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + # FIXME: #435 @context and context can be different and only @context is used here + di = ld_dict.from_dict({"@context": [{"schema": "https://schema.org/", "xmlns": "http://xmlns.com/foaf/0.1/"}], "@type": "schema:Thing", "xmlns:name": "fo"}, + context=[{"schema": "https://schema.org/", "xmlns": "http://xmlns.com/foaf/0.1/"}]) assert di["http://xmlns.com/foaf/0.1/name"] == di["xmlns:name"] == "fo" - assert di.context == {"schema": "https://schema.org/", "xmlns": "http://xmlns.com/foaf/0.1/"} + assert di.context == [{"schema": "https://schema.org/", "xmlns": "http://xmlns.com/foaf/0.1/"}] def test_is_ld_dict(): + # FIXME: #435 maybe rename to is_expanded_ld_dict assert not any(ld_dict.is_ld_dict(item) for item in [{}, {"foo": "bar"}, {"@id": "foo"}]) assert not any(ld_dict.is_ld_dict(item) for item in [[{"@id": "foo"}], [{"@set": "foo"}], [{}, {}], [], [""]]) assert all(ld_dict.is_ld_dict([item]) for item in [{"@id": "foo", "foobar": "bar"}, {"foo": "bar"}]) From fce1519a46aa0acd7c81e88b811ef7aea01c8408 Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Thu, 25 Sep 2025 12:22:27 +0200 Subject: [PATCH 116/247] added another case for testing of get and added the issue number to a few FIXMEs --- test/hermes_test/model/types/test_ld_dict.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_dict.py b/test/hermes_test/model/types/test_ld_dict.py index b47c26ee..8aacd13e 100644 --- a/test/hermes_test/model/types/test_ld_dict.py +++ b/test/hermes_test/model/types/test_ld_dict.py @@ -39,6 +39,16 @@ def test_build_in_get(): context=[{"xmlns": "http://xmlns.com/foaf/0.1/"}]) assert di["xmlns:name"] == "Manu Sporny" + # FIXME: fixing #433 would fix this + # get -> list to python -> create empty list -> to fill dicts -> expand them -> no expansion method for dicts + di = ld_dict([{"http://xmlns.com/foaf/0.1/name": [{"@value": "foo"}], + "http://xmlns.com/foaf/0.1/foo": [{"http://xmlns.com/foaf/0.1/barfoo": [{"@id": "foo"}], + "http://xmlns.com/foaf/0.1/fooba": [{"@value": "ba"}]}, + {"http://xmlns.com/foaf/0.1/barfoo": [{"@id": "foo"}], + "http://xmlns.com/foaf/0.1/fooba": [{"@value": "ba"}]}]}], + context=[{"xmlns": "http://xmlns.com/foaf/0.1/"}]) + assert isinstance(di["http://xmlns.com/foaf/0.1/foo"], ld_list) + def test_build_in_set(): di = ld_dict([{}], context=[{"xmlns": "http://xmlns.com/foaf/0.1/"}]) @@ -51,7 +61,7 @@ def test_build_in_set(): assert di.data_dict == {"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}], "http://xmlns.com/foaf/0.1/homepage": [{"@id": "http://manu.sporny.org/"}]} - # FIXME: TypeError for nested ld_container or mind in api + # FIXME: #435 TypeError for nested ld_container or mind in api di = ld_dict([{}], context=[{"xmlns": "http://xmlns.com/foaf/0.1/"}]) di2 = ld_list([{"@list": [{"@value": "Manu Sporny"}, {"@value": "foo"}]}], parent=di, key="http://xmlns.com/foaf/0.1/name") @@ -59,7 +69,7 @@ def test_build_in_set(): di["xmlns:name"] = di2 assert di.data_dict == {"http://xmlns.com/foaf/0.1/name": [{"@list": [{"@value": "Manu Sporny"}, {"@value": "foo"}]}]} - # FIXME: context is not applied on value side + # FIXME: #435 context is not applied on value side di = ld_dict([{}], context=[{"schema": "https://schema.org/"}]) di2 = ld_dict([{"@type": ["https://schema.org/Action"], "https://schema.org/name": [{"@value": "Test"}]}], context=[{"schema": "https://schema.org/"}], parent=di, key="https://schema.org/result") @@ -73,7 +83,7 @@ def test_build_in_set(): "https://schema.org/name": [{"@value": "Test"}] }] } - # FIXME: nesting in defintion is very long but with intermediate steps you need to define with None + # FIXME: #435 nesting in defintion is very long but with intermediate steps you need to define with None di = ld_dict([{}], context=[{"schema": "https://schema.org/"}]) di2 = ld_dict([{"@type": ["https://schema.org/Action"], "https://schema.org/error": None}], From 59180c75e09096f22e31bf32eafe0afa70a617a2 Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Thu, 25 Sep 2025 14:43:14 +0200 Subject: [PATCH 117/247] added an add method to SoftwareMetadata and improved __init__ of it and rewrote a test --- src/hermes/model/api.py | 18 +++++++++++++++--- test/hermes_test/model/test_api.py | 26 +++++++++++++++++--------- 2 files changed, 32 insertions(+), 12 deletions(-) diff --git a/src/hermes/model/api.py b/src/hermes/model/api.py index 6deba117..eaee6a77 100644 --- a/src/hermes/model/api.py +++ b/src/hermes/model/api.py @@ -1,10 +1,22 @@ -from hermes.model.types import ld_dict +from hermes.model.types import ld_dict, ld_list from hermes.model.types.ld_context import ALL_CONTEXTS + class SoftwareMetadata(ld_dict): - def __init__(self, data: dict=None, extra_vocabs: dict[str, str]=None) -> None: + def __init__(self, data: dict = None, extra_vocabs: dict[str, str] = None) -> None: ctx = ALL_CONTEXTS + [{**extra_vocabs}] if extra_vocabs is not None else ALL_CONTEXTS - super().__init__([data or {}], context=ctx) + super().__init__([ld_dict.from_dict(data, context=ctx).data_dict if data else {}], context=ctx) + def add(self, key, value): + if key not in self: + self[key] = value + return + if isinstance(val := self[key], ld_list): + val.append(value) + else: + temp = ld_list([{"@list": []}], parent=self, key=self.ld_proc.expand_iri(self.active_ctx, key), + context=self.context) + temp.extend([val, value]) + self[key] = temp diff --git a/test/hermes_test/model/test_api.py b/test/hermes_test/model/test_api.py index 7b40bc35..d48efa47 100644 --- a/test/hermes_test/model/test_api.py +++ b/test/hermes_test/model/test_api.py @@ -1,6 +1,7 @@ import pytest from hermes.model import SoftwareMetadata +from hermes.model.types import ld_list, ld_dict from hermes.model.types.ld_context import ALL_CONTEXTS @@ -11,6 +12,7 @@ def default_context(): return {"@context": ALL_CONTEXTS} + @pytest.fixture def custom_context(): return {"@context": ALL_CONTEXTS + [EXTRA_VOCABS]} @@ -44,16 +46,22 @@ def test_init_nested_object(): assert author["name"] in ["Foo", "Bar"] -def test_append(): +def test_add(): + data = SoftwareMetadata() + data.add("foo", "a") + assert data["foo"] == "a" + data.add("foo", "b") + assert type(data["foo"]) is ld_list and data["foo"].item_list == [{"@value": "a"}, {"@value": "b"}] + data.add("foo", "c") + assert data["foo"].item_list == [{"@value": "a"}, {"@value": "b"}, {"@value": "c"}] data = SoftwareMetadata() - author1 = {"name": "Foo"} - data["author"] = author1 - assert type(data["author"]) is list - author2 = {"name": "Bar"} - data["author"].append(author2) - assert len(data["author"]) == 2 - assert data["author"][0]["name"] == "Foo" - assert data["author"][1]["name"] == "Bar" + # FIXME: #433 will fix this + data.add("foo", {"bar": "foo"}) + assert type(data["foo"]) is ld_dict and data["foo"].data_dict == {"bar": "foo"} + data.add("foo", {"bar": "foo"}) + assert type(data["foo"]) is ld_list and data["foo"].item_list == 2 * [{"bar": "foo"}] + data.add("foo", {"bar": "foo"}) + assert data["foo"].item_list == 3 * [{"bar": "foo"}] def test_iterative_assignment(): From daed5d36b48b50c5c8b0b13ee6a13c0941b2f9fc Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Fri, 26 Sep 2025 09:48:17 +0200 Subject: [PATCH 118/247] Change existing test to assume returned lists --- test/hermes_test/model/test_api.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/test/hermes_test/model/test_api.py b/test/hermes_test/model/test_api.py index d48efa47..44e541fd 100644 --- a/test/hermes_test/model/test_api.py +++ b/test/hermes_test/model/test_api.py @@ -40,10 +40,10 @@ def test_init_nested_object(): "maintainer": {"name": "Some Name", "email": "maintainer@example.com"}, "author": [{"name": "Foo"}, {"name": "Bar"}]} data = SoftwareMetadata(my_software, extra_vocabs={"foo": "https://foo.bar"}) - assert data["foo:softwareName"] == "MySoftware" - assert data["maintainer"]["name"] == "Some Name" + assert data["foo:softwareName"] == ["MySoftware"] + assert data["maintainer"]["name"] == ["Some Name"] for author in data["author"]: - assert author["name"] in ["Foo", "Bar"] + assert author["name"] in [["Foo"], ["Bar"]] def test_add(): @@ -72,10 +72,10 @@ def test_iterative_assignment(): data["author"] = {"name": "Foo"} # Look, a squirrel! authors = data["author"] - assert type(authors) is list + assert isinstance(authors, list) author1 = authors[0] author1["email"] = "author@example.com" authors[0] = author1 - assert len(authors) == 1 authors.append({"name": "Bar", "email": "author2@example.com"}) data["author"] = authors + assert len(authors) == 2 \ No newline at end of file From 45839153f059d4686188eb0822122e4aff7e5745 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Fri, 26 Sep 2025 10:07:29 +0200 Subject: [PATCH 119/247] Add test for harvesting case --- test/hermes_test/model/test_api.py | 37 +++++++++++++++++++++++++++++- 1 file changed, 36 insertions(+), 1 deletion(-) diff --git a/test/hermes_test/model/test_api.py b/test/hermes_test/model/test_api.py index 44e541fd..817fc0b3 100644 --- a/test/hermes_test/model/test_api.py +++ b/test/hermes_test/model/test_api.py @@ -78,4 +78,39 @@ def test_iterative_assignment(): authors[0] = author1 authors.append({"name": "Bar", "email": "author2@example.com"}) data["author"] = authors - assert len(authors) == 2 \ No newline at end of file + assert len(authors) == 2 + + +### Application scenarios + +## Harvesting + +def test_harvest(): + data = SoftwareMetadata() + data["author"] = {"name": "Foo"} + data["author"].append({"name": "Bar"}) + data["author"][0]["email"] = "foo@bar.net" + data["author"][0]["email"].append("foo@baz.com") + assert len(data["author"]) == 2 + assert len(data["author"][1]["email"]) == 2 + assert len(data["author"][0]["email"]) == 0 + harvest = {"authors": [{"name": "Foo", "affiliations": ["Uni A", "Lab B"], "kw": ["a", "b", "c"]}, {"name": "Bar", "affiliations": ["Uni C"], "email": "bar@c.edu"}, {"name": "Baz", "affiliations": ["Lab E"]}]} + for author in harvest["authors"]: + for exist_author in data["author"]: + if author["name"] == exist_author["name"]: + exist_author["affiliation"] = author["affiliations"] + exist_author["email"].append(author["email"]) + exist_author["schema:knowsAbout"].append(kw for kw in author["kw"]) + assert len(data["author"]) == 3 + foo, bar, baz = data["author"] + assert foo["name"] == "Foo" + assert foo["affiliation"] == ["Uni A", "Lab B"] + assert foo["schema:knowsAbout"] == ["a", "b", "c"] + assert foo["email"] == ["foo@bar.net", "foo@baz.com"] + assert bar["name"] == "Bar" + assert bar["affiliation"] == ["Uni C"] + assert bar["email"] == ["bar@c.edu"] + assert baz["name"] == "Baz" + assert baz["affiliation"] == ["Lab E"] + assert baz["schema:knowsAbout"] is None + assert baz["email"] is None From 6808272435008df01d56b96c97540d1eaad2bf94 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Fri, 26 Sep 2025 10:23:02 +0200 Subject: [PATCH 120/247] Add more comprehensive usage test --- test/hermes_test/model/test_api.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/test/hermes_test/model/test_api.py b/test/hermes_test/model/test_api.py index 817fc0b3..7849b884 100644 --- a/test/hermes_test/model/test_api.py +++ b/test/hermes_test/model/test_api.py @@ -81,11 +81,7 @@ def test_iterative_assignment(): assert len(authors) == 2 -### Application scenarios - -## Harvesting - -def test_harvest(): +def test_usage(): data = SoftwareMetadata() data["author"] = {"name": "Foo"} data["author"].append({"name": "Bar"}) @@ -114,3 +110,11 @@ def test_harvest(): assert baz["affiliation"] == ["Lab E"] assert baz["schema:knowsAbout"] is None assert baz["email"] is None + assert data["@type"] == "SoftwareSourceCode" + assert data["@context"] == ALL_CONTEXTS + for author in data["author"]: + assert "name" in author + assert "email" in author + if "schema:knowsAbout" not in author: + author["schema:knowsAbout"] = None + author["schema:pronouns"] = "they/them" \ No newline at end of file From 2f7eadf6d7299bdcbf7a183cfc35692e5345a5f3 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Fri, 26 Sep 2025 10:25:16 +0200 Subject: [PATCH 121/247] Add new license annotation for Python files --- REUSE.toml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/REUSE.toml b/REUSE.toml index b3033158..c81fab97 100644 --- a/REUSE.toml +++ b/REUSE.toml @@ -17,3 +17,9 @@ path = ["REUSE.toml"] precedence = "aggregate" SPDX-FileCopyrightText = "German Aerospace Center (DLR), Helmholtz-Zentrum Dresden-Rossendorf, Forschungszentrum Jülich" SPDX-License-Identifier = "CC0-1.0" + +[[annotations]] +path = ["src/**/*.py", "test/**/*.py"] +precedence = "aggregate" +SPDX-FileCopyrightText = "German Aerospace Center (DLR), Helmholtz-Zentrum Dresden-Rossendorf, Forschungszentrum Jülich" +SPDX-License-Identifier = "Apache-2.0" \ No newline at end of file From 0f324949c8be3f90cd1d5bc5c705b473e35ec055 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 26 Sep 2025 12:46:05 +0200 Subject: [PATCH 122/247] changed conversions of types to output ld_lists for every item in a dict --- src/hermes/model/types/__init__.py | 62 +++++++++++------------------- src/hermes/model/types/ld_list.py | 4 +- 2 files changed, 24 insertions(+), 42 deletions(-) diff --git a/src/hermes/model/types/__init__.py b/src/hermes/model/types/__init__.py index 6fff24b2..11270d43 100644 --- a/src/hermes/model/types/__init__.py +++ b/src/hermes/model/types/__init__.py @@ -19,57 +19,39 @@ lambda c: isinstance(c, ld_container), { "ld_container": lambda c, **_: c, - "json": lambda c, **_: c.compact(), "expanded_json": lambda c, **_: c.ld_value, - } + }, ), - # Wrap expanded_json to ld_container - (ld_container.is_ld_id, dict(python=lambda c, **_: c[0]['@id'])), - (ld_container.is_typed_ld_value, dict(python=ld_container.typed_ld_to_py)), - (ld_container.is_ld_value, dict(python=lambda c, **_: c[0]['@value'])), + # Wrap item from ld_dict in ld_list (ld_list.is_ld_list, dict(ld_container=ld_list)), - (ld_dict.is_ld_dict, dict(ld_container=ld_dict)), + (lambda c: isinstance(c, list) and all(isinstance(item, dict) for item in c), dict(ld_container=ld_list.from_list)), - # Expand and access JSON data - (ld_container.is_json_id, dict(python=lambda c: c["@id"], expanded_json=lambda c, **_: [c])), + # pythonize items from lists (expanded set is already handled above) + (ld_container.is_json_id, dict(python=lambda c, **_: c["@id"])), (ld_container.is_typed_json_value, dict(python=ld_container.typed_ld_to_py)), - (ld_container.is_json_value, dict(python=lambda c, **_: c["@value"], expanded_json=lambda c, **_: [c])), + (ld_container.is_json_value, dict(python=lambda c, **_: c["@value"])), (ld_list.is_container, dict(ld_container=lambda c, **kw: ld_list([c], **kw))), - - # FIXME: add conversion from list and json dict to expanded_json - # to parse nested dicts and lists when using for example __setitem__(key, value) from ld_dict - # where value is converted to expanded_json bevor adding it to data_dict - # Suggested: - # ( - # ld_dict.is_json_dict, - # { - # "ld_container": ld_dict.from_dict, - # "expanded_json": lambda c, **kw: kw["parent"]._to_expanded_json(kw["key"], ld_dict.from_dict(c, **kw)) - # } - # ), - # - # ( - # lambda c: isinstance(c, list), - # { - # "ld_container": ld_list.from_list, - # "expanded_json": lambda c, **kw: kw["parent"]._to_expanded_json(kw["key"], ld_list.from_list(c, **kw)) - # } - # ), (ld_dict.is_json_dict, dict(ld_container=ld_dict.from_dict)), - (lambda c: isinstance(c, list), dict(ld_container=ld_list.from_list)), - - # Wrap internal data types + # Convert internal data types to expanded_json + (lambda c: ld_container.is_json_id(c) or ld_container.is_json_value(c), dict(expanded_json=lambda c, **_: [c])), + (ld_dict.is_json_dict, dict(expanded_json=lambda c, **kw: ld_dict.from_dict(c, **kw).ld_value)), + (lambda c: isinstance(c, list), dict(expanded_json=lambda c, **kw: ld_list.from_list(c, **kw).ld_value)), (lambda v: isinstance(v, (int, float, str, bool)), dict(expanded_json=lambda v, **_: [{"@value": v}])), - - (lambda v: isinstance(v, datetime), - dict(expanded_json=lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:DateTime"]}])), - (lambda v: isinstance(v, date), - dict(expanded_json=lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:Date"]}])), - (lambda v: isinstance(v, time), - dict(expanded_json=lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:Time"]}])), + ( + lambda v: isinstance(v, datetime), + dict(expanded_json=lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:DateTime"]}]), + ), + ( + lambda v: isinstance(v, date), + dict(expanded_json=lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:Date"]}]), + ), + ( + lambda v: isinstance(v, time), + dict(expanded_json=lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:Time"]}]), + ), ] diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index 62a7e5f3..110b95f7 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -33,7 +33,7 @@ def __getitem__(self, index): if isinstance(index, slice): return [self[i] for i in [*range(len(self))][index]] - item = self._to_python(self.key, self.item_list[index:index + 1]) + item = self._to_python(self.key, self.item_list[index]) if isinstance(item, ld_container): item.index = index return item @@ -46,7 +46,7 @@ def __len__(self): def __iter__(self): for index, value in enumerate(self.item_list): - item = self._to_python(self.key, [value]) + item = self._to_python(self.key, value) if isinstance(item, ld_container): item.index = index yield item From 8298e49f86547b80e5388880f41206849c1b63fb Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 26 Sep 2025 12:46:42 +0200 Subject: [PATCH 123/247] added some tests for the conversions and formated to satisfy flake8 --- test/hermes_test/model/test_api.py | 20 +++++++--- test/hermes_test/model/types/test_ld_dict.py | 39 +++++++++++--------- 2 files changed, 37 insertions(+), 22 deletions(-) diff --git a/test/hermes_test/model/test_api.py b/test/hermes_test/model/test_api.py index 7849b884..ce3fe4e7 100644 --- a/test/hermes_test/model/test_api.py +++ b/test/hermes_test/model/test_api.py @@ -36,9 +36,13 @@ def test_init_with_data(metadata, full_context, request): def test_init_nested_object(): - my_software = {"foo:softwareName": "MySoftware", "foo:egg": "spam", "foo:ham": "eggs", - "maintainer": {"name": "Some Name", "email": "maintainer@example.com"}, - "author": [{"name": "Foo"}, {"name": "Bar"}]} + my_software = { + "foo:softwareName": "MySoftware", + "foo:egg": "spam", + "foo:ham": "eggs", + "maintainer": {"name": "Some Name", "email": "maintainer@example.com"}, + "author": [{"name": "Foo"}, {"name": "Bar"}], + } data = SoftwareMetadata(my_software, extra_vocabs={"foo": "https://foo.bar"}) assert data["foo:softwareName"] == ["MySoftware"] assert data["maintainer"]["name"] == ["Some Name"] @@ -90,7 +94,13 @@ def test_usage(): assert len(data["author"]) == 2 assert len(data["author"][1]["email"]) == 2 assert len(data["author"][0]["email"]) == 0 - harvest = {"authors": [{"name": "Foo", "affiliations": ["Uni A", "Lab B"], "kw": ["a", "b", "c"]}, {"name": "Bar", "affiliations": ["Uni C"], "email": "bar@c.edu"}, {"name": "Baz", "affiliations": ["Lab E"]}]} + harvest = { + "authors": [ + {"name": "Foo", "affiliations": ["Uni A", "Lab B"], "kw": ["a", "b", "c"]}, + {"name": "Bar", "affiliations": ["Uni C"], "email": "bar@c.edu"}, + {"name": "Baz", "affiliations": ["Lab E"]}, + ] + } for author in harvest["authors"]: for exist_author in data["author"]: if author["name"] == exist_author["name"]: @@ -117,4 +127,4 @@ def test_usage(): assert "email" in author if "schema:knowsAbout" not in author: author["schema:knowsAbout"] = None - author["schema:pronouns"] = "they/them" \ No newline at end of file + author["schema:pronouns"] = "they/them" diff --git a/test/hermes_test/model/types/test_ld_dict.py b/test/hermes_test/model/types/test_ld_dict.py index 2fc0d5f3..7e7099bd 100644 --- a/test/hermes_test/model/types/test_ld_dict.py +++ b/test/hermes_test/model/types/test_ld_dict.py @@ -26,18 +26,22 @@ def test_malformed_input(): def test_build_in_get(): - di = ld_dict([{"name": [{"@value": "Manu Sporny"}], - "homepage": [{"@id": "http://manu.sporny.org/"}], - "foo": [{"foobar": "bar", "barfoo": "foo"}]}]) - assert di["name"] == "Manu Sporny" - assert di["homepage"] == "http://manu.sporny.org/" - assert di["foo"].data_dict == ld_dict([{"foobar": "bar", "barfoo": "foo"}]).data_dict + di = ld_dict([{"http://schema.org/name": [{"@value": "Manu Sporny"}], + "http://schema.org/homepage": [{"@id": "http://manu.sporny.org/"}], + "http://schema.org/foo": [{"http://schema.org/foobar": "bar", "http://schema.org/barfoo": "foo"}]}], + context=[{"schema": "http://schema.org/"}]) + assert isinstance(di["schema:name"], ld_list) and di["schema:name"].item_list == [{"@value": "Manu Sporny"}] + assert isinstance(di["schema:homepage"], ld_list) + assert di["schema:homepage"].item_list == [{"@id": "http://manu.sporny.org/"}] + assert isinstance(di["http://schema.org/foo"], ld_list) and isinstance(di["http://schema.org/foo"][0], ld_dict) + assert di["http://schema.org/foo"][0].data_dict == {"http://schema.org/foobar": [{"@value": "bar"}], + "http://schema.org/barfoo": [{"@value": "foo"}]} with pytest.raises(KeyError): di["bar"] di = ld_dict([{"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}]}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) - assert di["xmlns:name"] == "Manu Sporny" + assert di["xmlns:name"].item_list == [{"@value": "Manu Sporny"}] def test_build_in_set(): @@ -95,7 +99,7 @@ def test_build_in_set(): }] }] } - assert isinstance(di["schema:result"]["schema:error"]["schema:name"], ld_list) + assert isinstance(di["schema:result"][0]["schema:error"][0]["schema:name"], ld_list) def test_build_in_delete(): @@ -117,8 +121,8 @@ def test_build_in_contains(): def test_get(): di = ld_dict([{"https://schema.org/name": [{"@value": "Manu Sporny"}]}], context={"schema": "https://schema.org/"}) - assert di.get("https://schema.org/name") == "Manu Sporny" - assert di.get("schema:name") == "Manu Sporny" + assert di.get("https://schema.org/name").item_list == [{"@value": "Manu Sporny"}] + assert di.get("schema:name").item_list == [{"@value": "Manu Sporny"}] assert di.get("bar", None) is None with pytest.raises(KeyError): di.get("bar") @@ -169,11 +173,12 @@ def test_items(): inner_di = ld_dict([{}], parent=di) inner_di.update({"xmlns:foobar": "bar", "http://xmlns.com/foaf/0.1/barfoo": {"@id": "foo"}}) di.update({"http://xmlns.com/foaf/0.1/name": "foo", "xmlns:homepage": {"@id": "bar"}, "xmlns:foo": inner_di}) - assert [*di.items()][0:2] == [("http://xmlns.com/foaf/0.1/name", "foo"), - ("http://xmlns.com/foaf/0.1/homepage", "bar")] - assert [*di.items()][2][0] == "http://xmlns.com/foaf/0.1/foo" - assert [*di.items()][2][1].data_dict == {"http://xmlns.com/foaf/0.1/foobar": [{"@value": "bar"}], - "http://xmlns.com/foaf/0.1/barfoo": [{"@id": "foo"}]} + items = [*di.items()] + assert (items[0][0], items[1][0]) == ("http://xmlns.com/foaf/0.1/name", "http://xmlns.com/foaf/0.1/homepage") + assert (items[0][1].item_list, items[1][1].item_list) == ([{"@value": "foo"}], [{"@id": "bar"}]) + assert items[2][0] == "http://xmlns.com/foaf/0.1/foo" and isinstance(items[2][1], ld_list) + assert items[2][1][0].data_dict == {"http://xmlns.com/foaf/0.1/foobar": [{"@value": "bar"}], + "http://xmlns.com/foaf/0.1/barfoo": [{"@id": "foo"}]} def test_ref(): @@ -191,8 +196,8 @@ def test_to_python(): inner_di = ld_dict([{}], parent=di) inner_di.update({"xmlns:foobar": "bar", "http://xmlns.com/foaf/0.1/barfoo": {"@id": "foo"}}) di.update({"http://xmlns.com/foaf/0.1/name": "foo", "xmlns:homepage": {"@id": "bar"}, "xmlns:foo": inner_di}) - assert di.to_python() == {"xmlns:name": "foo", "xmlns:homepage": "bar", - "xmlns:foo": {"xmlns:foobar": "bar", "xmlns:barfoo": "foo"}} + assert di.to_python() == {"xmlns:name": ["foo"], "xmlns:homepage": ["bar"], + "xmlns:foo": [{"xmlns:foobar": ["bar"], "xmlns:barfoo": ["foo"]}]} def test_from_dict(): From 3a8bfbed119f5823ac9a69aacf7483137d154325 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 26 Sep 2025 12:55:27 +0200 Subject: [PATCH 124/247] added three more conversions for container to expanded json --- src/hermes/model/types/__init__.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/src/hermes/model/types/__init__.py b/src/hermes/model/types/__init__.py index 11270d43..e533a069 100644 --- a/src/hermes/model/types/__init__.py +++ b/src/hermes/model/types/__init__.py @@ -38,6 +38,22 @@ # Convert internal data types to expanded_json (lambda c: ld_container.is_json_id(c) or ld_container.is_json_value(c), dict(expanded_json=lambda c, **_: [c])), (ld_dict.is_json_dict, dict(expanded_json=lambda c, **kw: ld_dict.from_dict(c, **kw).ld_value)), + (ld_dict.is_ld_dict, dict(expanded_json=lambda c, **kw: ld_dict.from_dict(c[0], **kw).ld_value)), + ( + ld_list.is_container, + dict( + expanded_json=lambda c, **kw: ld_list.from_list( + ld_list([c]).item_list, container=ld_list([c]).container, **kw + ).ld_value + ), + ), + ( + ld_list.is_ld_list, + dict( + expanded_json=lambda c, **kw: ld_list.from_list( + ld_list(c).item_list, container=ld_list(c).container, **kw + ).ld_value + ), ), (lambda c: isinstance(c, list), dict(expanded_json=lambda c, **kw: ld_list.from_list(c, **kw).ld_value)), (lambda v: isinstance(v, (int, float, str, bool)), dict(expanded_json=lambda v, **_: [{"@value": v}])), ( From 2ef89d397d1bc4154a7eb66239af56b3c42f127c Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 26 Sep 2025 14:33:18 +0200 Subject: [PATCH 125/247] always return a list when getting an item from ld_dict --- src/hermes/model/types/ld_dict.py | 18 +++++++++--------- test/hermes_test/model/types/test_ld_dict.py | 9 ++++----- 2 files changed, 13 insertions(+), 14 deletions(-) diff --git a/src/hermes/model/types/ld_dict.py b/src/hermes/model/types/ld_dict.py index 49461894..439e0043 100644 --- a/src/hermes/model/types/ld_dict.py +++ b/src/hermes/model/types/ld_dict.py @@ -21,7 +21,11 @@ def __init__(self, data, *, parent=None, key=None, index=None, context=None): def __getitem__(self, key): full_iri = self.ld_proc.expand_iri(self.active_ctx, key) - ld_value = self.data_dict[full_iri] + try: + ld_value = self.data_dict[full_iri] + except KeyError: + self.data_dict.update({full_iri: [{"@list": []}]}) + ld_value = self.data_dict[full_iri] return self._to_python(full_iri, ld_value) def __setitem__(self, key, value): @@ -35,16 +39,12 @@ def __delitem__(self, key): def __contains__(self, key): full_iri = self.ld_proc.expand_iri(self.active_ctx, key) - return full_iri in self.data_dict + return len(self[full_iri]) != 0 def get(self, key, default=_NO_DEFAULT): - try: - value = self[key] - return value - except KeyError as e: - if default is not ld_dict._NO_DEFAULT: - return default - raise e + if key not in self and default is not ld_dict._NO_DEFAULT: + return default + return self[key] def update(self, other): for key, value in other.items(): diff --git a/test/hermes_test/model/types/test_ld_dict.py b/test/hermes_test/model/types/test_ld_dict.py index 7e7099bd..b31c24a6 100644 --- a/test/hermes_test/model/types/test_ld_dict.py +++ b/test/hermes_test/model/types/test_ld_dict.py @@ -28,7 +28,8 @@ def test_malformed_input(): def test_build_in_get(): di = ld_dict([{"http://schema.org/name": [{"@value": "Manu Sporny"}], "http://schema.org/homepage": [{"@id": "http://manu.sporny.org/"}], - "http://schema.org/foo": [{"http://schema.org/foobar": "bar", "http://schema.org/barfoo": "foo"}]}], + "http://schema.org/foo": [{"http://schema.org/foobar": [{"@value": "bar"}], + "http://schema.org/barfoo": [{"@value": "foo"}]}]}], context=[{"schema": "http://schema.org/"}]) assert isinstance(di["schema:name"], ld_list) and di["schema:name"].item_list == [{"@value": "Manu Sporny"}] assert isinstance(di["schema:homepage"], ld_list) @@ -36,8 +37,7 @@ def test_build_in_get(): assert isinstance(di["http://schema.org/foo"], ld_list) and isinstance(di["http://schema.org/foo"][0], ld_dict) assert di["http://schema.org/foo"][0].data_dict == {"http://schema.org/foobar": [{"@value": "bar"}], "http://schema.org/barfoo": [{"@value": "foo"}]} - with pytest.raises(KeyError): - di["bar"] + assert isinstance(di["bar"], ld_list) and len(di["bar"]) == 0 di = ld_dict([{"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}]}], context={"xmlns": "http://xmlns.com/foaf/0.1/"}) @@ -124,8 +124,7 @@ def test_get(): assert di.get("https://schema.org/name").item_list == [{"@value": "Manu Sporny"}] assert di.get("schema:name").item_list == [{"@value": "Manu Sporny"}] assert di.get("bar", None) is None - with pytest.raises(KeyError): - di.get("bar") + assert isinstance(di["bar"], ld_list) and len(di["bar"]) == 0 def test_update(): From 2db93cffe94a3a2234c9650bbb5448d4902abd00 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 26 Sep 2025 14:33:56 +0200 Subject: [PATCH 126/247] added tests and fixed issues --- src/hermes/model/types/__init__.py | 10 ++-- test/hermes_test/model/test_api.py | 83 ++++++++++++++++-------------- 2 files changed, 52 insertions(+), 41 deletions(-) diff --git a/src/hermes/model/types/__init__.py b/src/hermes/model/types/__init__.py index e533a069..6c02662c 100644 --- a/src/hermes/model/types/__init__.py +++ b/src/hermes/model/types/__init__.py @@ -26,14 +26,17 @@ # Wrap item from ld_dict in ld_list (ld_list.is_ld_list, dict(ld_container=ld_list)), - (lambda c: isinstance(c, list) and all(isinstance(item, dict) for item in c), dict(ld_container=ld_list.from_list)), + ( + lambda c: isinstance(c, list) and all(isinstance(item, dict) for item in c), + dict(ld_container=lambda c, **kw: ld_list([{"@list": c}], **kw)) + ), # pythonize items from lists (expanded set is already handled above) (ld_container.is_json_id, dict(python=lambda c, **_: c["@id"])), (ld_container.is_typed_json_value, dict(python=ld_container.typed_ld_to_py)), (ld_container.is_json_value, dict(python=lambda c, **_: c["@value"])), (ld_list.is_container, dict(ld_container=lambda c, **kw: ld_list([c], **kw))), - (ld_dict.is_json_dict, dict(ld_container=ld_dict.from_dict)), + (ld_dict.is_json_dict, dict(ld_container=lambda c, **kw: ld_dict([c], **kw))), # Convert internal data types to expanded_json (lambda c: ld_container.is_json_id(c) or ld_container.is_json_value(c), dict(expanded_json=lambda c, **_: [c])), @@ -53,7 +56,8 @@ expanded_json=lambda c, **kw: ld_list.from_list( ld_list(c).item_list, container=ld_list(c).container, **kw ).ld_value - ), ), + ), + ), (lambda c: isinstance(c, list), dict(expanded_json=lambda c, **kw: ld_list.from_list(c, **kw).ld_value)), (lambda v: isinstance(v, (int, float, str, bool)), dict(expanded_json=lambda v, **_: [{"@value": v}])), ( diff --git a/test/hermes_test/model/test_api.py b/test/hermes_test/model/test_api.py index ce3fe4e7..0ee910db 100644 --- a/test/hermes_test/model/test_api.py +++ b/test/hermes_test/model/test_api.py @@ -32,40 +32,42 @@ def test_init_no_data(metadata, full_context, request): ]) def test_init_with_data(metadata, full_context, request): assert metadata.full_context == request.getfixturevalue(full_context)["@context"] - assert metadata["funding"] == "foo" + assert metadata["funding"][0] == "foo" def test_init_nested_object(): my_software = { - "foo:softwareName": "MySoftware", + "schema:softwareName": "MySoftware", "foo:egg": "spam", "foo:ham": "eggs", "maintainer": {"name": "Some Name", "email": "maintainer@example.com"}, "author": [{"name": "Foo"}, {"name": "Bar"}], } data = SoftwareMetadata(my_software, extra_vocabs={"foo": "https://foo.bar"}) - assert data["foo:softwareName"] == ["MySoftware"] - assert data["maintainer"]["name"] == ["Some Name"] + assert data["schema:softwareName"][0] == "MySoftware" + assert data["maintainer"][0]["name"][0] == "Some Name" for author in data["author"]: - assert author["name"] in [["Foo"], ["Bar"]] + for name in author["name"]: + assert name in ["Foo", "Bar"] -def test_add(): +def test_append(): data = SoftwareMetadata() - data.add("foo", "a") - assert data["foo"] == "a" - data.add("foo", "b") + data["foo"].append("a") + assert type(data["foo"]) is ld_list and data["foo"][0] == "a" and data["foo"].item_list == [{"@value": "a"}] + data["foo"].append("b") assert type(data["foo"]) is ld_list and data["foo"].item_list == [{"@value": "a"}, {"@value": "b"}] - data.add("foo", "c") + data["foo"].append("c") assert data["foo"].item_list == [{"@value": "a"}, {"@value": "b"}, {"@value": "c"}] data = SoftwareMetadata() # FIXME: #433 will fix this - data.add("foo", {"bar": "foo"}) - assert type(data["foo"]) is ld_dict and data["foo"].data_dict == {"bar": "foo"} - data.add("foo", {"bar": "foo"}) - assert type(data["foo"]) is ld_list and data["foo"].item_list == 2 * [{"bar": "foo"}] - data.add("foo", {"bar": "foo"}) - assert data["foo"].item_list == 3 * [{"bar": "foo"}] + data["foo"].append({"schema:name": "foo"}) + assert type(data["foo"]) is ld_list and type(data["foo"][0]) is ld_dict + assert data["foo"][0].data_dict == {"http://schema.org/name": [{"@value": "foo"}]} + data["foo"].append({"schema:name": "foo"}) + assert type(data["foo"]) is ld_list and data["foo"].item_list == 2*[{"http://schema.org/name": [{"@value": "foo"}]}] + data["foo"].append({"schema:name": "foo"}) + assert data["foo"].item_list == 3 * [{"http://schema.org/name": [{"@value": "foo"}]}] def test_iterative_assignment(): @@ -76,7 +78,7 @@ def test_iterative_assignment(): data["author"] = {"name": "Foo"} # Look, a squirrel! authors = data["author"] - assert isinstance(authors, list) + assert isinstance(authors, ld_list) author1 = authors[0] author1["email"] = "author@example.com" authors[0] = author1 @@ -92,36 +94,41 @@ def test_usage(): data["author"][0]["email"] = "foo@bar.net" data["author"][0]["email"].append("foo@baz.com") assert len(data["author"]) == 2 - assert len(data["author"][1]["email"]) == 2 - assert len(data["author"][0]["email"]) == 0 + assert len(data["author"][0]["email"]) == 2 + assert len(data["author"][1]["email"]) == 0 harvest = { "authors": [ - {"name": "Foo", "affiliations": ["Uni A", "Lab B"], "kw": ["a", "b", "c"]}, - {"name": "Bar", "affiliations": ["Uni C"], "email": "bar@c.edu"}, - {"name": "Baz", "affiliations": ["Lab E"]}, + {"name": "Foo", "affiliation": ["Uni A", "Lab B"], "kw": ["a", "b", "c"]}, + {"name": "Bar", "affiliation": ["Uni C"], "email": "bar@c.edu"}, + {"name": "Baz", "affiliation": ["Lab E"]}, ] } for author in harvest["authors"]: for exist_author in data["author"]: - if author["name"] == exist_author["name"]: - exist_author["affiliation"] = author["affiliations"] - exist_author["email"].append(author["email"]) - exist_author["schema:knowsAbout"].append(kw for kw in author["kw"]) + if author["name"] == exist_author["name"][0]: + exist_author["affiliation"] = author["affiliation"] + if "email" in author: + exist_author["email"].append(author["email"]) + if "kw" in author: + exist_author["schema:knowsAbout"].extend(author["kw"]) + break + else: + data["author"].append(author) assert len(data["author"]) == 3 foo, bar, baz = data["author"] - assert foo["name"] == "Foo" - assert foo["affiliation"] == ["Uni A", "Lab B"] - assert foo["schema:knowsAbout"] == ["a", "b", "c"] - assert foo["email"] == ["foo@bar.net", "foo@baz.com"] - assert bar["name"] == "Bar" - assert bar["affiliation"] == ["Uni C"] - assert bar["email"] == ["bar@c.edu"] - assert baz["name"] == "Baz" - assert baz["affiliation"] == ["Lab E"] - assert baz["schema:knowsAbout"] is None - assert baz["email"] is None + assert foo["name"][0] == "Foo" + assert foo["affiliation"].to_python() == ["Uni A", "Lab B"] + assert foo["schema:knowsAbout"].to_python() == ["a", "b", "c"] + assert foo["email"].to_python() == ["foo@bar.net", "foo@baz.com"] + assert bar["name"][0] == "Bar" + assert bar["affiliation"].to_python() == ["Uni C"] + assert bar["email"].to_python() == ["bar@c.edu"] + assert baz["name"][0] == "Baz" + assert baz["affiliation"].to_python() == ["Lab E"] + assert len(baz["schema:knowsAbout"]) == 0 + assert len(baz["email"]) == 0 assert data["@type"] == "SoftwareSourceCode" - assert data["@context"] == ALL_CONTEXTS + assert data["@context"] == ALL_CONTEXTS # FIXME: #435 will solve this issue for author in data["author"]: assert "name" in author assert "email" in author From f38b662a107e434ab4399a9903be969e216b4cae Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 26 Sep 2025 14:51:37 +0200 Subject: [PATCH 127/247] changed type conversions as discussed in #423 and #433 and tweaked the tests to represent that change --- src/hermes/model/types/__init__.py | 64 +++++++++++------ src/hermes/model/types/ld_list.py | 9 ++- test/hermes_test/model/types/test_ld_list.py | 75 +++++++++++--------- 3 files changed, 91 insertions(+), 57 deletions(-) diff --git a/src/hermes/model/types/__init__.py b/src/hermes/model/types/__init__.py index a10d58a3..6c02662c 100644 --- a/src/hermes/model/types/__init__.py +++ b/src/hermes/model/types/__init__.py @@ -19,37 +19,59 @@ lambda c: isinstance(c, ld_container), { "ld_container": lambda c, **_: c, - "json": lambda c, **_: c.compact(), "expanded_json": lambda c, **_: c.ld_value, - } + }, ), - # Wrap expanded_json to ld_container - (ld_container.is_ld_id, dict(python=lambda c, **_: c[0]['@id'])), - (ld_container.is_typed_ld_value, dict(python=ld_container.typed_ld_to_py)), - (ld_container.is_ld_value, dict(python=lambda c, **_: c[0]['@value'])), + # Wrap item from ld_dict in ld_list (ld_list.is_ld_list, dict(ld_container=ld_list)), - (ld_dict.is_ld_dict, dict(ld_container=ld_dict)), + ( + lambda c: isinstance(c, list) and all(isinstance(item, dict) for item in c), + dict(ld_container=lambda c, **kw: ld_list([{"@list": c}], **kw)) + ), - # Expand and access JSON data - (ld_container.is_json_id, dict(python=lambda c: c["@id"], expanded_json=lambda c, **_: [c])), + # pythonize items from lists (expanded set is already handled above) + (ld_container.is_json_id, dict(python=lambda c, **_: c["@id"])), (ld_container.is_typed_json_value, dict(python=ld_container.typed_ld_to_py)), - (ld_container.is_json_value, dict(python=lambda c, **_: c["@value"], expanded_json=lambda c, **_: [c])), + (ld_container.is_json_value, dict(python=lambda c, **_: c["@value"])), (ld_list.is_container, dict(ld_container=lambda c, **kw: ld_list([c], **kw))), - (ld_dict.is_json_dict, dict(ld_container=ld_dict.from_dict)), - - (lambda c: isinstance(c, list), dict(ld_container=ld_list.from_list)), + (ld_dict.is_json_dict, dict(ld_container=lambda c, **kw: ld_dict([c], **kw))), - # Wrap internal data types + # Convert internal data types to expanded_json + (lambda c: ld_container.is_json_id(c) or ld_container.is_json_value(c), dict(expanded_json=lambda c, **_: [c])), + (ld_dict.is_json_dict, dict(expanded_json=lambda c, **kw: ld_dict.from_dict(c, **kw).ld_value)), + (ld_dict.is_ld_dict, dict(expanded_json=lambda c, **kw: ld_dict.from_dict(c[0], **kw).ld_value)), + ( + ld_list.is_container, + dict( + expanded_json=lambda c, **kw: ld_list.from_list( + ld_list([c]).item_list, container=ld_list([c]).container, **kw + ).ld_value + ), + ), + ( + ld_list.is_ld_list, + dict( + expanded_json=lambda c, **kw: ld_list.from_list( + ld_list(c).item_list, container=ld_list(c).container, **kw + ).ld_value + ), + ), + (lambda c: isinstance(c, list), dict(expanded_json=lambda c, **kw: ld_list.from_list(c, **kw).ld_value)), (lambda v: isinstance(v, (int, float, str, bool)), dict(expanded_json=lambda v, **_: [{"@value": v}])), - - (lambda v: isinstance(v, datetime), - dict(expanded_json=lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:DateTime"]}])), - (lambda v: isinstance(v, date), - dict(expanded_json=lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:Date"]}])), - (lambda v: isinstance(v, time), - dict(expanded_json=lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:Time"]}])), + ( + lambda v: isinstance(v, datetime), + dict(expanded_json=lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:DateTime"]}]), + ), + ( + lambda v: isinstance(v, date), + dict(expanded_json=lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:Date"]}]), + ), + ( + lambda v: isinstance(v, time), + dict(expanded_json=lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:Time"]}]), + ), ] diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index 5ac4d021..cbfefaf7 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -36,7 +36,7 @@ def __getitem__(self, index): if isinstance(index, slice): return [self[i] for i in [*range(len(self))][index]] - item = self._to_python(self.key, [self.item_list[index]]) + item = self._to_python(self.key, self.item_list[index]) if isinstance(item, ld_container): item.index = index return item @@ -69,7 +69,7 @@ def __len__(self): def __iter__(self): for index, value in enumerate(self.item_list): - item = self._to_python(self.key, [value]) + item = self._to_python(self.key, value) if isinstance(item, ld_container): item.index = index yield item @@ -96,7 +96,10 @@ def is_ld_list(cls, ld_value): @classmethod def is_container(cls, value): # FIXME: "@set" will never be inside a dictionary of an expanded json ld object - return isinstance(value, dict) and any(ct in value for ct in cls.container_types) + return ( + isinstance(value, dict) + and len([1 for ct in cls.container_types if isinstance(value.get(ct, None), list)]) == 1 + ) @classmethod def from_list(cls, value, *, parent=None, key=None, context=None, container=None): diff --git a/test/hermes_test/model/types/test_ld_list.py b/test/hermes_test/model/types/test_ld_list.py index b3ce093d..4e9ad2f0 100644 --- a/test/hermes_test/model/types/test_ld_list.py +++ b/test/hermes_test/model/types/test_ld_list.py @@ -30,9 +30,9 @@ def test_list_basics(): def test_build_in_get(): - li = ld_list([{"@list": [{"@value": "foo"}, {"@value": "bar"}, {"@value": "foobar"}]}]) + li = ld_list([{"@list": [{"@value": "foo"}, {"@value": "bar"}, {"@value": "foobar"}]}], key="name") assert li[0] == "foo" and li[-1] == "foobar" - assert li[:2] == ["foo", "bar"] and li[1:-1] == ["bar"] + assert li[:2] == ["foo", "bar"] and li[1:-1] == ["bar"] # FIXME: maybe ld_list instead? assert li[::2] == ["foo", "foobar"] and li[::-1] == ["foobar", "bar", "foo"] li = ld_list([{"@list": [{"@type": "A", "schema:name": "a"}, {"@list": [{"@type": "A", "schema:name": "a"}]}]}]) @@ -43,7 +43,7 @@ def test_build_in_get(): def test_build_in_set(): li = ld_list([{"@list": [{"@value": "foo"}, {"@value": "bar"}, {"@value": "foobar"}]}], - key="https://schema.org/name", context={"schema": "https://schema.org/"}) + key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) li[0] = "bar" li[-1] = "barfoo" assert li.item_list[0] == {"@value": "bar"} and li.item_list[-1] == {"@value": "barfoo"} @@ -59,14 +59,19 @@ def test_build_in_set(): li[::2] = "foo" with pytest.raises(TypeError): li[:2] = 1 - li[0] = ld_dict([{"@type": "schema:Thing", "schema:name": "a"}], parent=li, key=li.key) - assert isinstance(li[0], ld_dict) and li[0].data_dict == {"@type": "schema:Thing", "schema:name": "a"} - li[0] = {"@type": "schema:Thing", "schema:name": "a"} - assert isinstance(li[0], ld_dict) and li[0].data_dict == {"@type": "schema:Thing", "schema:name": "a"} - li[0] = ld_list([{"@set": [{"@type": "schema:Thing", "schema:name": "a"}]}], parent=li, key=li.key) - assert isinstance(li[0], ld_list) and li[0].item_list == [{"@type": "schema:Thing", "schema:name": "a"}] - li[0] = {"@set": [{"@type": "schema:Thing", "schema:name": "a"}]} - assert isinstance(li[0], ld_list) and li[0].item_list == [{"@type": "schema:Thing", "schema:name": "a"}] + li[0] = ld_dict([{"@type": ["https://schema.org/Thing"], "https://schema.org/name": [{"@value": "a"}]}], parent=li, + key=li.key) + assert isinstance(li[0], ld_dict) + assert li[0].data_dict == {"@type": ["https://schema.org/Thing"], "https://schema.org/name": [{"@value": "a"}]} + li[0] = {"@type": "schema:Thing", "schema:name": "b"} + assert isinstance(li[0], ld_dict) + assert li[0].data_dict == {"@type": ["https://schema.org/Thing"], "https://schema.org/name": [{"@value": "b"}]} + li[0] = ld_list.from_list([{"@type": "schema:Thing", "schema:name": "a"}], parent=li, key=li.key, container="@set") + assert isinstance(li[0], ld_list) + assert li[0].item_list == [{"@type": ["https://schema.org/Thing"], "https://schema.org/name": [{"@value": "a"}]}] + li[0] = {"@set": [{"@type": "schema:Thing", "schema:name": "b"}]} + assert isinstance(li[0], ld_list) + assert li[0].item_list == [{"@type": ["https://schema.org/Thing"], "https://schema.org/name": [{"@value": "b"}]}] def test_build_in_len(): @@ -75,51 +80,54 @@ def test_build_in_len(): def test_build_in_iter(): - li = ld_list([{"@list": [{"@value": "foo"}, {"@type": "A", "schema:name": "a"}, {"@list": [{"@value": "bar"}]}]}], - key="https://schema.org/name", context={"schema": "https://schema.org/"}) + li = ld_list([{"@list": [{"@value": "foo"}, {"@type": ["A"], "https://schema.org/name": [{"@value": "a"}]}, + {"@list": [{"@value": "bar"}]}]}], key="https://schema.org/name", + context=[{"schema": "https://schema.org/"}]) li = [val for val in li] - assert li[0] == "foo" and li[1].data_dict == {"@type": "A", "schema:name": "a"} and li[1].index == 1 + assert li[0] == "foo" + assert li[1].data_dict == {"@type": ["A"], "https://schema.org/name": [{"@value": "a"}]} and li[1].index == 1 assert isinstance(li[2], ld_list) and li[2].item_list == [{"@value": "bar"}] and li[2].index == 2 assert li[2].key == "https://schema.org/name" def test_append(): - li = ld_list([{"@list": []}], key="https://schema.org/name", context={"schema": "https://schema.org/"}) + li = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) li.append("foo") assert li[0] == "foo" and li.item_list[0] == {"@value": "foo"} and len(li) == 1 li.append("bar") assert li[0:2] == ["foo", "bar"] and li.item_list[1] == {"@value": "bar"} and len(li) == 2 - li.append(ld_dict([{"@type": "A", "schema:name": "a"}])) - assert li.item_list[2] == {"@type": "A", "schema:name": "a"} and len(li) == 3 + li.append(ld_dict.from_dict({"@type": "A", "schema:name": "a"}, parent=li, key=li.key)) + assert li.item_list[2] == {"@type": ["A"], "https://schema.org/name": [{"@value": "a"}]} and len(li) == 3 li.append({"@type": "A", "schema:name": "a"}) assert li.item_list[2] == li.item_list[3] - li.append(ld_list([{"@list": [{"@type": "A", "schema:name": "a"}]}])) - li.append([{"@type": "A", "schema:name": "a"}]) + li.append(ld_list([{"@list": [{"@type": ["A"], "https://schema.org/name": [{"@value": "a"}]}]}], parent=li, + key=li.key)) + li.append([{"@type": "A", "schema:name": "a"}]) # FIXME: should that be interpreted as a list or expanded dict? li.append(2 * [{"@type": "A", "schema:name": "a"}]) - assert 2 * li.item_list[4] == 2 * li.item_list[5] == li.item_list[6] + assert 2 * li[4].item_list == 2 * [li[5].data_dict] == li[6].item_list def test_extend(): - li = ld_list([{"@list": []}], key="https://schema.org/name", context={"schema": "https://schema.org/"}) + li = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) li.extend([]) assert len(li) == 0 li.extend(["foo"]) assert li[0] == "foo" and li.item_list[0] == {"@value": "foo"} and len(li) == 1 li.extend(["bar"]) assert li[0:2] == ["foo", "bar"] and li.item_list[1] == {"@value": "bar"} and len(li) == 2 - li.extend([ld_dict([{"@type": "A", "schema:name": "a"}])]) - assert li[-1].data_dict == {"@type": "A", "schema:name": "a"} and len(li) == 3 + li.extend([ld_dict([{"@type": ["A"], "https://schema.org/name": [{"@value": "a"}]}])]) + assert li[-1].data_dict == {"@type": ["A"], "https://schema.org/name": [{"@value": "a"}]} and len(li) == 3 - li = ld_list([{"@list": []}], key="https://schema.org/name", context={"schema": "https://schema.org/"}) - li.extend(["foo", "bar", ld_dict([{"@type": "A", "schema:name": "a"}])]) + li = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) + li.extend(["foo", "bar", ld_dict([{"@type": ["A"], "https://schema.org/name": [{"@value": "a"}]}])]) assert li[0:2] == ["foo", "bar"] and li.item_list[0:2] == [{"@value": "foo"}, {"@value": "bar"}] - assert li[-1].data_dict == {"@type": "A", "schema:name": "a"} and len(li) == 3 + assert li[-1].data_dict == {"@type": ["A"], "https://schema.org/name": [{"@value": "a"}]} and len(li) == 3 - li = ld_list([{"@list": []}], key="https://schema.org/name", context={"schema": "https://schema.org/"}) + li = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) li.append("foo") - li.extend(["bar", ld_dict([{"@type": "A", "schema:name": "a"}])]) + li.extend(["bar", ld_dict([{"@type": ["A"], "https://schema.org/name": [{"@value": "a"}]}])]) assert li[0:2] == ["foo", "bar"] and li.item_list[0:2] == [{"@value": "foo"}, {"@value": "bar"}] - assert li[-1].data_dict == {"@type": "A", "schema:name": "a"} and len(li) == 3 + assert li[-1].data_dict == {"@type": ["A"], "https://schema.org/name": [{"@value": "a"}]} and len(li) == 3 def test_is_ld_list(): @@ -138,11 +146,12 @@ def test_is_container(): def test_from_list(): li = ld_list.from_list([]) assert li.container == "@list" and li.item_list == li.context == [] and li.parent is li.key is li.index is None - li = ld_list.from_list([], parent=li, key="schema:name", context={"schema": "https://schema.org/"}, + li = ld_list.from_list([], parent=li, key="schema:name", context=[{"schema": "https://schema.org/"}], container="@set") assert li.container == "@set" and li.item_list == [] and li.parent is not None and li.key == "schema:name" - assert li.index is None and li.context == {"schema": "https://schema.org/"} + assert li.index is None and li.context == [{"schema": "https://schema.org/"}] li = ld_list.from_list(["a", {"@value": "b"}], parent=None, key="https://schema.org/name", - context={"schema": "https://schema.org/"}, container="@graph") + context=[{"schema": "https://schema.org/"}], container="@graph") assert li.container == "@graph" and li.item_list == [{"@value": "a"}, {"@value": "b"}] and li.parent is None - assert li.key == "https://schema.org/name" and li.index is None and li.context == {"schema": "https://schema.org/"} + assert li.key == "https://schema.org/name" and li.index is None + assert li.context == [{"schema": "https://schema.org/"}] From 17213251034ae8d177719d1f09d7d85e193ba1e4 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Thu, 2 Oct 2025 10:54:25 +0200 Subject: [PATCH 128/247] clean up --- src/hermes/model/api.py | 14 +------------- test/hermes_test/model/test_api.py | 2 -- 2 files changed, 1 insertion(+), 15 deletions(-) diff --git a/src/hermes/model/api.py b/src/hermes/model/api.py index eaee6a77..8b079544 100644 --- a/src/hermes/model/api.py +++ b/src/hermes/model/api.py @@ -1,4 +1,4 @@ -from hermes.model.types import ld_dict, ld_list +from hermes.model.types import ld_dict from hermes.model.types.ld_context import ALL_CONTEXTS @@ -8,15 +8,3 @@ class SoftwareMetadata(ld_dict): def __init__(self, data: dict = None, extra_vocabs: dict[str, str] = None) -> None: ctx = ALL_CONTEXTS + [{**extra_vocabs}] if extra_vocabs is not None else ALL_CONTEXTS super().__init__([ld_dict.from_dict(data, context=ctx).data_dict if data else {}], context=ctx) - - def add(self, key, value): - if key not in self: - self[key] = value - return - if isinstance(val := self[key], ld_list): - val.append(value) - else: - temp = ld_list([{"@list": []}], parent=self, key=self.ld_proc.expand_iri(self.active_ctx, key), - context=self.context) - temp.extend([val, value]) - self[key] = temp diff --git a/test/hermes_test/model/test_api.py b/test/hermes_test/model/test_api.py index 0ee910db..fd2288d4 100644 --- a/test/hermes_test/model/test_api.py +++ b/test/hermes_test/model/test_api.py @@ -60,7 +60,6 @@ def test_append(): data["foo"].append("c") assert data["foo"].item_list == [{"@value": "a"}, {"@value": "b"}, {"@value": "c"}] data = SoftwareMetadata() - # FIXME: #433 will fix this data["foo"].append({"schema:name": "foo"}) assert type(data["foo"]) is ld_list and type(data["foo"][0]) is ld_dict assert data["foo"][0].data_dict == {"http://schema.org/name": [{"@value": "foo"}]} @@ -83,7 +82,6 @@ def test_iterative_assignment(): author1["email"] = "author@example.com" authors[0] = author1 authors.append({"name": "Bar", "email": "author2@example.com"}) - data["author"] = authors assert len(authors) == 2 From 1ecd0f8a638c16082953d851b36f33105580d239 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Thu, 2 Oct 2025 12:18:25 +0200 Subject: [PATCH 129/247] suggestion for fix of #435 --- src/hermes/model/types/ld_container.py | 1 - src/hermes/model/types/ld_dict.py | 14 +++---- test/hermes_test/model/types/test_ld_dict.py | 43 +++++++++++++++----- 3 files changed, 39 insertions(+), 19 deletions(-) diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index 62716fe9..31445316 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -152,7 +152,6 @@ def merge_to_list(cls, *args): return [] head, *tail = args - # FIXME: #435 Context is not merged if isinstance(head, list): return [*head, *cls.merge_to_list(*tail)] else: diff --git a/src/hermes/model/types/ld_dict.py b/src/hermes/model/types/ld_dict.py index 8e8cc75f..19c95ce7 100644 --- a/src/hermes/model/types/ld_dict.py +++ b/src/hermes/model/types/ld_dict.py @@ -85,22 +85,20 @@ def from_dict(cls, value, *, parent=None, key=None, context=None, ld_type=None): ld_data["@type"] = ld_type data_context = ld_data.pop('@context', []) - full_context = ld_container.merge_to_list(context or [], data_context) - if parent is None and data_context: - ld_data["@context"] = data_context + merged_contexts = ld_container.merge_to_list(data_context, context or []) + full_context = [] + if parent is None and merged_contexts: + ld_data["@context"] = merged_contexts elif parent is not None: - full_context[:0] = parent.full_context + full_context = parent.full_context + merged_contexts ld_value = cls.ld_proc.expand(ld_data, {"expandContext": full_context, "documentLoader": bundled_loader}) - ld_value = cls(ld_value, parent=parent, key=key, context=data_context) + ld_value = cls(ld_value, parent=parent, key=key, context=merged_contexts) return ld_value @classmethod def is_ld_dict(cls, ld_value): - # FIXME: #435 maybe rename to is_expanded_ld_dict - # because this functions tests if ld_value could be an expanded json ld dict - # (is_ld_node returns False for every compacted json ld dictionary) return cls.is_ld_node(ld_value) and cls.is_json_dict(ld_value[0]) @classmethod diff --git a/test/hermes_test/model/types/test_ld_dict.py b/test/hermes_test/model/types/test_ld_dict.py index 8aacd13e..0bc15792 100644 --- a/test/hermes_test/model/types/test_ld_dict.py +++ b/test/hermes_test/model/types/test_ld_dict.py @@ -61,7 +61,6 @@ def test_build_in_set(): assert di.data_dict == {"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}], "http://xmlns.com/foaf/0.1/homepage": [{"@id": "http://manu.sporny.org/"}]} - # FIXME: #435 TypeError for nested ld_container or mind in api di = ld_dict([{}], context=[{"xmlns": "http://xmlns.com/foaf/0.1/"}]) di2 = ld_list([{"@list": [{"@value": "Manu Sporny"}, {"@value": "foo"}]}], parent=di, key="http://xmlns.com/foaf/0.1/name") @@ -69,7 +68,7 @@ def test_build_in_set(): di["xmlns:name"] = di2 assert di.data_dict == {"http://xmlns.com/foaf/0.1/name": [{"@list": [{"@value": "Manu Sporny"}, {"@value": "foo"}]}]} - # FIXME: #435 context is not applied on value side + di = ld_dict([{}], context=[{"schema": "https://schema.org/"}]) di2 = ld_dict([{"@type": ["https://schema.org/Action"], "https://schema.org/name": [{"@value": "Test"}]}], context=[{"schema": "https://schema.org/"}], parent=di, key="https://schema.org/result") @@ -83,7 +82,7 @@ def test_build_in_set(): "https://schema.org/name": [{"@value": "Test"}] }] } - # FIXME: #435 nesting in defintion is very long but with intermediate steps you need to define with None + di = ld_dict([{}], context=[{"schema": "https://schema.org/"}]) di2 = ld_dict([{"@type": ["https://schema.org/Action"], "https://schema.org/error": None}], @@ -133,6 +132,21 @@ def test_build_in_set(): } assert isinstance(di["schema:result"]["schema:error"]["schema:name"], ld_list) + # FIXME: fixing #433 would fix this (setting nested python dicts) + di = ld_dict([{}], context=[{"schema": "https://schema.org/"}]) + di["@type"] = "schema:Thing" + di["schema:result"] = {"@type": "schema:Action", "schema:error": {"@type": "schema:Thing", "schema:name": "foo"}} + assert di.data_dict == { + "@type": ["https://schema.org/Thing"], + "https://schema.org/result": [{ + "@type": ["https://schema.org/Action"], + "https://schema.org/error": [{ + "@type": ["https://schema.org/Thing"], + "https://schema.org/name": [{"@value": "foo"}] + }] + }] + } + def test_build_in_delete(): di = ld_dict([{"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}], @@ -180,7 +194,6 @@ def test_update(): "http://xmlns.com/foaf/0.1/barfoo": [{"@id": "foo"}]}], context=[{"xmlns": "http://xmlns.com/foaf/0.1/"}], parent=di, key="http://xmlns.com/foaf/0.1/foo") - # FIXME: #435 Allow nested ld_dicts? see commit before di.update({"http://xmlns.com/foaf/0.1/name": "foo", "xmlns:homepage": {"@id": "bar"}, "xmlns:foo": di2}) assert di.data_dict == {"http://xmlns.com/foaf/0.1/name": [{"@value": "foo"}], @@ -269,7 +282,6 @@ def test_from_dict(): assert di.active_ctx == {"mappings": {}} and di.context == di.full_context == [] assert di.index is di.key is di.parent is None - # FIXME: #435 When you can do short form from dict, then context should also be given in short form (as dict) di = ld_dict.from_dict({"@context": [{"schema": "https://schema.org/"}], "@type": "schema:Thing", "@id": "foo"}) assert di.data_dict == {"@type": ["https://schema.org/Thing"], "@id": "foo"} assert di.context == di.full_context == [{"schema": "https://schema.org/"}] @@ -280,7 +292,6 @@ def test_from_dict(): "schema:name": "foo"}, parent=outer_di, key="schema:result") assert di.data_dict == {"@type": ["https://schema.org/Action"], "https://schema.org/name": [{"@value": "foo"}]} - # FIXME: #435 Full Context with the same key_value pair twice? assert di.full_context == 2 * [{"schema": "https://schema.org/"}] assert di.context == [{"schema": "https://schema.org/"}] and di.key == "schema:result" and di.index is None @@ -297,15 +308,27 @@ def test_from_dict(): assert di.full_context == 2 * [{"schema": "https://schema.org/"}] assert di.context == [] and di.key == "schema:error" and di.index is None - # FIXME: #435 @context and context can be different and only @context is used here - di = ld_dict.from_dict({"@context": [{"schema": "https://schema.org/", "xmlns": "http://xmlns.com/foaf/0.1/"}], "@type": "schema:Thing", "xmlns:name": "fo"}, + di = ld_dict.from_dict({"@context": [{"schema": "https://schema.org/"}], "@type": "schema:Thing", "xmlns:name": "fo"}, context=[{"schema": "https://schema.org/", "xmlns": "http://xmlns.com/foaf/0.1/"}]) assert di["http://xmlns.com/foaf/0.1/name"] == di["xmlns:name"] == "fo" - assert di.context == [{"schema": "https://schema.org/", "xmlns": "http://xmlns.com/foaf/0.1/"}] + assert di.context == [{"schema": "https://schema.org/"}, + {"schema": "https://schema.org/", "xmlns": "http://xmlns.com/foaf/0.1/"}] + + outer_di = ld_dict.from_dict({"@context": [{"schema": "https://schema.org/"}], "@type": "schema:Thing", "@id": "foo"}) + di = ld_dict.from_dict({"@context": {"schema": "https://schema.org/"}, "@type": "schema:Action", + "schema:name": "foo"}, + parent=outer_di, key="schema:result") + assert di.data_dict == {"@type": ["https://schema.org/Action"], "https://schema.org/name": [{"@value": "foo"}]} + assert di.full_context == 2 * [{"schema": "https://schema.org/"}] + assert di.context == [{"schema": "https://schema.org/"}] and di.key == "schema:result" and di.index is None + + di = ld_dict.from_dict({"@context": {"schema": "https://schema.org/"}, "@type": "schema:Thing", "xmlns:name": "fo"}, + context={"xmlns": "http://xmlns.com/foaf/0.1/"}) + assert di["http://xmlns.com/foaf/0.1/name"] == di["xmlns:name"] == "fo" + assert di.context == [{"schema": "https://schema.org/"}, {"xmlns": "http://xmlns.com/foaf/0.1/"}] def test_is_ld_dict(): - # FIXME: #435 maybe rename to is_expanded_ld_dict assert not any(ld_dict.is_ld_dict(item) for item in [{}, {"foo": "bar"}, {"@id": "foo"}]) assert not any(ld_dict.is_ld_dict(item) for item in [[{"@id": "foo"}], [{"@set": "foo"}], [{}, {}], [], [""]]) assert all(ld_dict.is_ld_dict([item]) for item in [{"@id": "foo", "foobar": "bar"}, {"foo": "bar"}]) From 1fb7574f9105817e90166ed54aa57bc4a8423723 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 2 Oct 2025 23:43:39 +0200 Subject: [PATCH 130/247] Comment out local extension that breaks build --- docs/source/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 9ee616da..7ab7b582 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -102,7 +102,7 @@ def read_version_from_pyproject(): 'sphinx_togglebutton', 'sphinxcontrib.datatemplates', # Custom extensions, see `_ext` directory. - 'plugin_markup', + # 'plugin_markup', ] language = 'en' From 8d147a8ac0cb3708933d739ff64db645b74f10ce Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 2 Oct 2025 23:44:03 +0200 Subject: [PATCH 131/247] Document data model API --- docs/source/dev/data_model.md | 305 ++++++++++++++++++++++++++++++++-- 1 file changed, 292 insertions(+), 13 deletions(-) diff --git a/docs/source/dev/data_model.md b/docs/source/dev/data_model.md index a5f4c727..c51f3401 100644 --- a/docs/source/dev/data_model.md +++ b/docs/source/dev/data_model.md @@ -1,27 +1,306 @@ -# HERMES Data Model +# Data model -*hermes* uses an internal data model to store the output of the different stages. -All the data is collected in a directory called `.hermes` located in the root of the project directory. +`hermes`' internal data model acts like a contract between `hermes` and plugins. +It is based on [**JSON-LD (JSON Linked Data)**](https://json-ld.org/), and +the public API simplifies interaction with the data model through Python code. -You should not need to interact with this data directly. -Instead, use {class}`hermes.model.context.HermesContext` and respective subclasses to access the data in a consistent way. +Output of the different `hermes` commands consequently is valid JSON-LD, serialized as JSON, that is cached in +subdirectories of the `.hermes/` directory that is created in the root of the project directory. +The cache is purely for internal purposes, its data should not be interacted with. -## Harvest Data +As JSON-LD can be confusing to work with directly, the following sections provide documentation of the data model. +Depending on whether you develop a plugin for `hermes`, or you develop `hermes` itself, you need to know either _some_, +or _quite a few_ things about JSON-LD. -The data of the havesters is cached in the sub-directory `.hermes/harvest`. -Each harvester has a separate cache file to allow parallel harvesting. -The cache file is encoded in JSON and stored in `.hermes/harvest/HARVESTER_NAME.json` -where `HARVESTER_NAME` corresponds to the entry point name. +## The data model for plugin developers -{class}`hermes.model.context.HermesHarvestContext` encapsulates these harvester caches. +If you develop a plugin for `hermes`, you will only need to work with a single Python class and the public API +it provides: {class}`hermes.model.SoftwareMetadata`. + +Nevertheless, it is still necessary that you know _some_ things about JSON-LD. + +### JSON-LD for plugin developers + +```{attention} +Work in progress. +``` + + +### Working with the `hermes` data model in plugins + +> **Goal** +> Understand how plugins access and interact with the `hermes` data model. + +`hermes` aims to hide as much of the data model as possible behind a public API +to avoid that plugin developers have to deal with the complexities and intricacies of JSON-LD. + +#### Model instances in different types of plugin + +You can extend `hermes` with plugins for three different commands: `harvest`, `curate`, `deposit`. + +The commands differ in how they work with instances of the data model. + +- `harvest` plugins _create_ a single new model instance and return it. +- `curate` plugins are passed a single existing model instance (the output of `process`), +and return a single model instance. +- `deposit` plugins are passed a single existing model instance (the output of `curate`), +and return a single model instance. + +#### How plugins work with the API + +```{important} +Plugins access the data model _exclusively_ through the API class {class}`hermes.model.SoftwareMetadata`. +``` + +The following sections show how this class works. + +##### Creating a data model instance + +Model instances are primarily created in `harvest` plugins, but may also be created in other plugins to map +existing data into. + +To create a new model instance, initialize {class}`hermes.model.SoftwareMetadata`: + +```{code-block} python +:caption: Initializing a default data model instance +from hermes.model import SoftwareMetadata + +data = SoftwareMetadata() +``` + +`SoftwareMetadata` objects initialized without arguments provide the default _context_ +(see [_JSON-LD for plugin developers_](#json-ld-for-plugin-developers)). +This means that now, you can use terms from the schemas included in the default context to describe software metadata. + +Terms from [_CodeMeta_](https://codemeta.github.io/terms/) can be used without a prefix: + +```{code-block} python +:caption: Using terms from the default schema +data["readme"] = ... +``` + +Terms from [_Schema.org_](https://schema.org/) can be used with the prefix `schema`: + +```{code-block} python +:caption: Using terms from a non-default schema +data["schema:copyrightNotice"] = ... +``` + +You can also use other linked data vocabularies. To do this, you need to identify them with a prefix and register them +with the data model by passing it `extra_vocabs` as a `dict` mapping prefixes to URLs where the vocabularies are +provided as JSON-LD: + +```{code-block} python +:caption: Injecting additional schemas +from hermes.model import SoftwareMetadata + +# Contents served at https://bar.net/schema.jsonld: +# { +# "@context": +# { +# "baz": "https://schema.org/Thing" +# } +# } + +data = SoftwareMetadata(extra_vocabs={"foo": "https://bar.net/schema.jsonld"}) + +data["foo:baz"] = ... +``` + +##### Adding data + +Once you have an instance of {class}`hermes.model.SoftwareMetadata`, you can add data to it, +i.e., metadata that describes software: + +```{code-block} python +:caption: Setting data values +data["name"] = "My Research Software" # A simple "Text"-type value +data["author"] = {"name": "Foo"} # An object value that uses terms available in the defined context +``` + +##### Accessing data + +You need to be able to access data in the data model instance to add, edit or remove data. +Data can be accessed by using term strings, similar to how values in Python `dict`s are accessed by keys. + +```{important} +When you access data from a data model instance, +it will always be returned in a **list**-like object! +``` + +The reason for providing data in list-like objects is that JSON-LD treats all property values as arrays. +Even if you add "single value" data to a `hermes` data model instance via the API, the underlying JSON-LD model +will treat it as an array, i.e., a list-like object: + +```{code-block} python +:caption: Internal data values are arrays +data["name"] = "My Research Software" # → [ "My Research Software" ] +data["author"] = {"name": "Foo"} # → [ { "name": [ "Foo" ] } ] +``` + +The fact that you will always be returned a list-like object has consequences for accessing and creating data: + +1. You need to access single values using indices, e.g., `data["name"][0]`. +2. You can use list-like API to interact with data objects, e.g., +`data["name"].append("Bar")`, `data["name"].extend(["Bar", "Baz"])`. + +##### Interacting with data + +The following longer example shows different ways that you can interact with `SoftwareMetadata` objects and the data API. + +```{code-block} python +:caption: Building the data model +from hermes.model import SoftwareMetadata + +data = SoftwareMetadata() + +# Let's create author metadata for our software! +# Below each line of code, the value of `data["author"]` is given. + +data["author"] = {"name": "Foo"} +# → [{'name': ['Foo']}] + +data["author"].append({"name": "Bar"}) +# [{'name': ['Foo']}, {'name': ['Bar']}] + +data["author"][0]["email"] = "foo@baz.net" +# [{'name': ['Foo'], 'email': ['foo@baz.net']}, {'name': ['Bar']}] + +data["author"][1]["email"].append("bar@baz.net") +# [{'name': ['Foo'], 'email': ['foo@baz.net']}, {'name': ['Bar'], 'email': ['bar@baz.net']}] + +data["author"][1]["email"].extend(["bar@spam.org", "bar@eggs.com"]) +# [ +# {'name': ['Foo'], 'email': ['foo@baz.net']}, +# {'name': ['Bar'], 'email': ['bar@baz.net', 'bar@spam.org', 'bar@eggs.com']} +# ] +``` + +The example continues to show how to iterate through data. + +```{code-block} python +:caption: for-loop, containment check +for i, author in enumerate(data["author"]): + if author["name"][0] in ["Foo", "Bar"]: + print(f"Author {i + 1} has expected name.") + else: + raise ValueError("Unexpected author name found!", author["name"][0]) + +# Mock output: +# $> Author 1 has expected name. +# $> Author 2 has expected name. +``` + +```{code-block} python +:caption: Value check +for email in data["author"][0]["email"]: + if email.endswith(".edu"): + print("Author has an email address at an educational institution.") + else: + print("Cannot confirm affiliation with educational institution for author.") + +# Mock output +# $> Cannot confirm affiliation with educational institution for author. +``` + +```{code-block} python +:caption: Value check and list comprehension +if ["bar" in email for email in data["author"][1]["email"]]: + print("Author has only emails with their name in it.") + +# Mock output +# $> Author has only emails with their name in it. +``` + +The example continues to show how to assert data values. + +As mentioned in the [introduction to the data model](#data-model), +`hermes` uses a JSON-LD-like internal data model. +The API class {class}`hermes.model.SoftwareMetadata` hides many +of the more complex aspects of JSON-LD and makes it easy to work +with the data model. + +Assertions, however, operate on the internal model objects. +Therefore, they may not work as you would expect from plain +Python data: + +```{code-block} python +:caption: Naive containment assertion that raises +:emphasize-lines: 5,13 +try: + assert ( + {'name': ['Foo'], 'email': ['foo@baz.net']} + in + data["author"] + ) + print("The author was found!") +except AssertionError: + print("The author could not be found.") + raise + +# Mock output +# $> The author could not be found. +# $> AssertionError: +# assert +# {'email': ['foo@baz.net'], 'name': ['Foo']} +# in +# _LDList( +# {'@list': [ +# { +# 'http://schema.org/name': [{'@value': 'Foo'}], +# 'http://schema.org/email': [{'@value': 'foo@baz.net'}] +# }, +# { +# 'http://schema.org/name': [{'@value': 'Bar'}], +# 'http://schema.org/email': [ +# {'@list': [ +# {'@value': 'bar@baz.net'}, {'@value': 'bar@spam.org'}, {'@value': 'bar@eggs.com'} +# ]} +# ] +# }] +# } +# ) +``` + +The mock output in the example above shows the inequality of the expected and the actual value. +The actual value is an internal data type wrapping the more complex JSON-LD data. + +The complex data structure of JSON-LD is internally constructed in the `hermes` data +model, and to make it possible to work with only the data that is important - the actual terms +and their values - the internal data model types provide a function `.to_python()`. +This function can be used in assertions to assert full data integrity: + +```{code-block} python +:caption: Containment assertion with `to_python()` +:emphasize-lines: 5,13 +try: + assert ( + {'name': ['Foo'], 'email': ['foo@baz.net']} + in + data["author"].to_python() + ) + print("The author was found!") +except AssertionError: + print("The author could not be found.") + raise + +# Mock output +# $> The author was found! +``` + +--- + +## See Also + +- Reference: {class}`hermes.model.SoftwareMetadata` API From 7e1ac6441a440c4d79d87a817af486a6c312c24c Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Thu, 2 Oct 2025 23:51:43 +0200 Subject: [PATCH 132/247] Update dependency lock --- poetry.lock | 75 ++++++++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 72 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 0eaf104b..34fa117f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. [[package]] name = "accessible-pygments" @@ -700,6 +700,19 @@ files = [ {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, ] +[[package]] +name = "isodate" +version = "0.7.2" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.10\"" +files = [ + {file = "isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15"}, + {file = "isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6"}, +] + [[package]] name = "jinja2" version = "3.1.6" @@ -1139,6 +1152,24 @@ files = [ dev = ["abi3audit", "black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"] test = ["pytest", "pytest-xdist", "setuptools"] +[[package]] +name = "pyaml" +version = "25.7.0" +description = "PyYAML-based module to produce a bit more pretty and readable YAML-serialized data" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pyaml-25.7.0-py3-none-any.whl", hash = "sha256:ce5d7867cc2b455efdb9b0448324ff7b9f74d99f64650f12ca570102db6b985f"}, + {file = "pyaml-25.7.0.tar.gz", hash = "sha256:e113a64ec16881bf2b092e2beb84b7dcf1bd98096ad17f5f14e8fb782a75d99b"}, +] + +[package.dependencies] +PyYAML = "*" + +[package.extras] +anchors = ["unidecode"] + [[package]] name = "pycodestyle" version = "2.9.1" @@ -1594,7 +1625,7 @@ version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" -groups = ["docs"] +groups = ["main", "docs"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -1651,6 +1682,29 @@ files = [ {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] +[[package]] +name = "rdflib" +version = "7.2.1" +description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information." +optional = false +python-versions = ">=3.8.1" +groups = ["main"] +files = [ + {file = "rdflib-7.2.1-py3-none-any.whl", hash = "sha256:1a175bc1386a167a42fbfaba003bfa05c164a2a3ca3cb9c0c97f9c9638ca6ac2"}, + {file = "rdflib-7.2.1.tar.gz", hash = "sha256:cf9b7fa25234e8925da8b1fb09700f8349b5f0f100e785fb4260e737308292ac"}, +] + +[package.dependencies] +isodate = {version = ">=0.7.2,<1.0.0", markers = "python_version < \"3.11\""} +pyparsing = ">=2.1.0,<4" + +[package.extras] +berkeleydb = ["berkeleydb (>=18.1.0,<19.0.0)"] +html = ["html5rdf (>=1.2,<2)"] +lxml = ["lxml (>=4.3,<6.0)"] +networkx = ["networkx (>=2,<4)"] +orjson = ["orjson (>=3.9.14,<4)"] + [[package]] name = "requests" version = "2.32.4" @@ -1806,6 +1860,21 @@ files = [ {file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"}, ] +[[package]] +name = "schemaorg" +version = "0.1.1" +description = "Python functions for applied use of schema.org" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "schemaorg-0.1.1.tar.gz", hash = "sha256:567f1735df666221c893d2c206dd70f9cddcc983c8cdc39f3a7b7726884d2c51"}, +] + +[package.dependencies] +lxml = ">=4.1.1" +pyaml = ">=17.12.1" + [[package]] name = "setuptools" version = "80.9.0" @@ -2477,4 +2546,4 @@ files = [ [metadata] lock-version = "2.1" python-versions = ">=3.10, <4.0.0" -content-hash = "58304fd33d6ec1ce3400b43ecffb16b3f48a5621e513c3e8057f9e3e050835e8" +content-hash = "e76de51d1f5dd86486d4cc24a5cdf7d007b16ce5d9d0cc3f7d0f353cf0defff0" From 5fde82ad936d5784cb463c7930f92c5d90496644 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Mon, 6 Oct 2025 12:04:08 +0200 Subject: [PATCH 133/247] added contains and equals check for ld_list with tests and added issue number to fixmes --- src/hermes/model/types/ld_list.py | 30 ++++++++++++++++---- test/hermes_test/model/types/test_ld_list.py | 25 ++++++++++++++++ 2 files changed, 49 insertions(+), 6 deletions(-) diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index cbfefaf7..d915842a 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -15,10 +15,10 @@ class ld_list(ld_container): def __init__(self, data, *, parent=None, key=None, index=None, context=None): """ Create a new ld_list.py container. - # FIXME: there is no parameter container + # FIXME: #439 there is no parameter container :param container: The container type for this list. """ - # FIXME: A set container does not contain "@set" in the expected data format (expanded json ld) + # FIXME: #439 A set container does not contain "@set" in the expected data format (expanded json ld) # Instead it is just a list of dicts and therefor would raise a ValueError here (and fail ld_list.is_ld_list) super().__init__(data, parent=parent, key=key, index=index, context=context) @@ -42,7 +42,7 @@ def __getitem__(self, index): return item def __setitem__(self, index, value): - # FIXME: what should your_ld_list[index] = [{"@type": "foo", "name": "bar"}] mean? + # FIXME: #439 what should your_ld_list[index] = [{"@type": "foo", "name": "bar"}] mean? # set your_ld_list[index] to the dict {"@type": "foo", "name": "bar"} given in expanded form or # set your_ld_list[index] to the list [{"@type": "foo", "name": "bar"}] given in non expanded form or # set your_ld_list[index] to the set [{"@type": "foo", "name": "bar"}] given in expanded form @@ -52,7 +52,7 @@ def __setitem__(self, index, value): # This is relevent because nested sets get unnested when being expanded and lists not. # Moreover a set inside a list gets automaticaly converted to a list when expanded) - # FIXME: what happens when a ld_list is put inside another also depends on their container types + # FIXME: #439 what happens when a ld_list is put inside another also depends on their container types if not isinstance(index, slice): self.item_list[index] = val[0] if isinstance(val := self._to_expanded_json(self.key, value), list) else val @@ -74,6 +74,24 @@ def __iter__(self): item.index = index yield item + def __contains__(self, value): + expanded_value = val[0] if isinstance(val := self._to_expanded_json(self.key, value), list) else val + return expanded_value in self.item_list + + def __eq__(self, other): + if isinstance(other, ld_list): + # FIXME: #439 When are ld_lists equal? + return self.item_list == other.item_list and self.container == other.container + if isinstance(other, list): + return self.item_list == self._to_expanded_json(self.key, other)[0]["@list"] + return NotImplemented + + def __ne__(self, other): + x = self.__eq__(other) + if x is NotImplemented: + return NotImplemented + return not x + def append(self, value): ld_value = self._to_expanded_json(self.key, value) self.item_list.extend(ld_value) @@ -90,12 +108,12 @@ def to_python(self): @classmethod def is_ld_list(cls, ld_value): - # FIXME: every python list that contains at least one dict can be considerd a set in expanded json form + # FIXME: #439 every python list that contains at least one dict can be considerd a set in expanded json form return cls.is_ld_node(ld_value) and cls.is_container(ld_value[0]) @classmethod def is_container(cls, value): - # FIXME: "@set" will never be inside a dictionary of an expanded json ld object + # FIXME: #439 "@set" will never be inside a dictionary of an expanded json ld object return ( isinstance(value, dict) and len([1 for ct in cls.container_types if isinstance(value.get(ct, None), list)]) == 1 diff --git a/test/hermes_test/model/types/test_ld_list.py b/test/hermes_test/model/types/test_ld_list.py index 4e9ad2f0..377815a8 100644 --- a/test/hermes_test/model/types/test_ld_list.py +++ b/test/hermes_test/model/types/test_ld_list.py @@ -107,6 +107,31 @@ def test_append(): assert 2 * li[4].item_list == 2 * [li[5].data_dict] == li[6].item_list +def test_build_in_contains(): + li = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) + li.append("foo") + li.append({"@type": "A", "schema:name": "a"}) + assert "foo" in li and {"@type": "A", "schema:name": "a"} in li + assert {"@value": "foo"} in li and {"@type": "A", "https://schema.org/name": "a"} in li + + +def test_build_in_comparison(): + li = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) + li2 = ld_list([{"@set": []}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) + li3 = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema2": "https://schema.org/"}]) + assert li == [] and li2 == [] and [] == li and [] == li2 + assert li != li2 and li == li3 + li.append("foo") + li.append({"@type": "A", "schema:name": "a"}) + assert li != li3 and ["foo", {"@type": "A", "schema:name": "a"}] == li and ["foo"] != li3 + assert ["foo", {"@type": "A", "https://schema.org/name": "a"}] == li + li3.extend(["foo", {"@type": "A", "schema2:name": "a"}]) + assert li == li3 + li4 = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) + li4.extend([{"@type": "A", "schema:name": "a"}, "foo"]) + assert li != li4 + + def test_extend(): li = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) li.extend([]) From 9be8041d613cca5492cb54d843bc0157e27a6921 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Mon, 6 Oct 2025 12:12:04 +0200 Subject: [PATCH 134/247] removed tests of unclear matters (@type and @context fields) and added fixme --- test/hermes_test/model/test_api.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/test/hermes_test/model/test_api.py b/test/hermes_test/model/test_api.py index fd2288d4..4b2c614b 100644 --- a/test/hermes_test/model/test_api.py +++ b/test/hermes_test/model/test_api.py @@ -71,8 +71,6 @@ def test_append(): def test_iterative_assignment(): # This tests iterative assignments/traversals to edit/appending values - # This requires SoftwareMetadata.__getitem__ to return a plain dict. SoftwareMetadata.__setitem__ can then - # implement the isinstanceof checks that @notactuallyfinn suggested. data = SoftwareMetadata(extra_vocabs={"foo": "https://foo.bar"}) data["author"] = {"name": "Foo"} # Look, a squirrel! @@ -125,11 +123,10 @@ def test_usage(): assert baz["affiliation"].to_python() == ["Lab E"] assert len(baz["schema:knowsAbout"]) == 0 assert len(baz["email"]) == 0 - assert data["@type"] == "SoftwareSourceCode" - assert data["@context"] == ALL_CONTEXTS # FIXME: #435 will solve this issue for author in data["author"]: assert "name" in author assert "email" in author if "schema:knowsAbout" not in author: + # FIXME: None has to be discussed author["schema:knowsAbout"] = None author["schema:pronouns"] = "they/them" From dd854c7b9bb19547c8c6fb64e39974781e42f8a9 Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Fri, 17 Oct 2025 09:24:12 +0200 Subject: [PATCH 135/247] Track data in model in simplified form --- docs/source/dev/data_model.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/source/dev/data_model.md b/docs/source/dev/data_model.md index c51f3401..9d989eb9 100644 --- a/docs/source/dev/data_model.md +++ b/docs/source/dev/data_model.md @@ -126,7 +126,11 @@ i.e., metadata that describes software: ```{code-block} python :caption: Setting data values data["name"] = "My Research Software" # A simple "Text"-type value +# → Simplified model representation : { "name": [ "My Research Software" ] } +# Cf. "Accessing data" below data["author"] = {"name": "Foo"} # An object value that uses terms available in the defined context +# → Simplified model representation : { "name": [ "My Research Software" ], "author": [ { "name": "Foo" } ] } +# Cf. "Accessing data" below ``` ##### Accessing data From f4c1e7d3eef9cef19693160f37d5fdd77a1cb8cc Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Fri, 17 Oct 2025 09:30:00 +0200 Subject: [PATCH 136/247] Link to dummy section --- docs/source/dev/data_model.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/source/dev/data_model.md b/docs/source/dev/data_model.md index 9d989eb9..de313e40 100644 --- a/docs/source/dev/data_model.md +++ b/docs/source/dev/data_model.md @@ -19,8 +19,7 @@ subdirectories of the `.hermes/` directory that is created in the root of the pr The cache is purely for internal purposes, its data should not be interacted with. -As JSON-LD can be confusing to work with directly, the following sections provide documentation of the data model. -Depending on whether you develop a plugin for `hermes`, or you develop `hermes` itself, you need to know either _some_, +Depending on whether you develop a plugin for `hermes`, or you develop `hermes` itself, you need to know either [_some_](#json-ld-for-plugin-developers), or _quite a few_ things about JSON-LD. ## The data model for plugin developers From 97ebad4cfaf984e44d3f26623196f6e297b6571f Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Fri, 17 Oct 2025 09:39:50 +0200 Subject: [PATCH 137/247] Make tone less intimidating, more neutral/positive --- docs/source/dev/data_model.md | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/docs/source/dev/data_model.md b/docs/source/dev/data_model.md index de313e40..8f008dc4 100644 --- a/docs/source/dev/data_model.md +++ b/docs/source/dev/data_model.md @@ -22,12 +22,16 @@ The cache is purely for internal purposes, its data should not be interacted wit Depending on whether you develop a plugin for `hermes`, or you develop `hermes` itself, you need to know either [_some_](#json-ld-for-plugin-developers), or _quite a few_ things about JSON-LD. +The following sections provide documentation of the data model. +They aim to help you get started with `hermes` plugin and core development, +even if you have no previous experience with JSON-LD. + ## The data model for plugin developers If you develop a plugin for `hermes`, you will only need to work with a single Python class and the public API it provides: {class}`hermes.model.SoftwareMetadata`. -Nevertheless, it is still necessary that you know _some_ things about JSON-LD. +To work with this class, it is necessary that you know _some_ things about JSON-LD. ### JSON-LD for plugin developers @@ -39,10 +43,10 @@ Work in progress. ### Working with the `hermes` data model in plugins > **Goal** -> Understand how plugins access and interact with the `hermes` data model. +> Understand how plugins access the `hermes` data model and interact with it. `hermes` aims to hide as much of the data model as possible behind a public API -to avoid that plugin developers have to deal with the complexities and intricacies of JSON-LD. +to avoid that plugin developers have to deal with some of the more complex features of JSON-LD. #### Model instances in different types of plugin @@ -152,11 +156,11 @@ data["name"] = "My Research Software" # → [ "My Research Software" ] data["author"] = {"name": "Foo"} # → [ { "name": [ "Foo" ] } ] ``` -The fact that you will always be returned a list-like object has consequences for accessing and creating data: +Therefore, you access data in the same way you would access data from a Python `list`: -1. You need to access single values using indices, e.g., `data["name"][0]`. -2. You can use list-like API to interact with data objects, e.g., -`data["name"].append("Bar")`, `data["name"].extend(["Bar", "Baz"])`. +1. You access single values using indices, e.g., `data["name"][0]`. +2. You can use a list-like API to interact with data objects, e.g., +`data["name"].append("Bar")`, `data["name"].extend(["Bar", "Baz"])`, `for name in data["name"]: ...`, etc. ##### Interacting with data @@ -166,6 +170,7 @@ The following longer example shows different ways that you can interact with `So :caption: Building the data model from hermes.model import SoftwareMetadata +# Create the model object with the default context data = SoftwareMetadata() # Let's create author metadata for our software! @@ -306,4 +311,4 @@ except AssertionError: ## See Also -- Reference: {class}`hermes.model.SoftwareMetadata` API +- API reference: {class}`hermes.model.SoftwareMetadata` From da136c3ce0351f92d6fc47f1b4d50ac372227b8c Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 17 Oct 2025 13:48:18 +0200 Subject: [PATCH 138/247] changed ld_list according to issue #439 --- src/hermes/model/types/__init__.py | 24 ++++------ src/hermes/model/types/ld_list.py | 49 +++++++------------- test/hermes_test/model/types/test_ld_list.py | 48 ++++++++++--------- 3 files changed, 51 insertions(+), 70 deletions(-) diff --git a/src/hermes/model/types/__init__.py b/src/hermes/model/types/__init__.py index 6c02662c..4bb63152 100644 --- a/src/hermes/model/types/__init__.py +++ b/src/hermes/model/types/__init__.py @@ -21,14 +21,14 @@ "ld_container": lambda c, **_: c, "json": lambda c, **_: c.compact(), "expanded_json": lambda c, **_: c.ld_value, - }, + } ), # Wrap item from ld_dict in ld_list (ld_list.is_ld_list, dict(ld_container=ld_list)), ( - lambda c: isinstance(c, list) and all(isinstance(item, dict) for item in c), - dict(ld_container=lambda c, **kw: ld_list([{"@list": c}], **kw)) + lambda c: isinstance(c, list) and all(isinstance(item, dict) for item in c), + dict(ld_container=lambda c, **kw: ld_list([{"@list": c}], **kw)) ), # pythonize items from lists (expanded set is already handled above) @@ -44,33 +44,25 @@ (ld_dict.is_ld_dict, dict(expanded_json=lambda c, **kw: ld_dict.from_dict(c[0], **kw).ld_value)), ( ld_list.is_container, - dict( - expanded_json=lambda c, **kw: ld_list.from_list( - ld_list([c]).item_list, container=ld_list([c]).container, **kw - ).ld_value - ), + dict(expanded_json=lambda c, **kw: ld_list.from_list(ld_list.get_item_list_from_container(c), **kw).ld_value) ), ( ld_list.is_ld_list, - dict( - expanded_json=lambda c, **kw: ld_list.from_list( - ld_list(c).item_list, container=ld_list(c).container, **kw - ).ld_value - ), + dict(expanded_json=lambda c, **kw: ld_list.from_list(ld_list.get_item_list_from_container(c[0]), **kw).ld_value) ), (lambda c: isinstance(c, list), dict(expanded_json=lambda c, **kw: ld_list.from_list(c, **kw).ld_value)), (lambda v: isinstance(v, (int, float, str, bool)), dict(expanded_json=lambda v, **_: [{"@value": v}])), ( lambda v: isinstance(v, datetime), - dict(expanded_json=lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:DateTime"]}]), + dict(expanded_json=lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:DateTime"]}]) ), ( lambda v: isinstance(v, date), - dict(expanded_json=lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:Date"]}]), + dict(expanded_json=lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:Date"]}]) ), ( lambda v: isinstance(v, time), - dict(expanded_json=lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:Time"]}]), + dict(expanded_json=lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:Time"]}]) ), ] diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index d915842a..17145374 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -10,27 +10,12 @@ class ld_list(ld_container): """ An JSON-LD container resembling a list. """ - container_types = ['@list', '@set', '@graph'] - def __init__(self, data, *, parent=None, key=None, index=None, context=None): - """ Create a new ld_list.py container. - - # FIXME: #439 there is no parameter container - :param container: The container type for this list. - """ - # FIXME: #439 A set container does not contain "@set" in the expected data format (expanded json ld) - # Instead it is just a list of dicts and therefor would raise a ValueError here (and fail ld_list.is_ld_list) - + if not (self.is_ld_list(data) and "@list" in data[0]): + raise ValueError("The given data does not represent a ld_list.") super().__init__(data, parent=parent, key=key, index=index, context=context) - # Determine container and correct item list - for container in self.container_types: - if container in self._data[0]: - self.item_list = self._data[0][container] - self.container = container - break - else: - raise ValueError(f"Unexpected dict: {data}") + self.item_list = data[0]["@list"] def __getitem__(self, index): if isinstance(index, slice): @@ -44,15 +29,7 @@ def __getitem__(self, index): def __setitem__(self, index, value): # FIXME: #439 what should your_ld_list[index] = [{"@type": "foo", "name": "bar"}] mean? # set your_ld_list[index] to the dict {"@type": "foo", "name": "bar"} given in expanded form or - # set your_ld_list[index] to the list [{"@type": "foo", "name": "bar"}] given in non expanded form or - # set your_ld_list[index] to the set [{"@type": "foo", "name": "bar"}] given in expanded form - # (ld_list.fromlist([{"@type": "foo", "name": "bar"}]) defaults to container type list - # which would have the object as an expanded form whereas the expanded form of a list would be - # ["@list": [{"@type": "foo", "name": "bar"}]] - # This is relevent because nested sets get unnested when being expanded and lists not. - # Moreover a set inside a list gets automaticaly converted to a list when expanded) - - # FIXME: #439 what happens when a ld_list is put inside another also depends on their container types + # set your_ld_list[index] to the list [{"@type": "foo", "name": "bar"}] given in non expanded form if not isinstance(index, slice): self.item_list[index] = val[0] if isinstance(val := self._to_expanded_json(self.key, value), list) else val @@ -81,7 +58,7 @@ def __contains__(self, value): def __eq__(self, other): if isinstance(other, ld_list): # FIXME: #439 When are ld_lists equal? - return self.item_list == other.item_list and self.container == other.container + return self.item_list == other.item_list if isinstance(other, list): return self.item_list == self._to_expanded_json(self.key, other)[0]["@list"] return NotImplemented @@ -108,19 +85,25 @@ def to_python(self): @classmethod def is_ld_list(cls, ld_value): - # FIXME: #439 every python list that contains at least one dict can be considerd a set in expanded json form return cls.is_ld_node(ld_value) and cls.is_container(ld_value[0]) @classmethod def is_container(cls, value): - # FIXME: #439 "@set" will never be inside a dictionary of an expanded json ld object return ( isinstance(value, dict) - and len([1 for ct in cls.container_types if isinstance(value.get(ct, None), list)]) == 1 + and [*value.keys()] in [["@list"], ["@set"], ["@graph"]] + and any(isinstance(value.get(cont, None), list) for cont in {"@list", "@set", "@graph"}) ) @classmethod - def from_list(cls, value, *, parent=None, key=None, context=None, container=None): - new_list = cls([{container or "@list": []}], parent=parent, key=key, context=context) + def from_list(cls, value, *, parent=None, key=None, context=None): + new_list = cls([{"@list": []}], parent=parent, key=key, context=context) new_list.extend(value) return new_list + + @classmethod + def get_item_list_from_container(cls, ld_value): + for cont in {"@list", "@set", "@graph"}: + if cont in ld_value: + return ld_value[cont] + raise ValueError("The given data does not represent a container.") diff --git a/test/hermes_test/model/types/test_ld_list.py b/test/hermes_test/model/types/test_ld_list.py index 377815a8..fcae0459 100644 --- a/test/hermes_test/model/types/test_ld_list.py +++ b/test/hermes_test/model/types/test_ld_list.py @@ -16,6 +16,8 @@ def test_undefined_list(): ld_list([{}]) with pytest.raises(ValueError): ld_list([{"spam": [{"@value": "bacon"}]}]) + with pytest.raises(ValueError): + ld_list([{"@list": [0], "spam": [{"@value": "bacon"}]}]) with pytest.raises(ValueError): ld_list([{"@list": ["a", "b"], "@set": ["foo", "bar"]}]) with pytest.raises(ValueError): @@ -23,16 +25,15 @@ def test_undefined_list(): def test_list_basics(): - li = ld_list([{"@list": [0], "spam": [{"@value": "bacon"}]}]) - assert li._data == [{"@list": [0], "spam": [{"@value": "bacon"}]}] - assert li.container == '@list' + li = ld_list([{"@list": [0]}]) + assert li._data == [{"@list": [0]}] assert li.item_list == [0] def test_build_in_get(): li = ld_list([{"@list": [{"@value": "foo"}, {"@value": "bar"}, {"@value": "foobar"}]}], key="name") assert li[0] == "foo" and li[-1] == "foobar" - assert li[:2] == ["foo", "bar"] and li[1:-1] == ["bar"] # FIXME: maybe ld_list instead? + assert li[:2] == ["foo", "bar"] and li[1:-1] == ["bar"] assert li[::2] == ["foo", "foobar"] and li[::-1] == ["foobar", "bar", "foo"] li = ld_list([{"@list": [{"@type": "A", "schema:name": "a"}, {"@list": [{"@type": "A", "schema:name": "a"}]}]}]) @@ -66,7 +67,7 @@ def test_build_in_set(): li[0] = {"@type": "schema:Thing", "schema:name": "b"} assert isinstance(li[0], ld_dict) assert li[0].data_dict == {"@type": ["https://schema.org/Thing"], "https://schema.org/name": [{"@value": "b"}]} - li[0] = ld_list.from_list([{"@type": "schema:Thing", "schema:name": "a"}], parent=li, key=li.key, container="@set") + li[0] = ld_list.from_list([{"@type": "schema:Thing", "schema:name": "a"}], parent=li, key=li.key) assert isinstance(li[0], ld_list) assert li[0].item_list == [{"@type": ["https://schema.org/Thing"], "https://schema.org/name": [{"@value": "a"}]}] li[0] = {"@set": [{"@type": "schema:Thing", "schema:name": "b"}]} @@ -117,19 +118,18 @@ def test_build_in_contains(): def test_build_in_comparison(): li = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) - li2 = ld_list([{"@set": []}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) - li3 = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema2": "https://schema.org/"}]) - assert li == [] and li2 == [] and [] == li and [] == li2 - assert li != li2 and li == li3 + li2 = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema2": "https://schema.org/"}]) + assert li == [] and [] == li + assert li == li2 li.append("foo") li.append({"@type": "A", "schema:name": "a"}) - assert li != li3 and ["foo", {"@type": "A", "schema:name": "a"}] == li and ["foo"] != li3 + assert li != li2 and ["foo", {"@type": "A", "schema:name": "a"}] == li and ["foo"] != li2 assert ["foo", {"@type": "A", "https://schema.org/name": "a"}] == li - li3.extend(["foo", {"@type": "A", "schema2:name": "a"}]) - assert li == li3 - li4 = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) - li4.extend([{"@type": "A", "schema:name": "a"}, "foo"]) - assert li != li4 + li2.extend(["foo", {"@type": "A", "schema2:name": "a"}]) + assert li == li2 + li3 = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) + li3.extend([{"@type": "A", "schema:name": "a"}, "foo"]) + assert li != li3 def test_extend(): @@ -170,13 +170,19 @@ def test_is_container(): def test_from_list(): li = ld_list.from_list([]) - assert li.container == "@list" and li.item_list == li.context == [] and li.parent is li.key is li.index is None - li = ld_list.from_list([], parent=li, key="schema:name", context=[{"schema": "https://schema.org/"}], - container="@set") - assert li.container == "@set" and li.item_list == [] and li.parent is not None and li.key == "schema:name" + assert li.item_list == li.context == [] and li.parent is li.key is li.index is None + assert li._data == [{"@list": []}] + li = ld_list.from_list([], parent=li, key="schema:name", context=[{"schema": "https://schema.org/"}]) + assert li.item_list == [] and li.parent is not None and li.key == "schema:name" assert li.index is None and li.context == [{"schema": "https://schema.org/"}] li = ld_list.from_list(["a", {"@value": "b"}], parent=None, key="https://schema.org/name", - context=[{"schema": "https://schema.org/"}], container="@graph") - assert li.container == "@graph" and li.item_list == [{"@value": "a"}, {"@value": "b"}] and li.parent is None + context=[{"schema": "https://schema.org/"}]) + assert li.item_list == [{"@value": "a"}, {"@value": "b"}] and li.parent is None assert li.key == "https://schema.org/name" and li.index is None assert li.context == [{"schema": "https://schema.org/"}] + + +def test_get_item_list_from_container(): + assert ld_list.get_item_list_from_container({"@list": ["a"]}) == ["a"] + assert ld_list.get_item_list_from_container({"@set": ["a"]}) == ["a"] + assert ld_list.get_item_list_from_container({"@graph": ["a"]}) == ["a"] From 03ebd32fd36df233a0ffec169e35c95b40aa263a Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Mon, 20 Oct 2025 10:06:24 +0200 Subject: [PATCH 139/247] implemented tests for problem 2 of the issue and more tests --- test/hermes_test/model/types/test_ld_list.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_list.py b/test/hermes_test/model/types/test_ld_list.py index fcae0459..f27b36a1 100644 --- a/test/hermes_test/model/types/test_ld_list.py +++ b/test/hermes_test/model/types/test_ld_list.py @@ -103,9 +103,9 @@ def test_append(): assert li.item_list[2] == li.item_list[3] li.append(ld_list([{"@list": [{"@type": ["A"], "https://schema.org/name": [{"@value": "a"}]}]}], parent=li, key=li.key)) - li.append([{"@type": "A", "schema:name": "a"}]) # FIXME: should that be interpreted as a list or expanded dict? + li.append([{"@type": "A", "schema:name": "a"}]) li.append(2 * [{"@type": "A", "schema:name": "a"}]) - assert 2 * li[4].item_list == 2 * [li[5].data_dict] == li[6].item_list + assert 2 * li[4].item_list == 2 * li[5].item_list == li[6].item_list def test_build_in_contains(): @@ -130,6 +130,8 @@ def test_build_in_comparison(): li3 = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) li3.extend([{"@type": "A", "schema:name": "a"}, "foo"]) assert li != li3 + assert not li == 3 + assert li != 3 def test_extend(): @@ -154,6 +156,14 @@ def test_extend(): assert li[0:2] == ["foo", "bar"] and li.item_list[0:2] == [{"@value": "foo"}, {"@value": "bar"}] assert li[-1].data_dict == {"@type": ["A"], "https://schema.org/name": [{"@value": "a"}]} and len(li) == 3 +def test_to_python(): + li = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) + li.append("foo") + li.append(ld_dict([{"@type": ["A"], "https://schema.org/name": [{"@value": "a"}]}])) + li.append(["a"]) + assert li[1]["@type"].item_list == ["A"] + assert li.to_python() == ["foo", {"@type": ["A"], "schema:name": ["a"]}, ["a"]] + def test_is_ld_list(): assert not any(ld_list.is_ld_list(item) for item in [1, "", [], {}, {"@list": []}, [{}], [{"a": "b"}]]) @@ -186,3 +196,5 @@ def test_get_item_list_from_container(): assert ld_list.get_item_list_from_container({"@list": ["a"]}) == ["a"] assert ld_list.get_item_list_from_container({"@set": ["a"]}) == ["a"] assert ld_list.get_item_list_from_container({"@graph": ["a"]}) == ["a"] + with pytest.raises(ValueError): + ld_list.get_item_list_from_container(["a"]) From d1eb6128eec251edc4e97ab11367f3baf7be0691 Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Mon, 20 Oct 2025 10:14:37 +0200 Subject: [PATCH 140/247] added a few missing conversions and formatted the file a little bit different --- src/hermes/model/types/__init__.py | 37 ++++++++++++++++-------------- 1 file changed, 20 insertions(+), 17 deletions(-) diff --git a/src/hermes/model/types/__init__.py b/src/hermes/model/types/__init__.py index 4bb63152..37203ce4 100644 --- a/src/hermes/model/types/__init__.py +++ b/src/hermes/model/types/__init__.py @@ -25,44 +25,47 @@ ), # Wrap item from ld_dict in ld_list - (ld_list.is_ld_list, dict(ld_container=ld_list)), + (ld_list.is_ld_list, {"ld_container": ld_list}), ( lambda c: isinstance(c, list) and all(isinstance(item, dict) for item in c), - dict(ld_container=lambda c, **kw: ld_list([{"@list": c}], **kw)) + {"ld_container": lambda c, **kw: ld_list([{"@list": c}], **kw)} ), # pythonize items from lists (expanded set is already handled above) - (ld_container.is_json_id, dict(python=lambda c, **_: c["@id"])), - (ld_container.is_typed_json_value, dict(python=ld_container.typed_ld_to_py)), - (ld_container.is_json_value, dict(python=lambda c, **_: c["@value"])), - (ld_list.is_container, dict(ld_container=lambda c, **kw: ld_list([c], **kw))), - (ld_dict.is_json_dict, dict(ld_container=lambda c, **kw: ld_dict([c], **kw))), + (ld_container.is_json_id, {"python": lambda c, **_: c["@id"]}), + (ld_container.is_typed_json_value, {"python": ld_container.typed_ld_to_py}), + (ld_container.is_json_value, {"python": lambda c, **_: c["@value"]}), + (ld_list.is_container, {"ld_container": lambda c, **kw: ld_list([c], **kw)}), + (ld_dict.is_json_dict, {"ld_container": lambda c, **kw: ld_dict([c], **kw)}), # Convert internal data types to expanded_json - (lambda c: ld_container.is_json_id(c) or ld_container.is_json_value(c), dict(expanded_json=lambda c, **_: [c])), - (ld_dict.is_json_dict, dict(expanded_json=lambda c, **kw: ld_dict.from_dict(c, **kw).ld_value)), - (ld_dict.is_ld_dict, dict(expanded_json=lambda c, **kw: ld_dict.from_dict(c[0], **kw).ld_value)), + (lambda c: ld_container.is_json_id(c), {"expanded_json": lambda c, **_: c}), + (lambda c: ld_container.is_ld_id(c), {"expanded_json": lambda c, **_: c[0]}), + (lambda c: ld_container.is_json_value(c), {"expanded_json": lambda c, **_: [c]}), + (lambda c: ld_container.is_ld_value(c), {"expanded_json": lambda c, **_: c}), + (ld_dict.is_json_dict, {"expanded_json": lambda c, **kw: ld_dict.from_dict(c, **kw).ld_value}), + (ld_dict.is_ld_dict, {"expanded_json": lambda c, **kw: ld_dict.from_dict(c[0], **kw).ld_value}), ( ld_list.is_container, - dict(expanded_json=lambda c, **kw: ld_list.from_list(ld_list.get_item_list_from_container(c), **kw).ld_value) + {"expanded_json": lambda c, **kw: ld_list.from_list(ld_list.get_item_list_from_container(c), **kw).ld_value} ), ( ld_list.is_ld_list, - dict(expanded_json=lambda c, **kw: ld_list.from_list(ld_list.get_item_list_from_container(c[0]), **kw).ld_value) + {"expanded_json": lambda c, **kw: ld_list.from_list(ld_list.get_item_list_from_container(c[0]), **kw).ld_value} ), - (lambda c: isinstance(c, list), dict(expanded_json=lambda c, **kw: ld_list.from_list(c, **kw).ld_value)), - (lambda v: isinstance(v, (int, float, str, bool)), dict(expanded_json=lambda v, **_: [{"@value": v}])), + (lambda c: isinstance(c, list), {"expanded_json": lambda c, **kw: ld_list.from_list(c, **kw).ld_value}), + (lambda v: isinstance(v, (int, float, str, bool)), {"expanded_json": lambda v, **_: [{"@value": v}]}), ( lambda v: isinstance(v, datetime), - dict(expanded_json=lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:DateTime"]}]) + {"expanded_json": lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:DateTime"]}]} ), ( lambda v: isinstance(v, date), - dict(expanded_json=lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:Date"]}]) + {"expanded_json": lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:Date"]}]} ), ( lambda v: isinstance(v, time), - dict(expanded_json=lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:Time"]}]) + {"expanded_json": lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:Time"]}]} ), ] From d28dc10d048be521abf50935fd43019832d36e8f Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Mon, 20 Oct 2025 10:16:34 +0200 Subject: [PATCH 141/247] removed one conversion, added one and changed _to_python to fix conversion of @type values --- src/hermes/model/types/__init__.py | 7 ++----- src/hermes/model/types/ld_container.py | 7 ------- 2 files changed, 2 insertions(+), 12 deletions(-) diff --git a/src/hermes/model/types/__init__.py b/src/hermes/model/types/__init__.py index 37203ce4..8b6a2930 100644 --- a/src/hermes/model/types/__init__.py +++ b/src/hermes/model/types/__init__.py @@ -26,10 +26,7 @@ # Wrap item from ld_dict in ld_list (ld_list.is_ld_list, {"ld_container": ld_list}), - ( - lambda c: isinstance(c, list) and all(isinstance(item, dict) for item in c), - {"ld_container": lambda c, **kw: ld_list([{"@list": c}], **kw)} - ), + (lambda c: isinstance(c, list), {"ld_container": lambda c, **kw: ld_list([{"@list": c}], **kw)}), # pythonize items from lists (expanded set is already handled above) (ld_container.is_json_id, {"python": lambda c, **_: c["@id"]}), @@ -37,6 +34,7 @@ (ld_container.is_json_value, {"python": lambda c, **_: c["@value"]}), (ld_list.is_container, {"ld_container": lambda c, **kw: ld_list([c], **kw)}), (ld_dict.is_json_dict, {"ld_container": lambda c, **kw: ld_dict([c], **kw)}), + (lambda v: isinstance(v, str), {"python": lambda v, parent, **_: parent.ld_proc.compact_iri(parent.active_ctx, v)}), # Convert internal data types to expanded_json (lambda c: ld_container.is_json_id(c), {"expanded_json": lambda c, **_: c}), @@ -44,7 +42,6 @@ (lambda c: ld_container.is_json_value(c), {"expanded_json": lambda c, **_: [c]}), (lambda c: ld_container.is_ld_value(c), {"expanded_json": lambda c, **_: c}), (ld_dict.is_json_dict, {"expanded_json": lambda c, **kw: ld_dict.from_dict(c, **kw).ld_value}), - (ld_dict.is_ld_dict, {"expanded_json": lambda c, **kw: ld_dict.from_dict(c[0], **kw).ld_value}), ( ld_list.is_container, {"expanded_json": lambda c, **kw: ld_list.from_list(ld_list.get_item_list_from_container(c), **kw).ld_value} diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index 55b62cd3..86b42088 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -83,13 +83,6 @@ def ld_value(self): def _to_python(self, full_iri, ld_value): if full_iri == "@id": value = self.ld_proc.compact_iri(self.active_ctx, ld_value, vocab=False) - elif full_iri == "@type": - value = [ - self.ld_proc.compact_iri(self.active_ctx, ld_type) - for ld_type in ld_value - ] - if len(value) == 1: - value = value[0] else: value, ld_output = self.ld_proc.apply_typemap(ld_value, "python", "ld_container", parent=self, key=full_iri) From fb5aa6474c45a3c448e8b35289f34c2c1baee3f4 Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Mon, 20 Oct 2025 10:19:40 +0200 Subject: [PATCH 142/247] fixed formatting errors --- test/hermes_test/model/types/test_ld_list.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/test/hermes_test/model/types/test_ld_list.py b/test/hermes_test/model/types/test_ld_list.py index f27b36a1..98bd7b68 100644 --- a/test/hermes_test/model/types/test_ld_list.py +++ b/test/hermes_test/model/types/test_ld_list.py @@ -156,6 +156,7 @@ def test_extend(): assert li[0:2] == ["foo", "bar"] and li.item_list[0:2] == [{"@value": "foo"}, {"@value": "bar"}] assert li[-1].data_dict == {"@type": ["A"], "https://schema.org/name": [{"@value": "a"}]} and len(li) == 3 + def test_to_python(): li = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) li.append("foo") @@ -163,7 +164,7 @@ def test_to_python(): li.append(["a"]) assert li[1]["@type"].item_list == ["A"] assert li.to_python() == ["foo", {"@type": ["A"], "schema:name": ["a"]}, ["a"]] - + def test_is_ld_list(): assert not any(ld_list.is_ld_list(item) for item in [1, "", [], {}, {"@list": []}, [{}], [{"a": "b"}]]) From 524885382489850d1a1039a61d7fa403e7775748 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 24 Oct 2025 09:51:16 +0200 Subject: [PATCH 143/247] fixed ld_container tests, a type conversion and equals of ld_list --- src/hermes/model/types/__init__.py | 2 +- src/hermes/model/types/ld_list.py | 4 +++- .../model/types/test_ld_container.py | 17 ++++++++--------- 3 files changed, 12 insertions(+), 11 deletions(-) diff --git a/src/hermes/model/types/__init__.py b/src/hermes/model/types/__init__.py index 8b6a2930..8f47fe44 100644 --- a/src/hermes/model/types/__init__.py +++ b/src/hermes/model/types/__init__.py @@ -30,7 +30,7 @@ # pythonize items from lists (expanded set is already handled above) (ld_container.is_json_id, {"python": lambda c, **_: c["@id"]}), - (ld_container.is_typed_json_value, {"python": ld_container.typed_ld_to_py}), + (ld_container.is_typed_json_value, {"python": lambda c, **kw: ld_container.typed_ld_to_py([c], **kw)}), (ld_container.is_json_value, {"python": lambda c, **_: c["@value"]}), (ld_list.is_container, {"ld_container": lambda c, **kw: ld_list([c], **kw)}), (ld_dict.is_json_dict, {"ld_container": lambda c, **kw: ld_dict([c], **kw)}), diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index 17145374..fc93f074 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -60,7 +60,9 @@ def __eq__(self, other): # FIXME: #439 When are ld_lists equal? return self.item_list == other.item_list if isinstance(other, list): - return self.item_list == self._to_expanded_json(self.key, other)[0]["@list"] + if ld_list.is_ld_list(other): + other = ld_list.get_item_list_from_container(other) + return self.item_list == self.from_list(other, key=self.key, context=self.full_context).item_list return NotImplemented def __ne__(self, other): diff --git a/test/hermes_test/model/types/test_ld_container.py b/test/hermes_test/model/types/test_ld_container.py index 9da5b461..2d6687f1 100644 --- a/test/hermes_test/model/types/test_ld_container.py +++ b/test/hermes_test/model/types/test_ld_container.py @@ -96,27 +96,26 @@ def test_to_python_id_with_prefix(self, mock_context): def test_to_python_type(self, mock_context): cont = ld_container([{}], context=[mock_context]) - assert cont._to_python("@type", ["@id"]) == '@id' + assert cont._to_python("@type", ["@id"]) == ['@id'] assert cont._to_python("@type", ["@id", "http://spam.eggs/Egg"]) == ["@id", "Egg"] def test_to_python_id_value(self, mock_context): cont = ld_container([{}], context=[mock_context]) + assert cont._to_python("http://spam.eggs/ham", [{"@id": "http://spam.eggs/spam"}]) == ["http://spam.eggs/spam"] assert cont._to_python("http://spam.eggs/ham", - [{"@id": "http://spam.eggs/spam"}]) == "http://spam.eggs/spam" - assert cont._to_python("http://spam.eggs/ham", - [{"@id": "http://spam.eggs/identifier"}]) == "http://spam.eggs/identifier" + {"@id": "http://spam.eggs/identifier"}) == "http://spam.eggs/identifier" def test_to_python_basic_value(self, mock_context): cont = ld_container([{}], context=[mock_context]) - assert cont._to_python("http://soam.eggs/spam", [{"@value": "bacon"}]) == 'bacon' - assert cont._to_python("http://spam.eggs/spam", [{"@value": True}]) is True - assert cont._to_python("http://spam.eggs/spam", [{"@value": 123}]) == 123 + assert cont._to_python("http://soam.eggs/spam", {"@value": "bacon"}) == 'bacon' + assert cont._to_python("http://spam.eggs/spam", {"@value": True}) is True + assert cont._to_python("http://spam.eggs/spam", {"@value": 123}) == 123 def test_to_python_datetime_value(self, mock_context): cont = ld_container([{}], context=[mock_context]) - assert cont._to_python("http://spam.eggs/eggs", [{ + assert cont._to_python("http://spam.eggs/eggs", { "@value": "2022-02-22T00:00:00", "@type": "https://schema.org/DateTime" - }]) == "2022-02-22T00:00:00" + }) == "2022-02-22T00:00:00" # TODO: #434 typed date is returned as string instead of date def test_to_expanded_id(self, mock_context): cont = ld_container([{}], context=[mock_context]) From c233573313553f321dce444904fa60ac311e1e7e Mon Sep 17 00:00:00 2001 From: "Kernchen, Sophie" Date: Fri, 24 Oct 2025 16:59:50 +0200 Subject: [PATCH 144/247] First tests for ld_proc --- .../hermes_test/model/types/test_pyld_util.py | 38 +++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/test/hermes_test/model/types/test_pyld_util.py b/test/hermes_test/model/types/test_pyld_util.py index 1a1c6f4d..74b93641 100644 --- a/test/hermes_test/model/types/test_pyld_util.py +++ b/test/hermes_test/model/types/test_pyld_util.py @@ -6,6 +6,7 @@ import pytest +from pyld import jsonld from hermes.model.types import pyld_util @@ -22,3 +23,40 @@ def test_mock_document_compact(ld_proc, mock_document): def test_mock_document_expanded(ld_proc, mock_document): expanded_document = ld_proc.expand(mock_document.compact(), {}) assert expanded_document == mock_document.expanded() + + +def test_initial_context(ld_proc, httpserver, mock_document): + with pytest.raises(jsonld.JsonLdError): + active_ctx = ld_proc.initial_ctx( + [{"s": "www.spam.de"}], + {"documentLoader": pyld_util.bundled_loader} + ) + url = httpserver.url_for("/") + httpserver.expect_request("/").respond_with_json({"@context": mock_document.vocabulary(url)}) + active_ctx = ld_proc.initial_ctx( + [url], + {"documentLoader": pyld_util.bundled_loader} + ) + assert "spam" in active_ctx["mappings"] + assert active_ctx["mappings"]["spam"]["@id"] == url + "spam" + assert active_ctx["mappings"]["ham"]["@id"] == url + "ham" + assert active_ctx["mappings"]["use_until"]["@id"] == url + "use_until" + assert active_ctx["mappings"]["Egg"]["@id"] == url + "Egg" + assert active_ctx["processingMode"] == "json-ld-1.1" + + +def test_expand_iri(ld_proc, mock_context): + active_ctx = {'processingMode': 'json-ld-1.1', + 'mappings': mock_context} + assert ld_proc.expand_iri(active_ctx, "spam") == "http://spam.eggs/" + "spam" + + +def test_compact_iri(ld_proc, mock_context): + active_ctx = {'mappings': {'spam': {'reverse': False, 'protected': False, '_prefix': False, + '_term_has_colon': False, '@id': 'http://localhost:62391/spam'}, + 'ham': {'reverse': False, 'protected': False, '_prefix': False, + '_term_has_colon': False, '@id': 'http://localhost:62391/ham', '@type': '@id'}, + }, + 'processingMode': 'json-ld-1.1', '_uuid': 'c641b9db-b0e8-11f0-bc68-9cfce89fd5b3'} + + assert ld_proc.compact_iri(active_ctx, "http://spam.eggs/spam") == "spam" From d5f64e135608838979fd740642ea2654d8dcdd2e Mon Sep 17 00:00:00 2001 From: "Kernchen, Sophie" Date: Fri, 7 Nov 2025 09:42:52 +0100 Subject: [PATCH 145/247] Happy case test jsonldproc --- .../hermes_test/model/types/test_pyld_util.py | 26 +++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/test/hermes_test/model/types/test_pyld_util.py b/test/hermes_test/model/types/test_pyld_util.py index 74b93641..46e3eab1 100644 --- a/test/hermes_test/model/types/test_pyld_util.py +++ b/test/hermes_test/model/types/test_pyld_util.py @@ -6,6 +6,8 @@ import pytest +from unittest import mock + from pyld import jsonld from hermes.model.types import pyld_util @@ -53,10 +55,30 @@ def test_expand_iri(ld_proc, mock_context): def test_compact_iri(ld_proc, mock_context): active_ctx = {'mappings': {'spam': {'reverse': False, 'protected': False, '_prefix': False, - '_term_has_colon': False, '@id': 'http://localhost:62391/spam'}, + '_term_has_colon': False, '@id': 'http://spam.eggs/spam'}, 'ham': {'reverse': False, 'protected': False, '_prefix': False, - '_term_has_colon': False, '@id': 'http://localhost:62391/ham', '@type': '@id'}, + '_term_has_colon': False, '@id': 'http://spam.eggs/ham', '@type': '@id'}, }, 'processingMode': 'json-ld-1.1', '_uuid': 'c641b9db-b0e8-11f0-bc68-9cfce89fd5b3'} assert ld_proc.compact_iri(active_ctx, "http://spam.eggs/spam") == "spam" + assert ld_proc.compact_iri(active_ctx, "http://spam.eggs/bacon") == "http://spam.eggs/bacon" + + +def test_register_typemap(): + len_typemap = len(pyld_util.JsonLdProcessor._type_map) + pyld_util.JsonLdProcessor.register_typemap("function", **dict(spam="hallo")) + assert len(pyld_util.JsonLdProcessor._type_map) == len_typemap + 1 + assert pyld_util.JsonLdProcessor._type_map["spam"] == [("function", "hallo")] + + +def test_apply_typemap(): + pyld_util.JsonLdProcessor._type_map["spam"] = [(lambda c: isinstance(c, list), lambda c, **_: c[0]+"eggs")] + ld_value, ld_output = pyld_util.JsonLdProcessor.apply_typemap(["ham"], "spam") + assert ld_output == "spam" + assert ld_value == "hameggs" + ld_value, ld_output = pyld_util.JsonLdProcessor.apply_typemap(["eggs", "ham"], "spam") + assert ld_output == "spam" + assert ld_value == "eggseggs" + ld_value, ld_output = pyld_util.JsonLdProcessor.apply_typemap("ham", "spam") + assert ld_value == "ham" From e2c1bba599237bafab6240ab77b3fcae7cf320d5 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 7 Nov 2025 13:42:06 +0100 Subject: [PATCH 146/247] added detailed comparison for ld_list, ld_dict and json_values and added tests for them --- src/hermes/model/types/__init__.py | 4 +- src/hermes/model/types/ld_container.py | 11 ++++++ src/hermes/model/types/ld_dict.py | 37 ++++++++++++++++++ src/hermes/model/types/ld_list.py | 39 ++++++++++++++----- .../model/types/test_ld_container.py | 11 ++++++ test/hermes_test/model/types/test_ld_dict.py | 26 +++++++++++++ test/hermes_test/model/types/test_ld_list.py | 18 +++++++++ 7 files changed, 134 insertions(+), 12 deletions(-) create mode 100644 test/hermes_test/model/types/test_ld_dict.py diff --git a/src/hermes/model/types/__init__.py b/src/hermes/model/types/__init__.py index 8f47fe44..497ee44a 100644 --- a/src/hermes/model/types/__init__.py +++ b/src/hermes/model/types/__init__.py @@ -37,8 +37,8 @@ (lambda v: isinstance(v, str), {"python": lambda v, parent, **_: parent.ld_proc.compact_iri(parent.active_ctx, v)}), # Convert internal data types to expanded_json - (lambda c: ld_container.is_json_id(c), {"expanded_json": lambda c, **_: c}), - (lambda c: ld_container.is_ld_id(c), {"expanded_json": lambda c, **_: c[0]}), + (lambda c: ld_container.is_json_id(c), {"expanded_json": lambda c, **_: [c]}), + (lambda c: ld_container.is_ld_id(c), {"expanded_json": lambda c, **_: c}), (lambda c: ld_container.is_json_value(c), {"expanded_json": lambda c, **_: [c]}), (lambda c: ld_container.is_ld_value(c), {"expanded_json": lambda c, **_: c}), (ld_dict.is_json_dict, {"expanded_json": lambda c, **kw: ld_dict.from_dict(c, **kw).ld_value}), diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index 86b42088..8b0ef437 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -182,3 +182,14 @@ def typed_ld_to_py(cls, data, **kwargs): ld_value = data[0]['@value'] return ld_value + + @classmethod + def are_values_equal(cls, first, second): + if "@id" in first and "@id" in second: + return first["@id"] == second["@id"] + for key in {"@value", "@type"}: + if (key in first) ^ (key in second): + return False + if key in first and key in second and first[key] != second[key]: + return False + return True diff --git a/src/hermes/model/types/ld_dict.py b/src/hermes/model/types/ld_dict.py index d134b99e..1686b9a6 100644 --- a/src/hermes/model/types/ld_dict.py +++ b/src/hermes/model/types/ld_dict.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileContributor: Michael Meinel +# SPDX-FileContributor: Michael Fritzsche from .ld_container import ld_container @@ -35,6 +36,42 @@ def __contains__(self, key): full_iri = self.ld_proc.expand_iri(self.active_ctx, key) return full_iri in self.data_dict + def __eq__(self, other): + if not isinstance(other, (dict, ld_dict)): + return NotImplemented + if ld_container.is_json_id(other): + if "@id" in self: + return self["@id"] == other["@id"] + return self.data_dict == {} + if ld_container.is_json_value(other): + if {*self.keys()}.issubset({"@id", *other.keys()}): + return ld_container.are_values_equal(self.data_dict, other) + return False + if isinstance(other, dict): + other = self.from_dict(other, parent=self.parent, key=self.key, context=self.context) + if "@id" in self and "@id" in other: + return self["@id"] == other["@id"] + keys_self = {*self.keys()} + keys_other = {*other.keys()} + unique_keys = keys_self.symmetric_difference(keys_other) + if unique_keys and unique_keys != {"@id"}: + return False + for key in keys_self.intersection(keys_other): + item = self[key] + other_item = other[key] + res = item.__eq__(other_item) + if res == NotImplemented: + res = other_item.__eq__(item) + if res is False or res == NotImplemented: # res is not True + return False + return True + + def __ne__(self, other): + x = self.__eq__(other) + if x is NotImplemented: + return NotImplemented + return not x + def get(self, key, default=_NO_DEFAULT): try: value = self[key] diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index fc93f074..18e3cdff 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileContributor: Michael Meinel +# SPDX-FileContributor: Michael Fritzsche from .ld_container import ld_container @@ -27,10 +28,6 @@ def __getitem__(self, index): return item def __setitem__(self, index, value): - # FIXME: #439 what should your_ld_list[index] = [{"@type": "foo", "name": "bar"}] mean? - # set your_ld_list[index] to the dict {"@type": "foo", "name": "bar"} given in expanded form or - # set your_ld_list[index] to the list [{"@type": "foo", "name": "bar"}] given in non expanded form - if not isinstance(index, slice): self.item_list[index] = val[0] if isinstance(val := self._to_expanded_json(self.key, value), list) else val return @@ -56,14 +53,36 @@ def __contains__(self, value): return expanded_value in self.item_list def __eq__(self, other): - if isinstance(other, ld_list): - # FIXME: #439 When are ld_lists equal? - return self.item_list == other.item_list + if not (isinstance(other, (list, ld_list)) or ld_list.is_container(other)): + return NotImplemented + if isinstance(other, dict): + other = [other] if isinstance(other, list): if ld_list.is_ld_list(other): - other = ld_list.get_item_list_from_container(other) - return self.item_list == self.from_list(other, key=self.key, context=self.full_context).item_list - return NotImplemented + other = ld_list.get_item_list_from_container(other[0]) + other = self.from_list(other, parent=self.parent, key=self.key, context=self.context) + if len(self.item_list) != len(other.item_list): + return False + if (self.key == "@type") ^ (other.key == "@type"): + return False + if self.key == other.key == "@type": + return self.item_list == other.item_list + for index, (item, other_item) in enumerate(zip(self.item_list, other.item_list)): + if ((ld_container.is_typed_json_value(item) or ld_container.is_json_value(item)) and + (ld_container.is_typed_json_value(other_item) or ld_container.is_json_value(other_item))): + if not ld_container.are_values_equal(item, other_item): + return False + continue + if "@id" in item and "@id" in other_item: + return item["@id"] == other_item["@id"] + item = self[index] + other_item = other[index] + res = item.__eq__(other_item) + if res == NotImplemented: + res = other_item.__eq__(item) + if res is False or res == NotImplemented: # res is not True + return False + return True def __ne__(self, other): x = self.__eq__(other) diff --git a/test/hermes_test/model/types/test_ld_container.py b/test/hermes_test/model/types/test_ld_container.py index 2d6687f1..53cb15c8 100644 --- a/test/hermes_test/model/types/test_ld_container.py +++ b/test/hermes_test/model/types/test_ld_container.py @@ -4,6 +4,7 @@ # SPDX-FileContributor: Sophie Kernchen # SPDX-FileContributor: Michael Meinel +# SPDX-FileContributor: Michael Fritzsche from datetime import datetime @@ -152,3 +153,13 @@ def test_to_expanded_datetime_value(self, mock_context): assert cont._to_expanded_json("eggs", datetime(2022, 2, 22)) == [ {"@value": "2022-02-22T00:00:00", "@type": "http://schema.org/DateTime"} ] + + def test_are_values_equal(self): + assert ld_container.are_values_equal({"@id": "foo"}, {"@id": "foo"}) + assert not ld_container.are_values_equal({"@id": "foo"}, {"@id": "bar"}) + assert ld_container.are_values_equal({"@id": "foo"}, {"@id": "foo", "@value": "bar"}) + assert ld_container.are_values_equal({"@value": "foo"}, {"@value": "foo"}) + assert ld_container.are_values_equal({"@value": "bar"}, {"@id": "foo", "@value": "bar"}) + assert not ld_container.are_values_equal({"@value": "foo"}, {"@value": "bar"}) + assert not ld_container.are_values_equal({"@type": "bar", "@value": "foo"}, {"@value": "foo"}) + assert ld_container.are_values_equal({"@type": "bar", "@value": "foo"}, {"@type": "bar", "@value": "foo"}) diff --git a/test/hermes_test/model/types/test_ld_dict.py b/test/hermes_test/model/types/test_ld_dict.py new file mode 100644 index 00000000..b696ff5b --- /dev/null +++ b/test/hermes_test/model/types/test_ld_dict.py @@ -0,0 +1,26 @@ +# SPDX-FileCopyrightText: 2025 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Fritzsche + +from hermes.model.types.ld_dict import ld_dict + + +def test_build_in_comparison(): + di = ld_dict([{}], context={"schema": "https://schema.org/"}) + assert di != 1 and di != [] and di != "" + di["@id"] = "foo" + di["schema:name"] = "bar" + assert di == {"@id": "foo"} + # Fail probably because of bug in ld_dict + # that is fixed on refactor/data-model after merge of refactor/384-test-ld_dict + assert di == {"@id": "foo", "schema:name": "bar"} + assert di == {"@id": "foo", "name": "b"} + assert di == {"schema:name": "bar"} + di = ld_dict([{}], context={"schema": "https://schema.org/"}) + di["schema:Person"] = {"schema:name": "foo"} + assert di == {"schema:Person": {"schema:name": "foo"}} + di["schema:Person"].append({"schema:name": "bar"}) + assert di == {"schema:Person": [{"schema:name": "foo"}, {"schema:name": "bar"}]} + assert di != {"schema:name": "foo"} diff --git a/test/hermes_test/model/types/test_ld_list.py b/test/hermes_test/model/types/test_ld_list.py index 98bd7b68..8a5bd7d2 100644 --- a/test/hermes_test/model/types/test_ld_list.py +++ b/test/hermes_test/model/types/test_ld_list.py @@ -73,6 +73,9 @@ def test_build_in_set(): li[0] = {"@set": [{"@type": "schema:Thing", "schema:name": "b"}]} assert isinstance(li[0], ld_list) assert li[0].item_list == [{"@type": ["https://schema.org/Thing"], "https://schema.org/name": [{"@value": "b"}]}] + li[0] = [{"@type": "schema:Thing", "schema:name": "b"}] + assert isinstance(li[0], ld_list) + assert li[0].item_list == [{"@type": ["https://schema.org/Thing"], "https://schema.org/name": [{"@value": "b"}]}] def test_build_in_len(): @@ -132,6 +135,21 @@ def test_build_in_comparison(): assert li != li3 assert not li == 3 assert li != 3 + li = ld_list([{"@list": []}], key="https://schema.org/Person", context=[{"schema": "https://schema.org/"}]) + li.append({"@id": "foo"}) + assert li == [{"@id": "foo"}] and li == [{"@id": "foo", "schema:name": "bar"}] and li == {"@list": [{"@id": "foo"}]} + li2 = ld_list([{"@list": []}], key="@type", context=[{"schema": "https://schema.org/"}]) + li2.append("schema:name") + assert li != li2 + li = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) + li2 = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema2": "https://schema.org/"}]) + li.append("foo") + li2.append("bar") + assert li != li2 + li[0] = {"@type": "foo", "@value": "bar"} + assert li != li2 + li[0] = {"@type": "foobar", "@value": "bar"} + assert li != li2 def test_extend(): From aeb88fb8f0172e609b3e4e1261e1f5061db1c579 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 21 Nov 2025 10:57:32 +0100 Subject: [PATCH 147/247] added support for graph and set again and improved expansion of json-ld --- src/hermes/model/types/__init__.py | 2 +- src/hermes/model/types/ld_container.py | 92 +++++++++++++++++++- src/hermes/model/types/ld_dict.py | 6 +- src/hermes/model/types/ld_list.py | 65 +++++++++++--- test/hermes_test/model/types/test_ld_dict.py | 2 +- test/hermes_test/model/types/test_ld_list.py | 54 ++++++++---- 6 files changed, 187 insertions(+), 34 deletions(-) diff --git a/src/hermes/model/types/__init__.py b/src/hermes/model/types/__init__.py index 497ee44a..bf47bdfb 100644 --- a/src/hermes/model/types/__init__.py +++ b/src/hermes/model/types/__init__.py @@ -26,7 +26,7 @@ # Wrap item from ld_dict in ld_list (ld_list.is_ld_list, {"ld_container": ld_list}), - (lambda c: isinstance(c, list), {"ld_container": lambda c, **kw: ld_list([{"@list": c}], **kw)}), + (lambda c: isinstance(c, list), {"ld_container": lambda c, **kw: ld_list(c, **kw)}), # pythonize items from lists (expanded set is already handled above) (ld_container.is_json_id, {"python": lambda c, **_: c["@id"]}), diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index 8b0ef437..50e38df4 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -6,6 +6,8 @@ from .pyld_util import JsonLdProcessor, bundled_loader +from datetime import date, time, datetime + class ld_container: """ @@ -91,7 +93,95 @@ def _to_python(self, full_iri, ld_value): return value - def _to_expanded_json(self, key, value): + def _to_expanded_json(self, value): + """ + The item_lists contents/ the data_dict will be substituted with value. + Value can be an ld_container or contain zero or more. + Then the _data of the inner most ld_dict that contains or is self will be expanded. + If self is not an ld_dict and none of self's parents is, use the key from ld_list to generate a minimal dict + + The result of this function is what value has turned into + (always a list for type(self) == ld_dict and list or dict for type(self) == ld_list). + If self is an ld_list and value was assimilated by self the returned value is list otherwise it is a dict + (e.g. in a set the inner sets values are put directly into the outer one). + """ + if self.__class__.__name__ == "ld_list": + value = [value] + parent = self + path = [] + while parent.__class__.__name__ != "ld_dict": + if parent.container_type == "@list": + path.extend(["@list", 0]) + elif parent.container_type == "@graph": + path.extend(["@graph", 0]) + path.append(self.ld_proc.expand_iri(parent.active_ctx, parent.key) if self.index is None else self.index) + if parent.parent is None: + break + parent = parent.parent + if parent.__class__.__name__ != "ld_dict": + key = self.ld_proc.expand_iri(parent.active_ctx, parent.key) + parent = ld_container([{key: parent._data}]) + path.append(0) + + key_and_reference_todo_list = [] + if isinstance(value, ld_container): + if parent.__class__.__name__ == "ld_list" and parent.container_type == "@set": + value = value._data + else: + value = value._data[0] + elif isinstance(value, date): + value = {"@value": value.isoformat(), "@type": "schema:Date"} + elif isinstance(value, datetime): + value = {"@value": value.isoformat(), "@type": "schema:DateTime"} + elif isinstance(value, time): + value = {"@value": value.isoformat(), "@type": "schema:Time"} + else: + key_and_reference_todo_list = [(0, [value])] + special_types = (list, dict, ld_container, date, datetime, time) + while True: + if len(key_and_reference_todo_list) == 0: + break + key, ref = key_and_reference_todo_list.pop() + temp = ref[key] + if isinstance(temp, list): + key_and_reference_todo_list.extend([(index, temp) for index, val in enumerate(temp) if isinstance(val, special_types)]) + elif isinstance(temp, dict): + key_and_reference_todo_list.extend([(new_key, temp) for new_key in temp.keys() if isinstance(temp[new_key], special_types)]) + elif isinstance(temp, ld_container): + ref[key] = temp._data[0] + elif isinstance(temp, date): + ref[key] = {"@value": temp.isoformat(), "@type": "schema:Date"} + elif isinstance(temp, datetime): + ref[key] = {"@value": temp.isoformat(), "@type": "schema:DateTime"} + elif isinstance(temp, time): + ref[key] = {"@value": temp.isoformat(), "@type": "schema:Time"} + + current_data = parent._data + for index in range(len(path) - 1, 0, -1): + current_data = current_data[path[index]] + if current_data == []: + self_data = None + current_data.append(value) + else: + self_data = current_data[path[0]] + current_data[path[0]] = value + expanded_data = self.ld_proc.expand(parent._data, {"expandContext": self.full_context, + "documentLoader": bundled_loader, + "keepFreeFloatingNodes": True}) + if self_data is not None: + current_data[path[0]] = self_data + else: + current_data.clear() + for index in range(len(path) - 1, -1, -1): + expanded_data = expanded_data[path[index]] + + if self.__class__.__name__ == "ld_dict": + return expanded_data + if self.__class__.__name__ == "ld_list" and len(expanded_data) != 1: + return expanded_data + return expanded_data[0] + + def _to_expanded_json_deprecated(self, key, value): if key == "@id": ld_value = self.ld_proc.expand_iri(self.active_ctx, value, vocab=False) elif key == "@type": diff --git a/src/hermes/model/types/ld_dict.py b/src/hermes/model/types/ld_dict.py index 1686b9a6..e2848869 100644 --- a/src/hermes/model/types/ld_dict.py +++ b/src/hermes/model/types/ld_dict.py @@ -25,8 +25,8 @@ def __getitem__(self, key): def __setitem__(self, key, value): full_iri = self.ld_proc.expand_iri(self.active_ctx, key) - ld_value = self._to_expanded_json(full_iri, value) - self.data_dict.update({full_iri: ld_value}) + ld_value = self._to_expanded_json({full_iri: value}) + self.data_dict.update(ld_value) def __delitem__(self, key): full_iri = self.ld_proc.expand_iri(self.active_ctx, key) @@ -127,7 +127,7 @@ def from_dict(cls, value, *, parent=None, key=None, context=None, ld_type=None): full_context[:0] = parent.full_context ld_value = cls.ld_proc.expand(ld_data, {"expandContext": full_context, "documentLoader": bundled_loader}) - ld_value = cls(ld_value, parent=parent, key=key, context=data_context) + ld_value = cls(ld_value, parent=parent, key=key, context=full_context) return ld_value diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index 18e3cdff..a636bcb0 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -12,12 +12,28 @@ class ld_list(ld_container): """ An JSON-LD container resembling a list. """ def __init__(self, data, *, parent=None, key=None, index=None, context=None): - if not (self.is_ld_list(data) and "@list" in data[0]): - raise ValueError("The given data does not represent a ld_list.") + if not isinstance(key, str): + raise ValueError("The key is not a string or was omitted.") + if not isinstance(data, list): + raise ValueError("The given data does not represent an ld_list.") + if self.is_ld_list(data): + if "@list" in data[0]: + self.container_type = "@list" + self.item_list = data[0]["@list"] + elif "@graph" in data[0]: + self.container_type = "@graph" + self.item_list = data[0]["@graph"] + else: + raise ValueError("The given @set is not fully expanded.") + else: + self.container_type = "@set" + self.item_list = data + if key == "@type" and not all(isinstance(item, str) for item in self.item_list): + raise ValueError("A given value for @type is not a string.") + if key != "@type" and not all(isinstance(item, dict) for item in self.item_list): + raise ValueError("A given value is not properly expanded.") super().__init__(data, parent=parent, key=key, index=index, context=context) - self.item_list = data[0]["@list"] - def __getitem__(self, index): if isinstance(index, slice): return [self[i] for i in [*range(len(self))][index]] @@ -29,13 +45,22 @@ def __getitem__(self, index): def __setitem__(self, index, value): if not isinstance(index, slice): - self.item_list[index] = val[0] if isinstance(val := self._to_expanded_json(self.key, value), list) else val + value = self._to_expanded_json(value) + if not isinstance(value, list): + self.item_list[index] = value + return + if index < 0: + self.item_list[index-1:index] = value + else: + self.item_list[index:index+1] = value return try: iter(value) except TypeError as exc: raise TypeError("must assign iterable to extended slice") from exc - expanded_value = [self._to_expanded_json(self.key, val) for val in value] + expanded_value = [self._to_expanded_json(val) for val in value] + # TODO: the slice should work if all items including assimilated ones in the given order can be set via slice + # Implement this self.item_list[index] = [val[0] if isinstance(val, list) else val for val in expanded_value] def __len__(self): @@ -49,7 +74,9 @@ def __iter__(self): yield item def __contains__(self, value): - expanded_value = val[0] if isinstance(val := self._to_expanded_json(self.key, value), list) else val + # TODO: Update to use new _to_expanded_json + # and return True if value would be assimilated by self and all those items are in self + expanded_value = val[0] if isinstance(val := self._to_expanded_json_deprecated(self.key, value), list) else val return expanded_value in self.item_list def __eq__(self, other): @@ -91,7 +118,7 @@ def __ne__(self, other): return not x def append(self, value): - ld_value = self._to_expanded_json(self.key, value) + ld_value = val if isinstance(val:= self._to_expanded_json(value), list) else [val] self.item_list.extend(ld_value) def extend(self, value): @@ -117,10 +144,24 @@ def is_container(cls, value): ) @classmethod - def from_list(cls, value, *, parent=None, key=None, context=None): - new_list = cls([{"@list": []}], parent=parent, key=key, context=context) - new_list.extend(value) - return new_list + def from_list(cls, value, *, parent=None, key=None, context=None, container_type="@set"): + if key == "@type": + container_type = "@set" + if container_type == "@set": + temp_list = [] + else: + value = [{container_type: value}] + temp_list = [{container_type: value}] + if parent is not None: + expanded_value = parent._to_expanded_json(value) + # TODO: what should happen if value is assimilated by parent? + # -> return parent with added values + else: + expanded_value = cls([], parent=None, key=key, context=context)._to_expanded_json(value) + # we don't care if it is assimilated by the temporary object as expanded_value is its replacement + if not isinstance(expanded_value, list): + expanded_value = [expanded_value] + return cls(expanded_value, parent=parent, key=key, context=context) @classmethod def get_item_list_from_container(cls, ld_value): diff --git a/test/hermes_test/model/types/test_ld_dict.py b/test/hermes_test/model/types/test_ld_dict.py index b696ff5b..bc5c9916 100644 --- a/test/hermes_test/model/types/test_ld_dict.py +++ b/test/hermes_test/model/types/test_ld_dict.py @@ -16,7 +16,7 @@ def test_build_in_comparison(): # Fail probably because of bug in ld_dict # that is fixed on refactor/data-model after merge of refactor/384-test-ld_dict assert di == {"@id": "foo", "schema:name": "bar"} - assert di == {"@id": "foo", "name": "b"} + assert di == {"@id": "foo", "schema:name": "b"} assert di == {"schema:name": "bar"} di = ld_dict([{}], context={"schema": "https://schema.org/"}) di["schema:Person"] = {"schema:name": "foo"} diff --git a/test/hermes_test/model/types/test_ld_list.py b/test/hermes_test/model/types/test_ld_list.py index 8a5bd7d2..7e0fa390 100644 --- a/test/hermes_test/model/types/test_ld_list.py +++ b/test/hermes_test/model/types/test_ld_list.py @@ -5,6 +5,7 @@ # SPDX-FileContributor: Sophie Kernchen # SPDX-FileContributor: Michael Fritzsche +from datetime import date import pytest from hermes.model.types.ld_list import ld_list @@ -20,14 +21,17 @@ def test_undefined_list(): ld_list([{"@list": [0], "spam": [{"@value": "bacon"}]}]) with pytest.raises(ValueError): ld_list([{"@list": ["a", "b"], "@set": ["foo", "bar"]}]) + with pytest.raises(ValueError): + ld_list([{"@list": ["a", "b"]}]) # no given key with pytest.raises(ValueError): ld_list([{"@list": ["a", "b"]}, {"@set": ["foo", "bar"]}]) def test_list_basics(): - li = ld_list([{"@list": [0]}]) - assert li._data == [{"@list": [0]}] - assert li.item_list == [0] + li_data = [{"@list": [{"@value": "bar"}]}] + li = ld_list(li_data, key="foo") + assert li._data is li_data + assert li.item_list is li_data[0]["@list"] def test_build_in_get(): @@ -36,7 +40,8 @@ def test_build_in_get(): assert li[:2] == ["foo", "bar"] and li[1:-1] == ["bar"] assert li[::2] == ["foo", "foobar"] and li[::-1] == ["foobar", "bar", "foo"] - li = ld_list([{"@list": [{"@type": "A", "schema:name": "a"}, {"@list": [{"@type": "A", "schema:name": "a"}]}]}]) + li = ld_list([{"@list": [{"@type": "A", "schema:name": "a"}, {"@list": [{"@type": "A", "schema:name": "a"}]}]}], + key="schema:person") assert isinstance(li[0], ld_dict) and li[0].data_dict == {"@type": "A", "schema:name": "a"} and li[0].index == 0 assert isinstance(li[1], ld_list) and li[1].item_list == [{"@type": "A", "schema:name": "a"}] and li[1].index == 1 assert li[1].key == li.key @@ -78,9 +83,21 @@ def test_build_in_set(): assert li[0].item_list == [{"@type": ["https://schema.org/Thing"], "https://schema.org/name": [{"@value": "b"}]}] +def test_build_in_set_complex(): + di = ld_dict([{"https://schema.org/name": [{"@list": [{"@value": "c"}]}]}], + context=[{"schema": "https://schema.org/"}]) + temp = di["schema:name"] + di["schema:name"][0] = {"@list": ["a", "b"]} + assert di["schema:name"][0] == ["a", "b"] and temp._data is di["schema:name"]._data + li = ld_list([{"@list": []}], key="schema:time", context=[{"schema": "https://schema.org/"}]) + date_obj = date(year=2025, month=12, day=31) + li.append(date_obj) + assert li.item_list == [{"@value": date_obj.isoformat(), "@type": "https://schema.org/Date"}] + + def test_build_in_len(): - assert len(ld_list([{"@list": []}])) == 0 - assert len(ld_list([{"@list": [{"@value": "foo"}, {"@value": "bar"}, {"@value": "foobar"}]}])) == 3 + assert len(ld_list([{"@list": []}], key="foo")) == 0 + assert len(ld_list([{"@list": [{"@value": "foo"}, {"@value": "bar"}, {"@value": "foobar"}]}], key="foo")) == 3 def test_build_in_iter(): @@ -126,7 +143,9 @@ def test_build_in_comparison(): assert li == li2 li.append("foo") li.append({"@type": "A", "schema:name": "a"}) - assert li != li2 and ["foo", {"@type": "A", "schema:name": "a"}] == li and ["foo"] != li2 + assert li != li2 + assert ["foo", {"@type": "A", "schema:name": "a"}] == li + assert ["foo"] != li2 assert ["foo", {"@type": "A", "https://schema.org/name": "a"}] == li li2.extend(["foo", {"@type": "A", "schema2:name": "a"}]) assert li == li2 @@ -138,7 +157,7 @@ def test_build_in_comparison(): li = ld_list([{"@list": []}], key="https://schema.org/Person", context=[{"schema": "https://schema.org/"}]) li.append({"@id": "foo"}) assert li == [{"@id": "foo"}] and li == [{"@id": "foo", "schema:name": "bar"}] and li == {"@list": [{"@id": "foo"}]} - li2 = ld_list([{"@list": []}], key="@type", context=[{"schema": "https://schema.org/"}]) + li2 = ld_list([], key="@type", context=[{"schema": "https://schema.org/"}]) li2.append("schema:name") assert li != li2 li = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) @@ -146,9 +165,9 @@ def test_build_in_comparison(): li.append("foo") li2.append("bar") assert li != li2 - li[0] = {"@type": "foo", "@value": "bar"} + li[0] = {"@type": "schema:foo", "@value": "bar"} assert li != li2 - li[0] = {"@type": "foobar", "@value": "bar"} + li[0] = {"@type": "schema:foobar", "@value": "bar"} assert li != li2 @@ -178,7 +197,7 @@ def test_extend(): def test_to_python(): li = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) li.append("foo") - li.append(ld_dict([{"@type": ["A"], "https://schema.org/name": [{"@value": "a"}]}])) + li.append(ld_dict([{"@type": ["A"], "https://schema.org/name": [{"@value": "a"}]}], parent=li)) li.append(["a"]) assert li[1]["@type"].item_list == ["A"] assert li.to_python() == ["foo", {"@type": ["A"], "schema:name": ["a"]}, ["a"]] @@ -192,23 +211,26 @@ def test_is_ld_list(): def test_is_container(): assert not any(ld_list.is_container(item) for item in [1, "", [], {}, {"a": "b"}]) - assert not any(ld_list.is_container(item) for item in [ld_dict([{"a": "b"}]), ld_list([{"@list": ["a"]}])]) + assert not any(ld_list.is_container(item) for item in [ld_dict([{"a": "b"}]), + ld_list([{"@list": [{"@value": "a"}]}], key="foo")]) assert not any(ld_list.is_container({"@list": value}) for value in ["", 1, {}]) assert all(ld_list.is_container({container_type: []}) for container_type in ["@list", "@graph", "@set"]) def test_from_list(): - li = ld_list.from_list([]) - assert li.item_list == li.context == [] and li.parent is li.key is li.index is None - assert li._data == [{"@list": []}] + li = ld_list.from_list([], key="schema:foo") + assert li.item_list == li.context == [] and li.parent is li.index is None and li.key == "schema:foo" + assert li._data == [] and li.container_type == "@set" li = ld_list.from_list([], parent=li, key="schema:name", context=[{"schema": "https://schema.org/"}]) assert li.item_list == [] and li.parent is not None and li.key == "schema:name" assert li.index is None and li.context == [{"schema": "https://schema.org/"}] - li = ld_list.from_list(["a", {"@value": "b"}], parent=None, key="https://schema.org/name", + li_data = ["a", {"@value": "b"}] + li = ld_list.from_list(li_data, parent=None, key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) assert li.item_list == [{"@value": "a"}, {"@value": "b"}] and li.parent is None assert li.key == "https://schema.org/name" and li.index is None assert li.context == [{"schema": "https://schema.org/"}] + assert li.item_list is not li_data # as li_data is expected to change they should not be the same object def test_get_item_list_from_container(): From 007e3a32efd6f097a263c505e36a68a3fe3b2479 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 21 Nov 2025 10:58:06 +0100 Subject: [PATCH 148/247] removed unnecessary lambda --- src/hermes/model/types/__init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/hermes/model/types/__init__.py b/src/hermes/model/types/__init__.py index bf47bdfb..3cf51f35 100644 --- a/src/hermes/model/types/__init__.py +++ b/src/hermes/model/types/__init__.py @@ -37,10 +37,10 @@ (lambda v: isinstance(v, str), {"python": lambda v, parent, **_: parent.ld_proc.compact_iri(parent.active_ctx, v)}), # Convert internal data types to expanded_json - (lambda c: ld_container.is_json_id(c), {"expanded_json": lambda c, **_: [c]}), - (lambda c: ld_container.is_ld_id(c), {"expanded_json": lambda c, **_: c}), - (lambda c: ld_container.is_json_value(c), {"expanded_json": lambda c, **_: [c]}), - (lambda c: ld_container.is_ld_value(c), {"expanded_json": lambda c, **_: c}), + (ld_container.is_json_id, {"expanded_json": lambda c, **_: [c]}), + (ld_container.is_ld_id, {"expanded_json": lambda c, **_: c}), + (ld_container.is_json_value, {"expanded_json": lambda c, **_: [c]}), + (ld_container.is_ld_value, {"expanded_json": lambda c, **_: c}), (ld_dict.is_json_dict, {"expanded_json": lambda c, **kw: ld_dict.from_dict(c, **kw).ld_value}), ( ld_list.is_container, From a68f2ae8abb4a81efa09784da6abce0c4b65a97e Mon Sep 17 00:00:00 2001 From: Stephan Druskat Date: Fri, 21 Nov 2025 11:25:12 +0100 Subject: [PATCH 149/247] Add test file --- test/hermes_test/model/test_api_e2e.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 test/hermes_test/model/test_api_e2e.py diff --git a/test/hermes_test/model/test_api_e2e.py b/test/hermes_test/model/test_api_e2e.py new file mode 100644 index 00000000..e69de29b From 3c9ea6184fb6af97d782349b21c53d5f89bd1378 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 21 Nov 2025 12:03:02 +0100 Subject: [PATCH 150/247] fixed set and from_list and added del --- src/hermes/model/types/ld_list.py | 21 ++++++++++---------- test/hermes_test/model/types/test_ld_list.py | 19 +++++++++++++++--- 2 files changed, 27 insertions(+), 13 deletions(-) diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index a636bcb0..1fca70e6 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -58,11 +58,12 @@ def __setitem__(self, index, value): iter(value) except TypeError as exc: raise TypeError("must assign iterable to extended slice") from exc - expanded_value = [self._to_expanded_json(val) for val in value] - # TODO: the slice should work if all items including assimilated ones in the given order can be set via slice - # Implement this + expanded_value = ld_container.merge_to_list(*[self._to_expanded_json(val) for val in value]) self.item_list[index] = [val[0] if isinstance(val, list) else val for val in expanded_value] + def __delitem__(self, index): + del self.item_list[index] + def __len__(self): return len(self.item_list) @@ -147,18 +148,18 @@ def is_container(cls, value): def from_list(cls, value, *, parent=None, key=None, context=None, container_type="@set"): if key == "@type": container_type = "@set" - if container_type == "@set": - temp_list = [] - else: + if container_type != "@set": value = [{container_type: value}] - temp_list = [{container_type: value}] if parent is not None: expanded_value = parent._to_expanded_json(value) - # TODO: what should happen if value is assimilated by parent? - # -> return parent with added values + if isinstance(expanded_value, list) or not cls.is_container(expanded_value): + # parent has to be an ld_list because an ld_dict won't assimilate an list + parent.extend(expanded_value if isinstance(expanded_value, list) else [expanded_value]) + # TODO: is there a need to add the context to the parent as well? + return parent else: expanded_value = cls([], parent=None, key=key, context=context)._to_expanded_json(value) - # we don't care if it is assimilated by the temporary object as expanded_value is its replacement + # the object has to be a list for further use but does not have to be returned by _to_expanded_json as a list if not isinstance(expanded_value, list): expanded_value = [expanded_value] return cls(expanded_value, parent=parent, key=key, context=context) diff --git a/test/hermes_test/model/types/test_ld_list.py b/test/hermes_test/model/types/test_ld_list.py index 7e0fa390..d1d52484 100644 --- a/test/hermes_test/model/types/test_ld_list.py +++ b/test/hermes_test/model/types/test_ld_list.py @@ -89,10 +89,23 @@ def test_build_in_set_complex(): temp = di["schema:name"] di["schema:name"][0] = {"@list": ["a", "b"]} assert di["schema:name"][0] == ["a", "b"] and temp._data is di["schema:name"]._data - li = ld_list([{"@list": []}], key="schema:time", context=[{"schema": "https://schema.org/"}]) + li = ld_list([], key="schema:time", context=[{"schema": "https://schema.org/"}]) date_obj = date(year=2025, month=12, day=31) li.append(date_obj) assert li.item_list == [{"@value": date_obj.isoformat(), "@type": "https://schema.org/Date"}] + del li[0] + li[0:1] = ["a", "b", "c"] + assert li == ["a", "b", "c"] + li[0:3:2] = [["aa", "bb"]] + assert li == ["aa", "b", "bb"] + +def test_build_in_del(): + li = ld_list([{"@list": [{"@value": "foo"}, {"@value": "bar"}, {"@value": "foobar"}]}], + key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) + del li[0:3:2] + assert li == ["bar"] + del li[0] + assert li == [] def test_build_in_len(): @@ -222,8 +235,8 @@ def test_from_list(): assert li.item_list == li.context == [] and li.parent is li.index is None and li.key == "schema:foo" assert li._data == [] and li.container_type == "@set" li = ld_list.from_list([], parent=li, key="schema:name", context=[{"schema": "https://schema.org/"}]) - assert li.item_list == [] and li.parent is not None and li.key == "schema:name" - assert li.index is None and li.context == [{"schema": "https://schema.org/"}] + assert li.item_list == [] and li.parent is None and li.key == "schema:foo" + assert li.index is None and li.context == [] li_data = ["a", {"@value": "b"}] li = ld_list.from_list(li_data, parent=None, key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) From 66e03dffaf87f927e84eccbc3e0b7291577d1cc0 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 21 Nov 2025 13:05:21 +0100 Subject: [PATCH 151/247] updated tests of ld_container and fixed found bugs --- src/hermes/model/types/ld_container.py | 14 ++--- src/hermes/model/types/ld_list.py | 8 +-- test/hermes_test/model/types/conftest.py | 1 + .../model/types/test_ld_container.py | 52 ++++++++++--------- 4 files changed, 40 insertions(+), 35 deletions(-) diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index 50e38df4..ecff840b 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -129,15 +129,15 @@ def _to_expanded_json(self, value): value = value._data else: value = value._data[0] - elif isinstance(value, date): - value = {"@value": value.isoformat(), "@type": "schema:Date"} elif isinstance(value, datetime): value = {"@value": value.isoformat(), "@type": "schema:DateTime"} + elif isinstance(value, date): + value = {"@value": value.isoformat(), "@type": "schema:Date"} elif isinstance(value, time): value = {"@value": value.isoformat(), "@type": "schema:Time"} else: key_and_reference_todo_list = [(0, [value])] - special_types = (list, dict, ld_container, date, datetime, time) + special_types = (list, dict, ld_container, datetime, date, time) while True: if len(key_and_reference_todo_list) == 0: break @@ -149,10 +149,10 @@ def _to_expanded_json(self, value): key_and_reference_todo_list.extend([(new_key, temp) for new_key in temp.keys() if isinstance(temp[new_key], special_types)]) elif isinstance(temp, ld_container): ref[key] = temp._data[0] - elif isinstance(temp, date): - ref[key] = {"@value": temp.isoformat(), "@type": "schema:Date"} elif isinstance(temp, datetime): ref[key] = {"@value": temp.isoformat(), "@type": "schema:DateTime"} + elif isinstance(temp, date): + ref[key] = {"@value": temp.isoformat(), "@type": "schema:Date"} elif isinstance(temp, time): ref[key] = {"@value": temp.isoformat(), "@type": "schema:Time"} @@ -177,7 +177,7 @@ def _to_expanded_json(self, value): if self.__class__.__name__ == "ld_dict": return expanded_data - if self.__class__.__name__ == "ld_list" and len(expanded_data) != 1: + if len(expanded_data) != 1: return expanded_data return expanded_data[0] @@ -213,7 +213,7 @@ def _to_expanded_json_deprecated(self, key, value): return ld_value def __repr__(self): - return f'{type(self).__name__}({self._data[0]})' + return f'{type(self).__name__}({self._data})' def __str__(self): return str(self.to_python()) diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index 1fca70e6..b102f477 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -151,9 +151,11 @@ def from_list(cls, value, *, parent=None, key=None, context=None, container_type if container_type != "@set": value = [{container_type: value}] if parent is not None: - expanded_value = parent._to_expanded_json(value) - if isinstance(expanded_value, list) or not cls.is_container(expanded_value): - # parent has to be an ld_list because an ld_dict won't assimilate an list + if isinstance(parent, ld_list): + expanded_value = parent._to_expanded_json(value) + else: + expanded_value = parent._to_expanded_json({key: value})[cls.ld_proc.expand_iri(parent.active_ctx, key)] + if isinstance(parent, cls) and (isinstance(expanded_value, list) or not cls.is_container(expanded_value)): parent.extend(expanded_value if isinstance(expanded_value, list) else [expanded_value]) # TODO: is there a need to add the context to the parent as well? return parent diff --git a/test/hermes_test/model/types/conftest.py b/test/hermes_test/model/types/conftest.py index 8a1c7c2e..7d7e52d5 100644 --- a/test/hermes_test/model/types/conftest.py +++ b/test/hermes_test/model/types/conftest.py @@ -25,6 +25,7 @@ def vocabulary(cls, base_url: str = "http://spam.eggs/") -> dict: "ham": {"@id": f"{base_url}ham", "@type": "@id"}, "eggs": {"@id": f"{base_url}eggs", "@container": "@list"}, "use_until": {"@id": f"{base_url}use_until", "@type": "http://schema.org/DateTime"}, + "schema": "https://schema.org/", "Egg": {"@id": f"{base_url}Egg"}, } diff --git a/test/hermes_test/model/types/test_ld_container.py b/test/hermes_test/model/types/test_ld_container.py index 53cb15c8..dc5d8f8f 100644 --- a/test/hermes_test/model/types/test_ld_container.py +++ b/test/hermes_test/model/types/test_ld_container.py @@ -6,11 +6,12 @@ # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: Michael Fritzsche -from datetime import datetime +from datetime import date, datetime, time import pytest from hermes.model.types.ld_container import ld_container +from hermes.model.types.ld_dict import ld_dict '''we expect user of this class to give the right input data types @@ -83,7 +84,7 @@ def test_container_full_context_and_path(self, httpserver): def test_container_str_and_repr(self): cont = ld_container([{"spam": [{"@value": "bacon"}]}]) - assert repr(cont) == "ld_container({'spam': [{'@value': 'bacon'}]})" + assert repr(cont) == "ld_container([{'spam': [{'@value': 'bacon'}]}])" with pytest.raises(NotImplementedError): str(cont) @@ -96,13 +97,14 @@ def test_to_python_id_with_prefix(self, mock_context): assert cont._to_python("@id", f"{self.url}identifier") == "prefix:identifier" def test_to_python_type(self, mock_context): - cont = ld_container([{}], context=[mock_context]) + cont = ld_dict([{"@type": ["@id"]}], context=[mock_context]) assert cont._to_python("@type", ["@id"]) == ['@id'] + cont = ld_dict([{"@type": ["@id", "http://spam.eggs/Egg"]}], context=[mock_context]) assert cont._to_python("@type", ["@id", "http://spam.eggs/Egg"]) == ["@id", "Egg"] def test_to_python_id_value(self, mock_context): - cont = ld_container([{}], context=[mock_context]) - assert cont._to_python("http://spam.eggs/ham", [{"@id": "http://spam.eggs/spam"}]) == ["http://spam.eggs/spam"] + cont = ld_dict([{}], context=[mock_context]) + assert cont._to_python("http://spam.eggs/ham", [{"@id": "http://spam.eggs/spam"}]) == [{"@id": "http://spam.eggs/spam"}] assert cont._to_python("http://spam.eggs/ham", {"@id": "http://spam.eggs/identifier"}) == "http://spam.eggs/identifier" @@ -119,40 +121,40 @@ def test_to_python_datetime_value(self, mock_context): }) == "2022-02-22T00:00:00" # TODO: #434 typed date is returned as string instead of date def test_to_expanded_id(self, mock_context): - cont = ld_container([{}], context=[mock_context]) - assert cont._to_expanded_json("@id", f"{self.url}identifier") == f"{self.url}identifier" + cont = ld_dict([{}], context=[mock_context]) + assert cont._to_expanded_json({"@id": f"{self.url}identifier"}) == {"@id": f"{self.url}identifier"} # Regression test: "ham" is vocabulary and must not be expanded. - assert cont._to_expanded_json("@id", "ham") == "ham" + assert cont._to_expanded_json({"@id": "ham"}) == {"@id": "ham"} def test_to_expanded_id_with_prefix(self, mock_context): - cont = ld_container([{}], context=[mock_context, {"prefix": self.url}]) - assert cont._to_expanded_json("@id", "prefix:identifier") == f"{self.url}identifier" + cont = ld_dict([{}], context=[mock_context, {"prefix": self.url}]) + assert cont._to_expanded_json({"@id": "prefix:identifier"}) == {"@id": f"{self.url}identifier"} # Regression test: "ham" should still not be expaned, but "prefix:ham" should be. - assert cont._to_expanded_json("@id", "ham") == "ham" - assert cont._to_expanded_json("@id", "prefix:ham") == f"{self.url}ham" + assert cont._to_expanded_json({"@id": "ham"}) == {"@id": "ham"} + assert cont._to_expanded_json({"@id": "prefix:ham"}) == {"@id": f"{self.url}ham"} def test_to_expanded_type(self, mock_context): - cont = ld_container([{}], context=[mock_context]) - assert cont._to_expanded_json("@type", "Egg") == ["http://spam.eggs/Egg"] - assert cont._to_expanded_json("@type", ["Egg", "@id"]) == ["http://spam.eggs/Egg", "@id"] + cont = ld_dict([{}], context=[mock_context]) + assert cont._to_expanded_json({"@type": "Egg"}) == {"@type": ["http://spam.eggs/Egg"]} + assert cont._to_expanded_json({"@type": ["Egg", "@id"]}) =={"@type": ["http://spam.eggs/Egg", "@id"]} def test_to_expanded_id_value(self, mock_context): - cont = ld_container([{}], context=[mock_context]) - assert cont._to_expanded_json("ham", "spam") == [{"@id": "spam"}] + cont = ld_dict([{}], context=[mock_context]) + assert cont._to_expanded_json({"ham": "spam"}) == {"http://spam.eggs/ham": [{"@id": "spam"}]} def test_to_expanded_basic_value(self, mock_context): - cont = ld_container([{}], context=[mock_context]) - assert cont._to_expanded_json("spam", "bacon") == [{"@value": "bacon"}] - assert cont._to_expanded_json("spam", 123) == [{"@value": 123}] - assert cont._to_expanded_json("spam", True) == [{"@value": True}] + cont = ld_dict([{}], context=[mock_context]) + assert cont._to_expanded_json({"spam": "bacon"}) == {"http://spam.eggs/spam": [{"@value": "bacon"}]} + assert cont._to_expanded_json({"spam": 123}) == {"http://spam.eggs/spam": [{"@value": 123}]} + assert cont._to_expanded_json({"spam": True}) == {"http://spam.eggs/spam": [{"@value": True}]} def test_to_expanded_datetime_value(self, mock_context): - cont = ld_container([{}], context=[mock_context]) - assert cont._to_expanded_json("eggs", datetime(2022, 2, 22)) == [ - {"@value": "2022-02-22T00:00:00", "@type": "http://schema.org/DateTime"} - ] + cont = ld_dict([{}], context=[mock_context]) + assert cont._to_expanded_json({"eggs": datetime(2022, 2, 22)}) == {"http://spam.eggs/eggs": [{"@list": [ + {"@value": "2022-02-22T00:00:00", "@type": "https://schema.org/DateTime"} + ]}]} def test_are_values_equal(self): assert ld_container.are_values_equal({"@id": "foo"}, {"@id": "foo"}) From c4aea657b8893c3850081dbbfc0a97a5cffc88e4 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 21 Nov 2025 13:10:50 +0100 Subject: [PATCH 152/247] pacified flake8 --- src/hermes/model/types/ld_container.py | 10 ++++++---- src/hermes/model/types/ld_list.py | 2 +- test/hermes_test/model/types/test_ld_container.py | 7 ++++--- test/hermes_test/model/types/test_ld_list.py | 1 + 4 files changed, 12 insertions(+), 8 deletions(-) diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index ecff840b..c136d476 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -144,9 +144,11 @@ def _to_expanded_json(self, value): key, ref = key_and_reference_todo_list.pop() temp = ref[key] if isinstance(temp, list): - key_and_reference_todo_list.extend([(index, temp) for index, val in enumerate(temp) if isinstance(val, special_types)]) + key_and_reference_todo_list.extend([(index, temp) for index, val in enumerate(temp) + if isinstance(val, special_types)]) elif isinstance(temp, dict): - key_and_reference_todo_list.extend([(new_key, temp) for new_key in temp.keys() if isinstance(temp[new_key], special_types)]) + key_and_reference_todo_list.extend([(new_key, temp) for new_key in temp.keys() + if isinstance(temp[new_key], special_types)]) elif isinstance(temp, ld_container): ref[key] = temp._data[0] elif isinstance(temp, datetime): @@ -166,8 +168,8 @@ def _to_expanded_json(self, value): self_data = current_data[path[0]] current_data[path[0]] = value expanded_data = self.ld_proc.expand(parent._data, {"expandContext": self.full_context, - "documentLoader": bundled_loader, - "keepFreeFloatingNodes": True}) + "documentLoader": bundled_loader, + "keepFreeFloatingNodes": True}) if self_data is not None: current_data[path[0]] = self_data else: diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index b102f477..ac8af6b5 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -119,7 +119,7 @@ def __ne__(self, other): return not x def append(self, value): - ld_value = val if isinstance(val:= self._to_expanded_json(value), list) else [val] + ld_value = val if isinstance(val := self._to_expanded_json(value), list) else [val] self.item_list.extend(ld_value) def extend(self, value): diff --git a/test/hermes_test/model/types/test_ld_container.py b/test/hermes_test/model/types/test_ld_container.py index dc5d8f8f..ddc98405 100644 --- a/test/hermes_test/model/types/test_ld_container.py +++ b/test/hermes_test/model/types/test_ld_container.py @@ -6,7 +6,7 @@ # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: Michael Fritzsche -from datetime import date, datetime, time +from datetime import datetime import pytest @@ -104,7 +104,8 @@ def test_to_python_type(self, mock_context): def test_to_python_id_value(self, mock_context): cont = ld_dict([{}], context=[mock_context]) - assert cont._to_python("http://spam.eggs/ham", [{"@id": "http://spam.eggs/spam"}]) == [{"@id": "http://spam.eggs/spam"}] + assert cont._to_python("http://spam.eggs/ham", + [{"@id": "http://spam.eggs/spam"}]) == [{"@id": "http://spam.eggs/spam"}] assert cont._to_python("http://spam.eggs/ham", {"@id": "http://spam.eggs/identifier"}) == "http://spam.eggs/identifier" @@ -138,7 +139,7 @@ def test_to_expanded_id_with_prefix(self, mock_context): def test_to_expanded_type(self, mock_context): cont = ld_dict([{}], context=[mock_context]) assert cont._to_expanded_json({"@type": "Egg"}) == {"@type": ["http://spam.eggs/Egg"]} - assert cont._to_expanded_json({"@type": ["Egg", "@id"]}) =={"@type": ["http://spam.eggs/Egg", "@id"]} + assert cont._to_expanded_json({"@type": ["Egg", "@id"]}) == {"@type": ["http://spam.eggs/Egg", "@id"]} def test_to_expanded_id_value(self, mock_context): cont = ld_dict([{}], context=[mock_context]) diff --git a/test/hermes_test/model/types/test_ld_list.py b/test/hermes_test/model/types/test_ld_list.py index d1d52484..5f29017c 100644 --- a/test/hermes_test/model/types/test_ld_list.py +++ b/test/hermes_test/model/types/test_ld_list.py @@ -99,6 +99,7 @@ def test_build_in_set_complex(): li[0:3:2] = [["aa", "bb"]] assert li == ["aa", "b", "bb"] + def test_build_in_del(): li = ld_list([{"@list": [{"@value": "foo"}, {"@value": "bar"}, {"@value": "foobar"}]}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) From 7d30b169561a5583e747aec82d74352218894fd1 Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Mon, 24 Nov 2025 09:02:38 +0100 Subject: [PATCH 153/247] improved ld_list.__contains__ and added tests for it --- src/hermes/model/types/ld_list.py | 13 +++++++++---- test/hermes_test/model/types/test_ld_list.py | 8 ++++++-- 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index ac8af6b5..ea693053 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -75,10 +75,15 @@ def __iter__(self): yield item def __contains__(self, value): - # TODO: Update to use new _to_expanded_json - # and return True if value would be assimilated by self and all those items are in self - expanded_value = val[0] if isinstance(val := self._to_expanded_json_deprecated(self.key, value), list) else val - return expanded_value in self.item_list + expanded_value = self._to_expanded_json(value) + if isinstance(expanded_value, list): + return all(val in self for val in expanded_value) + self_attributes = {"parent": self.parent, "key": self.key, "index": self.index, "context": self.full_context} + if self.container_type == "@set": + temp_list = ld_list([expanded_value], **self_attributes) + return any(temp_list == ld_list([val], **self_attributes) for val in self.item_list) + temp_list = ld_list([{self.container_type: [expanded_value]}], **self_attributes) + return any(temp_list == ld_list([{self.container_type: [val]}], **self_attributes) for val in self.item_list) def __eq__(self, other): if not (isinstance(other, (list, ld_list)) or ld_list.is_container(other)): diff --git a/test/hermes_test/model/types/test_ld_list.py b/test/hermes_test/model/types/test_ld_list.py index 5f29017c..1b0a4127 100644 --- a/test/hermes_test/model/types/test_ld_list.py +++ b/test/hermes_test/model/types/test_ld_list.py @@ -93,7 +93,6 @@ def test_build_in_set_complex(): date_obj = date(year=2025, month=12, day=31) li.append(date_obj) assert li.item_list == [{"@value": date_obj.isoformat(), "@type": "https://schema.org/Date"}] - del li[0] li[0:1] = ["a", "b", "c"] assert li == ["a", "b", "c"] li[0:3:2] = [["aa", "bb"]] @@ -143,11 +142,16 @@ def test_append(): def test_build_in_contains(): - li = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) + li = ld_list([], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) li.append("foo") li.append({"@type": "A", "schema:name": "a"}) assert "foo" in li and {"@type": "A", "schema:name": "a"} in li assert {"@value": "foo"} in li and {"@type": "A", "https://schema.org/name": "a"} in li + assert ["foo", {"@type": "A", "schema:name": "a"}] in li + assert [{"@list": ["foo", {"@type": "A", "schema:name": "a"}]}] not in li + li.append({"@id": "schema:foo", "schema:name": "foo"}) + assert {"@id": "schema:foo"} in li and {"@id": "schema:foo", "schema:name": "foobar"} in li + assert {"schema:name": "foo"} in li def test_build_in_comparison(): From f8d259fca9862ddef681d435387b4746e2482e08 Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Mon, 24 Nov 2025 10:56:23 +0100 Subject: [PATCH 154/247] added one TODO item to ld_list --- src/hermes/model/types/ld_list.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index ea693053..6766f1c9 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -86,6 +86,7 @@ def __contains__(self, value): return any(temp_list == ld_list([{self.container_type: [val]}], **self_attributes) for val in self.item_list) def __eq__(self, other): + # TODO: ld_lists with container_type "@set" have to be considered unordered if not (isinstance(other, (list, ld_list)) or ld_list.is_container(other)): return NotImplemented if isinstance(other, dict): From 2b98627b2687f87fc4c49e7abada6435baa15944 Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Mon, 24 Nov 2025 10:56:58 +0100 Subject: [PATCH 155/247] began adding doc strings, type hints and comments for ld_container --- src/hermes/model/types/ld_container.py | 108 ++++++++++++++++++++++--- 1 file changed, 97 insertions(+), 11 deletions(-) diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index c136d476..359a4ebe 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -7,6 +7,9 @@ from .pyld_util import JsonLdProcessor, bundled_loader from datetime import date, time, datetime +from typing import Self + +JSON_LD_CONTEXT_DICT = dict[str, str | 'JSON_LD_CONTEXT_DICT'] class ld_container: @@ -20,14 +23,33 @@ class ld_container: ld_proc = JsonLdProcessor() - def __init__(self, data, *, parent=None, key=None, index=None, context=None): + def __init__( + self: Self, + data: list, + *, + parent: "ld_container" | None = None, + key: str | None = None, + index: int | None = None, + context: list[str | JSON_LD_CONTEXT_DICT] | None = None, + ) -> None: """ Create a new instance of an ld_container. + :param self: The instance of ld_container to be initialized. + :type self: Self :param data: The expanded json-ld data that is mapped. - :param parent: Optional parent node of this container. - :param key: Optional key into the parent container. - :param context: Optional local context for this container. + :type data: list + :param parent: parent node of this container. + :type parent: ld_container | None + :param key: key into the parent container. + :type key: str | None + :param key: index into the parent container. + :type index: int | None + :param context: local context for this container. + :type context: list[str | JSON_LD_CONTEXT_DICT] + + :return: + :rtype: None """ # Store basic data @@ -54,7 +76,18 @@ def __init__(self, data, *, parent=None, key=None, index=None, context=None): {"documentLoader": bundled_loader} ) - def add_context(self, context): + def add_context(self: Self, context: list[str | JSON_LD_CONTEXT_DICT]) -> None: + """ + Add the given context to the ld_container. + + :param self: The ld_container the context should be added to. + :type self: Self + :param context: The context to be added to self. + :type context: list[str | JSON_LD_CONTEXT_DICT] + + :return: + :rtype: None + """ self.context = self.merge_to_list(self.context, context) self.active_ctx = self.ld_proc.process_context( self.active_ctx, @@ -63,31 +96,84 @@ def add_context(self, context): ) @property - def full_context(self): + def full_context(self: Self) -> list[str, JSON_LD_CONTEXT_DICT] | None: + """ + Return the context of the ld_container merged with the full_context of its parent. + + :param self: The ld_container whose full_context is returned + :type self: Self + + :return: The context of the ld_container merged with the full_context of its parent via + ld_container.merge_to_list or just the context of this ld_container if self.parent is None. + :rtype: list[str | JSON_LD_CONTEXT_DICT] | None + """ if self.parent is not None: return self.merge_to_list(self.parent.full_context, self.context) else: return self.context @property - def path(self): - """ Create a path representation for this item. """ + def path(self: Self) -> list[str | int]: + """ + Create a path representation for this item. + + :param self: The ld_container the path leads to from its outer most parent container. + :type self: Self + + :return: The path from selfs outer most parent to it self. + Let parent be the outer most parent of self. + Start with index = 1 and iteratively set parent to parent[path[index]] and then increment index + until index == len(path) to get parent is self == true. + :rtype: list[str | int] + """ if self.parent: return self.parent.path + [self.key if self.index is None else self.index] else: return ['$'] @property - def ld_value(self): - """ Retrun a representation that is suitable as a value in expanded JSON-LD. """ + def ld_value(self: Self) -> list: + """ + Return a representation that is suitable as a value in expanded JSON-LD of this ld_container. + + :param self: The ld_container whose expanded JSON-LD representation is returned. + :type self: Self + + :return: The expanded JSON-LD value of this container. + This value is the basis of all operations and a reference to the original is returned and not a copy. + Do **not** modify unless strictly necessary and you know what you do. + Otherwise unexpected behavior may occur. + :rtype: list + """ return self._data - def _to_python(self, full_iri, ld_value): + def _to_python( + self: Self, + full_iri: str, + ld_value: list | dict | str + ) -> "ld_container" | str | int | float | bool | date | datetime | time: + """ + Returns a pythonized version of the given value pretending the value is in self and full_iri its key. + + :param self: the ld_container ld_value is considered to be in. + :type self: Self + :param full_iri: The expanded iri of the key of ld_value / self (later if self is not a dictionary). + :type full_iri: str + :param ld_value: The value thats pythonized value is requested. ld_value has to be valid expanded JSON-LD if it + was embeded in self._data. + :type ld_value: list | dict | str + + :return: The pythonized value of the ld_value. + :rtype: ld_container | str | int | float | bool | date | datetime | time + """ if full_iri == "@id": + # values of key "@id" only have to be compacted value = self.ld_proc.compact_iri(self.active_ctx, ld_value, vocab=False) else: + # use the type map from src/hermes/model/types/__init__.py to convert all other values. value, ld_output = self.ld_proc.apply_typemap(ld_value, "python", "ld_container", parent=self, key=full_iri) + # check if conversion was successful if ld_output is None: raise TypeError(full_iri, ld_value) From 1958518bc914d1fc9e1e192146352ea4590627c7 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 28 Nov 2025 11:14:02 +0100 Subject: [PATCH 156/247] improved _to_expanded_json --- src/hermes/model/types/ld_container.py | 27 ++++++++---------- src/hermes/model/types/ld_list.py | 39 +++++++++++--------------- 2 files changed, 29 insertions(+), 37 deletions(-) diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index 359a4ebe..4f0d6f41 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -181,18 +181,19 @@ def _to_python( def _to_expanded_json(self, value): """ - The item_lists contents/ the data_dict will be substituted with value. - Value can be an ld_container or contain zero or more. - Then the _data of the inner most ld_dict that contains or is self will be expanded. - If self is not an ld_dict and none of self's parents is, use the key from ld_list to generate a minimal dict + Returns an expanded version of the given value. - The result of this function is what value has turned into - (always a list for type(self) == ld_dict and list or dict for type(self) == ld_list). - If self is an ld_list and value was assimilated by self the returned value is list otherwise it is a dict - (e.g. in a set the inner sets values are put directly into the outer one). + The item_list/ data_dict of self will be substituted with value. + Value can be an ld_container or contain zero or more. + Then the _data of the inner most ld_dict that contains or is self will be expanded + using the JSON_LD-Processor. + If self and none of self's parents is an ld_dict, use the key from outer most ld_list + to generate a minimal dict. + + The result of this function is what value has turned into: + - If type(self) == ld_dict: the returned value is a dict + - If type(self) == ld_list: the returned value is a list """ - if self.__class__.__name__ == "ld_list": - value = [value] parent = self path = [] while parent.__class__.__name__ != "ld_dict": @@ -263,11 +264,7 @@ def _to_expanded_json(self, value): for index in range(len(path) - 1, -1, -1): expanded_data = expanded_data[path[index]] - if self.__class__.__name__ == "ld_dict": - return expanded_data - if len(expanded_data) != 1: - return expanded_data - return expanded_data[0] + return expanded_data def _to_expanded_json_deprecated(self, key, value): if key == "@id": diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index 6766f1c9..5402f85d 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -45,20 +45,17 @@ def __getitem__(self, index): def __setitem__(self, index, value): if not isinstance(index, slice): - value = self._to_expanded_json(value) - if not isinstance(value, list): - self.item_list[index] = value - return - if index < 0: - self.item_list[index-1:index] = value - else: + value = self._to_expanded_json([value]) + if index != -1: self.item_list[index:index+1] = value + else: + self.item_list[index:] = value return try: iter(value) except TypeError as exc: raise TypeError("must assign iterable to extended slice") from exc - expanded_value = ld_container.merge_to_list(*[self._to_expanded_json(val) for val in value]) + expanded_value = ld_container.merge_to_list(*[self._to_expanded_json([val]) for val in value]) self.item_list[index] = [val[0] if isinstance(val, list) else val for val in expanded_value] def __delitem__(self, index): @@ -75,14 +72,16 @@ def __iter__(self): yield item def __contains__(self, value): - expanded_value = self._to_expanded_json(value) - if isinstance(expanded_value, list): + expanded_value = self._to_expanded_json([value]) + if len(expanded_value) == 0: + return True + if len(expanded_value) > 1: return all(val in self for val in expanded_value) self_attributes = {"parent": self.parent, "key": self.key, "index": self.index, "context": self.full_context} if self.container_type == "@set": - temp_list = ld_list([expanded_value], **self_attributes) + temp_list = ld_list(expanded_value, **self_attributes) return any(temp_list == ld_list([val], **self_attributes) for val in self.item_list) - temp_list = ld_list([{self.container_type: [expanded_value]}], **self_attributes) + temp_list = ld_list([{self.container_type: expanded_value}], **self_attributes) return any(temp_list == ld_list([{self.container_type: [val]}], **self_attributes) for val in self.item_list) def __eq__(self, other): @@ -125,8 +124,7 @@ def __ne__(self, other): return not x def append(self, value): - ld_value = val if isinstance(val := self._to_expanded_json(value), list) else [val] - self.item_list.extend(ld_value) + self.item_list.extend(self._to_expanded_json([value])) def extend(self, value): for item in value: @@ -158,18 +156,15 @@ def from_list(cls, value, *, parent=None, key=None, context=None, container_type value = [{container_type: value}] if parent is not None: if isinstance(parent, ld_list): - expanded_value = parent._to_expanded_json(value) + expanded_value = parent._to_expanded_json([value]) + if (len(expanded_value) != 1 or + not (isinstance(expanded_value[0], list) or cls.is_container(expanded_value[0]))): + parent.extend(expanded_value) + return parent else: expanded_value = parent._to_expanded_json({key: value})[cls.ld_proc.expand_iri(parent.active_ctx, key)] - if isinstance(parent, cls) and (isinstance(expanded_value, list) or not cls.is_container(expanded_value)): - parent.extend(expanded_value if isinstance(expanded_value, list) else [expanded_value]) - # TODO: is there a need to add the context to the parent as well? - return parent else: expanded_value = cls([], parent=None, key=key, context=context)._to_expanded_json(value) - # the object has to be a list for further use but does not have to be returned by _to_expanded_json as a list - if not isinstance(expanded_value, list): - expanded_value = [expanded_value] return cls(expanded_value, parent=parent, key=key, context=context) @classmethod From 4acf8de1a1f81c2349ce325130bff710ebdc4614 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 28 Nov 2025 12:48:47 +0100 Subject: [PATCH 157/247] added doc-strings and comments and pacified flake8 --- src/hermes/model/types/ld_container.py | 228 ++++++++++++++++--------- src/hermes/model/types/ld_list.py | 2 +- 2 files changed, 145 insertions(+), 85 deletions(-) diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index 4f0d6f41..b443ae1d 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -7,9 +7,22 @@ from .pyld_util import JsonLdProcessor, bundled_loader from datetime import date, time, datetime -from typing import Self +from typing import Union, Self, Any -JSON_LD_CONTEXT_DICT = dict[str, str | 'JSON_LD_CONTEXT_DICT'] + +JSON_LD_CONTEXT_DICT = dict[str, Union[str, "JSON_LD_CONTEXT_DICT"]] +BASIC_TYPE = Union[str, float, int, bool] +EXPANDED_JSON_LD_VALUE = Union[ + list[Union["EXPANDED_JSON_LD_VALUE", BASIC_TYPE]], dict[str, Union["EXPANDED_JSON_LD_VALUE", BASIC_TYPE]] +] +COMPACTED_JSON_LD_VALUE = Union[ + list[Union["COMPACTED_JSON_LD_VALUE", BASIC_TYPE]], dict[str, Union["COMPACTED_JSON_LD_VALUE", BASIC_TYPE]] +] +TIME_TYPE = Union[datetime, date, time] +JSON_LD_VALUE = Union[ + list[Union["JSON_LD_VALUE", BASIC_TYPE, TIME_TYPE, "ld_container"]], + dict[str, Union["JSON_LD_VALUE", BASIC_TYPE, TIME_TYPE, "ld_container"]], +] class ld_container: @@ -27,10 +40,10 @@ def __init__( self: Self, data: list, *, - parent: "ld_container" | None = None, - key: str | None = None, - index: int | None = None, - context: list[str | JSON_LD_CONTEXT_DICT] | None = None, + parent: Union["ld_container", None] = None, + key: Union[str, None] = None, + index: Union[int, None] = None, + context: Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None, ) -> None: """ Create a new instance of an ld_container. @@ -46,7 +59,7 @@ def __init__( :param key: index into the parent container. :type index: int | None :param context: local context for this container. - :type context: list[str | JSON_LD_CONTEXT_DICT] + :type context: list[str | JSON_LD_CONTEXT_DICT] | None :return: :rtype: None @@ -65,18 +78,14 @@ def __init__( if self.parent: if self.context: self.active_ctx = self.ld_proc.process_context( - self.parent.active_ctx, - self.context, - {"documentLoader": bundled_loader}) + self.parent.active_ctx, self.context, {"documentLoader": bundled_loader} + ) else: self.active_ctx = parent.active_ctx else: - self.active_ctx = self.ld_proc.initial_ctx( - self.full_context, - {"documentLoader": bundled_loader} - ) + self.active_ctx = self.ld_proc.initial_ctx(self.full_context, {"documentLoader": bundled_loader}) - def add_context(self: Self, context: list[str | JSON_LD_CONTEXT_DICT]) -> None: + def add_context(self: Self, context: list[Union[str | JSON_LD_CONTEXT_DICT]]) -> None: """ Add the given context to the ld_container. @@ -89,14 +98,10 @@ def add_context(self: Self, context: list[str | JSON_LD_CONTEXT_DICT]) -> None: :rtype: None """ self.context = self.merge_to_list(self.context, context) - self.active_ctx = self.ld_proc.process_context( - self.active_ctx, - context, - {"documentLoader": bundled_loader} - ) + self.active_ctx = self.ld_proc.process_context(self.active_ctx, context, {"documentLoader": bundled_loader}) @property - def full_context(self: Self) -> list[str, JSON_LD_CONTEXT_DICT] | None: + def full_context(self: Self) -> Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None]: """ Return the context of the ld_container merged with the full_context of its parent. @@ -113,7 +118,7 @@ def full_context(self: Self) -> list[str, JSON_LD_CONTEXT_DICT] | None: return self.context @property - def path(self: Self) -> list[str | int]: + def path(self: Self) -> list[Union[str, int]]: """ Create a path representation for this item. @@ -129,10 +134,10 @@ def path(self: Self) -> list[str | int]: if self.parent: return self.parent.path + [self.key if self.index is None else self.index] else: - return ['$'] + return ["$"] @property - def ld_value(self: Self) -> list: + def ld_value(self: Self) -> EXPANDED_JSON_LD_VALUE: """ Return a representation that is suitable as a value in expanded JSON-LD of this ld_container. @@ -143,15 +148,13 @@ def ld_value(self: Self) -> list: This value is the basis of all operations and a reference to the original is returned and not a copy. Do **not** modify unless strictly necessary and you know what you do. Otherwise unexpected behavior may occur. - :rtype: list + :rtype: EXPANDED_JSON_LD_VALUE """ return self._data def _to_python( - self: Self, - full_iri: str, - ld_value: list | dict | str - ) -> "ld_container" | str | int | float | bool | date | datetime | time: + self: Self, full_iri: str, ld_value: Union[list, dict, str] + ) -> Union["ld_container", BASIC_TYPE, TIME_TYPE]: """ Returns a pythonized version of the given value pretending the value is in self and full_iri its key. @@ -164,36 +167,49 @@ def _to_python( :type ld_value: list | dict | str :return: The pythonized value of the ld_value. - :rtype: ld_container | str | int | float | bool | date | datetime | time + :rtype: ld_container | BASIC_TYPE | TIME_TYPE """ if full_iri == "@id": # values of key "@id" only have to be compacted value = self.ld_proc.compact_iri(self.active_ctx, ld_value, vocab=False) else: # use the type map from src/hermes/model/types/__init__.py to convert all other values. - value, ld_output = self.ld_proc.apply_typemap(ld_value, "python", "ld_container", - parent=self, key=full_iri) + value, ld_output = self.ld_proc.apply_typemap(ld_value, "python", "ld_container", parent=self, key=full_iri) # check if conversion was successful if ld_output is None: raise TypeError(full_iri, ld_value) return value - def _to_expanded_json(self, value): + def _to_expanded_json(self: Self, value: JSON_LD_VALUE) -> EXPANDED_JSON_LD_VALUE: """ - Returns an expanded version of the given value. - - The item_list/ data_dict of self will be substituted with value. - Value can be an ld_container or contain zero or more. - Then the _data of the inner most ld_dict that contains or is self will be expanded - using the JSON_LD-Processor. - If self and none of self's parents is an ld_dict, use the key from outer most ld_list - to generate a minimal dict. - - The result of this function is what value has turned into: - - If type(self) == ld_dict: the returned value is a dict - - If type(self) == ld_list: the returned value is a list + Returns an expanded version of the given value. + + The item_list/ data_dict of self will be substituted with value. + Value can be an ld_container or contain zero or more. + Then the _data of the inner most ld_dict that contains or is self will be expanded + using the JSON_LD-Processor. + If self and none of self's parents is an ld_dict, use the key from outer most ld_list + to generate a minimal dict. + + The result of this function is what value has turned into. + + :param self: The ld_dict or ld_list in which value gets expanded + :type self: Self + :param value: The value that is to be expanded. Different types are expected based on the type of self: +
  • If type(self) == ld_dict: value must be a dict
  • +
  • If type(self) == ld_list: value must be a list
+ value will be exapnded as if it was the data_dict/ the item_list of self. + :type value: JSON_LD_VALUE + + :return: The expanded version of value i.e. the data_dict/ item_list of self if it had been value. + The return type is based on the type of self: +
  • If type(self) == ld_dict: the returned values type is dict
  • +
  • If type(self) == ld_list: the returned values type is list
+ :rtype: EXPANDED_JSON_LD_VALUE """ + # search for an ld_dict that is either self or the inner most parents parent of self that is an ld_dict + # while searching build a path such that it leads from the found ld_dicts ld_value to selfs data_dict/ item_list parent = self path = [] while parent.__class__.__name__ != "ld_dict": @@ -205,37 +221,36 @@ def _to_expanded_json(self, value): if parent.parent is None: break parent = parent.parent + + # if neither self nor any of its parents is a ld_dict: + # create a dict with the key of the outer most parent of self and this parents ld_value as a value + # this dict is stored in an ld_container and simulates the most minimal JSON-LD object possible if parent.__class__.__name__ != "ld_dict": key = self.ld_proc.expand_iri(parent.active_ctx, parent.key) parent = ld_container([{key: parent._data}]) path.append(0) - key_and_reference_todo_list = [] - if isinstance(value, ld_container): - if parent.__class__.__name__ == "ld_list" and parent.container_type == "@set": - value = value._data - else: - value = value._data[0] - elif isinstance(value, datetime): - value = {"@value": value.isoformat(), "@type": "schema:DateTime"} - elif isinstance(value, date): - value = {"@value": value.isoformat(), "@type": "schema:Date"} - elif isinstance(value, time): - value = {"@value": value.isoformat(), "@type": "schema:Time"} - else: - key_and_reference_todo_list = [(0, [value])] + # all ld_container (ld_dicts and ld_lists) and datetime, date as well as time objects in value have to dissolved + # because the JSON-LD processor can't handle them + # to do this traverse value in a BFS and replace all items with a type in 'special_types' with a usable values + key_and_reference_todo_list = [(0, [value])] special_types = (list, dict, ld_container, datetime, date, time) while True: + # check if ready if len(key_and_reference_todo_list) == 0: break + # get next item key, ref = key_and_reference_todo_list.pop() temp = ref[key] + # replace item if necessary and add childs to the todo list if isinstance(temp, list): - key_and_reference_todo_list.extend([(index, temp) for index, val in enumerate(temp) - if isinstance(val, special_types)]) + key_and_reference_todo_list.extend( + [(index, temp) for index, val in enumerate(temp) if isinstance(val, special_types)] + ) elif isinstance(temp, dict): - key_and_reference_todo_list.extend([(new_key, temp) for new_key in temp.keys() - if isinstance(temp[new_key], special_types)]) + key_and_reference_todo_list.extend( + [(new_key, temp) for new_key in temp.keys() if isinstance(temp[new_key], special_types)] + ) elif isinstance(temp, ld_container): ref[key] = temp._data[0] elif isinstance(temp, datetime): @@ -245,27 +260,40 @@ def _to_expanded_json(self, value): elif isinstance(temp, time): ref[key] = {"@value": temp.isoformat(), "@type": "schema:Time"} + # traverse the ld_value of parent with the previously generated path current_data = parent._data for index in range(len(path) - 1, 0, -1): current_data = current_data[path[index]] + # replace the data_dict/ item_list so that value is now inside of the ld_value of parent and store the old value if current_data == []: + # itemlist of an empty ld_list: + # The item_list can't be replaced like in all other cases self_data = None current_data.append(value) else: self_data = current_data[path[0]] current_data[path[0]] = value - expanded_data = self.ld_proc.expand(parent._data, {"expandContext": self.full_context, - "documentLoader": bundled_loader, - "keepFreeFloatingNodes": True}) + + # expand the ld_value of parent to implicitly expand value + # important the ld_value of parent is not modified because the processor makes a deep copy + expanded_data = self.ld_proc.expand( + parent._data, + {"expandContext": self.full_context, "documentLoader": bundled_loader, "keepFreeFloatingNodes": True}, + ) + + # restore the data_dict/ item_list to its former state if self_data is not None: current_data[path[0]] = self_data else: current_data.clear() + + # use the path to get the expansion of value for index in range(len(path) - 1, -1, -1): expanded_data = expanded_data[path[index]] return expanded_data + # TODO: remove this method and all other unused methods/ functions def _to_expanded_json_deprecated(self, key, value): if key == "@id": ld_value = self.ld_proc.expand_iri(self.active_ctx, value, vocab=False) @@ -275,11 +303,12 @@ def _to_expanded_json_deprecated(self, key, value): ld_value = [self.ld_proc.expand_iri(self.active_ctx, ld_type) for ld_type in value] else: short_key = self.ld_proc.compact_iri(self.active_ctx, key) - if ':' in short_key: - prefix, short_key = short_key.split(':', 1) + if ":" in short_key: + prefix, short_key = short_key.split(":", 1) ctx_value = self.ld_proc.get_context_value(self.active_ctx, prefix, "@id") - active_ctx = self.ld_proc.process_context(self.active_ctx, [ctx_value], - {"documentLoader": bundled_loader}) + active_ctx = self.ld_proc.process_context( + self.active_ctx, [ctx_value], {"documentLoader": bundled_loader} + ) else: active_ctx = self.active_ctx ld_type = self.ld_proc.get_context_value(active_ctx, short_key, "@type") @@ -287,38 +316,50 @@ def _to_expanded_json_deprecated(self, key, value): ld_value = [{"@id": value}] ld_output = "expanded_json" else: - ld_value, ld_output = self.ld_proc.apply_typemap(value, "expanded_json", "json", - parent=self, key=key) + ld_value, ld_output = self.ld_proc.apply_typemap(value, "expanded_json", "json", parent=self, key=key) if ld_output == "json": - ld_value = self.ld_proc.expand(ld_value, {"expandContext": self.full_context, - "documentLoader": bundled_loader}) + ld_value = self.ld_proc.expand( + ld_value, {"expandContext": self.full_context, "documentLoader": bundled_loader} + ) elif ld_output != "expanded_json": raise TypeError(f"Cannot convert {type(value)}") return ld_value - def __repr__(self): - return f'{type(self).__name__}({self._data})' + def __repr__(self: Self) -> str: + return f"{type(self).__name__}({self._data})" - def __str__(self): + def __str__(self: Self) -> str: return str(self.to_python()) - def compact(self, context=None): + def compact( + self: Self, context: Union[list[JSON_LD_CONTEXT_DICT], JSON_LD_CONTEXT_DICT] = None + ) -> COMPACTED_JSON_LD_VALUE: return self.ld_proc.compact( - self.ld_value, - context or self.context, - {"documentLoader": bundled_loader, "skipExpand": True} + self.ld_value, context or self.context, {"documentLoader": bundled_loader, "skipExpand": True} ) def to_python(self): raise NotImplementedError() @classmethod - def merge_to_list(cls, *args): + def merge_to_list(cls: Self, *args: tuple[Any]) -> list[Any]: + """ + Returns a list that is contains all non-list items from args and all items in the lists in args. + + :param *args: The items that should be put into one list. + :type *args: tuple[Any] + + :return: A list containing all non-list items and all items from lists in args. (Same order as in args) + :rytpe: list[Any] + """ + # base case for recursion if not args: return [] + # split args into first and all other items head, *tail = args + # recursion calls if isinstance(head, list): return [*head, *cls.merge_to_list(*tail)] else: @@ -354,14 +395,33 @@ def is_typed_json_value(cls, ld_value): @classmethod def typed_ld_to_py(cls, data, **kwargs): - ld_value = data[0]['@value'] + ld_value = data[0]["@value"] return ld_value @classmethod - def are_values_equal(cls, first, second): + def are_values_equal( + cls: Self, first: dict[str, Union[BASIC_TYPE, TIME_TYPE]], second: dict[str, Union[BASIC_TYPE, TIME_TYPE]] + ) -> bool: + """ + Returns whether or not the given expanded JSON-LD values are considered equal. + The comparison compares the "@id" values first and returns the result if it is conclusive. + + If the comparison is inconclusive i.e. exactly one or zero of both values have an "@id" value: + Return whether or not all other keys exist in both values and all values of the keys are the same. + + :param first: The first value of the comparison + :type first: dict[str, Union[BASIC_TYPE, TIME_TYPE]] + :param second: The second value of the comparison + :type second: dict[str, Union[BASIC_TYPE, TIME_TYPE]] + + :return: Whether the values are considered equal or not. + :rtype: bool + """ + # compare @id's if "@id" in first and "@id" in second: return first["@id"] == second["@id"] + # compare all other values and keys (@id-comparison was inconclusive) for key in {"@value", "@type"}: if (key in first) ^ (key in second): return False diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index 5402f85d..fd958b65 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -158,7 +158,7 @@ def from_list(cls, value, *, parent=None, key=None, context=None, container_type if isinstance(parent, ld_list): expanded_value = parent._to_expanded_json([value]) if (len(expanded_value) != 1 or - not (isinstance(expanded_value[0], list) or cls.is_container(expanded_value[0]))): + not (isinstance(expanded_value[0], list) or cls.is_container(expanded_value[0]))): parent.extend(expanded_value) return parent else: From b751130f303378c6f59a01d8fd16e5c854501d33 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 28 Nov 2025 13:52:45 +0100 Subject: [PATCH 158/247] began commenting and adding doc-strings to ld_list --- src/hermes/model/types/__init__.py | 1 + src/hermes/model/types/ld_container.py | 8 +-- src/hermes/model/types/ld_list.py | 97 +++++++++++++++++++++++--- 3 files changed, 93 insertions(+), 13 deletions(-) diff --git a/src/hermes/model/types/__init__.py b/src/hermes/model/types/__init__.py index 3cf51f35..8ab05171 100644 --- a/src/hermes/model/types/__init__.py +++ b/src/hermes/model/types/__init__.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileContributor: Michael Meinel +# SPDX-FileContributor: Michael Fritzsche from datetime import date, time, datetime diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index b443ae1d..36a37e9e 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileContributor: Michael Meinel +# SPDX-FileContributor: Michael Fritzsche from .pyld_util import JsonLdProcessor, bundled_loader @@ -38,7 +39,7 @@ class ld_container: def __init__( self: Self, - data: list, + data: list[EXPANDED_JSON_LD_VALUE], *, parent: Union["ld_container", None] = None, key: Union[str, None] = None, @@ -51,12 +52,12 @@ def __init__( :param self: The instance of ld_container to be initialized. :type self: Self :param data: The expanded json-ld data that is mapped. - :type data: list + :type data: list[EXPANDED_JSON_LD_VALUE] :param parent: parent node of this container. :type parent: ld_container | None :param key: key into the parent container. :type key: str | None - :param key: index into the parent container. + :param index: index into the parent container. :type index: int | None :param context: local context for this container. :type context: list[str | JSON_LD_CONTEXT_DICT] | None @@ -64,7 +65,6 @@ def __init__( :return: :rtype: None """ - # Store basic data self.parent = parent self.key = key diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index fd958b65..9fec82fc 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -5,17 +5,56 @@ # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: Michael Fritzsche -from .ld_container import ld_container +from .ld_container import ( + ld_container, + JSON_LD_CONTEXT_DICT, + EXPANDED_JSON_LD_VALUE, + COMPACTED_JSON_LD_VALUE, + JSON_LD_VALUE, + TIME_TYPE, + BASIC_TYPE, +) +from typing import Union, Self, Any -class ld_list(ld_container): - """ An JSON-LD container resembling a list. """ - def __init__(self, data, *, parent=None, key=None, index=None, context=None): +class ld_list(ld_container): + """ An JSON-LD container resembling a list ("@set", "@list" or "@graph"). """ + + def __init__( + self: Self, + data: Union[list[str], list[dict[str, Union[BASIC_TYPE, EXPANDED_JSON_LD_VALUE]]]], + *, + parent: Union["ld_container", None] = None, + key: Union[str, None] = None, + index: Union[int, None] = None, + context: Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None, + ) -> None: + """ + Create a new ld_list.py container. + + :param self: The instance of ld_list to be initialized. + :type self: Self + :param data: The expanded json-ld data that is mapped (must be valid for @set, @list or @graph) + :type data: list[str] | list[dict[str, BASIC_TYPE | EXPANDED_JSON_LD_VALUE]] + :param parent: parent node of this container. + :type parent: ld_container | None + :param key: key into the parent container. + :type key: str | None + :param index: index into the parent container. + :type index: int | None + :param context: local context for this container. + :type context: list[str | JSON_LD_CONTEXT_DICT] | None + + :return: + :rtype: None + """ + # check for validity of data if not isinstance(key, str): raise ValueError("The key is not a string or was omitted.") if not isinstance(data, list): raise ValueError("The given data does not represent an ld_list.") + # infer the container type and item_list from data if self.is_ld_list(data): if "@list" in data[0]: self.container_type = "@list" @@ -28,34 +67,74 @@ def __init__(self, data, *, parent=None, key=None, index=None, context=None): else: self.container_type = "@set" self.item_list = data - if key == "@type" and not all(isinstance(item, str) for item in self.item_list): - raise ValueError("A given value for @type is not a string.") - if key != "@type" and not all(isinstance(item, dict) for item in self.item_list): + # further validity checks + if key == "@type": + if any(not isinstance(item, str) for item in self.item_list) or self.container_type != "@set": + raise ValueError("A given value for @type is not a string.") + elif any(not isinstance(item, dict) for item in self.item_list): raise ValueError("A given value is not properly expanded.") + # call super constructor super().__init__(data, parent=parent, key=key, index=index, context=context) - def __getitem__(self, index): + def __getitem__( + self: Self, index: Union[int, slice] + ) -> Union[BASIC_TYPE, TIME_TYPE, ld_container, list[Union[BASIC_TYPE, TIME_TYPE, ld_container]]]: + """ + Get the item(s) at position index in a pythonized form. + + :param self: The ld_list the items are taken from. + :type self: Self + :param index: The positon(s) from which the item(s) is/ are taken. + :type index: int | slice + + :return: The pythonized item(s) at index. + :rtype: BASIC_TYPE | TIME_TYPE | ld_container | list[BASIC_TYPE | TIME_TYPE | ld_container]] + """ + # handle slices by applying them to a list of indices and then getting the items at those if isinstance(index, slice): return [self[i] for i in [*range(len(self))][index]] + # get the item from the item_list and pythonize it. If necessary add the index. item = self._to_python(self.key, self.item_list[index]) if isinstance(item, ld_container): item.index = index return item - def __setitem__(self, index, value): + def __setitem__( + self: Self, index: Union[int, slice], value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container] + ) -> None: + """ + Set the item(s) at position index to the given value(s). + All given values are expanded. If any are assimilated by self all items that would be added by this are added. + + :param self: The ld_list the items are set in. + :type self: Self + :param index: The positon(s) at which the item(s) is/ are set. + :type index: int | slice + :param value: The new value(s). + :type value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container] + + :return: + :rtype: None + """ if not isinstance(index, slice): + # expand the value value = self._to_expanded_json([value]) + # the returned value is always a list but my contain more then one item + # therefor a slice on the item_list is used to add the expanded value(s) if index != -1: self.item_list[index:index+1] = value else: self.item_list[index:] = value return + # check if the given values can be iterated (value does not have to be a list) try: iter(value) except TypeError as exc: raise TypeError("must assign iterable to extended slice") from exc + # expand the values and merge all expanded values into one list expanded_value = ld_container.merge_to_list(*[self._to_expanded_json([val]) for val in value]) + # set the values at index to the expanded values self.item_list[index] = [val[0] if isinstance(val, list) else val for val in expanded_value] def __delitem__(self, index): From 3c243f281964799df65071f83aac54329e473632 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 5 Dec 2025 09:14:03 +0100 Subject: [PATCH 159/247] fixed bug in _to_expanded_json --- src/hermes/model/types/ld_container.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index 36a37e9e..feff2742 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -252,7 +252,10 @@ def _to_expanded_json(self: Self, value: JSON_LD_VALUE) -> EXPANDED_JSON_LD_VALU [(new_key, temp) for new_key in temp.keys() if isinstance(temp[new_key], special_types)] ) elif isinstance(temp, ld_container): - ref[key] = temp._data[0] + if temp.__class__.__name__ == "ld_list" and temp.container_type == "@set": + ref[key] = temp._data + else: + ref[key] = temp._data[0] elif isinstance(temp, datetime): ref[key] = {"@value": temp.isoformat(), "@type": "schema:DateTime"} elif isinstance(temp, date): From e0f0fc9b4fd1800f92412c8e871fc4afc424504d Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 5 Dec 2025 09:14:29 +0100 Subject: [PATCH 160/247] added a few test cases to delete of ld_list --- test/hermes_test/model/types/test_ld_list.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/test/hermes_test/model/types/test_ld_list.py b/test/hermes_test/model/types/test_ld_list.py index 1b0a4127..aaeb548f 100644 --- a/test/hermes_test/model/types/test_ld_list.py +++ b/test/hermes_test/model/types/test_ld_list.py @@ -106,6 +106,16 @@ def test_build_in_del(): assert li == ["bar"] del li[0] assert li == [] + di = ld_dict([{}], context=[{"schema": "https://schema.org/"}]) + di["schema:name"] = [{"@value": "foo"}, {"@value": "bar"}, {"@value": "foobar"}] + li = di["schema:name"] + del li[0] + assert len(di["schema:name"]) == 2 + di = ld_dict([{}], context=[{"schema": "https://schema.org/"}]) + di["schema:name"] = [{"@list": [{"@value": "foo"}, {"@value": "bar"}, {"@value": "foobar"}]}] + li = di["schema:name"] + del di["schema:name"][0:3:2] + assert len(di["schema:name"]) == 1 and len(li) == 1 def test_build_in_len(): From 1b5656ed0b5c79102c634f7979d1ffcedaeb2963 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 5 Dec 2025 09:14:44 +0100 Subject: [PATCH 161/247] added more comments to ld_list --- src/hermes/model/types/ld_list.py | 150 +++++++++++++++++++++++++++--- 1 file changed, 138 insertions(+), 12 deletions(-) diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index 9fec82fc..e26a16ee 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -5,6 +5,7 @@ # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: Michael Fritzsche +from types import NotImplementedType from .ld_container import ( ld_container, JSON_LD_CONTEXT_DICT, @@ -15,7 +16,7 @@ BASIC_TYPE, ) -from typing import Union, Self, Any +from typing import Generator, Union, Self class ld_list(ld_container): @@ -58,15 +59,15 @@ def __init__( if self.is_ld_list(data): if "@list" in data[0]: self.container_type = "@list" - self.item_list = data[0]["@list"] + self.item_list: list = data[0]["@list"] elif "@graph" in data[0]: self.container_type = "@graph" - self.item_list = data[0]["@graph"] + self.item_list: list = data[0]["@graph"] else: raise ValueError("The given @set is not fully expanded.") else: self.container_type = "@set" - self.item_list = data + self.item_list: list = data # further validity checks if key == "@type": if any(not isinstance(item, str) for item in self.item_list) or self.container_type != "@set": @@ -137,67 +138,192 @@ def __setitem__( # set the values at index to the expanded values self.item_list[index] = [val[0] if isinstance(val, list) else val for val in expanded_value] - def __delitem__(self, index): + def __delitem__(self: Self, index: Union[int, slice]) -> None: + """ + Delete the item(s) at position index. + Note that if a deleted object is represented by an ld_container druing this process it will still exist + and not be modified afterwards. + + :param self: The ld_list the items are deleted from. + :type self: Self + :param index: The positon(s) at which the item(s) is/ are deleted. + :type index: int | slice + + :return: + :rtype: None + """ del self.item_list[index] - def __len__(self): + def __len__(self: Self) -> int: + """ + Returns the number of items in this ld_list. + + :param self: The ld_list whose length is to be returned. + :type self: Self + + :return: The length of self. + :rtype: int + """ return len(self.item_list) - def __iter__(self): + def __iter__(self: Self) -> Generator[Union[BASIC_TYPE | TIME_TYPE | ld_container], None, None]: + """ + Returns an iterator over the pythonized values contained in self. + + :param self: The ld_list over whose items is iterated. + :type self: Self + + :return: The Iterator over self's values. + :rtype: Generator[Union[BASIC_TYPE | TIME_TYPE | ld_container], None, None] + """ + # return an Iterator over each value in self in its pythonized from for index, value in enumerate(self.item_list): item = self._to_python(self.key, value) + # add which entry an ld_container is stored at, if item is an ld_container if isinstance(item, ld_container): item.index = index yield item - def __contains__(self, value): + def __contains__(self: Self, value: JSON_LD_VALUE) -> bool: + """ + Returns whether or not value is contained in self. + Note that it is not directly checked if value is in self.item_list. + First value is expanded then it is checked if value is in self.item_list. + If however value is assimilated by self it is checked if all values are contained in self.item_list. + Also note that the checks whether the expanded value is in self.item_list is based on ld_list.__eq__. + That means that this value is 'contained' in self.item_list if any object in self.item_list + has the same @id like it or it xor the object in the item_list has an id an all other values are the same. + + :param self: The ld_list that is checked if it contains value. + :type self: Self + :param value: The object being checked whether or not it is in self. + :type value: JSON_LD_VALUE + + :return: Whether or not value is being considered to be contained in self. + :rtype: bool + """ + # expand value expanded_value = self._to_expanded_json([value]) + # empty list -> no value to check if len(expanded_value) == 0: return True + # call contains on all items in the expanded list if it contains more then one item + # and return true only if all calls return true if len(expanded_value) > 1: return all(val in self for val in expanded_value) self_attributes = {"parent": self.parent, "key": self.key, "index": self.index, "context": self.full_context} + # create a temporary list containing the expanded value + # check for equality with a list containg exactly one item from self.item_list for every item in self.item_list + # return true if for any item in self.item_list this check returns true if self.container_type == "@set": temp_list = ld_list(expanded_value, **self_attributes) return any(temp_list == ld_list([val], **self_attributes) for val in self.item_list) temp_list = ld_list([{self.container_type: expanded_value}], **self_attributes) return any(temp_list == ld_list([{self.container_type: [val]}], **self_attributes) for val in self.item_list) - def __eq__(self, other): + def __eq__( + self: Self, + other: Union[ + "ld_list", + list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container]], + dict[str, list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container]]], + ], + ) -> Union[bool, NotImplementedType]: + """ + Returns wheter or not self is considered to be equal to other. + If other is not an ld_list, it is converted first. + For each index it is checked if the ids of the items at index in self and other match if both have one, + if only one has an id all other values are compared. + If self or other is considered unordered the comparison is more difficult and ... + + :param self: The ld_list other is compared to. + :type self: Self + :param other: The list/ container/ ld_list self is compared to. + :type other: ld_list | list[JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_container] + | dict[str, list[JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_container]] + + :return: Whether or not self and other are considered equal. + If other is of the wrong type return NotImplemented instead. + :rtype: bool | NotImplementedType + """ # TODO: ld_lists with container_type "@set" have to be considered unordered + # check if other has an acceptable type if not (isinstance(other, (list, ld_list)) or ld_list.is_container(other)): return NotImplemented + + # convert other into an ld_list if it isn't one already if isinstance(other, dict): other = [other] if isinstance(other, list): if ld_list.is_ld_list(other): other = ld_list.get_item_list_from_container(other[0]) other = self.from_list(other, parent=self.parent, key=self.key, context=self.context) + + # check if the length matches if len(self.item_list) != len(other.item_list): return False + + # check for special case (= key is @type) if (self.key == "@type") ^ (other.key == "@type"): return False if self.key == other.key == "@type": + # lists will only contain string return self.item_list == other.item_list + + # check if at each index the items are considered equal for index, (item, other_item) in enumerate(zip(self.item_list, other.item_list)): + # check if items are values if ((ld_container.is_typed_json_value(item) or ld_container.is_json_value(item)) and (ld_container.is_typed_json_value(other_item) or ld_container.is_json_value(other_item))): if not ld_container.are_values_equal(item, other_item): return False continue + # check if both contain an id and compare if "@id" in item and "@id" in other_item: - return item["@id"] == other_item["@id"] + if item["@id"] != other_item["@id"]: + return False + continue + # get the 'real' items (i.e. can also be ld_dicts or ld_lists) item = self[index] other_item = other[index] + # compare using the correct equals method res = item.__eq__(other_item) if res == NotImplemented: + # swap order if first try returned NotImplemented res = other_item.__eq__(item) - if res is False or res == NotImplemented: # res is not True + # return false if the second comparison also fails or one of them returned false + if res is False or res == NotImplemented: return False + + # return true because no unequal elements where found return True - def __ne__(self, other): + def __ne__( + self: Self, + other: Union[ + "ld_list", + list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container]], + dict[str, list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container]]], + ], + ) -> Union[bool, NotImplementedType]: + """ + Returns whether or not self and other not considered to be equal. + (Returns not self.__eq__(other) if the return type is bool. + See ld_list.__eq__ for more details on the comparison.) + + :param self: The ld_list other is compared to. + :type self: Self + :param other: The list/ container/ ld_list self is compared to. + :type other: ld_list | list[JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_container] + | dict[str, list[JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_container]] + + :return: Whether or not self and other are not considered equal. + If other is of the wrong type return NotImplemented instead. + :rtype: bool | NotImplementedType + """ + # compare self and other using __eq__ x = self.__eq__(other) + # return NotImplemented if __eq__ did so and else the inverted result of __eq__ if x is NotImplemented: return NotImplemented return not x From 071630ce03d52f5c5a6839fb3260837154c678a9 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 5 Dec 2025 14:04:09 +0100 Subject: [PATCH 162/247] added more doc strings --- src/hermes/model/types/ld_container.py | 45 +++++-- src/hermes/model/types/ld_list.py | 167 ++++++++++++++++++++++--- 2 files changed, 187 insertions(+), 25 deletions(-) diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index feff2742..b1b55af4 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -13,17 +13,20 @@ JSON_LD_CONTEXT_DICT = dict[str, Union[str, "JSON_LD_CONTEXT_DICT"]] BASIC_TYPE = Union[str, float, int, bool] -EXPANDED_JSON_LD_VALUE = Union[ - list[Union["EXPANDED_JSON_LD_VALUE", BASIC_TYPE]], dict[str, Union["EXPANDED_JSON_LD_VALUE", BASIC_TYPE]] -] +EXPANDED_JSON_LD_VALUE = list[dict[str, Union["EXPANDED_JSON_LD_VALUE", BASIC_TYPE]]] COMPACTED_JSON_LD_VALUE = Union[ - list[Union["COMPACTED_JSON_LD_VALUE", BASIC_TYPE]], dict[str, Union["COMPACTED_JSON_LD_VALUE", BASIC_TYPE]] + list[Union[dict[str, Union["COMPACTED_JSON_LD_VALUE", BASIC_TYPE]], BASIC_TYPE]], + dict[str, Union["COMPACTED_JSON_LD_VALUE", BASIC_TYPE]], ] TIME_TYPE = Union[datetime, date, time] JSON_LD_VALUE = Union[ list[Union["JSON_LD_VALUE", BASIC_TYPE, TIME_TYPE, "ld_container"]], dict[str, Union["JSON_LD_VALUE", BASIC_TYPE, TIME_TYPE, "ld_container"]], ] +PYTHONIZED_LD_CONTAINER = Union[ + list[Union["PYTHONIZED_LD_CONTAINER", BASIC_TYPE, TIME_TYPE]], + dict[str, Union["PYTHONIZED_LD_CONTAINER", BASIC_TYPE, TIME_TYPE]], +] class ld_container: @@ -33,13 +36,33 @@ class ld_container: A linked data container impelements a view on the expanded form of an JSON-LD document. It allows to easily interacts them by hinding all the nesting and automatically mapping between different forms. + + :ivar active_ctx: The active context that is used by the json-ld processor. + :ivar context: The context exclusive to this ld_container and all its childs + (it can still be the same as e.g. parent.context) + :ivartype context: list[str | JSON_LD_CONTEXT_DICT] + :ivar full_context: The context of this ld_container and all its parents merged into one list. + :ivartype full_context: list[str | JSON_LD_CONTEXT_DICT] + :ivar index: The index into the parent container if it is a list. + :ivartype index: int + :ivar key: The key into the inner most parent that is a dict of this ld_container. + :ivartype key: str + :ivar ld_value: The expanded JSON-LD value this object represents. + :ivartype ld_value: EXPANDED_JSON_LD_VALUE + :ivar parent: The ld_container this one is directly contained in. + :ivartype parent: ld_container + :ivar path: The path from the outer most parent to this ld_container. + :ivartype path: list[str | int] + + :cvar ld_proc: The JSON-LD processor object for all ld_container. + :cvartype ld_proc: JsonLdProcessor """ ld_proc = JsonLdProcessor() def __init__( self: Self, - data: list[EXPANDED_JSON_LD_VALUE], + data: EXPANDED_JSON_LD_VALUE, *, parent: Union["ld_container", None] = None, key: Union[str, None] = None, @@ -52,7 +75,7 @@ def __init__( :param self: The instance of ld_container to be initialized. :type self: Self :param data: The expanded json-ld data that is mapped. - :type data: list[EXPANDED_JSON_LD_VALUE] + :type data: EXPANDED_JSON_LD_VALUE :param parent: parent node of this container. :type parent: ld_container | None :param key: key into the parent container. @@ -101,7 +124,7 @@ def add_context(self: Self, context: list[Union[str | JSON_LD_CONTEXT_DICT]]) -> self.active_ctx = self.ld_proc.process_context(self.active_ctx, context, {"documentLoader": bundled_loader}) @property - def full_context(self: Self) -> Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None]: + def full_context(self: Self) -> list[Union[str, JSON_LD_CONTEXT_DICT]]: """ Return the context of the ld_container merged with the full_context of its parent. @@ -110,7 +133,7 @@ def full_context(self: Self) -> Union[list[Union[str, JSON_LD_CONTEXT_DICT]], No :return: The context of the ld_container merged with the full_context of its parent via ld_container.merge_to_list or just the context of this ld_container if self.parent is None. - :rtype: list[str | JSON_LD_CONTEXT_DICT] | None + :rtype: list[str | JSON_LD_CONTEXT_DICT] """ if self.parent is not None: return self.merge_to_list(self.parent.full_context, self.context) @@ -181,7 +204,9 @@ def _to_python( return value - def _to_expanded_json(self: Self, value: JSON_LD_VALUE) -> EXPANDED_JSON_LD_VALUE: + def _to_expanded_json( + self: Self, value: JSON_LD_VALUE + ) -> Union[EXPANDED_JSON_LD_VALUE, dict[str, EXPANDED_JSON_LD_VALUE]]: """ Returns an expanded version of the given value. @@ -206,7 +231,7 @@ def _to_expanded_json(self: Self, value: JSON_LD_VALUE) -> EXPANDED_JSON_LD_VALU The return type is based on the type of self:
  • If type(self) == ld_dict: the returned values type is dict
  • If type(self) == ld_list: the returned values type is list
- :rtype: EXPANDED_JSON_LD_VALUE + :rtype: EXPANDED_JSON_LD_VALUE | dict[str, EXPANDED_JSON_LD_VALUE] """ # search for an ld_dict that is either self or the inner most parents parent of self that is an ld_dict # while searching build a path such that it leads from the found ld_dicts ld_value to selfs data_dict/ item_list diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index e26a16ee..c10a6aa8 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -10,21 +10,29 @@ ld_container, JSON_LD_CONTEXT_DICT, EXPANDED_JSON_LD_VALUE, - COMPACTED_JSON_LD_VALUE, + PYTHONIZED_LD_CONTAINER, JSON_LD_VALUE, TIME_TYPE, BASIC_TYPE, ) -from typing import Generator, Union, Self +from typing import Generator, Union, Self, Any class ld_list(ld_container): - """ An JSON-LD container resembling a list ("@set", "@list" or "@graph"). """ + """ + An JSON-LD container resembling a list ("@set", "@list" or "@graph"). + See also :class:`ld_container` + + :ivar container_type: The type of JSON-LD container the list is representing. ("@set", "@list", "graph") + :ivartype container_type: str + :ivar item_list: The list of items (in expanded JSON-LD form) that are contained in this ld_list. + :ivartype item_list: EXPANDED_JSON_LD_VALUE + """ def __init__( self: Self, - data: Union[list[str], list[dict[str, Union[BASIC_TYPE, EXPANDED_JSON_LD_VALUE]]]], + data: Union[list[str], list[dict[str, EXPANDED_JSON_LD_VALUE]]], *, parent: Union["ld_container", None] = None, key: Union[str, None] = None, @@ -32,7 +40,7 @@ def __init__( context: Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None, ) -> None: """ - Create a new ld_list.py container. + Create a new ld_list container. :param self: The instance of ld_list to be initialized. :type self: Self @@ -49,6 +57,9 @@ def __init__( :return: :rtype: None + + :raises ValueError: bla + :raises ValueError: bla """ # check for validity of data if not isinstance(key, str): @@ -278,7 +289,7 @@ def __eq__( if not ld_container.are_values_equal(item, other_item): return False continue - # check if both contain an id and compare + # check if both contain an id and compare if "@id" in item and "@id" in other_item: if item["@id"] != other_item["@id"]: return False @@ -328,25 +339,78 @@ def __ne__( return NotImplemented return not x - def append(self, value): + def append(self: Self, value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container]) -> None: + """ + Append the item to the given ld_list self. + The given value is expanded. If it is assimilated by self all items that would be added by this are added. + + :param self: The ld_list the item is appended to. + :type self: Self + :param value: The new value. + :type value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container] + + :return: + :rtype: None + """ self.item_list.extend(self._to_expanded_json([value])) - def extend(self, value): + def extend(self: Self, value: list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container]]) -> None: + """ + Append the items in value to the given ld_list self. + The given values are expanded. If any are assimilated by self all items that would be added by this are added. + + :param self: The ld_list the items are appended to. + :type self: Self + :param value: The new values. + :type value: list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container]] + + :return: + :rtype: None + """ for item in value: self.append(item) - def to_python(self): + def to_python(self: Self) -> list[PYTHONIZED_LD_CONTAINER]: + """ + Return a fully pythonized version of this object where all ld_container are replaced by lists and dicts. + + :param self: The ld_list whose fully pythonized version is returned. + :type self: Self + + :return: The fully pythonized version of self. + :rtype: list[PYTHONIZED_LD_CONTAINER] + """ return [ item.to_python() if isinstance(item, ld_container) else item for item in self ] @classmethod - def is_ld_list(cls, ld_value): + def is_ld_list(cls: Self, ld_value: Any) -> bool: + """ + Returns wheter the given value is considered to be possible of representing an ld_list.
+ I.e. if ld_value is of the form [{container_type: [...]}] where container_type is '@set', '@list' or '@graph'. + + :param ld_value: The value that is checked. + :type ld_value: Any + + :returns: Wheter or not ld_value could represent an ld_list. + :rtype: bool + """ return cls.is_ld_node(ld_value) and cls.is_container(ld_value[0]) @classmethod def is_container(cls, value): + """ + Returns wheter the given value is considered to be possible of representing an json-ld container.
+ I.e. if ld_value is of the form {container_type: [...]} where container_type is '@set', '@list' or '@graph'. + + :param ld_value: The value that is checked. + :type ld_value: Any + + :returns: Wheter or not ld_value could represent a json-ld container. + :rtype: bool + """ return ( isinstance(value, dict) and [*value.keys()] in [["@list"], ["@set"], ["@graph"]] @@ -354,27 +418,100 @@ def is_container(cls, value): ) @classmethod - def from_list(cls, value, *, parent=None, key=None, context=None, container_type="@set"): + def from_list( + cls: Self, + value: list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE]], + *, + parent: Union[ld_container, None] = None, + key: Union[str, None] = None, + context: Union[str, JSON_LD_CONTEXT_DICT, list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None, + container_type: str = "@set" + ) -> "ld_list": + """ + Creates a ld_list from the given list with the given parent, key, context and container_type.
+ Note that only container_type '@set' is valid for key '@type'.
+ Further more note that if parent would assimilate the values in value no new ld_list is created + and the given values are appended to parent instead and parent is returned. + + :param value: The list of values the ld_list should be created from. + :type value: list[JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE] + :param parent: The parent container of the new ld_list.
If value is assimilated by parent druing JSON-LD + expansion parent is extended by value and parent is returned. + :type parent: ls_container | None + :param key: The key into the inner most parent container representing a dict of the new ld_list. + :type: key: str | None + :param context: The context for the new list (is will also inherit the context of parent).
+ Note that this context won't be added to parent if value is assimilated by parent and parent is returned. + :type context: str | JSON_LD_CONTEXT_DICT | list[str | JSON_LD_CONTEXT_DICT] | None + :param container_type: The container type of the new list valid are '@set', '@list' and '@graph'.
+ If value is assimilated by parent and parent is returned the given container_type won't affect + the container type of parent.
Also note that only '@set' is valid if key is '@type'. + :type container_type: str + + :return: The new ld_list build from value or if value is assimilated by parent, parent extended by value. + :rtype: ld_list + + :raises ValueError: If key is '@type' and container_type is not '@set'. + """ + # TODO: handle context if not of type list or None + # validate container_type if key == "@type": - container_type = "@set" - if container_type != "@set": + if container_type != "@set": + raise ValueError(f"The given container type is {container_type} which is invalid for a list" + " containing values for '@type' (valid is only '@set').") + if container_type in {"@list", "@graph"}: + # construct json-ld container that indicates the container type value = [{container_type: value}] + elif container_type != "@set": + raise ValueError(f"Invalid container type: {container_type}. (valid are only '@set', '@list' and '@graph')") + if parent is not None: + # expand value in the "context" of parent if isinstance(parent, ld_list): expanded_value = parent._to_expanded_json([value]) if (len(expanded_value) != 1 or not (isinstance(expanded_value[0], list) or cls.is_container(expanded_value[0]))): + # parent assimilated value druing expansion. Therefor the values are appended and parent returned + # if value is assimilated but contained only one list after expansion this list is used for + # the new list instead of expanding parent parent.extend(expanded_value) return parent else: expanded_value = parent._to_expanded_json({key: value})[cls.ld_proc.expand_iri(parent.active_ctx, key)] else: + # create a temporary ld_list which is necessary for expansion + # value is not passed in a list as usual because value should be treated like the item list of the + # temporary object and not like a item in it expanded_value = cls([], parent=None, key=key, context=context)._to_expanded_json(value) + + # construct and return the final ld_list from the expanded_value return cls(expanded_value, parent=parent, key=key, context=context) @classmethod - def get_item_list_from_container(cls, ld_value): + def get_item_list_from_container(cls: Self, ld_value: dict[str, list[Any]]) -> list[Any]: + """ + Returns the item list from a container, the given ld_value, (i.e. {container_type: item_list}).
+ Only '@set', '@list' and '@graph' are valid container types. + + :param ld_value: The container whose item list is to be returned. + :type ld_value: dict[str, list[Any]] + + :returns: The list the container holds. + :rtype: list[Any] + + :raises ValueError: If the item_container is not a dict. + :raises ValueError: If the container_type is not exactly one of '@set', '@list' and '@graph'. + :raises ValueError: If the item_list is no list. + """ + if type(ld_value) != dict: + raise ValueError(f"The given data {ld_value} is not a dictionary and therefor no container.") + if len(ld_value.keys()) != 1: + raise ValueError(f"The given data contains two many or few entries ({len(ld_value.keys())})." + " It should be only one entry: '@set', '@list' or '@graph' as key and a list as value.") + # find the container type to return the item_list for cont in {"@list", "@set", "@graph"}: if cont in ld_value: + if type(ld_value[cont]) != list: + raise ValueError(f"The item list of {ld_value} is of type {type(ld_value[cont])} and not list.") return ld_value[cont] - raise ValueError("The given data does not represent a container.") + raise ValueError(f"The given data {ld_value} does not represent a container.") From d4b34b41922fc02230ea1cd3c588a490b8b0aea3 Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Mon, 8 Dec 2025 08:39:21 +0100 Subject: [PATCH 163/247] corrected type hints --- src/hermes/model/types/ld_list.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index c10a6aa8..bba5afcb 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -386,7 +386,7 @@ def to_python(self: Self) -> list[PYTHONIZED_LD_CONTAINER]: ] @classmethod - def is_ld_list(cls: Self, ld_value: Any) -> bool: + def is_ld_list(cls: type[Self], ld_value: Any) -> bool: """ Returns wheter the given value is considered to be possible of representing an ld_list.
I.e. if ld_value is of the form [{container_type: [...]}] where container_type is '@set', '@list' or '@graph'. @@ -400,7 +400,7 @@ def is_ld_list(cls: Self, ld_value: Any) -> bool: return cls.is_ld_node(ld_value) and cls.is_container(ld_value[0]) @classmethod - def is_container(cls, value): + def is_container(cls: type[Self], value: Any) -> bool: """ Returns wheter the given value is considered to be possible of representing an json-ld container.
I.e. if ld_value is of the form {container_type: [...]} where container_type is '@set', '@list' or '@graph'. @@ -419,7 +419,7 @@ def is_container(cls, value): @classmethod def from_list( - cls: Self, + cls: type[Self], value: list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE]], *, parent: Union[ld_container, None] = None, @@ -488,7 +488,7 @@ def from_list( return cls(expanded_value, parent=parent, key=key, context=context) @classmethod - def get_item_list_from_container(cls: Self, ld_value: dict[str, list[Any]]) -> list[Any]: + def get_item_list_from_container(cls: type[Self], ld_value: dict[str, list[Any]]) -> list[Any]: """ Returns the item list from a container, the given ld_value, (i.e. {container_type: item_list}).
Only '@set', '@list' and '@graph' are valid container types. From f0f18188037599fe4db69af58723929760110b45 Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Mon, 8 Dec 2025 09:02:33 +0100 Subject: [PATCH 164/247] documented the rest of the methods in ld_container --- src/hermes/model/types/ld_container.py | 136 +++++++++++++++++++++++-- 1 file changed, 125 insertions(+), 11 deletions(-) diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index b1b55af4..a0a0bfa5 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -355,14 +355,43 @@ def _to_expanded_json_deprecated(self, key, value): return ld_value def __repr__(self: Self) -> str: + """ + Returns a short string representation of this object. + + :param self: The object whose representation is returned. + :type self: Self + + :returns: The short representation of self. + :rtype: str + """ return f"{type(self).__name__}({self._data})" def __str__(self: Self) -> str: + """ + Returns a string representation of this object. + + :param self: The object whose representation is returned. + :type self: Self + + :returns: The representation of self. + :rtype: str + """ return str(self.to_python()) def compact( - self: Self, context: Union[list[JSON_LD_CONTEXT_DICT], JSON_LD_CONTEXT_DICT] = None + self: Self, context: Union[list[Union[JSON_LD_CONTEXT_DICT, str]], JSON_LD_CONTEXT_DICT, str, None] = None ) -> COMPACTED_JSON_LD_VALUE: + """ + Returns the compacted version of the given ld_container using its context only if none was supplied. + + :param self: The ld_container that is to be compacted. + :type self: Self + :param context: The context to use for the compaction. If None the context of self is used. + :type context: list[JSON_LD_CONTEXT_DICT | str] | JSON_LD_CONTEXT_DICT | str | None + + :returns: The compacted version of selfs JSON-LD representation. + :rtype: COMPACTED_JSON_LD_VALUE + """ return self.ld_proc.compact( self.ld_value, context or self.context, {"documentLoader": bundled_loader, "skipExpand": True} ) @@ -371,7 +400,7 @@ def to_python(self): raise NotImplementedError() @classmethod - def merge_to_list(cls: Self, *args: tuple[Any]) -> list[Any]: + def merge_to_list(cls: type[Self], *args: tuple[Any]) -> list[Any]: """ Returns a list that is contains all non-list items from args and all items in the lists in args. @@ -394,42 +423,127 @@ def merge_to_list(cls: Self, *args: tuple[Any]) -> list[Any]: return [head, *cls.merge_to_list(*tail)] @classmethod - def is_ld_node(cls, ld_value): + def is_ld_node(cls: type[Self], ld_value: Any) -> bool: + """ + Returns wheter the given value is considered to be possible of representing an expanded JSON-LD node.
+ I.e. if ld_value is of the form [{a: b, ..., y: z}]. + + :param ld_value: The value that is checked. + :type ld_value: Any + + :returns: Wheter or not ld_value could represent an expanded JSON-LD node. + :rtype: bool + """ return isinstance(ld_value, list) and len(ld_value) == 1 and isinstance(ld_value[0], dict) @classmethod - def is_ld_id(cls, ld_value): + def is_ld_id(cls: type[Self], ld_value: Any) -> bool: + """ + Returns wheter the given value is considered to be possible of representing an expanded JSON-LD node + containing only an @id value.
+ I.e. if ld_value is of the form [{"@id": ...}]. + + :param ld_value: The value that is checked. + :type ld_value: Any + + :returns: Wheter or not ld_value could represent an expanded JSON-LD node containing only an @id value. + :rtype: bool + """ return cls.is_ld_node(ld_value) and cls.is_json_id(ld_value[0]) @classmethod - def is_ld_value(cls, ld_value): + def is_ld_value(cls: type[Self], ld_value: Any) -> bool: + """ + Returns wheter the given value is considered to be possible of representing an expanded JSON-LD value.
+ I.e. if ld_value is of the form [{"@value": a, ..., x: z}]. + + :param ld_value: The value that is checked. + :type ld_value: Any + + :returns: Wheter or not ld_value could represent an expanded JSON-LD value. + :rtype: bool + """ return cls.is_ld_node(ld_value) and "@value" in ld_value[0] @classmethod - def is_typed_ld_value(cls, ld_value): + def is_typed_ld_value(cls: type[Self], ld_value: Any) -> bool: + """ + Returns wheter the given value is considered to be possible of representing an expanded JSON-LD value + containing a value type.
+ I.e. if ld_value is of the form [{"@value": a, "@type": b, ..., x: z}]. + + :param ld_value: The value that is checked. + :type ld_value: Any + + :returns: Wheter or not ld_value could represent an expanded JSON-LD value containing a value type. + :rtype: bool + """ return cls.is_ld_value(ld_value) and "@type" in ld_value[0] @classmethod - def is_json_id(cls, ld_value): + def is_json_id(cls: type[Self], ld_value: Any) -> bool: + """ + Returns wheter the given value is considered to be possible of representing a non-expanded JSON-LD node + containing only an @id value.
+ I.e. if ld_value is of the form {"@id": ...}. + + :param ld_value: The value that is checked. + :type ld_value: Any + + :returns: Wheter or not ld_value could represent a non-expanded JSON-LD node containing only an @id value. + :rtype: bool + """ return isinstance(ld_value, dict) and ["@id"] == [*ld_value.keys()] @classmethod - def is_json_value(cls, ld_value): + def is_json_value(cls: type[Self], ld_value: Any) -> bool: + """ + Returns wheter the given value is considered to be possible of representing a non-expanded JSON-LD value.
+ I.e. if ld_value is of the form {"@value": b, ..., x: z}. + + :param ld_value: The value that is checked. + :type ld_value: Any + + :returns: Wheter or not ld_value could represent a non-expanded JSON-LD value. + :rtype: bool + """ return isinstance(ld_value, dict) and "@value" in ld_value @classmethod - def is_typed_json_value(cls, ld_value): + def is_typed_json_value(cls: type[Self], ld_value: Any) -> bool: + """ + Returns wheter the given value is considered to be possible of representing a non-expanded JSON-LD value + containing a value type.
+ I.e. if ld_value is of the form {"@value": a, "@type": b, ..., x: z}. + + :param ld_value: The value that is checked. + :type ld_value: Any + + :returns: Wheter or not ld_value could represent a non-expanded JSON-LD value containing a value type. + :rtype: bool + """ return cls.is_json_value(ld_value) and "@type" in ld_value @classmethod - def typed_ld_to_py(cls, data, **kwargs): + def typed_ld_to_py(cls: type[Self], data: list[dict[str, BASIC_TYPE]], **kwargs) -> Union[BASIC_TYPE, TIME_TYPE]: + """ + Returns the value of the given expanded JSON-LD value containing a value type converted into that type. + Meaning the pythonized version of the JSON-LD value data is returned.
+ ld_container.is_typed_ld_value(data) must return True. + + :param data: The value that is that is converted into its pythonized from. + :type data: list[dict[str, BASIC_TYPE]] + + :returns: The pythonized version of data. + :rtype: BASIC_TYPE | TIME_TYPE + """ ld_value = data[0]["@value"] return ld_value @classmethod def are_values_equal( - cls: Self, first: dict[str, Union[BASIC_TYPE, TIME_TYPE]], second: dict[str, Union[BASIC_TYPE, TIME_TYPE]] + cls: type[Self], first: dict[str, Union[BASIC_TYPE, TIME_TYPE]], second: dict[str, Union[BASIC_TYPE, TIME_TYPE]] ) -> bool: """ Returns whether or not the given expanded JSON-LD values are considered equal. From bcc233d0f76afef417cedb72f1cf94c79aba8aa7 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 12 Dec 2025 14:00:57 +0100 Subject: [PATCH 165/247] implemented list comparison and added tests for it --- src/hermes/model/types/ld_list.py | 247 ++++++++++++++++--- test/hermes_test/model/types/test_ld_list.py | 23 ++ 2 files changed, 240 insertions(+), 30 deletions(-) diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index bba5afcb..490ac4b3 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -5,6 +5,7 @@ # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: Michael Fritzsche +from collections import deque from types import NotImplementedType from .ld_container import ( ld_container, @@ -16,7 +17,7 @@ BASIC_TYPE, ) -from typing import Generator, Union, Self, Any +from typing import Generator, Hashable, Union, Self, Any class ld_list(ld_container): @@ -241,11 +242,16 @@ def __eq__( ], ) -> Union[bool, NotImplementedType]: """ - Returns wheter or not self is considered to be equal to other. + Returns wheter or not self is considered to be equal to other.
If other is not an ld_list, it is converted first. For each index it is checked if the ids of the items at index in self and other match if both have one, - if only one has an id all other values are compared. - If self or other is considered unordered the comparison is more difficult and ... + if only one has or neither have an id all other values are compared.
+ Note that due to those circumstances equality is not transitve + meaning if a == b and b == c is is not guaranteed that a == c.
+ If self or other is considered unordered the comparison is more difficult. All items in self are compared + with all items in other. On the resulting graph given by the realtion == the Hopcroft-Karp algoritm is used + to determine if there exists a bijection reordering self so that the ordered comparison of self with other + returns true. :param self: The ld_list other is compared to. :type self: Self @@ -257,7 +263,6 @@ def __eq__( If other is of the wrong type return NotImplemented instead. :rtype: bool | NotImplementedType """ - # TODO: ld_lists with container_type "@set" have to be considered unordered # check if other has an acceptable type if not (isinstance(other, (list, ld_list)) or ld_list.is_container(other)): return NotImplemented @@ -281,33 +286,215 @@ def __eq__( # lists will only contain string return self.item_list == other.item_list - # check if at each index the items are considered equal - for index, (item, other_item) in enumerate(zip(self.item_list, other.item_list)): - # check if items are values - if ((ld_container.is_typed_json_value(item) or ld_container.is_json_value(item)) and - (ld_container.is_typed_json_value(other_item) or ld_container.is_json_value(other_item))): - if not ld_container.are_values_equal(item, other_item): + if self.container_type == other.container_type == "@list": + # check if at each index the items are considered equal + for index, (item, other_item) in enumerate(zip(self.item_list, other.item_list)): + # check if items are values + if ((ld_container.is_typed_json_value(item) or ld_container.is_json_value(item)) and + (ld_container.is_typed_json_value(other_item) or ld_container.is_json_value(other_item))): + if not ld_container.are_values_equal(item, other_item): + return False + continue + # check if both contain an id and compare + if "@id" in item and "@id" in other_item: + if item["@id"] != other_item["@id"]: + return False + continue + # get the 'real' items (i.e. can also be ld_dicts or ld_lists) + item = self[index] + other_item = other[index] + # compare using the correct equals method + res = item.__eq__(other_item) + if res == NotImplemented: + # swap order if first try returned NotImplemented + res = other_item.__eq__(item) + # return false if the second comparison also fails or one of them returned false + if res is False or res == NotImplemented: return False - continue - # check if both contain an id and compare - if "@id" in item and "@id" in other_item: - if item["@id"] != other_item["@id"]: + # return true because no unequal elements where found + return True + else: + # check which items in self are equal the which in other + equality_pairs = [[] for i in range(len(self))] # j in equality_pairs[i] <=> self[i] == other[j] + for index, item in enumerate(self.item_list): + for other_index, other_item in enumerate(other.item_list): + # check if items are values + if ((ld_container.is_typed_json_value(item) or ld_container.is_json_value(item)) and + (ld_container.is_typed_json_value(other_item) or ld_container.is_json_value(other_item))): + if ld_container.are_values_equal(item, other_item): + equality_pairs[index] += [other_index] + continue + # check if both contain an id and compare + if "@id" in item and "@id" in other_item: + if item["@id"] == other_item["@id"]: + equality_pairs[index] += [other_index] + continue + # get the 'real' items (i.e. can also be ld_dicts or ld_lists) + item = self[index] + other_item = other[index] + # compare using the correct equals method + res = item.__eq__(other_item) + if res == NotImplemented: + # swap order if first try returned NotImplemented + res = other_item.__eq__(item) + # if one of both comparisons returned true the elements are equal + if res: + equality_pairs[index] += [other_index] + if len(equality_pairs[index]) == 0: + # there exists no element in other that is equal to item return False - continue - # get the 'real' items (i.e. can also be ld_dicts or ld_lists) - item = self[index] - other_item = other[index] - # compare using the correct equals method - res = item.__eq__(other_item) - if res == NotImplemented: - # swap order if first try returned NotImplemented - res = other_item.__eq__(item) - # return false if the second comparison also fails or one of them returned false - if res is False or res == NotImplemented: - return False - - # return true because no unequal elements where found - return True + # check if there is a way to chose one index from equality_pairs[i] for every i + # so that there are no two i's with the same chosen index. + # If such a way exists self and other are considered equal. If not they are considered to be not equal. + # solved via a Hopcroft-Karp algorithm variant: + # The bipartite graph is the disjoint union of the vertices 1 to len(self) and + # freely chosen ids for each list in equality_pairs. + # The graph has an edge from i to the id of a list if i is contained in the list. + item_count = len(self) + verticies_set1 = {*range(item_count)} + verticies_set2 = {*range(item_count, 2 * item_count)} + edges = {i: tuple(j for j in verticies_set2 if i in equality_pairs[j - item_count]) for i in verticies_set1} + return ld_list._hopcroft_karp(verticies_set1, verticies_set2, edges) == len(self) + + @classmethod + def _bfs_step( + cls: Self, verticies1: set[Hashable], edges: dict[Hashable, tuple[Hashable]], matches: dict[Hashable, Hashable], + distances: dict[Hashable, Union[int, float]] + ) -> bool: + """ + Completes the BFS step of Hopcroft-Karp. I.e.:
+ Finds the shortest path from all unmatched verticies in verticies1 to any unmatched vertex in any value in edges + where the connecting paths are alternating between matches and its complement.
+ It also marks each vertex in verticies1 with how few verticies from verticies1 have to be passed + to reach the vertex from an unmatched one in verticies1. This is stored in distances. + + :param verticies1: The set of verticies in the left partition of the bipartite graph. + :type verticies1: set[Hashable] + :param edges: The edges in the bipartite graph. (As the edges are bidirectional they are expected to be given in + this format: Dictionary with keys being the vertices in the left partition and values being tuples + of verticies in the right partition.) + :type edges: dict[Hashable, tuple[Hashable]] + :param matches: The current matching of verticies in the left partition with the ones in the right partition. + :type matches: dict[Hashable, Hashable] + :param distances: The reference to the dictionary mapping verticies of the left partition to the minimal + number of verticies in the left partition that will be passed on a path from an unmatched vertex of the left + partition to the vertex that is the key. + :type distances: dict[Hashable, Union[int, float]] + + :returns: Wheter or not a alternating path from an unmatched vertex in the left partition to an unmatched vertex + in the right partition exists. + :rtype: bool + """ + # initialize the queue and set the distances to zero for unmatched vertices and to inf for all others + queue = deque() + for ver in verticies1: + if matches[ver] is None: + distances[ver] = 0 + queue.append(ver) + else: + distances[ver] = float("inf") + distances[None] = float("inf") + # begin BFS + while len(queue) != 0: + ver1 = queue.popleft() + # if the current vertex has a distance less then the current minimal one from an unmatched vertex in the + # left partition to an unmatched one in the right partition + if distances[ver1] < distances[None]: + # iterate over all vertices in the right partition connected to ver1 + for ver2 in edges[ver1]: + # if the vertex ver2 is matched with (or None if not matched) wasn't visited yet + if distances[matches[ver2]] == float("inf"): + # initialize the distance and queue the vertex for further search + distances[matches[ver2]] = distances[ver1] + 1 + queue.append(matches[ver2]) + # if a path to None i.e. an unmatched vertex in the right partition was found return true otherwise false + return distances[None] != float("inf") + + @classmethod + def _dfs_step( + cls: Self, ver: Hashable, edges: dict[Hashable, tuple[Hashable]], matches: dict[Hashable, Hashable], + distances: dict[Hashable, Union[int, float]] + ) -> bool: + """ + Completes the DFS step of Hopcroft-Karp. I.e.:
+ Adds all edges on every path with the minimal path length to matches if they would be in the symmetric + difference of matches and the set of edges on the union of the paths. + + :param ver: The set of verticies in the left partition of the bipartite graph. + :type vert: Hashable + :param edges: The edges in the bipartite graph. (As the edges are bidirectional they are expected to be given in + this format: Dictionary with keys being the vertices in the left partition and values being tuples + of verticies in the right partition.) + :type edges: dict[Hashable, tuple[Hashable]] + :param matches: The current matching of verticies in the left partition with the ones in the right partition. + :type matches: dict[Hashable, Hashable] + :param distances: The reference to the dictionary mapping verticies of the left partition to the minimal + number of verticies in the left partition that will be passed on a path from an unmatched vertex of the left + partition to the vertex that is the key. The values will be replaced with float("inf") to mark already + visited vertices. + :type distances: dict[Hashable, Union[int, float]] + + :returns: Wheter or not a path from the unmatched vertex ver in the left partition to an unmatched vertex + in the right partition could still exist. + :rtype: bool + """ + # recursion base case: None always has a shortest possible path to itself + if ver is None: + return True + # iterate over all vertices connected to ver in the right partition + for ver2 in edges[ver]: + # if ver2 is on a path with minimal length and not all subtrees have been searched already + if distances[matches[ver2]] == distances[ver] + 1: + if cls._dfs_step(matches[ver], edges, matches, distances): + # add the edge to the matches and return true + matches[ver2] = ver + matches[ver] = ver2 + return True + # mark this vertex as completly searched + distances[ver] = float("inf") + return False + + @classmethod + def _hopcroft_karp( + cls: Self, verticies1: set[Hashable], verticies2: set[Hashable], edges: dict[Hashable, tuple[Hashable]] + ) -> int: + """ + Implementation of Hopcroft-Karp. I.e.:
+ Finds how maximal number of edges with the property that no two edges share an endpoint (and startpoint) + in the given bipartite graph.
+ Note that verticies1 and verticies2 have to be disjoint. + + :param verticies1: The set of verticies in the left partition of the bipartite graph. + :type verticies1: set[Hashable] + :param verticies2: The set of verticies in the right partition of the bipartite graph. + :type verticies2: set[Hashable] + :param edges: The edges in the bipartite graph. (As the edges are bidirectional they are expected to be given in + this format: Dictionary with keys being the vertices in the left partition and values being tuples + of verticies in the right partition.) + :type edges: dict[Hashable, tuple[Hashable]] + + :returns: The number of edges. + :rtype: int + """ + # initializes the first matching. None is a imaginary vertex to denote unmatched vertices. + matches = dict() + for ver in verticies1: + matches[ver] = None + for ver in verticies2: + matches[ver] = None + matching_size = 0 + distances = dict() + while cls._bfs_step(verticies1, edges, matches, distances): + # while a alternating path from an unmatched vertex in the left partition exits + # recalculate the distances and + # iterate over all unmatched vertices in the left partition. + for ver in verticies1: + if matches[ver] is None: + # create the new matches dict and if a new edge was added increase the size of the matching + if cls._dfs_step(ver, edges, matches, distances): + matching_size += 1 + # return the size of the matching + return matching_size def __ne__( self: Self, diff --git a/test/hermes_test/model/types/test_ld_list.py b/test/hermes_test/model/types/test_ld_list.py index aaeb548f..e1785b7a 100644 --- a/test/hermes_test/model/types/test_ld_list.py +++ b/test/hermes_test/model/types/test_ld_list.py @@ -197,6 +197,29 @@ def test_build_in_comparison(): assert li != li2 li[0] = {"@type": "schema:foobar", "@value": "bar"} assert li != li2 + li = ld_list([], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) + li2 = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema2": "https://schema.org/"}]) + li.extend(["foo", "bar"]) + li2.extend(["bar", "foo"]) + assert li == li2 + li.append("bar") + li2.append("foo") + assert li != li2 + + +def test_hopcroft_karp(): + ver1 = {0, 1, 2, 3, 4} + ver2 = {10, 11, 12, 13, 14} + edges = {0: (10, 11), 1: (10, 14), 2: (12, 13), 3: (10, 14), 4: tuple([11])} + assert ld_list._hopcroft_karp(ver1, ver2, edges) == 4 + edges[4] = (11, 13) + assert ld_list._hopcroft_karp(ver1, ver2, edges) == 5 + ver1 = {0, 1, 2, 3, 4} + ver2 = {(0, 1, 3), (0, 4), (2, ), (2, 4), (1, 3)} + edges = { + 0: ((0, 1, 3), (0, 4)), 1: ((0, 1, 3), (1, 3)), 2: ((2,), (2, 4)), 3: ((0, 1, 3), (1, 3)), 4: ((0, 4), (2, 4)) + } + assert ld_list._hopcroft_karp(ver1, ver2, edges) == 5 def test_extend(): From 287d37a3af7aa1c495bacd2b4c600c13a9ad2795 Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Mon, 15 Dec 2025 10:55:06 +0100 Subject: [PATCH 166/247] added tests and fixed small bug and fixed typos --- src/hermes/model/types/ld_container.py | 17 ++----- src/hermes/model/types/ld_list.py | 24 +++++++--- .../model/types/test_ld_container.py | 36 ++++++++++++++- test/hermes_test/model/types/test_ld_list.py | 45 ++++++++++++++++--- 4 files changed, 96 insertions(+), 26 deletions(-) diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index a0a0bfa5..8ddb0876 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -224,7 +224,7 @@ def _to_expanded_json( :param value: The value that is to be expanded. Different types are expected based on the type of self:
  • If type(self) == ld_dict: value must be a dict
  • If type(self) == ld_list: value must be a list
- value will be exapnded as if it was the data_dict/ the item_list of self. + value will be expanded as if it was the data_dict/ the item_list of self. :type value: JSON_LD_VALUE :return: The expanded version of value i.e. the data_dict/ item_list of self if it had been value. @@ -293,14 +293,8 @@ def _to_expanded_json( for index in range(len(path) - 1, 0, -1): current_data = current_data[path[index]] # replace the data_dict/ item_list so that value is now inside of the ld_value of parent and store the old value - if current_data == []: - # itemlist of an empty ld_list: - # The item_list can't be replaced like in all other cases - self_data = None - current_data.append(value) - else: - self_data = current_data[path[0]] - current_data[path[0]] = value + self_data = current_data[path[0]] + current_data[path[0]] = value # expand the ld_value of parent to implicitly expand value # important the ld_value of parent is not modified because the processor makes a deep copy @@ -310,10 +304,7 @@ def _to_expanded_json( ) # restore the data_dict/ item_list to its former state - if self_data is not None: - current_data[path[0]] = self_data - else: - current_data.clear() + current_data[path[0]] = self_data # use the path to get the expansion of value for index in range(len(path) - 1, -1, -1): diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index 490ac4b3..07cf3248 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -59,8 +59,12 @@ def __init__( :return: :rtype: None - :raises ValueError: bla - :raises ValueError: bla + :raises ValueError: If the given key is not a string or None was given. + :raises ValueError: If the given data is not a list. + :raises ValueError: If the data represents an unexpanded @set. I.e. is of the form [{"@set": [...]}] + :raises ValueError: If the given key is "@type" but the container_type not "@set" + or a value in the item_list not a string. + :raises ValueError: If the given key is not "@type" and any value in the item_list not a dict. """ # check for validity of data if not isinstance(key, str): @@ -272,8 +276,16 @@ def __eq__( other = [other] if isinstance(other, list): if ld_list.is_ld_list(other): - other = ld_list.get_item_list_from_container(other[0]) - other = self.from_list(other, parent=self.parent, key=self.key, context=self.context) + if "@list" in other[0]: + cont = "@list" + elif "@graph" in other[0]: + cont = "@graph" + else: + cont = "@set" + other = other[0][cont] + else: + cont = "@set" + other = self.from_list(other, parent=self.parent, key=self.key, context=self.context, container_type=cont) # check if the length matches if len(self.item_list) != len(other.item_list): @@ -338,7 +350,7 @@ def __eq__( # swap order if first try returned NotImplemented res = other_item.__eq__(item) # if one of both comparisons returned true the elements are equal - if res: + if res is not NotImplemented and res: equality_pairs[index] += [other_index] if len(equality_pairs[index]) == 0: # there exists no element in other that is equal to item @@ -624,7 +636,7 @@ def from_list( :type value: list[JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE] :param parent: The parent container of the new ld_list.
If value is assimilated by parent druing JSON-LD expansion parent is extended by value and parent is returned. - :type parent: ls_container | None + :type parent: ld_container | None :param key: The key into the inner most parent container representing a dict of the new ld_list. :type: key: str | None :param context: The context for the new list (is will also inherit the context of parent).
diff --git a/test/hermes_test/model/types/test_ld_container.py b/test/hermes_test/model/types/test_ld_container.py index ddc98405..f73fdcd9 100644 --- a/test/hermes_test/model/types/test_ld_container.py +++ b/test/hermes_test/model/types/test_ld_container.py @@ -6,7 +6,7 @@ # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: Michael Fritzsche -from datetime import datetime +from datetime import datetime, time import pytest @@ -121,6 +121,11 @@ def test_to_python_datetime_value(self, mock_context): "@value": "2022-02-22T00:00:00", "@type": "https://schema.org/DateTime" }) == "2022-02-22T00:00:00" # TODO: #434 typed date is returned as string instead of date + def test_to_python_error(self, mock_context): + cont = ld_container([{}], context=[mock_context]) + with pytest.raises(TypeError): + cont._to_python("http://spam.eggs/eggs", set()) + def test_to_expanded_id(self, mock_context): cont = ld_dict([{}], context=[mock_context]) assert cont._to_expanded_json({"@id": f"{self.url}identifier"}) == {"@id": f"{self.url}identifier"} @@ -156,6 +161,35 @@ def test_to_expanded_datetime_value(self, mock_context): assert cont._to_expanded_json({"eggs": datetime(2022, 2, 22)}) == {"http://spam.eggs/eggs": [{"@list": [ {"@value": "2022-02-22T00:00:00", "@type": "https://schema.org/DateTime"} ]}]} + cont = ld_dict([{}], context=[mock_context]) + assert cont._to_expanded_json({"eggs": time(5, 4, 3)}) == {"http://spam.eggs/eggs": [{"@list": [ + {"@value": "05:04:03", "@type": "https://schema.org/Time"} + ]}]} + + def test_compact(self, mock_context): + cont = ld_container([{"http://spam.eggs/eggs": [{"@list": [{"@value": "a"}]}], + "http://spam.eggs/spam": [{"@value": "bacon"}]}]) + assert cont.compact([mock_context]) == {"@context": mock_context, "spam": "bacon", "eggs": ["a"]} + + def test_is_ld_id(self): + assert ld_container.is_ld_id([{"@id": "foo"}]) + assert not ld_container.is_ld_id([{"@id": "foo", "bar": "barfoo"}]) + assert not ld_container.is_ld_id({"@id": "foo"}) + assert not ld_container.is_ld_id([{"bar": "foo"}]) + + def test_is_ld_value(self): + assert ld_container.is_ld_value([{"@value": "foo"}]) + assert ld_container.is_ld_value([{"@value": "foo", "bar": "barfoo"}]) + assert not ld_container.is_ld_value({"@value": "foo"}) + assert not ld_container.is_ld_value([{"bar": "foo"}]) + + def test_is_typed_ld_value(self): + assert ld_container.is_typed_ld_value([{"@value": "foo", "@type": "bar"}]) + assert ld_container.is_typed_ld_value([{"@value": "foo", "@type": "bar", "bar": "barfoo"}]) + assert not ld_container.is_typed_ld_value([{"@type": "bar"}]) + assert not ld_container.is_typed_ld_value([{"@value": "foo"}]) + assert not ld_container.is_typed_ld_value({"@value": "foo", "@type": "bar"}) + assert not ld_container.is_typed_ld_value([{"bar": "foo"}]) def test_are_values_equal(self): assert ld_container.are_values_equal({"@id": "foo"}, {"@id": "foo"}) diff --git a/test/hermes_test/model/types/test_ld_list.py b/test/hermes_test/model/types/test_ld_list.py index e1785b7a..fc9ca6a5 100644 --- a/test/hermes_test/model/types/test_ld_list.py +++ b/test/hermes_test/model/types/test_ld_list.py @@ -14,17 +14,15 @@ def test_undefined_list(): with pytest.raises(ValueError): - ld_list([{}]) + ld_list({}, key="foo") with pytest.raises(ValueError): - ld_list([{"spam": [{"@value": "bacon"}]}]) + ld_list([{"@set": [{"@value": "bacon"}]}], key="foo") with pytest.raises(ValueError): - ld_list([{"@list": [0], "spam": [{"@value": "bacon"}]}]) + ld_list([{"@value": "bacon"}], key="@type") with pytest.raises(ValueError): - ld_list([{"@list": ["a", "b"], "@set": ["foo", "bar"]}]) + ld_list(["bacon"], key="eggs") with pytest.raises(ValueError): ld_list([{"@list": ["a", "b"]}]) # no given key - with pytest.raises(ValueError): - ld_list([{"@list": ["a", "b"]}, {"@set": ["foo", "bar"]}]) def test_list_basics(): @@ -32,6 +30,15 @@ def test_list_basics(): li = ld_list(li_data, key="foo") assert li._data is li_data assert li.item_list is li_data[0]["@list"] + li_data = [{"@graph": [{"@value": "bar"}]}] + li = ld_list(li_data, key="foo") + assert li._data is li_data + assert li.item_list is li_data[0]["@graph"] + li_data = [{"@value": "bar"}] + li = ld_list(li_data, key="foo") + assert li._data is li_data + assert li.item_list is li_data + assert li.container_type == "@set" def test_build_in_get(): @@ -135,6 +142,12 @@ def test_build_in_iter(): def test_append(): + li = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) + li.append(ld_list([{"@value": "foo"}], key="https://schema.org/name")) + assert isinstance(li[0], ld_list) and li[0].container_type == "@list" + li = ld_list([{"@graph": []}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) + li.append({"schema:name": "foo"}) + assert li[0] == {"https://schema.org/name": "foo"} and len(li) == 1 li = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) li.append("foo") assert li[0] == "foo" and li.item_list[0] == {"@value": "foo"} and len(li) == 1 @@ -153,6 +166,7 @@ def test_append(): def test_build_in_contains(): li = ld_list([], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) + assert [] in li li.append("foo") li.append({"@type": "A", "schema:name": "a"}) assert "foo" in li and {"@type": "A", "schema:name": "a"} in li @@ -162,9 +176,18 @@ def test_build_in_contains(): li.append({"@id": "schema:foo", "schema:name": "foo"}) assert {"@id": "schema:foo"} in li and {"@id": "schema:foo", "schema:name": "foobar"} in li assert {"schema:name": "foo"} in li + li = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) + li.append("foo") + assert "foo" in li def test_build_in_comparison(): + li = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) + li.append({"@id": "foo", "schema:bar": "foobar"}) + assert [{"@list": [{"@id": "foo", "schema:bar": "barfoo"}]}] == li + assert [{"@list": [{"@id": "bar", "schema:bar": "foobar"}]}] != li + assert [{"@set": [{"@id": "foo", "schema:bar": "barfoo"}]}] == li + assert [{"@graph": [{"@id": "foo", "schema:bar": "barfoo"}]}] == li li = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema": "https://schema.org/"}]) li2 = ld_list([{"@list": []}], key="https://schema.org/name", context=[{"schema2": "https://schema.org/"}]) assert li == [] and [] == li @@ -269,6 +292,10 @@ def test_is_container(): def test_from_list(): + with pytest.raises(ValueError): + ld_list.from_list([], key="@type", container_type="@list") + with pytest.raises(ValueError): + ld_list.from_list([], container_type="foo") li = ld_list.from_list([], key="schema:foo") assert li.item_list == li.context == [] and li.parent is li.index is None and li.key == "schema:foo" assert li._data == [] and li.container_type == "@set" @@ -290,3 +317,9 @@ def test_get_item_list_from_container(): assert ld_list.get_item_list_from_container({"@graph": ["a"]}) == ["a"] with pytest.raises(ValueError): ld_list.get_item_list_from_container(["a"]) + with pytest.raises(ValueError): + ld_list.get_item_list_from_container({"@list": [], "@set": []}) + with pytest.raises(ValueError): + ld_list.get_item_list_from_container({"@list": {}}) + with pytest.raises(ValueError): + ld_list.get_item_list_from_container({"foo": []}) From 20fbefca1fdd07461e4a18ccc396f537a83dd353 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 19 Dec 2025 09:42:44 +0100 Subject: [PATCH 167/247] fixed ld_dict tests --- test/hermes_test/model/types/test_ld_dict.py | 62 +++++++++++--------- 1 file changed, 34 insertions(+), 28 deletions(-) diff --git a/test/hermes_test/model/types/test_ld_dict.py b/test/hermes_test/model/types/test_ld_dict.py index bb25b674..fec2368b 100644 --- a/test/hermes_test/model/types/test_ld_dict.py +++ b/test/hermes_test/model/types/test_ld_dict.py @@ -27,26 +27,26 @@ def test_malformed_input(): def test_build_in_get(): - di = ld_dict([{"name": [{"@value": "Manu Sporny"}], - "homepage": [{"@id": "http://manu.sporny.org/"}], - "foo": [{"foobar": "bar", "barfoo": "foo"}]}]) - assert di["name"] == "Manu Sporny" - assert di["homepage"] == "http://manu.sporny.org/" - assert di["foo"].data_dict == ld_dict([{"foobar": "bar", "barfoo": "foo"}]).data_dict + di = ld_dict([{"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}], + "http://xmlns.com/foaf/0.1/homepage": [{"@id": "http://manu.sporny.org/"}], + "http://xmlns.com/foaf/0.1/foo": [{"http://xmlns.com/foaf/0.1/foobar": [{"@value": "bar"}], + "http://xmlns.com/foaf/0.1/barfoo": [{"@value": "foo"}]}]}], + context=[{"xmlns": "http://xmlns.com/foaf/0.1/"}]) + assert di["xmlns:name"] == ["Manu Sporny"] + assert di["xmlns:homepage"] == ["http://manu.sporny.org/"] + assert di["xmlns:foo"] == [{"xmlns:foobar": ["bar"], "xmlns:barfoo": ["foo"]}] with pytest.raises(KeyError): di["bar"] di = ld_dict([{"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}]}], context=[{"xmlns": "http://xmlns.com/foaf/0.1/"}]) - assert di["xmlns:name"] == "Manu Sporny" + assert di["xmlns:name"] == ["Manu Sporny"] - # FIXME: fixing #433 would fix this - # get -> list to python -> create empty list -> to fill dicts -> expand them -> no expansion method for dicts di = ld_dict([{"http://xmlns.com/foaf/0.1/name": [{"@value": "foo"}], "http://xmlns.com/foaf/0.1/foo": [{"http://xmlns.com/foaf/0.1/barfoo": [{"@id": "foo"}], "http://xmlns.com/foaf/0.1/fooba": [{"@value": "ba"}]}, {"http://xmlns.com/foaf/0.1/barfoo": [{"@id": "foo"}], - "http://xmlns.com/foaf/0.1/fooba": [{"@value": "ba"}]}]}], + "http://xmlns.com/foaf/0.1/fooba": [{"@value": "ba"}]}]}], context=[{"xmlns": "http://xmlns.com/foaf/0.1/"}]) assert isinstance(di["http://xmlns.com/foaf/0.1/foo"], ld_list) @@ -131,7 +131,7 @@ def test_build_in_set(): }] }] } - assert isinstance(di["schema:result"]["schema:error"]["schema:name"], ld_list) + assert isinstance(di["schema:result"][0]["schema:error"][0]["schema:name"], ld_list) # FIXME: fixing #433 would fix this (setting nested python dicts) di = ld_dict([{}], context=[{"schema": "https://schema.org/"}]) @@ -165,6 +165,7 @@ def test_build_in_contains(): assert "http://xmlns.com/foaf/0.1/name" in di and "xmlns:homepage" in di assert "xmlns:foo" not in di and "homepage" not in di and "foo" not in di + def test_build_in_comparison(): di = ld_dict([{}], context={"schema": "https://schema.org/"}) assert di != 1 and di != [] and di != "" @@ -183,10 +184,12 @@ def test_build_in_comparison(): assert di == {"schema:Person": [{"schema:name": "foo"}, {"schema:name": "bar"}]} assert di != {"schema:name": "foo"} + def test_get(): - di = ld_dict([{"https://schema.org/name": [{"@value": "Manu Sporny"}]}], context=[{"schema": "https://schema.org/"}]) - assert di.get("https://schema.org/name") == "Manu Sporny" - assert di.get("schema:name") == "Manu Sporny" + di = ld_dict([{"https://schema.org/name": [{"@value": "Manu Sporny"}]}], + context=[{"schema": "https://schema.org/"}]) + assert di.get("https://schema.org/name") == ["Manu Sporny"] + assert di.get("schema:name") == ["Manu Sporny"] assert di.get("bar", None) is None with pytest.raises(KeyError): di.get("bar") @@ -240,7 +243,8 @@ def test_compact_keys(): "http://xmlns.com/foaf/0.1/homepage": {"@id": "http://manu.sporny.org/"}}) assert {*di.compact_keys()} == {"http://xmlns.com/foaf/0.1/name", "homepage"} - di = ld_dict([{}], context=[{"xmls": "http://xmlns.com/foaf/0.1/", "homepage": "http://xmlns.com/foaf/0.1/homepage"}]) + di = ld_dict([{}], + context=[{"xmls": "http://xmlns.com/foaf/0.1/", "homepage": "http://xmlns.com/foaf/0.1/homepage"}]) di.update({"http://xmlns.com/foaf/0.1/name": "Manu Sporny", "http://xmlns.com/foaf/0.1/homepage": {"@id": "http://manu.sporny.org/"}}) assert {*di.compact_keys()} == {"xmls:name", "homepage"} @@ -251,11 +255,11 @@ def test_items(): inner_di = ld_dict([{}], parent=di, key="http://xmlns.com/foaf/0.1/foo") inner_di.update({"xmlns:foobar": "bar", "http://xmlns.com/foaf/0.1/barfoo": {"@id": "foo"}}) di.update({"http://xmlns.com/foaf/0.1/name": "foo", "xmlns:homepage": {"@id": "bar"}, "xmlns:foo": inner_di}) - assert [*di.items()][0:2] == [("http://xmlns.com/foaf/0.1/name", "foo"), - ("http://xmlns.com/foaf/0.1/homepage", "bar")] + assert [*di.items()][0:2] == [("http://xmlns.com/foaf/0.1/name", ["foo"]), + ("http://xmlns.com/foaf/0.1/homepage", ["bar"])] assert [*di.items()][2][0] == "http://xmlns.com/foaf/0.1/foo" - assert [*di.items()][2][1].data_dict == {"http://xmlns.com/foaf/0.1/foobar": [{"@value": "bar"}], - "http://xmlns.com/foaf/0.1/barfoo": [{"@id": "foo"}]} + assert [*di.items()][2][1][0] == {"http://xmlns.com/foaf/0.1/foobar": [{"@value": "bar"}], + "http://xmlns.com/foaf/0.1/barfoo": [{"@id": "foo"}]} def test_ref(): @@ -273,14 +277,14 @@ def test_to_python(): inner_di = ld_dict([{}], parent=di) inner_di.update({"xmlns:foobar": "bar", "http://xmlns.com/foaf/0.1/barfoo": {"@id": "foo"}}) di.update({"http://xmlns.com/foaf/0.1/name": "foo", "xmlns:homepage": {"@id": "bar"}, "xmlns:foo": inner_di}) - assert di.to_python() == {"xmlns:name": "foo", "xmlns:homepage": "bar", - "xmlns:foo": {"xmlns:foobar": "bar", "xmlns:barfoo": "foo"}} + assert di.to_python() == {"xmlns:name": ["foo"], "xmlns:homepage": ["bar"], + "xmlns:foo": [{"xmlns:foobar": ["bar"], "xmlns:barfoo": ["foo"]}]} di.update({"http://spam.eggs/eggs": { "@value": "2022-02-22T00:00:00", "@type": "https://schema.org/DateTime" }}) - assert di.to_python() == {"xmlns:name": "foo", "xmlns:homepage": "bar", - "xmlns:foo": {"xmlns:foobar": "bar", "xmlns:barfoo": "foo"}, - "http://spam.eggs/eggs": "2022-02-22T00:00:00"} + assert di.to_python() == {"xmlns:name": ["foo"], "xmlns:homepage": ["bar"], + "xmlns:foo": [{"xmlns:foobar": ["bar"], "xmlns:barfoo": ["foo"]}], + "http://spam.eggs/eggs": ["2022-02-22T00:00:00"]} def test_from_dict(): @@ -326,13 +330,15 @@ def test_from_dict(): assert di.full_context == 2 * [{"schema": "https://schema.org/"}] assert di.context == [] and di.key == "schema:error" and di.index is None - di = ld_dict.from_dict({"@context": [{"schema": "https://schema.org/"}], "@type": "schema:Thing", "xmlns:name": "fo"}, + di = ld_dict.from_dict({"@context": [{"schema": "https://schema.org/"}], + "@type": "schema:Thing", "xmlns:name": "fo"}, context=[{"schema": "https://schema.org/", "xmlns": "http://xmlns.com/foaf/0.1/"}]) - assert di["http://xmlns.com/foaf/0.1/name"] == di["xmlns:name"] == "fo" + assert di["http://xmlns.com/foaf/0.1/name"] == di["xmlns:name"] == ["fo"] assert di.context == [{"schema": "https://schema.org/"}, {"schema": "https://schema.org/", "xmlns": "http://xmlns.com/foaf/0.1/"}] - outer_di = ld_dict.from_dict({"@context": [{"schema": "https://schema.org/"}], "@type": "schema:Thing", "@id": "foo"}) + outer_di = ld_dict.from_dict({"@context": [{"schema": "https://schema.org/"}], + "@type": "schema:Thing", "@id": "foo"}) di = ld_dict.from_dict({"@context": {"schema": "https://schema.org/"}, "@type": "schema:Action", "schema:name": "foo"}, parent=outer_di, key="schema:result") @@ -342,7 +348,7 @@ def test_from_dict(): di = ld_dict.from_dict({"@context": {"schema": "https://schema.org/"}, "@type": "schema:Thing", "xmlns:name": "fo"}, context={"xmlns": "http://xmlns.com/foaf/0.1/"}) - assert di["http://xmlns.com/foaf/0.1/name"] == di["xmlns:name"] == "fo" + assert di["http://xmlns.com/foaf/0.1/name"] == di["xmlns:name"] == ["fo"] assert di.context == [{"schema": "https://schema.org/"}, {"xmlns": "http://xmlns.com/foaf/0.1/"}] From 504a345148532c5212d7be112c5d7b3e98d9a516 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 19 Dec 2025 09:54:19 +0100 Subject: [PATCH 168/247] removed unnecessary fixme --- test/hermes_test/model/types/test_ld_dict.py | 1 - 1 file changed, 1 deletion(-) diff --git a/test/hermes_test/model/types/test_ld_dict.py b/test/hermes_test/model/types/test_ld_dict.py index fec2368b..545b704f 100644 --- a/test/hermes_test/model/types/test_ld_dict.py +++ b/test/hermes_test/model/types/test_ld_dict.py @@ -133,7 +133,6 @@ def test_build_in_set(): } assert isinstance(di["schema:result"][0]["schema:error"][0]["schema:name"], ld_list) - # FIXME: fixing #433 would fix this (setting nested python dicts) di = ld_dict([{}], context=[{"schema": "https://schema.org/"}]) di["@type"] = "schema:Thing" di["schema:result"] = {"@type": "schema:Action", "schema:error": {"@type": "schema:Thing", "schema:name": "foo"}} From 0999f44edb4191d7c5820206a532a5b9504de617 Mon Sep 17 00:00:00 2001 From: "Kernchen, Sophie" Date: Fri, 19 Dec 2025 12:08:24 +0100 Subject: [PATCH 169/247] Delete deprecated function --- src/hermes/model/types/ld_container.py | 33 -------------------------- src/hermes/model/types/ld_list.py | 2 +- 2 files changed, 1 insertion(+), 34 deletions(-) diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index 136d774c..88d92795 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -312,39 +312,6 @@ def _to_expanded_json( return expanded_data - # TODO: remove this method and all other unused methods/ functions - def _to_expanded_json_deprecated(self, key, value): - if key == "@id": - ld_value = self.ld_proc.expand_iri(self.active_ctx, value, vocab=False) - elif key == "@type": - if not isinstance(value, list): - value = [value] - ld_value = [self.ld_proc.expand_iri(self.active_ctx, ld_type) for ld_type in value] - else: - short_key = self.ld_proc.compact_iri(self.active_ctx, key) - if ":" in short_key: - prefix, short_key = short_key.split(":", 1) - ctx_value = self.ld_proc.get_context_value(self.active_ctx, prefix, "@id") - active_ctx = self.ld_proc.process_context( - self.active_ctx, [ctx_value], {"documentLoader": bundled_loader} - ) - else: - active_ctx = self.active_ctx - ld_type = self.ld_proc.get_context_value(active_ctx, short_key, "@type") - if ld_type == "@id": - ld_value = [{"@id": value}] - ld_output = "expanded_json" - else: - ld_value, ld_output = self.ld_proc.apply_typemap(value, "expanded_json", "json", parent=self, key=key) - if ld_output == "json": - ld_value = self.ld_proc.expand( - ld_value, {"expandContext": self.full_context, "documentLoader": bundled_loader} - ) - elif ld_output != "expanded_json": - raise TypeError(f"Cannot convert {type(value)}") - - return ld_value - def __repr__(self: Self) -> str: """ Returns a short string representation of this object. diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index 07cf3248..c4d1c450 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -137,7 +137,7 @@ def __setitem__( if not isinstance(index, slice): # expand the value value = self._to_expanded_json([value]) - # the returned value is always a list but my contain more then one item + # the returned value is always a list but may contain more then one item # therefor a slice on the item_list is used to add the expanded value(s) if index != -1: self.item_list[index:index+1] = value From 6f039e84e10b8b9e4134b2d30c858928010c7dba Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 19 Dec 2025 13:28:43 +0100 Subject: [PATCH 170/247] slightly adjusted tests and fixed miniature bugs in ld_container and ld_dict --- src/hermes/model/types/ld_container.py | 4 +- src/hermes/model/types/ld_dict.py | 21 ++++--- test/hermes_test/model/test_api.py | 64 +++++++++++--------- test/hermes_test/model/types/test_ld_dict.py | 6 +- 4 files changed, 53 insertions(+), 42 deletions(-) diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index 88d92795..ef47715f 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -237,7 +237,7 @@ def _to_expanded_json( # while searching build a path such that it leads from the found ld_dicts ld_value to selfs data_dict/ item_list parent = self path = [] - while parent.__class__.__name__ != "ld_dict": + while parent.__class__.__name__ not in {"ld_dict", "SoftwareMetadata"}: if parent.container_type == "@list": path.extend(["@list", 0]) elif parent.container_type == "@graph": @@ -250,7 +250,7 @@ def _to_expanded_json( # if neither self nor any of its parents is a ld_dict: # create a dict with the key of the outer most parent of self and this parents ld_value as a value # this dict is stored in an ld_container and simulates the most minimal JSON-LD object possible - if parent.__class__.__name__ != "ld_dict": + if parent.__class__.__name__ not in {"ld_dict", "SoftwareMetadata"}: key = self.ld_proc.expand_iri(parent.active_ctx, parent.key) parent = ld_container([{key: parent._data}]) path.append(0) diff --git a/src/hermes/model/types/ld_dict.py b/src/hermes/model/types/ld_dict.py index 9b707864..e2a3eb1b 100644 --- a/src/hermes/model/types/ld_dict.py +++ b/src/hermes/model/types/ld_dict.py @@ -22,15 +22,16 @@ def __init__(self, data, *, parent=None, key=None, index=None, context=None): def __getitem__(self, key): full_iri = self.ld_proc.expand_iri(self.active_ctx, key) - try: - ld_value = self.data_dict[full_iri] - except KeyError: - self.data_dict.update({full_iri: [{"@list": []}]}) - ld_value = self.data_dict[full_iri] + if full_iri not in self.data_dict: + self[full_iri] = [] + ld_value = self.data_dict[full_iri] return self._to_python(full_iri, ld_value) def __setitem__(self, key, value): full_iri = self.ld_proc.expand_iri(self.active_ctx, key) + if value is None: + del self[full_iri] + return ld_value = self._to_expanded_json({full_iri: value}) self.data_dict.update(ld_value) @@ -40,7 +41,8 @@ def __delitem__(self, key): def __contains__(self, key): full_iri = self.ld_proc.expand_iri(self.active_ctx, key) - return len(self[full_iri]) != 0 + # FIXME: is that good? + return full_iri in self.data_dict def __eq__(self, other): if not isinstance(other, (dict, ld_dict)): @@ -79,9 +81,12 @@ def __ne__(self, other): return not x def get(self, key, default=_NO_DEFAULT): - if key not in self and default is not ld_dict._NO_DEFAULT: + try: + return self[key] + except KeyError as e: + if default is self._NO_DEFAULT: + raise e return default - return self[key] def update(self, other): for key, value in other.items(): diff --git a/test/hermes_test/model/test_api.py b/test/hermes_test/model/test_api.py index 4b2c614b..b4bec276 100644 --- a/test/hermes_test/model/test_api.py +++ b/test/hermes_test/model/test_api.py @@ -44,8 +44,8 @@ def test_init_nested_object(): "author": [{"name": "Foo"}, {"name": "Bar"}], } data = SoftwareMetadata(my_software, extra_vocabs={"foo": "https://foo.bar"}) - assert data["schema:softwareName"][0] == "MySoftware" - assert data["maintainer"][0]["name"][0] == "Some Name" + assert data["schema:softwareName"] == ["MySoftware"] + assert len(data["maintainer"]) == 1 and data["maintainer"][0]["name"] == ["Some Name"] for author in data["author"]: for name in author["name"]: assert name in ["Foo", "Bar"] @@ -53,20 +53,23 @@ def test_init_nested_object(): def test_append(): data = SoftwareMetadata() - data["foo"].append("a") - assert type(data["foo"]) is ld_list and data["foo"][0] == "a" and data["foo"].item_list == [{"@value": "a"}] - data["foo"].append("b") - assert type(data["foo"]) is ld_list and data["foo"].item_list == [{"@value": "a"}, {"@value": "b"}] - data["foo"].append("c") - assert data["foo"].item_list == [{"@value": "a"}, {"@value": "b"}, {"@value": "c"}] + data["schema:foo"].append("a") + assert type(data["schema:foo"]) is ld_list + assert data["schema:foo"][0] == "a" and data["schema:foo"].item_list == [{"@value": "a"}] + data["schema:foo"].append("b") + assert type(data["schema:foo"]) is ld_list + assert data["schema:foo"] == [{"@value": "a"}, {"@value": "b"}] + data["schema:foo"].append("c") + assert data["schema:foo"] == [{"@value": "a"}, {"@value": "b"}, {"@value": "c"}] data = SoftwareMetadata() - data["foo"].append({"schema:name": "foo"}) - assert type(data["foo"]) is ld_list and type(data["foo"][0]) is ld_dict - assert data["foo"][0].data_dict == {"http://schema.org/name": [{"@value": "foo"}]} - data["foo"].append({"schema:name": "foo"}) - assert type(data["foo"]) is ld_list and data["foo"].item_list == 2*[{"http://schema.org/name": [{"@value": "foo"}]}] - data["foo"].append({"schema:name": "foo"}) - assert data["foo"].item_list == 3 * [{"http://schema.org/name": [{"@value": "foo"}]}] + data["schema:foo"].append({"schema:name": "bar"}) + assert type(data["schema:foo"]) is ld_list and type(data["schema:foo"][0]) is ld_dict + assert data["schema:foo"] == [{"http://schema.org/name": [{"@value": "bar"}]}] + data["schema:foo"].append({"schema:name": "bar"}) + assert type(data["schema:foo"]) is ld_list + assert data["schema:foo"] == 2 * [{"http://schema.org/name": [{"@value": "bar"}]}] + data["schema:foo"].append({"schema:name": "bar"}) + assert data["schema:foo"] == 3 * [{"http://schema.org/name": [{"@value": "bar"}]}] def test_iterative_assignment(): @@ -78,9 +81,10 @@ def test_iterative_assignment(): assert isinstance(authors, ld_list) author1 = authors[0] author1["email"] = "author@example.com" - authors[0] = author1 authors.append({"name": "Bar", "email": "author2@example.com"}) assert len(authors) == 2 + del authors[0] + assert len(authors) == 1 def test_usage(): @@ -95,38 +99,38 @@ def test_usage(): harvest = { "authors": [ {"name": "Foo", "affiliation": ["Uni A", "Lab B"], "kw": ["a", "b", "c"]}, - {"name": "Bar", "affiliation": ["Uni C"], "email": "bar@c.edu"}, + {"name": "Bar", "affiliation": ["Uni C"], "email": "bar@c.edu", "kw": "egg"}, {"name": "Baz", "affiliation": ["Lab E"]}, ] } for author in harvest["authors"]: for exist_author in data["author"]: - if author["name"] == exist_author["name"][0]: - exist_author["affiliation"] = author["affiliation"] - if "email" in author: - exist_author["email"].append(author["email"]) - if "kw" in author: - exist_author["schema:knowsAbout"].extend(author["kw"]) + if author["name"] in exist_author["name"]: + exist_author["affiliation"] = author.get("affiliation", []) + exist_author["email"].extend(email if isinstance((email := author.get("email", [])), list) else [email]) + exist_author["schema:knowsAbout"].extend(kw if isinstance((kw := author.get("kw", [])), list) else [kw]) break else: data["author"].append(author) assert len(data["author"]) == 3 foo, bar, baz = data["author"] assert foo["name"][0] == "Foo" - assert foo["affiliation"].to_python() == ["Uni A", "Lab B"] - assert foo["schema:knowsAbout"].to_python() == ["a", "b", "c"] - assert foo["email"].to_python() == ["foo@bar.net", "foo@baz.com"] + assert foo["affiliation"] == ["Uni A", "Lab B"] + assert foo["schema:knowsAbout"] == ["a", "b", "c"] + assert foo["email"] == ["foo@bar.net", "foo@baz.com"] assert bar["name"][0] == "Bar" - assert bar["affiliation"].to_python() == ["Uni C"] - assert bar["email"].to_python() == ["bar@c.edu"] + assert bar["affiliation"] == ["Uni C"] + assert bar["email"] == ["bar@c.edu"] assert baz["name"][0] == "Baz" - assert baz["affiliation"].to_python() == ["Lab E"] + assert baz["affiliation"] == ["Lab E"] assert len(baz["schema:knowsAbout"]) == 0 assert len(baz["email"]) == 0 for author in data["author"]: assert "name" in author assert "email" in author - if "schema:knowsAbout" not in author: + if author["schema:knowsAbout"] == ["egg"]: # FIXME: None has to be discussed + # json-ld processor just removes it in expansion author["schema:knowsAbout"] = None author["schema:pronouns"] = "they/them" + assert len(bar["schema:knowsAbout"]) == 0 diff --git a/test/hermes_test/model/types/test_ld_dict.py b/test/hermes_test/model/types/test_ld_dict.py index 4346f0f4..66095295 100644 --- a/test/hermes_test/model/types/test_ld_dict.py +++ b/test/hermes_test/model/types/test_ld_dict.py @@ -189,8 +189,10 @@ def test_get(): context=[{"schema": "https://schema.org/"}]) assert di.get("https://schema.org/name") == ["Manu Sporny"] assert di.get("schema:name") == ["Manu Sporny"] - assert di.get("bar", None) is None - assert isinstance(di["bar"], ld_list) and len(di["bar"]) == 0 + assert di.get("bar", None) is None # invalid key + with pytest.raises(KeyError): + di.get("bar") + assert isinstance(di.get("schema:bar", None), ld_list) and len(di.get("schema:bar", None)) == 0 def test_update(): From c2b9c4fa71b300c8d78df0f737fdae650b63b679 Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Mon, 5 Jan 2026 09:07:04 +0100 Subject: [PATCH 171/247] cleaned up __init__.py --- src/hermes/model/types/__init__.py | 47 +++--------------------------- src/hermes/model/types/ld_dict.py | 7 +---- src/hermes/model/types/ld_list.py | 24 +++------------ 3 files changed, 9 insertions(+), 69 deletions(-) diff --git a/src/hermes/model/types/__init__.py b/src/hermes/model/types/__init__.py index 9e4b1bf5..df6aed71 100644 --- a/src/hermes/model/types/__init__.py +++ b/src/hermes/model/types/__init__.py @@ -5,29 +5,19 @@ # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: Michael Fritzsche -from datetime import date, time, datetime - from .ld_container import ld_container -from .ld_list import ld_list from .ld_dict import ld_dict -from .ld_context import iri_map +from .ld_list import ld_list from .pyld_util import JsonLdProcessor _TYPEMAP = [ - # Conversion routines for ld_container - ( - lambda c: isinstance(c, ld_container), - { - "ld_container": lambda c, **_: c, - "json": lambda c, **_: c.compact(), - "expanded_json": lambda c, **_: c.ld_value, - }, - ), + # Conversion routine for ld_container + (lambda c: isinstance(c, ld_container), {"ld_container": lambda c, **_: c}), # Wrap item from ld_dict in ld_list (ld_list.is_ld_list, {"ld_container": ld_list}), - (lambda c: isinstance(c, list), {"ld_container": lambda c, **kw: ld_list(c, **kw)}), + (lambda c: isinstance(c, list), {"ld_container": ld_list}), # pythonize items from lists (expanded set is already handled above) (ld_container.is_json_id, {"python": lambda c, **_: c["@id"]}), @@ -36,35 +26,6 @@ (ld_list.is_container, {"ld_container": lambda c, **kw: ld_list([c], **kw)}), (ld_dict.is_json_dict, {"ld_container": lambda c, **kw: ld_dict([c], **kw)}), (lambda v: isinstance(v, str), {"python": lambda v, parent, **_: parent.ld_proc.compact_iri(parent.active_ctx, v)}), - - # Convert internal data types to expanded_json - (ld_container.is_json_id, {"expanded_json": lambda c, **_: [c]}), - (ld_container.is_ld_id, {"expanded_json": lambda c, **_: c}), - (ld_container.is_json_value, {"expanded_json": lambda c, **_: [c]}), - (ld_container.is_ld_value, {"expanded_json": lambda c, **_: c}), - (ld_dict.is_json_dict, {"expanded_json": lambda c, **kw: ld_dict.from_dict(c, **kw).ld_value}), - ( - ld_list.is_container, - {"expanded_json": lambda c, **kw: ld_list.from_list(ld_list.get_item_list_from_container(c), **kw).ld_value} - ), - ( - ld_list.is_ld_list, - {"expanded_json": lambda c, **kw: ld_list.from_list(ld_list.get_item_list_from_container(c[0]), **kw).ld_value} - ), - (lambda c: isinstance(c, list), {"expanded_json": lambda c, **kw: ld_list.from_list(c, **kw).ld_value}), - (lambda v: isinstance(v, (int, float, str, bool)), {"expanded_json": lambda v, **_: [{"@value": v}]}), - ( - lambda v: isinstance(v, datetime), - {"expanded_json": lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:DateTime"]}]} - ), - ( - lambda v: isinstance(v, date), - {"expanded_json": lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:Date"]}]} - ), - ( - lambda v: isinstance(v, time), - {"expanded_json": lambda v, **_: [{"@value": v.isoformat(), "@type": iri_map["schema:Time"]}]} - ), ] diff --git a/src/hermes/model/types/ld_dict.py b/src/hermes/model/types/ld_dict.py index e2a3eb1b..7493b897 100644 --- a/src/hermes/model/types/ld_dict.py +++ b/src/hermes/model/types/ld_dict.py @@ -65,12 +65,7 @@ def __eq__(self, other): if unique_keys and unique_keys != {"@id"}: return False for key in keys_self.intersection(keys_other): - item = self[key] - other_item = other[key] - res = item.__eq__(other_item) - if res == NotImplemented: - res = other_item.__eq__(item) - if res is False or res == NotImplemented: # res is not True + if self[key] != other[key]: return False return True diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index c4d1c450..002bbd6d 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -312,16 +312,8 @@ def __eq__( if item["@id"] != other_item["@id"]: return False continue - # get the 'real' items (i.e. can also be ld_dicts or ld_lists) - item = self[index] - other_item = other[index] - # compare using the correct equals method - res = item.__eq__(other_item) - if res == NotImplemented: - # swap order if first try returned NotImplemented - res = other_item.__eq__(item) - # return false if the second comparison also fails or one of them returned false - if res is False or res == NotImplemented: + # compare the 'real' items (i.e. can also be ld_dicts or ld_lists) + if self[index] != other[index]: return False # return true because no unequal elements where found return True @@ -341,16 +333,8 @@ def __eq__( if item["@id"] == other_item["@id"]: equality_pairs[index] += [other_index] continue - # get the 'real' items (i.e. can also be ld_dicts or ld_lists) - item = self[index] - other_item = other[index] - # compare using the correct equals method - res = item.__eq__(other_item) - if res == NotImplemented: - # swap order if first try returned NotImplemented - res = other_item.__eq__(item) - # if one of both comparisons returned true the elements are equal - if res is not NotImplemented and res: + # compare the 'real' items (i.e. can also be ld_dicts or ld_lists) + if self[index] == other[other_index]: equality_pairs[index] += [other_index] if len(equality_pairs[index]) == 0: # there exists no element in other that is equal to item From bd1a19fd281502a04adc921970d3755390edb834 Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Mon, 5 Jan 2026 09:14:04 +0100 Subject: [PATCH 172/247] ran 'poetry lock' --- poetry.lock | 74 +++++++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 72 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index 067f4087..476df72e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -700,6 +700,19 @@ files = [ {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, ] +[[package]] +name = "isodate" +version = "0.7.2" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.10\"" +files = [ + {file = "isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15"}, + {file = "isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6"}, +] + [[package]] name = "jinja2" version = "3.1.6" @@ -1139,6 +1152,24 @@ files = [ dev = ["abi3audit", "black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"] test = ["pytest", "pytest-xdist", "setuptools"] +[[package]] +name = "pyaml" +version = "25.7.0" +description = "PyYAML-based module to produce a bit more pretty and readable YAML-serialized data" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pyaml-25.7.0-py3-none-any.whl", hash = "sha256:ce5d7867cc2b455efdb9b0448324ff7b9f74d99f64650f12ca570102db6b985f"}, + {file = "pyaml-25.7.0.tar.gz", hash = "sha256:e113a64ec16881bf2b092e2beb84b7dcf1bd98096ad17f5f14e8fb782a75d99b"}, +] + +[package.dependencies] +PyYAML = "*" + +[package.extras] +anchors = ["unidecode"] + [[package]] name = "pycodestyle" version = "2.9.1" @@ -1594,7 +1625,7 @@ version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" -groups = ["docs"] +groups = ["main", "docs"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -1651,6 +1682,30 @@ files = [ {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] +[[package]] +name = "rdflib" +version = "7.5.0" +description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information." +optional = false +python-versions = ">=3.8.1" +groups = ["main"] +files = [ + {file = "rdflib-7.5.0-py3-none-any.whl", hash = "sha256:b011dfc40d0fc8a44252e906dcd8fc806a7859bc231be190c37e9568a31ac572"}, + {file = "rdflib-7.5.0.tar.gz", hash = "sha256:663083443908b1830e567350d72e74d9948b310f827966358d76eebdc92bf592"}, +] + +[package.dependencies] +isodate = {version = ">=0.7.2,<1.0.0", markers = "python_version < \"3.11\""} +pyparsing = ">=2.1.0,<4" + +[package.extras] +berkeleydb = ["berkeleydb (>=18.1.0,<19.0.0)"] +html = ["html5rdf (>=1.2,<2)"] +lxml = ["lxml (>=4.3,<6.0)"] +networkx = ["networkx (>=2,<4)"] +orjson = ["orjson (>=3.9.14,<4)"] +rdf4j = ["httpx (>=0.28.1,<0.29.0)"] + [[package]] name = "requests" version = "2.32.4" @@ -1806,6 +1861,21 @@ files = [ {file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"}, ] +[[package]] +name = "schemaorg" +version = "0.1.1" +description = "Python functions for applied use of schema.org" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "schemaorg-0.1.1.tar.gz", hash = "sha256:567f1735df666221c893d2c206dd70f9cddcc983c8cdc39f3a7b7726884d2c51"}, +] + +[package.dependencies] +lxml = ">=4.1.1" +pyaml = ">=17.12.1" + [[package]] name = "setuptools" version = "80.9.0" @@ -2477,4 +2547,4 @@ files = [ [metadata] lock-version = "2.1" python-versions = ">=3.10, <4.0.0" -content-hash = "2e2405b30c3dee4416a6e77828c7cff1197a8be71665770bcbdb308c19ef4358" +content-hash = "e76de51d1f5dd86486d4cc24a5cdf7d007b16ce5d9d0cc3f7d0f353cf0defff0" From 9527e260b122229bdd62cd769ee722703853fa8a Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Mon, 5 Jan 2026 09:52:06 +0100 Subject: [PATCH 173/247] updated type hints to be supported by python 3.10 --- src/hermes/model/types/ld_container.py | 5 ++- src/hermes/model/types/ld_list.py | 55 +++++++++++++++----------- 2 files changed, 36 insertions(+), 24 deletions(-) diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index ef47715f..92e2ed11 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -8,7 +8,8 @@ from .pyld_util import JsonLdProcessor, bundled_loader from datetime import date, time, datetime -from typing import Union, Self, Any +from typing import Union, Any +from typing_extensions import Self JSON_LD_CONTEXT_DICT = dict[str, Union[str, "JSON_LD_CONTEXT_DICT"]] @@ -64,7 +65,7 @@ def __init__( self: Self, data: EXPANDED_JSON_LD_VALUE, *, - parent: Union["ld_container", None] = None, + parent: Union[Self, None] = None, key: Union[str, None] = None, index: Union[int, None] = None, context: Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None, diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index 002bbd6d..0b89ed00 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -17,7 +17,9 @@ BASIC_TYPE, ) -from typing import Generator, Hashable, Union, Self, Any +from collections.abc import Generator, Hashable +from typing import Union, Any +from typing_extensions import Self class ld_list(ld_container): @@ -32,10 +34,10 @@ class ld_list(ld_container): """ def __init__( - self: Self, + self: "ld_list", data: Union[list[str], list[dict[str, EXPANDED_JSON_LD_VALUE]]], *, - parent: Union["ld_container", None] = None, + parent: Union[ld_container, None] = None, key: Union[str, None] = None, index: Union[int, None] = None, context: Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None, @@ -44,7 +46,7 @@ def __init__( Create a new ld_list container. :param self: The instance of ld_list to be initialized. - :type self: Self + :type self: ld_list :param data: The expanded json-ld data that is mapped (must be valid for @set, @list or @graph) :type data: list[str] | list[dict[str, BASIC_TYPE | EXPANDED_JSON_LD_VALUE]] :param parent: parent node of this container. @@ -100,7 +102,7 @@ def __getitem__( Get the item(s) at position index in a pythonized form. :param self: The ld_list the items are taken from. - :type self: Self + :type self: ld_list :param index: The positon(s) from which the item(s) is/ are taken. :type index: int | slice @@ -125,7 +127,7 @@ def __setitem__( All given values are expanded. If any are assimilated by self all items that would be added by this are added. :param self: The ld_list the items are set in. - :type self: Self + :type self: ld_list :param index: The positon(s) at which the item(s) is/ are set. :type index: int | slice :param value: The new value(s). @@ -161,7 +163,7 @@ def __delitem__(self: Self, index: Union[int, slice]) -> None: and not be modified afterwards. :param self: The ld_list the items are deleted from. - :type self: Self + :type self: ld_list :param index: The positon(s) at which the item(s) is/ are deleted. :type index: int | slice @@ -175,22 +177,22 @@ def __len__(self: Self) -> int: Returns the number of items in this ld_list. :param self: The ld_list whose length is to be returned. - :type self: Self + :type self: ld_list :return: The length of self. :rtype: int """ return len(self.item_list) - def __iter__(self: Self) -> Generator[Union[BASIC_TYPE | TIME_TYPE | ld_container], None, None]: + def __iter__(self: Self) -> Generator[Union[BASIC_TYPE, TIME_TYPE, ld_container], None, None]: """ Returns an iterator over the pythonized values contained in self. :param self: The ld_list over whose items is iterated. - :type self: Self + :type self: ld_list :return: The Iterator over self's values. - :rtype: Generator[Union[BASIC_TYPE | TIME_TYPE | ld_container], None, None] + :rtype: Generator[BASIC_TYPE | TIME_TYPE | ld_container, None, None] """ # return an Iterator over each value in self in its pythonized from for index, value in enumerate(self.item_list): @@ -211,7 +213,7 @@ def __contains__(self: Self, value: JSON_LD_VALUE) -> bool: has the same @id like it or it xor the object in the item_list has an id an all other values are the same. :param self: The ld_list that is checked if it contains value. - :type self: Self + :type self: ld_list :param value: The object being checked whether or not it is in self. :type value: JSON_LD_VALUE @@ -240,7 +242,7 @@ def __contains__(self: Self, value: JSON_LD_VALUE) -> bool: def __eq__( self: Self, other: Union[ - "ld_list", + Self, list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container]], dict[str, list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container]]], ], @@ -258,7 +260,7 @@ def __eq__( returns true. :param self: The ld_list other is compared to. - :type self: Self + :type self: ld_list :param other: The list/ container/ ld_list self is compared to. :type other: ld_list | list[JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_container] | dict[str, list[JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_container]] @@ -354,7 +356,10 @@ def __eq__( @classmethod def _bfs_step( - cls: Self, verticies1: set[Hashable], edges: dict[Hashable, tuple[Hashable]], matches: dict[Hashable, Hashable], + cls: type[Self], + verticies1: set[Hashable], + edges: dict[Hashable, tuple[Hashable]], + matches: dict[Hashable, Hashable], distances: dict[Hashable, Union[int, float]] ) -> bool: """ @@ -408,7 +413,10 @@ def _bfs_step( @classmethod def _dfs_step( - cls: Self, ver: Hashable, edges: dict[Hashable, tuple[Hashable]], matches: dict[Hashable, Hashable], + cls: type[Self], + ver: Hashable, + edges: dict[Hashable, tuple[Hashable]], + matches: dict[Hashable, Hashable], distances: dict[Hashable, Union[int, float]] ) -> bool: """ @@ -452,7 +460,10 @@ def _dfs_step( @classmethod def _hopcroft_karp( - cls: Self, verticies1: set[Hashable], verticies2: set[Hashable], edges: dict[Hashable, tuple[Hashable]] + cls: type[Self], + verticies1: set[Hashable], + verticies2: set[Hashable], + edges: dict[Hashable, tuple[Hashable]] ) -> int: """ Implementation of Hopcroft-Karp. I.e.:
@@ -495,7 +506,7 @@ def _hopcroft_karp( def __ne__( self: Self, other: Union[ - "ld_list", + Self, list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container]], dict[str, list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container]]], ], @@ -506,7 +517,7 @@ def __ne__( See ld_list.__eq__ for more details on the comparison.) :param self: The ld_list other is compared to. - :type self: Self + :type self: ld_list :param other: The list/ container/ ld_list self is compared to. :type other: ld_list | list[JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_container] | dict[str, list[JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_container]] @@ -528,7 +539,7 @@ def append(self: Self, value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_con The given value is expanded. If it is assimilated by self all items that would be added by this are added. :param self: The ld_list the item is appended to. - :type self: Self + :type self: ld_list :param value: The new value. :type value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container] @@ -543,7 +554,7 @@ def extend(self: Self, value: list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, l The given values are expanded. If any are assimilated by self all items that would be added by this are added. :param self: The ld_list the items are appended to. - :type self: Self + :type self: ld_list :param value: The new values. :type value: list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container]] @@ -558,7 +569,7 @@ def to_python(self: Self) -> list[PYTHONIZED_LD_CONTAINER]: Return a fully pythonized version of this object where all ld_container are replaced by lists and dicts. :param self: The ld_list whose fully pythonized version is returned. - :type self: Self + :type self: ld_list :return: The fully pythonized version of self. :rtype: list[PYTHONIZED_LD_CONTAINER] From 97d9d95c0756e2380ef7239f5e42d04a751b2902 Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Mon, 5 Jan 2026 11:00:28 +0100 Subject: [PATCH 174/247] update type hints and began commenting ld_dict --- src/hermes/model/types/ld_container.py | 6 ++- src/hermes/model/types/ld_dict.py | 56 ++++++++++++++++++++------ src/hermes/model/types/ld_list.py | 8 ++-- 3 files changed, 53 insertions(+), 17 deletions(-) diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index 92e2ed11..766205a9 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -14,7 +14,11 @@ JSON_LD_CONTEXT_DICT = dict[str, Union[str, "JSON_LD_CONTEXT_DICT"]] BASIC_TYPE = Union[str, float, int, bool] -EXPANDED_JSON_LD_VALUE = list[dict[str, Union["EXPANDED_JSON_LD_VALUE", BASIC_TYPE]]] +EXPANDED_JSON_LD_VALUE = list[Union[ + dict[str, Union["EXPANDED_JSON_LD_VALUE", BASIC_TYPE]], + "EXPANDED_JSON_LD_VALUE", + str +]] COMPACTED_JSON_LD_VALUE = Union[ list[Union[dict[str, Union["COMPACTED_JSON_LD_VALUE", BASIC_TYPE]], BASIC_TYPE]], dict[str, Union["COMPACTED_JSON_LD_VALUE", BASIC_TYPE]], diff --git a/src/hermes/model/types/ld_dict.py b/src/hermes/model/types/ld_dict.py index 7493b897..3d01c711 100644 --- a/src/hermes/model/types/ld_dict.py +++ b/src/hermes/model/types/ld_dict.py @@ -5,29 +5,56 @@ # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: Michael Fritzsche -from .ld_container import ld_container - from .pyld_util import bundled_loader +from .ld_container import ( + ld_container, + JSON_LD_CONTEXT_DICT, + EXPANDED_JSON_LD_VALUE, + PYTHONIZED_LD_CONTAINER, + JSON_LD_VALUE, + TIME_TYPE, + BASIC_TYPE, +) + +from collections.abc import KeysView +from types import NotImplementedType +from typing import Union, Any +from typing_extensions import Self class ld_dict(ld_container): + """ + An JSON-LD container resembling a dict. + See also :class:`ld_container` + + :cvar container_type: A type used as a placeholder to represent "no default". + :cvartype container_type: type[str] + """ _NO_DEFAULT = type("NO DEFAULT") - def __init__(self, data, *, parent=None, key=None, index=None, context=None): + def __init__( + self: Self, + data: list[dict[str, EXPANDED_JSON_LD_VALUE]], + *, + parent: Union[ld_container, None] = None, + key: Union[str, None] = None, + index: Union[int, None] = None, + context: Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None + ) -> None: if not self.is_ld_dict(data): raise ValueError("The given data does not represent a ld_dict.") super().__init__(data, parent=parent, key=key, index=index, context=context) self.data_dict = data[0] - def __getitem__(self, key): + def __getitem__(self: Self, key: str) -> list[Union[BASIC_TYPE, TIME_TYPE, ld_container]]: full_iri = self.ld_proc.expand_iri(self.active_ctx, key) if full_iri not in self.data_dict: self[full_iri] = [] ld_value = self.data_dict[full_iri] return self._to_python(full_iri, ld_value) - def __setitem__(self, key, value): + def __setitem__(self: Self, key: str, value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container]) -> None: full_iri = self.ld_proc.expand_iri(self.active_ctx, key) if value is None: del self[full_iri] @@ -35,16 +62,16 @@ def __setitem__(self, key, value): ld_value = self._to_expanded_json({full_iri: value}) self.data_dict.update(ld_value) - def __delitem__(self, key): + def __delitem__(self: Self, key: str) -> None: full_iri = self.ld_proc.expand_iri(self.active_ctx, key) del self.data_dict[full_iri] - def __contains__(self, key): + def __contains__(self: Self, key: str) -> bool: full_iri = self.ld_proc.expand_iri(self.active_ctx, key) # FIXME: is that good? return full_iri in self.data_dict - def __eq__(self, other): + def __eq__(self: Self, other: Any) -> Union[bool, NotImplementedType]: # FIXME: give another type hint to other? if not isinstance(other, (dict, ld_dict)): return NotImplemented if ld_container.is_json_id(other): @@ -69,13 +96,15 @@ def __eq__(self, other): return False return True - def __ne__(self, other): + def __ne__(self: Self, other: Any) -> Union[bool, NotImplementedType]: # FIXME: give another type hint to other? x = self.__eq__(other) if x is NotImplemented: return NotImplemented return not x - def get(self, key, default=_NO_DEFAULT): + def get( + self: Self, key: str, default: Any = _NO_DEFAULT + ) -> Union[list[Union[BASIC_TYPE, TIME_TYPE, ld_container]], Any]: try: return self[key] except KeyError as e: @@ -83,11 +112,14 @@ def get(self, key, default=_NO_DEFAULT): raise e return default - def update(self, other): + def update( + self: Self, + other: Union[dict[str, Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container]], "ld_dict"] + ) -> None: for key, value in other.items(): self[key] = value - def keys(self): + def keys(self: Self) -> KeysView[str]: return self.data_dict.keys() def compact_keys(self): diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index 0b89ed00..e567dff8 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -34,8 +34,8 @@ class ld_list(ld_container): """ def __init__( - self: "ld_list", - data: Union[list[str], list[dict[str, EXPANDED_JSON_LD_VALUE]]], + self: Self, + data: EXPANDED_JSON_LD_VALUE, *, parent: Union[ld_container, None] = None, key: Union[str, None] = None, @@ -48,7 +48,7 @@ def __init__( :param self: The instance of ld_list to be initialized. :type self: ld_list :param data: The expanded json-ld data that is mapped (must be valid for @set, @list or @graph) - :type data: list[str] | list[dict[str, BASIC_TYPE | EXPANDED_JSON_LD_VALUE]] + :type data: EXPANDED_JSON_LD_VALUE :param parent: parent node of this container. :type parent: ld_container | None :param key: key into the parent container. @@ -253,7 +253,7 @@ def __eq__( For each index it is checked if the ids of the items at index in self and other match if both have one, if only one has or neither have an id all other values are compared.
Note that due to those circumstances equality is not transitve - meaning if a == b and b == c is is not guaranteed that a == c.
+ meaning if a == b and b == c it is not guaranteed that a == c.
If self or other is considered unordered the comparison is more difficult. All items in self are compared with all items in other. On the resulting graph given by the realtion == the Hopcroft-Karp algoritm is used to determine if there exists a bijection reordering self so that the ordered comparison of self with other From cd6e3d5564813e798217dbe62e1b0fa3ab1fe077 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 9 Jan 2026 13:17:20 +0100 Subject: [PATCH 175/247] added and updated comments --- src/hermes/model/types/ld_container.py | 64 +++--- src/hermes/model/types/ld_dict.py | 303 ++++++++++++++++++++++--- src/hermes/model/types/ld_list.py | 102 ++++----- 3 files changed, 355 insertions(+), 114 deletions(-) diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index 766205a9..c8ab051f 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -5,33 +5,37 @@ # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: Michael Fritzsche -from .pyld_util import JsonLdProcessor, bundled_loader +from __future__ import annotations -from datetime import date, time, datetime -from typing import Union, Any -from typing_extensions import Self - - -JSON_LD_CONTEXT_DICT = dict[str, Union[str, "JSON_LD_CONTEXT_DICT"]] -BASIC_TYPE = Union[str, float, int, bool] -EXPANDED_JSON_LD_VALUE = list[Union[ - dict[str, Union["EXPANDED_JSON_LD_VALUE", BASIC_TYPE]], - "EXPANDED_JSON_LD_VALUE", - str -]] -COMPACTED_JSON_LD_VALUE = Union[ - list[Union[dict[str, Union["COMPACTED_JSON_LD_VALUE", BASIC_TYPE]], BASIC_TYPE]], - dict[str, Union["COMPACTED_JSON_LD_VALUE", BASIC_TYPE]], -] -TIME_TYPE = Union[datetime, date, time] -JSON_LD_VALUE = Union[ - list[Union["JSON_LD_VALUE", BASIC_TYPE, TIME_TYPE, "ld_container"]], - dict[str, Union["JSON_LD_VALUE", BASIC_TYPE, TIME_TYPE, "ld_container"]], -] -PYTHONIZED_LD_CONTAINER = Union[ - list[Union["PYTHONIZED_LD_CONTAINER", BASIC_TYPE, TIME_TYPE]], - dict[str, Union["PYTHONIZED_LD_CONTAINER", BASIC_TYPE, TIME_TYPE]], -] +from .pyld_util import JsonLdProcessor, bundled_loader +from datetime import date, datetime, time + +from typing import TYPE_CHECKING +if TYPE_CHECKING: + from .ld_dict import ld_dict + from .ld_list import ld_list + from typing import Any, TypeAlias, Union + from typing_extensions import Self + JSON_LD_CONTEXT_DICT: TypeAlias = dict[str, Union[str, "JSON_LD_CONTEXT_DICT"]] + BASIC_TYPE: TypeAlias = Union[str, float, int, bool] + EXPANDED_JSON_LD_VALUE: TypeAlias = list[Union[ + dict[str, Union["EXPANDED_JSON_LD_VALUE", BASIC_TYPE]], + "EXPANDED_JSON_LD_VALUE", + str + ]] + COMPACTED_JSON_LD_VALUE: TypeAlias = Union[ + list[Union[dict[str, Union["COMPACTED_JSON_LD_VALUE", BASIC_TYPE]], BASIC_TYPE]], + dict[str, Union["COMPACTED_JSON_LD_VALUE", BASIC_TYPE]], + ] + TIME_TYPE: TypeAlias = Union[datetime, date, time] + JSON_LD_VALUE: TypeAlias = Union[ + list[Union["JSON_LD_VALUE", BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]], + dict[str, Union["JSON_LD_VALUE", BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]], + ] + PYTHONIZED_LD_CONTAINER: TypeAlias = Union[ + list[Union["PYTHONIZED_LD_CONTAINER", BASIC_TYPE, TIME_TYPE]], + dict[str, Union["PYTHONIZED_LD_CONTAINER", BASIC_TYPE, TIME_TYPE]], + ] class ld_container: @@ -69,7 +73,7 @@ def __init__( self: Self, data: EXPANDED_JSON_LD_VALUE, *, - parent: Union[Self, None] = None, + parent: Union[ld_dict, ld_list, None] = None, key: Union[str, None] = None, index: Union[int, None] = None, context: Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None, @@ -82,7 +86,7 @@ def __init__( :param data: The expanded json-ld data that is mapped. :type data: EXPANDED_JSON_LD_VALUE :param parent: parent node of this container. - :type parent: ld_container | None + :type parent: ld_dict | ld_list | None :param key: key into the parent container. :type key: str | None :param index: index into the parent container. @@ -182,7 +186,7 @@ def ld_value(self: Self) -> EXPANDED_JSON_LD_VALUE: def _to_python( self: Self, full_iri: str, ld_value: Union[list, dict, str] - ) -> Union["ld_container", BASIC_TYPE, TIME_TYPE]: + ) -> Union[ld_dict, ld_list, BASIC_TYPE, TIME_TYPE]: """ Returns a pythonized version of the given value pretending the value is in self and full_iri its key. @@ -195,7 +199,7 @@ def _to_python( :type ld_value: list | dict | str :return: The pythonized value of the ld_value. - :rtype: ld_container | BASIC_TYPE | TIME_TYPE + :rtype: ld_dict | ld_list | BASIC_TYPE | TIME_TYPE """ if full_iri == "@id": # values of key "@id" only have to be compacted diff --git a/src/hermes/model/types/ld_dict.py b/src/hermes/model/types/ld_dict.py index 3d01c711..ba29838c 100644 --- a/src/hermes/model/types/ld_dict.py +++ b/src/hermes/model/types/ld_dict.py @@ -5,21 +5,25 @@ # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: Michael Fritzsche +from __future__ import annotations + from .pyld_util import bundled_loader -from .ld_container import ( - ld_container, - JSON_LD_CONTEXT_DICT, - EXPANDED_JSON_LD_VALUE, - PYTHONIZED_LD_CONTAINER, - JSON_LD_VALUE, - TIME_TYPE, - BASIC_TYPE, -) - -from collections.abc import KeysView -from types import NotImplementedType -from typing import Union, Any -from typing_extensions import Self +from .ld_container import ld_container + +from typing import TYPE_CHECKING +if TYPE_CHECKING: + from collections.abc import Generator, Iterator, KeysView + from .ld_container import ( + JSON_LD_CONTEXT_DICT, + EXPANDED_JSON_LD_VALUE, + PYTHONIZED_LD_CONTAINER, + JSON_LD_VALUE, + TIME_TYPE, + BASIC_TYPE, + ) + from .ld_list import ld_list + from typing import Any, Union, Literal + from typing_extensions import Self class ld_dict(ld_container): @@ -27,6 +31,9 @@ class ld_dict(ld_container): An JSON-LD container resembling a dict. See also :class:`ld_container` + :ivar ref: A dict used to reference this object by its id. (Its form is {"@id": ...}) + :ivartype ref: dict[Literal["@id"], str] + :cvar container_type: A type used as a placeholder to represent "no default". :cvartype container_type: type[str] """ @@ -36,44 +43,143 @@ def __init__( self: Self, data: list[dict[str, EXPANDED_JSON_LD_VALUE]], *, - parent: Union[ld_container, None] = None, + parent: Union[ld_dict, ld_list, None] = None, key: Union[str, None] = None, index: Union[int, None] = None, context: Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None ) -> None: + """ + Create a new instance of an ld_dict. + + :param self: The instance of ld_container to be initialized. + :type self: Self + :param data: The expanded json-ld data that is mapped. + :type data: EXPANDED_JSON_LD_VALUE + :param parent: parent node of this container. + :type parent: ld_dict | ld_list | None + :param key: key into the parent container. + :type key: str | None + :param index: index into the parent container. + :type index: int | None + :param context: local context for this container. + :type context: list[str | JSON_LD_CONTEXT_DICT] | None + + :return: + :rtype: None + + :raises ValueError: If the given data doesn't represent an ld_dict. + """ + # check for validity of data if not self.is_ld_dict(data): raise ValueError("The given data does not represent a ld_dict.") + self.data_dict = data[0] + # call super constructor super().__init__(data, parent=parent, key=key, index=index, context=context) - self.data_dict = data[0] + def __getitem__(self: Self, key: str) -> ld_list: + """ + Get the item with the given key in a pythonized form. + If self contains no key, value pair with the given key, then an empty list is added as its value and returned. + + :param self: The ld_dict the item is taken from. + :type self: ld_dict + :param key: The key (compacted or expanded) to the item. + :type key: str - def __getitem__(self: Self, key: str) -> list[Union[BASIC_TYPE, TIME_TYPE, ld_container]]: + :return: The pythonized item at the key. + :rtype: ld_list + """ full_iri = self.ld_proc.expand_iri(self.active_ctx, key) if full_iri not in self.data_dict: self[full_iri] = [] ld_value = self.data_dict[full_iri] return self._to_python(full_iri, ld_value) - def __setitem__(self: Self, key: str, value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container]) -> None: + def __setitem__(self: Self, key: str, value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]) -> None: + """ + Set the item at the given key to the given value or delete it if value is None. + The given value is expanded. + + :param self: The ld_dict the item is set in. + :type self: ld_dict + :param key: The key at which the item is set. + :type key: str + :param value: The new value. + :type value: JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list + + :return: + :rtype: None + """ + # expand key full_iri = self.ld_proc.expand_iri(self.active_ctx, key) + # if the value is None delete the entry instead of updating it if value is None: del self[full_iri] return + # expand the value and replace the key, value pair ld_value = self._to_expanded_json({full_iri: value}) self.data_dict.update(ld_value) def __delitem__(self: Self, key: str) -> None: + """ + Delete the key, value pair with the given value pair. + Note that if a deleted object is represented by an ld_container druing this process it will still exist + and not be modified afterwards. + + :param self: The ld_dict the key, value pair is deleted from. + :type self: ld_dict + :param key: The key (expanded or compacted) of the key, value pair that is deleted. + :type key: str + + :return: + :rtype: None + """ + # expand key and delete the key, value pair full_iri = self.ld_proc.expand_iri(self.active_ctx, key) del self.data_dict[full_iri] def __contains__(self: Self, key: str) -> bool: + """ + Returns whether or not self contains a key, value pair with the given key. + + :param self: The ld_dict that is checked if it a key, value pair with the given key. + :type self: ld_dict + :param key: The key for which it is checked if a key, value pair is contained in self. + :type key: str + + :return: Whether or not self contains a key, value pair with the given key. + :rtype: bool + """ + # expand the key and check if self contains a key, value pair with it full_iri = self.ld_proc.expand_iri(self.active_ctx, key) # FIXME: is that good? return full_iri in self.data_dict - def __eq__(self: Self, other: Any) -> Union[bool, NotImplementedType]: # FIXME: give another type hint to other? + def __eq__( + self: Self, other: Union[ld_dict, dict[str, Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]]] + ) -> bool: + """ + Returns wheter or not self is considered to be equal to other.
+ If other is not an ld_dict, it is converted first. + If an id check is possible return its result otherwise: + For each key, value pair its value is compared to the value with the same key in other. + Note that due to those circumstances equality is not transitve + meaning if a == b and b == c it is not guaranteed that a == c.
+ + :param self: The ld_dict other is compared to. + :type self: ld_dict + :param other: The dict/ ld_dict self is compared to. + :type other: ld_dict | dict[str, JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list] + + :return: Whether or not self and other are considered equal. + If other is of the wrong type return the NotImplemented singleton instead. + :rtype: bool + """ + # check if other has an acceptable type if not isinstance(other, (dict, ld_dict)): return NotImplemented + + # compare in the special case that other is a json_id or json_value if ld_container.is_json_id(other): if "@id" in self: return self["@id"] == other["@id"] @@ -82,29 +188,69 @@ def __eq__(self: Self, other: Any) -> Union[bool, NotImplementedType]: # FIXME: if {*self.keys()}.issubset({"@id", *other.keys()}): return ld_container.are_values_equal(self.data_dict, other) return False + + # convert into an ld_dict if other is not one if isinstance(other, dict): other = self.from_dict(other, parent=self.parent, key=self.key, context=self.context) + + # check for id equality if "@id" in self and "@id" in other: return self["@id"] == other["@id"] + + # test for value equality keys_self = {*self.keys()} keys_other = {*other.keys()} unique_keys = keys_self.symmetric_difference(keys_other) if unique_keys and unique_keys != {"@id"}: + # there is a key that isn't "@id" that is only in other or self return False + # check if the values with the same key are equal for key in keys_self.intersection(keys_other): if self[key] != other[key]: return False return True - def __ne__(self: Self, other: Any) -> Union[bool, NotImplementedType]: # FIXME: give another type hint to other? + def __ne__( + self: Self, other: Union[ld_dict, dict[str, Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]]] + ) -> bool: + """ + Returns whether or not self and other not considered to be equal. + (Returns not self.__eq__(other) if the return type is bool. + See ld_list.__eq__ for more details on the comparison.) + + :param self: The ld_dict other is compared to. + :type self: ld_dict + :param other: The dict/ ld_dict self is compared to. + :type other: ld_dict | dict[str, JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list] + + :return: Whether or not self and other are not considered equal. + If other is of the wrong type return the NotImplemented singleton instead. + :rtype: bool + """ + # compare self and other using __eq__ x = self.__eq__(other) + # return NotImplemented if __eq__ did so and else the inverted result of __eq__ if x is NotImplemented: return NotImplemented return not x def get( self: Self, key: str, default: Any = _NO_DEFAULT - ) -> Union[list[Union[BASIC_TYPE, TIME_TYPE, ld_container]], Any]: + ) -> Union[ld_list, Any]: + """ + Get the item with the given key in a pythonized form using the build in get. + If a KeyError is raised, return the default or reraise it if no default is given. + + :param self: The ld_dict the item is taken from. + :type self: ld_dict + :param key: The key (compacted or expanded) to the item. + :type key: str + + :return: The pythonized item at the key. + :rtype: ld_list + + :raises KeyError: If the build in get raised a KeyError. + """ try: return self[key] except KeyError as e: @@ -114,29 +260,75 @@ def get( def update( self: Self, - other: Union[dict[str, Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container]], "ld_dict"] + other: Union[ld_dict, dict[str, Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]]] ) -> None: + """ + Set the items at the given keys to the given values or delete it if value is None by using build in set. + + :param self: The ld_dict the items are set in. + :type self: ld_dict + :param other: The key, value pairs giving the new values and their keys. + :type other: ld_dict | dict[str, JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list] + + :return: + :rtype: None + """ for key, value in other.items(): self[key] = value def keys(self: Self) -> KeysView[str]: + """ + Return the keys of the key, value pairs of self. + + :param self: The ld_dict whose keys are returned. + :type self: ld_dict + """ return self.data_dict.keys() - def compact_keys(self): + def compact_keys(self: Self) -> Iterator[str]: + """ + Return an iterator of the compacted keys of the key, value pairs of self. + + :param self: The ld_dict whose compacted keys are returned. + :type self: ld_dict + """ return map( lambda k: self.ld_proc.compact_iri(self.active_ctx, k), self.data_dict.keys() ) - def items(self): + def items(self: Self) -> Generator[tuple[str, ld_list], None, None]: + """ + Return an generator of tuples of keys and their values in self. + + :param self: The ld_dict whose items are returned. + :type self: ld_dict + """ for k in self.data_dict.keys(): yield k, self[k] @property - def ref(self): + def ref(self: Self) -> dict[Literal["@id"], str]: + """ + Return the dict used to reference this object by its id. (Its form is {"@id": ...}) + + :param self: The ld_dict whose reference is returned. + :type self: ld_dict + + :raises KeyError: If self has no id. + """ return {"@id": self.data_dict['@id']} - def to_python(self): + def to_python(self: Self) -> dict[str, Union[BASIC_TYPE, TIME_TYPE, PYTHONIZED_LD_CONTAINER]]: + """ + Return a fully pythonized version of this object where all ld_container are replaced by lists and dicts. + + :param self: The ld_dict whose fully pythonized version is returned. + :type self: ld_dict + + :return: The fully pythonized version of self. + :rtype: dict[str, BASIC_TYPE | TIME_TYPE | PYTHONIZED_LD_CONTAINER] + """ res = {} for key in self.compact_keys(): value = self[key] @@ -145,14 +337,42 @@ def to_python(self): res[key] = value return res + # FIXME: Allow from_dict to handle dicts containing ld_dicts and ld_lists @classmethod - def from_dict(cls, value, *, parent=None, key=None, context=None, ld_type=None): - ld_data = value.copy() + def from_dict( + cls: type[Self], + value: dict[str, PYTHONIZED_LD_CONTAINER], + *, + parent: Union[ld_dict, ld_list, None] = None, + key: Union[str, None] = None, + context: Union[str, JSON_LD_CONTEXT_DICT, list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None, + ld_type: Union[str, list[str], None] = None + ) -> ld_dict: + """ + Creates a ld_dict from the given dict with the given parent, key, context and ld_type.
+ Uses the expansion of the JSON-LD Processor and not the one of ld_container. + + :param value: The dict of values the ld_dict should be created from. + :type value: dict[str, PYTHONIZED_LD_CONTAINER] + :param parent: The parent container of the new ld_list. + :type parent: ld_dict | ld_list | None + :param key: The key into the inner most parent container representing a dict of the new ld_list. + :type: key: str | None + :param context: The context for the new dict (it will also inherit the context of parent). + :type context: str | JSON_LD_CONTEXT_DICT | list[str | JSON_LD_CONTEXT_DICT] | None + :param ld_type: Additional value(s) for the new dict. + :type ld_type: str | list[str] | None + :return: The new ld_dict build from value. + :rtype: ld_dict + """ + # make a copy of value and add the new type to it. + ld_data = value.copy() ld_type = ld_container.merge_to_list(ld_type or [], ld_data.get('@type', [])) if ld_type: ld_data["@type"] = ld_type + # generate the context from value, context and parent data_context = ld_data.pop('@context', []) merged_contexts = ld_container.merge_to_list(data_context, context or []) full_context = [] @@ -161,17 +381,40 @@ def from_dict(cls, value, *, parent=None, key=None, context=None, ld_type=None): elif parent is not None: full_context = parent.full_context + merged_contexts + # expand value and create an ld_dict from it ld_value = cls.ld_proc.expand(ld_data, {"expandContext": full_context, "documentLoader": bundled_loader}) ld_value = cls(ld_value, parent=parent, key=key, context=merged_contexts) return ld_value @classmethod - def is_ld_dict(cls, ld_value): + def is_ld_dict(cls: type[Self], ld_value: Any) -> bool: + """ + Returns wheter the given value is considered to be possible of representing an expanded json-ld dict.
+ I.e. if ld_value is a list containing a dict containing none of the keys "@set", "@graph", "@list" and "@value" + and not only the key "@id". + + :param ld_value: The value that is checked. + :type ld_value: Any + + :returns: Wheter or not ld_value could represent an expanded json-ld dict. + :rtype: bool + """ return cls.is_ld_node(ld_value) and cls.is_json_dict(ld_value[0]) @classmethod - def is_json_dict(cls, ld_value): + def is_json_dict(cls: type[Self], ld_value: Any) -> bool: + """ + Returns wheter the given value is considered to be possible of representing an expanded json-ld dict.
+ I.e. if ld_value is a dict containing none of the keys "@set", "@graph", "@list" and "@value" + and not only the key "@id". + + :param ld_value: The value that is checked. + :type ld_value: Any + + :returns: Wheter or not ld_value could represent an expanded json-ld dict. + :rtype: bool + """ if not isinstance(ld_value, dict): return False diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index e567dff8..23ebe4d0 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -5,21 +5,25 @@ # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: Michael Fritzsche +from __future__ import annotations + +from .ld_container import ld_container from collections import deque -from types import NotImplementedType -from .ld_container import ( - ld_container, - JSON_LD_CONTEXT_DICT, - EXPANDED_JSON_LD_VALUE, - PYTHONIZED_LD_CONTAINER, - JSON_LD_VALUE, - TIME_TYPE, - BASIC_TYPE, -) - -from collections.abc import Generator, Hashable -from typing import Union, Any -from typing_extensions import Self + +from typing import TYPE_CHECKING +if TYPE_CHECKING: + from collections.abc import Generator, Hashable + from .ld_dict import ld_dict + from .ld_container import ( + JSON_LD_CONTEXT_DICT, + EXPANDED_JSON_LD_VALUE, + PYTHONIZED_LD_CONTAINER, + JSON_LD_VALUE, + TIME_TYPE, + BASIC_TYPE, + ) + from typing import Any, Union + from typing_extensions import Self class ld_list(ld_container): @@ -37,20 +41,20 @@ def __init__( self: Self, data: EXPANDED_JSON_LD_VALUE, *, - parent: Union[ld_container, None] = None, + parent: Union[ld_dict, ld_list, None] = None, key: Union[str, None] = None, index: Union[int, None] = None, context: Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None, ) -> None: """ - Create a new ld_list container. + Create a new instance of an ld_list. :param self: The instance of ld_list to be initialized. :type self: ld_list :param data: The expanded json-ld data that is mapped (must be valid for @set, @list or @graph) :type data: EXPANDED_JSON_LD_VALUE :param parent: parent node of this container. - :type parent: ld_container | None + :type parent: ld_dict | ld_list | None :param key: key into the parent container. :type key: str | None :param index: index into the parent container. @@ -97,7 +101,7 @@ def __init__( def __getitem__( self: Self, index: Union[int, slice] - ) -> Union[BASIC_TYPE, TIME_TYPE, ld_container, list[Union[BASIC_TYPE, TIME_TYPE, ld_container]]]: + ) -> Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list, list[Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]]]: """ Get the item(s) at position index in a pythonized form. @@ -107,7 +111,7 @@ def __getitem__( :type index: int | slice :return: The pythonized item(s) at index. - :rtype: BASIC_TYPE | TIME_TYPE | ld_container | list[BASIC_TYPE | TIME_TYPE | ld_container]] + :rtype: BASIC_TYPE | TIME_TYPE | ld_dict | ld_list | list[BASIC_TYPE | TIME_TYPE | ld_dict | ld_list] """ # handle slices by applying them to a list of indices and then getting the items at those if isinstance(index, slice): @@ -120,7 +124,7 @@ def __getitem__( return item def __setitem__( - self: Self, index: Union[int, slice], value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container] + self: Self, index: Union[int, slice], value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] ) -> None: """ Set the item(s) at position index to the given value(s). @@ -131,7 +135,7 @@ def __setitem__( :param index: The positon(s) at which the item(s) is/ are set. :type index: int | slice :param value: The new value(s). - :type value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container] + :type value: JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list :return: :rtype: None @@ -184,7 +188,7 @@ def __len__(self: Self) -> int: """ return len(self.item_list) - def __iter__(self: Self) -> Generator[Union[BASIC_TYPE, TIME_TYPE, ld_container], None, None]: + def __iter__(self: Self) -> Generator[Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list], None, None]: """ Returns an iterator over the pythonized values contained in self. @@ -192,7 +196,7 @@ def __iter__(self: Self) -> Generator[Union[BASIC_TYPE, TIME_TYPE, ld_container] :type self: ld_list :return: The Iterator over self's values. - :rtype: Generator[BASIC_TYPE | TIME_TYPE | ld_container, None, None] + :rtype: Generator[BASIC_TYPE | TIME_TYPE | ld_dict | ld_list, None, None] """ # return an Iterator over each value in self in its pythonized from for index, value in enumerate(self.item_list): @@ -241,12 +245,9 @@ def __contains__(self: Self, value: JSON_LD_VALUE) -> bool: def __eq__( self: Self, - other: Union[ - Self, - list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container]], - dict[str, list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container]]], - ], - ) -> Union[bool, NotImplementedType]: + other: Union[ld_list, list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]], + dict[str, Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]]] + ) -> bool: """ Returns wheter or not self is considered to be equal to other.
If other is not an ld_list, it is converted first. @@ -262,12 +263,11 @@ def __eq__( :param self: The ld_list other is compared to. :type self: ld_list :param other: The list/ container/ ld_list self is compared to. - :type other: ld_list | list[JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_container] - | dict[str, list[JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_container]] + :type other: ld_list | list[JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list] :return: Whether or not self and other are considered equal. - If other is of the wrong type return NotImplemented instead. - :rtype: bool | NotImplementedType + If other is of the wrong type return the NotImplemented singleton instead. + :rtype: bool """ # check if other has an acceptable type if not (isinstance(other, (list, ld_list)) or ld_list.is_container(other)): @@ -504,13 +504,8 @@ def _hopcroft_karp( return matching_size def __ne__( - self: Self, - other: Union[ - Self, - list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container]], - dict[str, list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container]]], - ], - ) -> Union[bool, NotImplementedType]: + self: Self, other: Union[ld_list, list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]]] + ) -> bool: """ Returns whether or not self and other not considered to be equal. (Returns not self.__eq__(other) if the return type is bool. @@ -519,12 +514,11 @@ def __ne__( :param self: The ld_list other is compared to. :type self: ld_list :param other: The list/ container/ ld_list self is compared to. - :type other: ld_list | list[JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_container] - | dict[str, list[JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_container]] + :type other: ld_list | list[JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list] :return: Whether or not self and other are not considered equal. - If other is of the wrong type return NotImplemented instead. - :rtype: bool | NotImplementedType + If other is of the wrong type return the NotImplemented singleton instead. + :rtype: bool """ # compare self and other using __eq__ x = self.__eq__(other) @@ -533,7 +527,7 @@ def __ne__( return NotImplemented return not x - def append(self: Self, value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container]) -> None: + def append(self: Self, value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]) -> None: """ Append the item to the given ld_list self. The given value is expanded. If it is assimilated by self all items that would be added by this are added. @@ -541,14 +535,14 @@ def append(self: Self, value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_con :param self: The ld_list the item is appended to. :type self: ld_list :param value: The new value. - :type value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container] + :type value: JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list :return: :rtype: None """ self.item_list.extend(self._to_expanded_json([value])) - def extend(self: Self, value: list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container]]) -> None: + def extend(self: Self, value: list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]]) -> None: """ Append the items in value to the given ld_list self. The given values are expanded. If any are assimilated by self all items that would be added by this are added. @@ -556,7 +550,7 @@ def extend(self: Self, value: list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, l :param self: The ld_list the items are appended to. :type self: ld_list :param value: The new values. - :type value: list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_container]] + :type value: list[JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dcit | ld_list] :return: :rtype: None @@ -564,7 +558,7 @@ def extend(self: Self, value: list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, l for item in value: self.append(item) - def to_python(self: Self) -> list[PYTHONIZED_LD_CONTAINER]: + def to_python(self: Self) -> list[Union[BASIC_TYPE, TIME_TYPE, PYTHONIZED_LD_CONTAINER]]: """ Return a fully pythonized version of this object where all ld_container are replaced by lists and dicts. @@ -572,7 +566,7 @@ def to_python(self: Self) -> list[PYTHONIZED_LD_CONTAINER]: :type self: ld_list :return: The fully pythonized version of self. - :rtype: list[PYTHONIZED_LD_CONTAINER] + :rtype: list[BASIC_TYPE | TIME_TYPE | PYTHONIZED_LD_CONTAINER] """ return [ item.to_python() if isinstance(item, ld_container) else item @@ -616,11 +610,11 @@ def from_list( cls: type[Self], value: list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE]], *, - parent: Union[ld_container, None] = None, + parent: Union[ld_dict, ld_list, None] = None, key: Union[str, None] = None, context: Union[str, JSON_LD_CONTEXT_DICT, list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None, container_type: str = "@set" - ) -> "ld_list": + ) -> ld_list: """ Creates a ld_list from the given list with the given parent, key, context and container_type.
Note that only container_type '@set' is valid for key '@type'.
@@ -631,10 +625,10 @@ def from_list( :type value: list[JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE] :param parent: The parent container of the new ld_list.
If value is assimilated by parent druing JSON-LD expansion parent is extended by value and parent is returned. - :type parent: ld_container | None + :type parent: ld_dict | ld_list | None :param key: The key into the inner most parent container representing a dict of the new ld_list. :type: key: str | None - :param context: The context for the new list (is will also inherit the context of parent).
+ :param context: The context for the new list (it will also inherit the context of parent).
Note that this context won't be added to parent if value is assimilated by parent and parent is returned. :type context: str | JSON_LD_CONTEXT_DICT | list[str | JSON_LD_CONTEXT_DICT] | None :param container_type: The container type of the new list valid are '@set', '@list' and '@graph'.
From 95288a2f68ab090605b25aa58e25c82dca9ecb24 Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Mon, 12 Jan 2026 10:59:59 +0100 Subject: [PATCH 176/247] added first e2e harvest test for SoftwareMetadata --- src/hermes/commands/__init__.py | 16 +++--- src/hermes/commands/base.py | 77 +++++++++----------------- src/hermes/commands/harvest/base.py | 44 +++++++-------- src/hermes/commands/harvest/cff.py | 33 +++++------ src/hermes/model/__init__.py | 4 ++ test/hermes_test/model/test_api_e2e.py | 56 +++++++++++++++++++ 6 files changed, 132 insertions(+), 98 deletions(-) diff --git a/src/hermes/commands/__init__.py b/src/hermes/commands/__init__.py index 5203ac18..14f77741 100644 --- a/src/hermes/commands/__init__.py +++ b/src/hermes/commands/__init__.py @@ -8,12 +8,12 @@ # "unused import" errors. # flake8: noqa -from hermes.commands.base import HermesHelpCommand -from hermes.commands.base import HermesVersionCommand -from hermes.commands.clean.base import HermesCleanCommand -from hermes.commands.init.base import HermesInitCommand -from hermes.commands.curate.base import HermesCurateCommand +# from hermes.commands.base import HermesHelpCommand +# from hermes.commands.base import HermesVersionCommand +# from hermes.commands.clean.base import HermesCleanCommand +# from hermes.commands.init.base import HermesInitCommand +# from hermes.commands.curate.base import HermesCurateCommand from hermes.commands.harvest.base import HermesHarvestCommand -from hermes.commands.process.base import HermesProcessCommand -from hermes.commands.deposit.base import HermesDepositCommand -from hermes.commands.postprocess.base import HermesPostprocessCommand +# from hermes.commands.process.base import HermesProcessCommand +# from hermes.commands.deposit.base import HermesDepositCommand +# from hermes.commands.postprocess.base import HermesPostprocessCommand diff --git a/src/hermes/commands/base.py b/src/hermes/commands/base.py index 3ae9030b..d64581de 100644 --- a/src/hermes/commands/base.py +++ b/src/hermes/commands/base.py @@ -9,19 +9,20 @@ import logging import pathlib from importlib import metadata -from typing import Dict, Optional, Type +from typing import Type, Union import toml from pydantic import BaseModel from pydantic_settings import BaseSettings, SettingsConfigDict -class _HermesSettings(BaseSettings): + +class HermesSettings(BaseSettings): """Root class for HERMES configuration model.""" model_config = SettingsConfigDict(env_file_encoding='utf-8') - logging: Dict = {} + logging: dict = {} class HermesCommand(abc.ABC): @@ -31,7 +32,7 @@ class HermesCommand(abc.ABC): """ command_name: str = "" - settings_class: Type = _HermesSettings + settings_class: Type = HermesSettings def __init__(self, parser: argparse.ArgumentParser): """Initialize a new instance of any HERMES command. @@ -45,28 +46,27 @@ def __init__(self, parser: argparse.ArgumentParser): self.log = logging.getLogger(f"hermes.{self.command_name}") self.errors = [] - @classmethod - def init_plugins(cls): + def init_plugins(self): """Collect and initialize the plugins available for the HERMES command.""" # Collect all entry points for this group (i.e., all valid plug-ins for the step) - entry_point_group = f"hermes.{cls.command_name}" - group_plugins = { - entry_point.name: entry_point.load() - for entry_point in metadata.entry_points(group=entry_point_group) - } - - # Collect the plug-in specific configurations - cls.derive_settings_class({ - plugin_name: plugin_class.settings_class - for plugin_name, plugin_class in group_plugins.items() - if hasattr(plugin_class, "settings_class") and plugin_class.settings_class is not None - }) + entry_point_group = f"hermes.{self.command_name}" + group_plugins = {} + group_settings = {} + + for entry_point in metadata.entry_points(group=entry_point_group): + plugin_cls = entry_point.load() + + group_plugins[entry_point.name] = plugin_cls + if hasattr(plugin_cls, 'settings_class') and plugin_cls.settings_class is not None: + group_settings[entry_point.name] = plugin_cls.settings_class + + self.derive_settings_class(group_settings) return group_plugins @classmethod - def derive_settings_class(cls, setting_types: Dict[str, Type]) -> None: + def derive_settings_class(cls, setting_types: dict[str, Type]) -> None: """Build a new Pydantic data model class for configuration. This will create a new class that includes all settings from the plugins available. @@ -131,13 +131,10 @@ def init_command_parser(self, command_parser: argparse.ArgumentParser) -> None: def load_settings(self, args: argparse.Namespace): """Load settings from the configuration file (passed in from command line).""" - try: - toml_data = toml.load(args.path / args.config) - self.root_settings = HermesCommand.settings_class.model_validate(toml_data) - self.settings = getattr(self.root_settings, self.command_name) - except FileNotFoundError as e: - self.log.error("hermes.toml was not found. Try to run 'hermes init' first or create one manually.") - raise e # This will lead to our default error message & sys.exit + + toml_data = toml.load(args.path / args.config) + self.root_settings = HermesCommand.settings_class.model_validate(toml_data) + self.settings = getattr(self.root_settings, self.command_name) def patch_settings(self, args: argparse.Namespace): """Process command line options for the settings.""" @@ -164,7 +161,9 @@ def __call__(self, args: argparse.Namespace): class HermesPlugin(abc.ABC): """Base class for all HERMES plugins.""" - settings_class: Optional[Type] = None + pluing_node = None + + settings_class: Union[Type, None] = None @abc.abstractmethod def __call__(self, command: HermesCommand) -> None: @@ -202,27 +201,3 @@ def __call__(self, args: argparse.Namespace) -> None: # Otherwise, simply show the general help and exit (cleanly). self.parser.print_help() self.parser.exit() - - def load_settings(self, args: argparse.Namespace): - """No settings are needed for the help command.""" - pass - - -class HermesVersionSettings(BaseModel): - """Intentionally empty settings class for the version command.""" - pass - - -class HermesVersionCommand(HermesCommand): - """Show HERMES version and exit.""" - - command_name = "version" - settings_class = HermesVersionSettings - - def load_settings(self, args: argparse.Namespace): - """Pass loading settings as not necessary for this command.""" - pass - - def __call__(self, args: argparse.Namespace) -> None: - self.log.info(metadata.version("hermes")) - self.parser.exit() diff --git a/src/hermes/commands/harvest/base.py b/src/hermes/commands/harvest/base.py index 59fad8f1..28a62301 100644 --- a/src/hermes/commands/harvest/base.py +++ b/src/hermes/commands/harvest/base.py @@ -5,14 +5,13 @@ # SPDX-FileContributor: Michael Meinel import argparse -import typing as t -from datetime import datetime from pydantic import BaseModel from hermes.commands.base import HermesCommand, HermesPlugin -from hermes.model.context import HermesContext, HermesHarvestContext -from hermes.model.error import HermesValidationError, HermesMergeError +from hermes.model.context_manager import HermesContext +from hermes.model.error import HermesValidationError +from hermes.model import SoftwareMetadata class HermesHarvestPlugin(HermesPlugin): @@ -21,11 +20,11 @@ class HermesHarvestPlugin(HermesPlugin): TODO: describe the harvesting process and how this is mapped to this plugin. """ - def __call__(self, command: HermesCommand) -> t.Tuple[t.Dict, t.Dict]: + def __call__(self, command: HermesCommand) -> tuple[SoftwareMetadata, dict]: pass -class _HarvestSettings(BaseModel): +class HarvestSettings(BaseModel): """Generic harvesting settings.""" sources: list[str] = [] @@ -35,32 +34,31 @@ class HermesHarvestCommand(HermesCommand): """ Harvest metadata from configured sources. """ command_name = "harvest" - settings_class = _HarvestSettings + settings_class = HarvestSettings def __call__(self, args: argparse.Namespace) -> None: self.args = args - ctx = HermesContext() # Initialize the harvest cache directory here to indicate the step ran - ctx.init_cache("harvest") + ctx = HermesContext() + ctx.prepare_step('harvest') for plugin_name in self.settings.sources: + plugin_cls = self.plugins[plugin_name] + try: - plugin_func = self.plugins[plugin_name]() - harvested_data, tags = plugin_func(self) - - with HermesHarvestContext(ctx, plugin_name) as harvest_ctx: - harvest_ctx.update_from(harvested_data, - plugin=plugin_name, - timestamp=datetime.now().isoformat(), **tags) - for _key, ((_value, _tag), *_trace) in harvest_ctx._data.items(): - if any(v != _value and t == _tag for v, t in _trace): - raise HermesMergeError(_key, None, _value) - - except KeyError as e: - self.log.error("Plugin '%s' not found.", plugin_name) - self.errors.append(e) + # Load plugin and run the harvester + plugin_func = plugin_cls() + harvested_data = plugin_func(self) + + with ctx[plugin_name] as plugin_ctx: + plugin_ctx["codemeta"] = harvested_data.compact() + plugin_ctx["context"] = {"@context": harvested_data.full_context} + + plugin_ctx["expanded"] = harvested_data.ld_value except HermesValidationError as e: self.log.error("Error while executing %s: %s", plugin_name, e) self.errors.append(e) + + ctx.finalize_step('harvest') diff --git a/src/hermes/commands/harvest/cff.py b/src/hermes/commands/harvest/cff.py index e333b27c..6c2b6594 100644 --- a/src/hermes/commands/harvest/cff.py +++ b/src/hermes/commands/harvest/cff.py @@ -9,16 +9,16 @@ import logging import pathlib import urllib.request -import typing as t from pydantic import BaseModel from ruamel.yaml import YAML import jsonschema from cffconvert import Citation +from typing import Any, Union -from hermes.model.context import ContextPath -from hermes.model.errors import HermesValidationError +from hermes.model.error import HermesValidationError from hermes.commands.harvest.base import HermesHarvestPlugin, HermesHarvestCommand +from hermes.model import SoftwareMetadata # TODO: should this be configurable via a CLI option? @@ -35,7 +35,7 @@ class CffHarvestSettings(BaseModel): class CffHarvestPlugin(HermesHarvestPlugin): settings_class = CffHarvestSettings - def __call__(self, command: HermesHarvestCommand) -> t.Tuple[t.Dict, t.Dict]: + def __call__(self, command: HermesHarvestCommand) -> tuple[SoftwareMetadata, dict]: # Get source files cff_file = self._get_single_cff(command.args.path) if not cff_file: @@ -44,23 +44,24 @@ def __call__(self, command: HermesHarvestCommand) -> t.Tuple[t.Dict, t.Dict]: # Read the content cff_data = cff_file.read_text() - - # Validate the content to be correct CFF cff_dict = self._load_cff_from_file(cff_data) - if command.settings.cff.enable_validation and not self._validate(cff_file, cff_dict): - raise HermesValidationError(cff_file) + if command.settings.cff.enable_validation: + # Validate the content to be correct CFF + if not self._validate(cff_file, cff_dict): + raise HermesValidationError(cff_file) # Convert to CodeMeta using cffconvert codemeta_dict = self._convert_cff_to_codemeta(cff_data) - # TODO Replace the following temp patch for #112 once there is a new cffconvert version with cffconvert#309 - codemeta_dict = self._patch_author_emails(cff_dict, codemeta_dict) if "version" in codemeta_dict: codemeta_dict["version"] = str(codemeta_dict["version"]) # Convert Version to string - return codemeta_dict, {'local_path': str(cff_file)} + # TODO Replace the following temp patch for #112 once there is a new cffconvert version with cffconvert#309 + codemeta_dict = self._patch_author_emails(cff_dict, codemeta_dict) + ld_codemeta = SoftwareMetadata(codemeta_dict, extra_vocabs={'legalName': {'@id': "http://schema.org/name"}}) + return ld_codemeta, {} - def _load_cff_from_file(self, cff_data: str) -> t.Any: + def _load_cff_from_file(self, cff_data: str) -> Any: yaml = YAML(typ='safe') yaml.constructor.yaml_constructors[u'tag:yaml.org,2002:timestamp'] = yaml.constructor.yaml_constructors[ u'tag:yaml.org,2002:str'] @@ -73,11 +74,11 @@ def _patch_author_emails(self, cff: dict, codemeta: dict) -> dict: codemeta["author"][i]["email"] = author["email"] return codemeta - def _convert_cff_to_codemeta(self, cff_data: str) -> t.Any: + def _convert_cff_to_codemeta(self, cff_data: str) -> Any: codemeta_str = Citation(cff_data).as_codemeta() return json.loads(codemeta_str) - def _validate(self, cff_file: pathlib.Path, cff_dict: t.Dict) -> bool: + def _validate(self, cff_file: pathlib.Path, cff_dict: dict) -> bool: audit_log = logging.getLogger('audit.cff') cff_schema_url = f'https://citation-file-format.github.io/{_CFF_VERSION}/schema.json' @@ -93,7 +94,7 @@ def _validate(self, cff_file: pathlib.Path, cff_dict: t.Dict) -> bool: audit_log.warning('!!! warning "%s is not valid according to <%s>"', cff_file, cff_schema_url) for error in errors: - path = ContextPath.make(error.absolute_path or ['root']) + path = error.absolute_path or ['root'] audit_log.info(' Invalid input for `%s`.', str(path)) audit_log.info(' !!! message "%s"', error.message) audit_log.debug(' !!! value "%s"', error.instance) @@ -108,7 +109,7 @@ def _validate(self, cff_file: pathlib.Path, cff_dict: t.Dict) -> bool: audit_log.info('- Found valid Citation File Format file at: %s', cff_file) return True - def _get_single_cff(self, path: pathlib.Path) -> t.Optional[pathlib.Path]: + def _get_single_cff(self, path: pathlib.Path) -> Union[pathlib.Path, None]: # Find CFF files in directories and subdirectories cff_file = path / 'CITATION.cff' if cff_file.exists(): diff --git a/src/hermes/model/__init__.py b/src/hermes/model/__init__.py index 4a4bca25..febdb0ff 100644 --- a/src/hermes/model/__init__.py +++ b/src/hermes/model/__init__.py @@ -2,4 +2,8 @@ # # SPDX-License-Identifier: Apache-2.0 +# This is an interface file that only provides a public interface, hence linter is disabled to avoid +# "unused import" errors. +# flake8: noqa + from hermes.model.api import SoftwareMetadata diff --git a/test/hermes_test/model/test_api_e2e.py b/test/hermes_test/model/test_api_e2e.py index e69de29b..11101722 100644 --- a/test/hermes_test/model/test_api_e2e.py +++ b/test/hermes_test/model/test_api_e2e.py @@ -0,0 +1,56 @@ +import pytest +from hermes.commands.harvest.cff import CffHarvestPlugin, CffHarvestSettings +from hermes.model import SoftwareMetadata + + +@pytest.mark.parametrize( + "cff, res", + [ + ( + """cff-version: 1.2.0 +title: Temp\nmessage: >- + If you use this software, please cite it using the + metadata from this file. +type: software +authors: + - given-names: Max + family-names: Mustermann + email: max@muster.mann""", + SoftwareMetadata({ + "@type": "SoftwareSourceCode", + "schema:author": { + "@list": [{ + "@type": "Person", + "email": ["max@muster.mann"], + "familyName": ["Mustermann"], + "givenName": ["Max"] + }] + }, + "schema:name": ["Temp"] + }) + ) + ] +) +def test_cff_harvest(tmp_path, cff, res): + class Args: + def __init__(self, path): + self.path = path + + class Settings: + def __init__(self, cff_settings): + self.cff = cff_settings + + class Command: + def __init__(self, args, settings): + self.args = args + self.settings = settings + + command = Command(Args(tmp_path), Settings(CffHarvestSettings())) + + cff_file = tmp_path / "CITATION.cff" + cff_file.write_text(cff) + + result = CffHarvestPlugin().__call__(command) + # FIXME: update to compare the SoftwareMetadata objects instead of the data_dicts + # after merge with refactor/data-model and/or refactor/423-implement-public-api + assert result[0].data_dict == res.data_dict From de457e3a11d4bca59bf6f304034654991b8e86a7 Mon Sep 17 00:00:00 2001 From: "Kernchen, Sophie" Date: Tue, 13 Jan 2026 11:32:41 +0100 Subject: [PATCH 177/247] Apply Style Changes (Author names instead of foo etc.) --- docs/source/dev/data_model.md | 56 +++++++++++++++++------------------ 1 file changed, 28 insertions(+), 28 deletions(-) diff --git a/docs/source/dev/data_model.md b/docs/source/dev/data_model.md index 8f008dc4..1a746a1e 100644 --- a/docs/source/dev/data_model.md +++ b/docs/source/dev/data_model.md @@ -112,13 +112,13 @@ from hermes.model import SoftwareMetadata # { # "@context": # { -# "baz": "https://schema.org/Thing" +# "name": "https://schema.org/name" # } # } data = SoftwareMetadata(extra_vocabs={"foo": "https://bar.net/schema.jsonld"}) -data["foo:baz"] = ... +data["foo:name"] = ... ``` ##### Adding data @@ -131,8 +131,8 @@ i.e., metadata that describes software: data["name"] = "My Research Software" # A simple "Text"-type value # → Simplified model representation : { "name": [ "My Research Software" ] } # Cf. "Accessing data" below -data["author"] = {"name": "Foo"} # An object value that uses terms available in the defined context -# → Simplified model representation : { "name": [ "My Research Software" ], "author": [ { "name": "Foo" } ] } +data["author"] = {"name": "Shakespeare"} # An object value that uses terms available in the defined context +# → Simplified model representation : { "name": [ "My Research Software" ], "author": [ { "name": "Shakespeare" } ] } # Cf. "Accessing data" below ``` @@ -153,14 +153,14 @@ will treat it as an array, i.e., a list-like object: ```{code-block} python :caption: Internal data values are arrays data["name"] = "My Research Software" # → [ "My Research Software" ] -data["author"] = {"name": "Foo"} # → [ { "name": [ "Foo" ] } ] +data["author"] = {"name": "Shakespeare"} # → [ { "name": [ "Shakespeare" ] } ] ``` Therefore, you access data in the same way you would access data from a Python `list`: 1. You access single values using indices, e.g., `data["name"][0]`. 2. You can use a list-like API to interact with data objects, e.g., -`data["name"].append("Bar")`, `data["name"].extend(["Bar", "Baz"])`, `for name in data["name"]: ...`, etc. +`data["name"].append("Hamilton")`, `data["name"].extend(["Hamilton", "Knuth"])`, `for name in data["name"]: ...`, etc. ##### Interacting with data @@ -176,22 +176,22 @@ data = SoftwareMetadata() # Let's create author metadata for our software! # Below each line of code, the value of `data["author"]` is given. -data["author"] = {"name": "Foo"} -# → [{'name': ['Foo']}] +data["author"] = {"name": "Shakespeare"} +# → [{'name': ['Shakespeare']}] -data["author"].append({"name": "Bar"}) -# [{'name': ['Foo']}, {'name': ['Bar']}] +data["author"].append({"name": "Hamilton"}) +# [{'name': ['Shakespeare']}, {'name': ['Hamilton']}] -data["author"][0]["email"] = "foo@baz.net" -# [{'name': ['Foo'], 'email': ['foo@baz.net']}, {'name': ['Bar']}] +data["author"][0]["email"] = "Shakespeare@baz.net" +# [{'name': ['Shakespeare'], 'email': ['shakespeare@baz.net']}, {'name': ['Hamilton']}] -data["author"][1]["email"].append("bar@baz.net") -# [{'name': ['Foo'], 'email': ['foo@baz.net']}, {'name': ['Bar'], 'email': ['bar@baz.net']}] +data["author"][1]["email"].append("Hamilton@baz.net") +# [{'name': ['Shakespeare'], 'email': ['shakespeare@baz.net']}, {'name': ['Hamilton'], 'email': ['hamilton@baz.net']}] -data["author"][1]["email"].extend(["bar@spam.org", "bar@eggs.com"]) +data["author"][1]["email"].extend(["hamilton@spam.org", "hamilton@eggs.com"]) # [ -# {'name': ['Foo'], 'email': ['foo@baz.net']}, -# {'name': ['Bar'], 'email': ['bar@baz.net', 'bar@spam.org', 'bar@eggs.com']} +# {'name': ['Shakespeare'], 'email': ['shakespeare@baz.net']}, +# {'name': ['Hamilton'], 'email': ['hamilton@baz.net', 'hamilton@spam.org', 'hamilton@eggs.com']} # ] ``` @@ -199,9 +199,9 @@ The example continues to show how to iterate through data. ```{code-block} python :caption: for-loop, containment check -for i, author in enumerate(data["author"]): - if author["name"][0] in ["Foo", "Bar"]: - print(f"Author {i + 1} has expected name.") +for i, author in enumerate(data["author"], start=1): + if author["name"][0] in ["Shakespeare", "Hamilton"]: + print(f"Author {i} has expected name.") else: raise ValueError("Unexpected author name found!", author["name"][0]) @@ -224,7 +224,7 @@ for email in data["author"][0]["email"]: ```{code-block} python :caption: Value check and list comprehension -if ["bar" in email for email in data["author"][1]["email"]]: +if all(["hamilton" in email for email in data["author"][1]["email"]]): print("Author has only emails with their name in it.") # Mock output @@ -248,7 +248,7 @@ Python data: :emphasize-lines: 5,13 try: assert ( - {'name': ['Foo'], 'email': ['foo@baz.net']} + {'name': ['Shakespeare'], 'email': ['shakespeare@baz.net']} in data["author"] ) @@ -261,19 +261,19 @@ except AssertionError: # $> The author could not be found. # $> AssertionError: # assert -# {'email': ['foo@baz.net'], 'name': ['Foo']} +# {'email': ['shakespeare@baz.net'], 'name': ['Shakespeare']} # in # _LDList( # {'@list': [ # { -# 'http://schema.org/name': [{'@value': 'Foo'}], -# 'http://schema.org/email': [{'@value': 'foo@baz.net'}] +# 'http://schema.org/name': [{'@value': 'Shakespeare'}], +# 'http://schema.org/email': [{'@value': 'shakespeare@baz.net'}] # }, # { -# 'http://schema.org/name': [{'@value': 'Bar'}], +# 'http://schema.org/name': [{'@value': 'Hamilton'}], # 'http://schema.org/email': [ # {'@list': [ -# {'@value': 'bar@baz.net'}, {'@value': 'bar@spam.org'}, {'@value': 'bar@eggs.com'} +# {'@value': 'hamilton@baz.net'}, {'@value': 'hamilton@spam.org'}, {'@value': 'hamilton@eggs.com'} # ]} # ] # }] @@ -294,7 +294,7 @@ This function can be used in assertions to assert full data integrity: :emphasize-lines: 5,13 try: assert ( - {'name': ['Foo'], 'email': ['foo@baz.net']} + {'name': ['Shakespeare'], 'email': ['Shakespeare@baz.net']} in data["author"].to_python() ) From e9d010f0fabb1e87c74727d72f9080cfa0ab3fb8 Mon Sep 17 00:00:00 2001 From: "Kernchen, Sophie" Date: Tue, 13 Jan 2026 11:47:47 +0100 Subject: [PATCH 178/247] Correct lower letters for emails --- docs/source/dev/data_model.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/source/dev/data_model.md b/docs/source/dev/data_model.md index 1a746a1e..e788fd44 100644 --- a/docs/source/dev/data_model.md +++ b/docs/source/dev/data_model.md @@ -182,10 +182,10 @@ data["author"] = {"name": "Shakespeare"} data["author"].append({"name": "Hamilton"}) # [{'name': ['Shakespeare']}, {'name': ['Hamilton']}] -data["author"][0]["email"] = "Shakespeare@baz.net" +data["author"][0]["email"] = "shakespeare@baz.net" # [{'name': ['Shakespeare'], 'email': ['shakespeare@baz.net']}, {'name': ['Hamilton']}] -data["author"][1]["email"].append("Hamilton@baz.net") +data["author"][1]["email"].append("hamilton@baz.net") # [{'name': ['Shakespeare'], 'email': ['shakespeare@baz.net']}, {'name': ['Hamilton'], 'email': ['hamilton@baz.net']}] data["author"][1]["email"].extend(["hamilton@spam.org", "hamilton@eggs.com"]) @@ -294,7 +294,7 @@ This function can be used in assertions to assert full data integrity: :emphasize-lines: 5,13 try: assert ( - {'name': ['Shakespeare'], 'email': ['Shakespeare@baz.net']} + {'name': ['Shakespeare'], 'email': ['shakespeare@baz.net']} in data["author"].to_python() ) From 605201d396a045a1880786e7e56f0501d0c05447 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 16 Jan 2026 10:54:09 +0100 Subject: [PATCH 179/247] fixed small bug in set_item of ld_dict --- src/hermes/model/types/ld_dict.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/src/hermes/model/types/ld_dict.py b/src/hermes/model/types/ld_dict.py index ba29838c..a6b9de09 100644 --- a/src/hermes/model/types/ld_dict.py +++ b/src/hermes/model/types/ld_dict.py @@ -110,14 +110,12 @@ def __setitem__(self: Self, key: str, value: Union[JSON_LD_VALUE, BASIC_TYPE, TI :return: :rtype: None """ - # expand key - full_iri = self.ld_proc.expand_iri(self.active_ctx, key) # if the value is None delete the entry instead of updating it if value is None: - del self[full_iri] + del self[self.ld_proc.expand_iri(self.active_ctx, key)] return - # expand the value and replace the key, value pair - ld_value = self._to_expanded_json({full_iri: value}) + # expand the key, value pair and update data_dict + ld_value = self._to_expanded_json({key: value}) self.data_dict.update(ld_value) def __delitem__(self: Self, key: str) -> None: From 4920090d2db1793ccedd6fab6b710ed3ba1a24ee Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 16 Jan 2026 10:58:24 +0100 Subject: [PATCH 180/247] added more tests --- src/hermes/commands/harvest/codemeta.py | 11 ++- test/hermes_test/model/test_api_e2e.py | 122 +++++++++++++++++++++++- 2 files changed, 125 insertions(+), 8 deletions(-) diff --git a/src/hermes/commands/harvest/codemeta.py b/src/hermes/commands/harvest/codemeta.py index b75bb002..5f211222 100644 --- a/src/hermes/commands/harvest/codemeta.py +++ b/src/hermes/commands/harvest/codemeta.py @@ -8,15 +8,16 @@ import glob import json import pathlib -import typing as t +from typing import Union from hermes.commands.harvest.base import HermesHarvestCommand, HermesHarvestPlugin from hermes.commands.harvest.util.validate_codemeta import validate_codemeta -from hermes.model.errors import HermesValidationError +from hermes.model.error import HermesValidationError +from hermes.model import SoftwareMetadata class CodeMetaHarvestPlugin(HermesHarvestPlugin): - def __call__(self, command: HermesHarvestCommand) -> t.Tuple[t.Dict, t.Dict]: + def __call__(self, command: HermesHarvestCommand) -> tuple[SoftwareMetadata, dict]: """ Implementation of a harvester that provides data from a codemeta.json file format. @@ -39,7 +40,7 @@ def __call__(self, command: HermesHarvestCommand) -> t.Tuple[t.Dict, t.Dict]: raise HermesValidationError(codemeta_file) codemeta = json.loads(codemeta_str) - return codemeta, {'local_path': str(codemeta_file)} + return SoftwareMetadata(codemeta), {'local_path': str(codemeta_file)} def _validate(self, codemeta_file: pathlib.Path) -> bool: with open(codemeta_file, "r") as fi: @@ -55,7 +56,7 @@ def _validate(self, codemeta_file: pathlib.Path) -> bool: return True - def _get_single_codemeta(self, path: pathlib.Path) -> t.Optional[pathlib.Path]: + def _get_single_codemeta(self, path: pathlib.Path) -> Union[pathlib.Path, None]: # Find CodeMeta files in directories and subdirectories # TODO: Do we really want to search recursive? Maybe add another option to enable pointing to a single file? # (So this stays "convention over configuration") diff --git a/test/hermes_test/model/test_api_e2e.py b/test/hermes_test/model/test_api_e2e.py index 11101722..67b40f7b 100644 --- a/test/hermes_test/model/test_api_e2e.py +++ b/test/hermes_test/model/test_api_e2e.py @@ -1,5 +1,12 @@ +# SPDX-FileCopyrightText: 2026 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Fritzsche + import pytest from hermes.commands.harvest.cff import CffHarvestPlugin, CffHarvestSettings +from hermes.commands.harvest.codemeta import CodeMetaHarvestPlugin from hermes.model import SoftwareMetadata @@ -18,7 +25,7 @@ email: max@muster.mann""", SoftwareMetadata({ "@type": "SoftwareSourceCode", - "schema:author": { + "author": { "@list": [{ "@type": "Person", "email": ["max@muster.mann"], @@ -26,7 +33,80 @@ "givenName": ["Max"] }] }, - "schema:name": ["Temp"] + "name": ["Temp"] + }) + ), + ( + """# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR), Helmholtz-Zentrum Dresden-Rossendorf +# +# SPDX-License-Identifier: CC0-1.0 + +# SPDX-FileContributor: Michael Meinel + +cff-version: 1.2.0 +title: hermes +message: >- + If you use this software, please cite it using the + metadata from this file. +version: 0.9.0 +license: "Apache-2.0" +abstract: "Tool to automate software publication. Not stable yet." +type: software +authors: + - given-names: Michael + family-names: Meinel + email: michael.meinel@dlr.de + affiliation: German Aerospace Center (DLR) + orcid: "https://orcid.org/0000-0001-6372-3853" + - given-names: Stephan + family-names: Druskat + email: stephan.druskat@dlr.de + affiliation: German Aerospace Center (DLR) + orcid: "https://orcid.org/0000-0003-4925-7248" +identifiers: + - type: doi + value: 10.5281/zenodo.13221384 + description: Version 0.8.1b1 +""", + SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/author": [ + { + "@list": [ + { + "@id": "https://orcid.org/0000-0001-6372-3853", + "@type": ["http://schema.org/Person"], + "http://schema.org/affiliation": [ + { + "@type": ["http://schema.org/Organization"], + "http://schema.org/name": [{"@value": "German Aerospace Center (DLR)"}] + } + ], + "http://schema.org/email": [{"@value": "michael.meinel@dlr.de"}], + "http://schema.org/familyName": [{"@value": "Meinel"}], + "http://schema.org/givenName": [{"@value": "Michael"}] + }, + { + "@id": "https://orcid.org/0000-0003-4925-7248", + "@type": ["http://schema.org/Person"], + "http://schema.org/affiliation": [ + { + "@type": ["http://schema.org/Organization"], + "http://schema.org/name": [{"@value": "German Aerospace Center (DLR)"}] + } + ], + "http://schema.org/email": [{"@value": "stephan.druskat@dlr.de"}], + "http://schema.org/familyName": [{"@value": "Druskat"}], + "http://schema.org/givenName": [{"@value": "Stephan"}] + } + ] + } + ], + "http://schema.org/description": [{"@value": "Tool to automate software publication. Not stable yet."}], + "http://schema.org/identifier": [{"@id": "https://doi.org/10.5281/zenodo.13221384"}], + "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}], + "http://schema.org/name": [{"@value": "hermes"}], + "http://schema.org/version": [{"@value": "0.9.0"}] }) ) ] @@ -51,6 +131,42 @@ def __init__(self, args, settings): cff_file.write_text(cff) result = CffHarvestPlugin().__call__(command) - # FIXME: update to compare the SoftwareMetadata objects instead of the data_dicts + # FIXME: update to compare the SoftwareMetadata objects instead of the data_dicts (in multiple places) # after merge with refactor/data-model and/or refactor/423-implement-public-api assert result[0].data_dict == res.data_dict + + +@pytest.mark.parametrize( + "codemeta, res", + [ + ( + """{ + "@context": "https://doi.org/10.5063/schema/codemeta-2.0", + "type": "SoftwareSourceCode", + "description": "for testing", + "name": "Test" +}""", + SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/name": [{"@value": "Test"}] + }) + ) + ] +) +def test_codemeta_harvest(tmp_path, codemeta, res): + class Args: + def __init__(self, path): + self.path = path + + class Command: + def __init__(self, args): + self.args = args + + command = Command(Args(tmp_path)) + + codemeta_file = tmp_path / "codemeta.json" + codemeta_file.write_text(codemeta) + + result = CodeMetaHarvestPlugin().__call__(command) + assert result[0].data_dict == res.data_dict From 38ef40e67e1a0dc0ab031eab7d28eab2dec0a49a Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 16 Jan 2026 14:01:15 +0100 Subject: [PATCH 181/247] refactored end to end tests --- src/hermes/commands/base.py | 1 - src/hermes/commands/cli.py | 24 +-- src/hermes/commands/harvest/base.py | 6 +- test/hermes_test/model/test_api_e2e.py | 249 +++++++++++++++++++++---- 4 files changed, 232 insertions(+), 48 deletions(-) diff --git a/src/hermes/commands/base.py b/src/hermes/commands/base.py index d64581de..2d182267 100644 --- a/src/hermes/commands/base.py +++ b/src/hermes/commands/base.py @@ -16,7 +16,6 @@ from pydantic_settings import BaseSettings, SettingsConfigDict - class HermesSettings(BaseSettings): """Root class for HERMES configuration model.""" diff --git a/src/hermes/commands/cli.py b/src/hermes/commands/cli.py index 06a18ca7..db109a5e 100644 --- a/src/hermes/commands/cli.py +++ b/src/hermes/commands/cli.py @@ -12,9 +12,11 @@ import sys from hermes import logger -from hermes.commands import (HermesHelpCommand, HermesVersionCommand, HermesCleanCommand, - HermesHarvestCommand, HermesProcessCommand, HermesCurateCommand, - HermesDepositCommand, HermesPostprocessCommand, HermesInitCommand) +# FIXME: remove comments after new implementation of modules is available +# from hermes.commands import (HermesHelpCommand, HermesVersionCommand, HermesCleanCommand, +# HermesHarvestCommand, HermesProcessCommand, HermesCurateCommand, +# HermesDepositCommand, HermesPostprocessCommand, HermesInitCommand) +from hermes.commands import HermesHarvestCommand from hermes.commands.base import HermesCommand @@ -36,15 +38,15 @@ def main() -> None: setting_types = {} for command in ( - HermesHelpCommand(parser), - HermesVersionCommand(parser), - HermesInitCommand(parser), - HermesCleanCommand(parser), + # HermesHelpCommand(parser), + # HermesVersionCommand(parser), + # HermesInitCommand(parser), + # HermesCleanCommand(parser), HermesHarvestCommand(parser), - HermesProcessCommand(parser), - HermesCurateCommand(parser), - HermesDepositCommand(parser), - HermesPostprocessCommand(parser), + # HermesProcessCommand(parser), + # HermesCurateCommand(parser), + # HermesDepositCommand(parser), + # HermesPostprocessCommand(parser), ): if command.settings_class is not None: setting_types[command.command_name] = command.settings_class diff --git a/src/hermes/commands/harvest/base.py b/src/hermes/commands/harvest/base.py index 28a62301..19ccc623 100644 --- a/src/hermes/commands/harvest/base.py +++ b/src/hermes/commands/harvest/base.py @@ -52,10 +52,10 @@ def __call__(self, args: argparse.Namespace) -> None: harvested_data = plugin_func(self) with ctx[plugin_name] as plugin_ctx: - plugin_ctx["codemeta"] = harvested_data.compact() - plugin_ctx["context"] = {"@context": harvested_data.full_context} + plugin_ctx["codemeta"] = harvested_data[0].compact() + plugin_ctx["context"] = {"@context": harvested_data[0].full_context} - plugin_ctx["expanded"] = harvested_data.ld_value + plugin_ctx["expanded"] = harvested_data[0].ld_value except HermesValidationError as e: self.log.error("Error while executing %s: %s", plugin_name, e) diff --git a/test/hermes_test/model/test_api_e2e.py b/test/hermes_test/model/test_api_e2e.py index 67b40f7b..650747e0 100644 --- a/test/hermes_test/model/test_api_e2e.py +++ b/test/hermes_test/model/test_api_e2e.py @@ -5,9 +5,9 @@ # SPDX-FileContributor: Michael Fritzsche import pytest -from hermes.commands.harvest.cff import CffHarvestPlugin, CffHarvestSettings -from hermes.commands.harvest.codemeta import CodeMetaHarvestPlugin -from hermes.model import SoftwareMetadata +import sys +from hermes.model import context_manager, SoftwareMetadata +from hermes.commands import cli @pytest.mark.parametrize( @@ -108,32 +108,93 @@ "http://schema.org/name": [{"@value": "hermes"}], "http://schema.org/version": [{"@value": "0.9.0"}] }) + ), + ( + """cff-version: 1.2.0 +title: Test +message: None +type: software +authors: + - given-names: Test + family-names: Testi + email: test.testi@test.testi + affiliation: German Aerospace Center (DLR) +identifiers: + - type: url + value: "https://arxiv.org/abs/2201.09015" + - type: doi + value: 10.5281/zenodo.13221384 +repository-code: "https://github.com/softwarepub/hermes" +abstract: for testing +url: "https://docs.software-metadata.pub/en/latest" +keywords: + - testing + - more testing +license: Apache-2.0 +version: 9.0.1 +date-released: "2026-01-16" """, + SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/author": [ + { + "@list": [ + { + "@type": ["http://schema.org/Person"], + "http://schema.org/affiliation": [ + { + "@type": ["http://schema.org/Organization"], + "http://schema.org/name": [{"@value": "German Aerospace Center (DLR)"}] + } + ], + "http://schema.org/email": [{"@value": "test.testi@test.testi"}], + "http://schema.org/familyName": [{"@value": "Testi"}], + "http://schema.org/givenName": [{"@value": "Test"}] + } + ] + } + ], + "http://schema.org/codeRepository": [{"@id": "https://github.com/softwarepub/hermes"}], + "http://schema.org/datePublished": [{"@type": "http://schema.org/Date", "@value": "2026-01-16"}], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/identifier": [{"@id": "https://doi.org/10.5281/zenodo.13221384"}], + "http://schema.org/keywords": [{"@value": "testing"}, {"@value": "more testing"}], + "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}], + "http://schema.org/name": [{"@value": "Test"}], + "http://schema.org/url": [ + {"@id": 'https://arxiv.org/abs/2201.09015'}, + {"@id": "https://docs.software-metadata.pub/en/latest"} + ], + "http://schema.org/version": [{"@value": "9.0.1"}] + }) ) ] ) -def test_cff_harvest(tmp_path, cff, res): - class Args: - def __init__(self, path): - self.path = path - - class Settings: - def __init__(self, cff_settings): - self.cff = cff_settings - - class Command: - def __init__(self, args, settings): - self.args = args - self.settings = settings - - command = Command(Args(tmp_path), Settings(CffHarvestSettings())) - +def test_cff_harvest(tmp_path, monkeypatch, cff, res): + monkeypatch.chdir(tmp_path) cff_file = tmp_path / "CITATION.cff" cff_file.write_text(cff) - result = CffHarvestPlugin().__call__(command) + config_file = tmp_path / "hermes.toml" + config_file.write_text("[harvest]\nsources = [ \"cff\" ]") + + orig_argv = sys.argv[:] + sys.argv = ["hermes", "harvest", "--path", str(tmp_path), "--config", str(config_file)] + result = {} + try: + monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) + cli.main() + except SystemExit: + manager = context_manager.HermesContext() + manager.prepare_step("harvest") + with manager["cff"] as cache: + result = SoftwareMetadata(cache["expanded"][0], cache["context"]["@context"][1]) + manager.finalize_step("harvest") + finally: + sys.argv = orig_argv + # FIXME: update to compare the SoftwareMetadata objects instead of the data_dicts (in multiple places) # after merge with refactor/data-model and/or refactor/423-implement-public-api - assert result[0].data_dict == res.data_dict + assert result.data_dict == res.data_dict @pytest.mark.parametrize( @@ -151,22 +212,144 @@ def __init__(self, args, settings): "http://schema.org/description": [{"@value": "for testing"}], "http://schema.org/name": [{"@value": "Test"}] }) + ), + ( + """{ + "@context": "https://doi.org/10.5063/schema/codemeta-2.0", + "type": "SoftwareSourceCode", + "applicationCategory": "Testing", + "author": [ + { + "id": "_:author_1", + "type": "Person", + "email": "test.testi@test.testi", + "familyName": "Testi", + "givenName": "Test" + } + ], + "codeRepository": "https://github.com/softwarepub/hermes", + "contributor": { + "id": "_:contributor_1", + "type": "Person", + "email": "test.testi@test.testi", + "familyName": "Testi", + "givenName": "Test" + }, + "dateCreated": "2026-01-16", + "dateModified": "2026-01-16", + "datePublished": "2026-01-16", + "description": "for testing", + "funder": { + "type": "Organization", + "name": "TestsTests" + }, + "keywords": [ + "testing", + "more testing" + ], + "license": [ + "https://spdx.org/licenses/Adobe-2006", + "https://spdx.org/licenses/Abstyles", + "https://spdx.org/licenses/AGPL-1.0-only" + ], + "name": "Test", + "operatingSystem": "Windows", + "programmingLanguage": [ + "Python", + "Python 3" + ], + "relatedLink": "https://docs.software-metadata.pub/en/latest", + "schema:releaseNotes": "get it now", + "version": "1.1.1", + "developmentStatus": "abandoned", + "funding": "none :(", + "codemeta:isSourceCodeOf": { + "id": "HERMES" + }, + "issueTracker": "https://github.com/softwarepub/hermes/issues", + "referencePublication": "https://arxiv.org/abs/2201.09015" +}""", + SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/applicationCategory": [{"@id": "Testing"}], + "http://schema.org/author": [ + { + "@list": [ + { + "@id": "_:author_1", + "@type": ["http://schema.org/Person"], + "http://schema.org/email": [{"@value": "test.testi@test.testi"}], + "http://schema.org/familyName": [{"@value": "Testi"}], + "http://schema.org/givenName": [{"@value": "Test"}] + } + ] + } + ], + "http://schema.org/codeRepository": [{"@id": "https://github.com/softwarepub/hermes"}], + "http://schema.org/contributor": [ + { + "@id": "_:contributor_1", + "@type": ["http://schema.org/Person"], + "http://schema.org/email": [{"@value": "test.testi@test.testi"}], + "http://schema.org/familyName": [{"@value": "Testi"}], + "http://schema.org/givenName": [{"@value": "Test"}] + } + ], + "http://schema.org/dateCreated": [{"@type": "http://schema.org/Date", "@value": "2026-01-16"}], + "http://schema.org/dateModified": [{"@type": "http://schema.org/Date", "@value": "2026-01-16"}], + "http://schema.org/datePublished": [{"@type": "http://schema.org/Date", "@value": "2026-01-16"}], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/funder": [ + { + "@type": ["http://schema.org/Organization"], + "http://schema.org/name": [{"@value": "TestsTests"}] + } + ], + "http://schema.org/keywords": [{"@value": "testing"}, {"@value": "more testing"}], + "http://schema.org/license": [ + {"@id": "https://spdx.org/licenses/Adobe-2006"}, + {"@id": "https://spdx.org/licenses/Abstyles"}, + {"@id": "https://spdx.org/licenses/AGPL-1.0-only"} + ], + "http://schema.org/name": [{"@value": "Test"}], + "http://schema.org/operatingSystem": [{"@value": "Windows"}], + "http://schema.org/programmingLanguage": [{"@value": "Python"}, {"@value": "Python 3"}], + "http://schema.org/relatedLink": [{"@id": "https://docs.software-metadata.pub/en/latest"}], + "http://schema.org/releaseNotes": [{"@value": "get it now"}], + "http://schema.org/version": [{"@value": "1.1.1"}], + "https://codemeta.github.io/terms/developmentStatus": [{"@id": "abandoned"}], + "https://codemeta.github.io/terms/funding": [{"@value": "none :("}], + "https://codemeta.github.io/terms/isSourceCodeOf": [{"@id": "HERMES"}], + "https://codemeta.github.io/terms/issueTracker": [ + {"@id": "https://github.com/softwarepub/hermes/issues"} + ], + "https://codemeta.github.io/terms/referencePublication": [{"@id": "https://arxiv.org/abs/2201.09015"}] + }) ) ] ) -def test_codemeta_harvest(tmp_path, codemeta, res): - class Args: - def __init__(self, path): - self.path = path - - class Command: - def __init__(self, args): - self.args = args - - command = Command(Args(tmp_path)) +def test_codemeta_harvest(tmp_path, monkeypatch, codemeta, res): + monkeypatch.chdir(tmp_path) codemeta_file = tmp_path / "codemeta.json" codemeta_file.write_text(codemeta) - result = CodeMetaHarvestPlugin().__call__(command) - assert result[0].data_dict == res.data_dict + config_file = tmp_path / "hermes.toml" + config_file.write_text("[harvest]\nsources = [ \"codemeta\" ]") + + orig_argv = sys.argv[:] + sys.argv = ["hermes", "harvest", "--path", str(tmp_path), "--config", str(config_file)] + result = {} + try: + monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) + cli.main() + except SystemExit: + manager = context_manager.HermesContext() + manager.prepare_step("harvest") + with manager["codemeta"] as cache: + result = SoftwareMetadata(cache["expanded"][0], cache["context"]["@context"][1]) + manager.finalize_step("harvest") + finally: + sys.argv = orig_argv + + assert result.data_dict == res.data_dict From eb6f587126b42f343f75a545655cc414a8c27b54 Mon Sep 17 00:00:00 2001 From: "Kernchen, Sophie" Date: Fri, 16 Jan 2026 15:48:48 +0100 Subject: [PATCH 182/247] Fix compact_iri for schema elements with containers --- src/hermes/model/types/pyld_util.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/hermes/model/types/pyld_util.py b/src/hermes/model/types/pyld_util.py index 3cf9862b..10b9b7c5 100644 --- a/src/hermes/model/types/pyld_util.py +++ b/src/hermes/model/types/pyld_util.py @@ -103,8 +103,12 @@ class JsonLdProcessor(jsonld.JsonLdProcessor): def expand_iri(self, active_ctx: t.Any, short_iri: str, vocab: bool = True) -> str: return self._expand_iri(active_ctx, short_iri, vocab=vocab) - def compact_iri(self, active_ctx: t.Any, long_iri: str, vocab: bool = True) -> str: - return self._compact_iri(active_ctx, long_iri, vocab=vocab) + def compact_iri(self, active_ctx: t.Any, long_iri: str, vocab: bool = True, value: list = None) -> str: + # print(active_ctx, long_iri, vocab) + for mapping in active_ctx["mappings"].values(): + if ("@container" in mapping and long_iri): + value = {x: "none" for x in mapping["@container"]} + return self._compact_iri(active_ctx, long_iri, vocab=vocab, value=value) def initial_ctx(self, local_ctx, options=None): return self.process_context(self._INITIAL_CONTEXT, local_ctx, options or {}) From ddcd26a44777c3d6d8ab58afa3179fb0101ced3c Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Mon, 19 Jan 2026 08:31:36 +0100 Subject: [PATCH 183/247] updated creation of SoftwareMetadata objects in e2e tests --- test/hermes_test/model/test_api_e2e.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/hermes_test/model/test_api_e2e.py b/test/hermes_test/model/test_api_e2e.py index 650747e0..f4ec7fd6 100644 --- a/test/hermes_test/model/test_api_e2e.py +++ b/test/hermes_test/model/test_api_e2e.py @@ -187,7 +187,7 @@ def test_cff_harvest(tmp_path, monkeypatch, cff, res): manager = context_manager.HermesContext() manager.prepare_step("harvest") with manager["cff"] as cache: - result = SoftwareMetadata(cache["expanded"][0], cache["context"]["@context"][1]) + result = SoftwareMetadata(cache["codemeta"]) manager.finalize_step("harvest") finally: sys.argv = orig_argv @@ -347,7 +347,7 @@ def test_codemeta_harvest(tmp_path, monkeypatch, codemeta, res): manager = context_manager.HermesContext() manager.prepare_step("harvest") with manager["codemeta"] as cache: - result = SoftwareMetadata(cache["expanded"][0], cache["context"]["@context"][1]) + result = SoftwareMetadata(cache["codemeta"]) manager.finalize_step("harvest") finally: sys.argv = orig_argv From 9b44b53f4b2948f898b22b3738f7933e2b47dc1e Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Mon, 19 Jan 2026 09:50:50 +0100 Subject: [PATCH 184/247] added tests and support for curate step --- src/hermes/commands/__init__.py | 2 +- src/hermes/commands/cli.py | 4 +- src/hermes/commands/curate/base.py | 38 +++++---- test/hermes_test/model/test_api_e2e.py | 111 +++++++++++++++++++++++-- 4 files changed, 131 insertions(+), 24 deletions(-) diff --git a/src/hermes/commands/__init__.py b/src/hermes/commands/__init__.py index 14f77741..d2116ef2 100644 --- a/src/hermes/commands/__init__.py +++ b/src/hermes/commands/__init__.py @@ -12,7 +12,7 @@ # from hermes.commands.base import HermesVersionCommand # from hermes.commands.clean.base import HermesCleanCommand # from hermes.commands.init.base import HermesInitCommand -# from hermes.commands.curate.base import HermesCurateCommand +from hermes.commands.curate.base import HermesCurateCommand from hermes.commands.harvest.base import HermesHarvestCommand # from hermes.commands.process.base import HermesProcessCommand # from hermes.commands.deposit.base import HermesDepositCommand diff --git a/src/hermes/commands/cli.py b/src/hermes/commands/cli.py index db109a5e..565381fc 100644 --- a/src/hermes/commands/cli.py +++ b/src/hermes/commands/cli.py @@ -16,7 +16,7 @@ # from hermes.commands import (HermesHelpCommand, HermesVersionCommand, HermesCleanCommand, # HermesHarvestCommand, HermesProcessCommand, HermesCurateCommand, # HermesDepositCommand, HermesPostprocessCommand, HermesInitCommand) -from hermes.commands import HermesHarvestCommand +from hermes.commands import HermesCurateCommand, HermesHarvestCommand from hermes.commands.base import HermesCommand @@ -44,7 +44,7 @@ def main() -> None: # HermesCleanCommand(parser), HermesHarvestCommand(parser), # HermesProcessCommand(parser), - # HermesCurateCommand(parser), + HermesCurateCommand(parser), # HermesDepositCommand(parser), # HermesPostprocessCommand(parser), ): diff --git a/src/hermes/commands/curate/base.py b/src/hermes/commands/curate/base.py index 4c990bc7..15d7c8db 100644 --- a/src/hermes/commands/curate/base.py +++ b/src/hermes/commands/curate/base.py @@ -5,17 +5,16 @@ # SPDX-FileContributor: Michael Meinel import argparse -import os -import shutil -import sys from pydantic import BaseModel from hermes.commands.base import HermesCommand -from hermes.model.context import CodeMetaContext +from hermes.model import SoftwareMetadata +from hermes.model.context_manager import HermesContext +from hermes.model.error import HermesValidationError -class _CurateSettings(BaseModel): +class CurateSettings(BaseModel): """Generic deposition settings.""" pass @@ -25,23 +24,30 @@ class HermesCurateCommand(HermesCommand): """ Curate the unified metadata before deposition. """ command_name = "curate" - settings_class = _CurateSettings + settings_class = CurateSettings def init_command_parser(self, command_parser: argparse.ArgumentParser) -> None: pass def __call__(self, args: argparse.Namespace) -> None: - self.log.info("# Metadata curation") - ctx = CodeMetaContext() - process_output = ctx.hermes_dir / 'process' / (ctx.hermes_name + ".json") + ctx = HermesContext() + ctx.prepare_step("curate") + + ctx.prepare_step("process") + with ctx["result"] as process_ctx: + expanded_data = process_ctx["expanded"] + context_data = process_ctx["context"] + ctx.finalize_step("process") + + try: + data = SoftwareMetadata(expanded_data[0], context_data["@context"][1]) + except Exception as e: + raise HermesValidationError("The results of the process step are invalid.") from e - if not process_output.is_file(): - self.log.error( - "No processed metadata found. Please run `hermes process` before curation." - ) - sys.exit(1) + with ctx["result"] as curate_ctx: + curate_ctx["expanded"] = data.ld_value + curate_ctx["context"] = {"@context": data.full_context} - os.makedirs(ctx.hermes_dir / 'curate', exist_ok=True) - shutil.copy(process_output, ctx.hermes_dir / 'curate' / (ctx.hermes_name + '.json')) + ctx.finalize_step("curate") diff --git a/test/hermes_test/model/test_api_e2e.py b/test/hermes_test/model/test_api_e2e.py index f4ec7fd6..3e43073d 100644 --- a/test/hermes_test/model/test_api_e2e.py +++ b/test/hermes_test/model/test_api_e2e.py @@ -6,7 +6,8 @@ import pytest import sys -from hermes.model import context_manager, SoftwareMetadata +from hermes.model import SoftwareMetadata +from hermes.model.context_manager import HermesContext from hermes.commands import cli @@ -181,10 +182,10 @@ def test_cff_harvest(tmp_path, monkeypatch, cff, res): sys.argv = ["hermes", "harvest", "--path", str(tmp_path), "--config", str(config_file)] result = {} try: - monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) + monkeypatch.setattr(HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) cli.main() except SystemExit: - manager = context_manager.HermesContext() + manager = HermesContext() manager.prepare_step("harvest") with manager["cff"] as cache: result = SoftwareMetadata(cache["codemeta"]) @@ -341,10 +342,10 @@ def test_codemeta_harvest(tmp_path, monkeypatch, codemeta, res): sys.argv = ["hermes", "harvest", "--path", str(tmp_path), "--config", str(config_file)] result = {} try: - monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) + monkeypatch.setattr(HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) cli.main() except SystemExit: - manager = context_manager.HermesContext() + manager = HermesContext() manager.prepare_step("harvest") with manager["codemeta"] as cache: result = SoftwareMetadata(cache["codemeta"]) @@ -353,3 +354,103 @@ def test_codemeta_harvest(tmp_path, monkeypatch, codemeta, res): sys.argv = orig_argv assert result.data_dict == res.data_dict + + +@pytest.mark.parametrize( + "process_result, res", + [ + 2 * ( + SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/name": [{"@value": "Test"}] + }), + ), + 2 * ( + SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/applicationCategory": [{"@id": "Testing"}], + "http://schema.org/author": [ + { + "@list": [ + { + "@id": "_:author_1", + "@type": ["http://schema.org/Person"], + "http://schema.org/email": [{"@value": "test.testi@test.testi"}], + "http://schema.org/familyName": [{"@value": "Testi"}], + "http://schema.org/givenName": [{"@value": "Test"}] + } + ] + } + ], + "http://schema.org/codeRepository": [{"@id": "https://github.com/softwarepub/hermes"}], + "http://schema.org/contributor": [ + { + "@id": "_:contributor_1", + "@type": ["http://schema.org/Person"], + "http://schema.org/email": [{"@value": "test.testi@test.testi"}], + "http://schema.org/familyName": [{"@value": "Testi"}], + "http://schema.org/givenName": [{"@value": "Test"}] + } + ], + "http://schema.org/dateCreated": [{"@type": "http://schema.org/Date", "@value": "2026-01-16"}], + "http://schema.org/dateModified": [{"@type": "http://schema.org/Date", "@value": "2026-01-16"}], + "http://schema.org/datePublished": [{"@type": "http://schema.org/Date", "@value": "2026-01-16"}], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/funder": [ + { + "@type": ["http://schema.org/Organization"], + "http://schema.org/name": [{"@value": "TestsTests"}] + } + ], + "http://schema.org/keywords": [{"@value": "testing"}, {"@value": "more testing"}], + "http://schema.org/license": [ + {"@id": "https://spdx.org/licenses/Adobe-2006"}, + {"@id": "https://spdx.org/licenses/Abstyles"}, + {"@id": "https://spdx.org/licenses/AGPL-1.0-only"} + ], + "http://schema.org/name": [{"@value": "Test"}], + "http://schema.org/operatingSystem": [{"@value": "Windows"}], + "http://schema.org/programmingLanguage": [{"@value": "Python"}, {"@value": "Python 3"}], + "http://schema.org/relatedLink": [{"@id": "https://docs.software-metadata.pub/en/latest"}], + "http://schema.org/releaseNotes": [{"@value": "get it now"}], + "http://schema.org/version": [{"@value": "1.1.1"}], + "https://codemeta.github.io/terms/developmentStatus": [{"@id": "abandoned"}], + "https://codemeta.github.io/terms/funding": [{"@value": "none :("}], + "https://codemeta.github.io/terms/isSourceCodeOf": [{"@id": "HERMES"}], + "https://codemeta.github.io/terms/issueTracker": [ + {"@id": "https://github.com/softwarepub/hermes/issues"} + ], + "https://codemeta.github.io/terms/referencePublication": [{"@id": "https://arxiv.org/abs/2201.09015"}] + }), + ), + ] +) +def test_do_nothing_curate(tmp_path, monkeypatch, process_result, res): + monkeypatch.chdir(tmp_path) + + manager = HermesContext(tmp_path) + manager.prepare_step("process") + with manager["result"] as cache: + cache["expanded"] = process_result.ld_value + cache["context"] = {"@context": process_result.full_context} + manager.finalize_step("process") + + config_file = tmp_path / "hermes.toml" + config_file.write_text("") + + orig_argv = sys.argv[:] + sys.argv = ["hermes", "curate", "--path", str(tmp_path), "--config", str(config_file)] + result = {} + try: + monkeypatch.setattr(HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) + cli.main() + except SystemExit: + manager.prepare_step("curate") + with manager["result"] as cache: + result = SoftwareMetadata(cache["expanded"][0], cache["context"]["@context"][1]) + manager.finalize_step("curate") + finally: + sys.argv = orig_argv + + assert result.data_dict == res.data_dict From 6c3ba13f4dcb6ff714fbc76c4bca596e91116abd Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Mon, 19 Jan 2026 11:00:38 +0100 Subject: [PATCH 185/247] started to add support for deposit step and added useful method for SoftwareMetadata --- src/hermes/commands/__init__.py | 2 +- src/hermes/commands/cli.py | 4 +- src/hermes/commands/deposit/base.py | 41 ++++++---------- src/hermes/commands/deposit/file.py | 9 +--- src/hermes/commands/deposit/invenio.py | 68 ++++++++++++-------------- src/hermes/error.py | 2 +- src/hermes/model/api.py | 21 +++++++- 7 files changed, 72 insertions(+), 75 deletions(-) diff --git a/src/hermes/commands/__init__.py b/src/hermes/commands/__init__.py index 14f77741..278faddf 100644 --- a/src/hermes/commands/__init__.py +++ b/src/hermes/commands/__init__.py @@ -15,5 +15,5 @@ # from hermes.commands.curate.base import HermesCurateCommand from hermes.commands.harvest.base import HermesHarvestCommand # from hermes.commands.process.base import HermesProcessCommand -# from hermes.commands.deposit.base import HermesDepositCommand +from hermes.commands.deposit.base import HermesDepositCommand # from hermes.commands.postprocess.base import HermesPostprocessCommand diff --git a/src/hermes/commands/cli.py b/src/hermes/commands/cli.py index db109a5e..0ec2d1ae 100644 --- a/src/hermes/commands/cli.py +++ b/src/hermes/commands/cli.py @@ -16,7 +16,7 @@ # from hermes.commands import (HermesHelpCommand, HermesVersionCommand, HermesCleanCommand, # HermesHarvestCommand, HermesProcessCommand, HermesCurateCommand, # HermesDepositCommand, HermesPostprocessCommand, HermesInitCommand) -from hermes.commands import HermesHarvestCommand +from hermes.commands import HermesDepositCommand, HermesHarvestCommand from hermes.commands.base import HermesCommand @@ -45,7 +45,7 @@ def main() -> None: HermesHarvestCommand(parser), # HermesProcessCommand(parser), # HermesCurateCommand(parser), - # HermesDepositCommand(parser), + HermesDepositCommand(parser), # HermesPostprocessCommand(parser), ): if command.settings_class is not None: diff --git a/src/hermes/commands/deposit/base.py b/src/hermes/commands/deposit/base.py index 75018579..800c15e9 100644 --- a/src/hermes/commands/deposit/base.py +++ b/src/hermes/commands/deposit/base.py @@ -7,15 +7,13 @@ import abc import argparse -import json -import sys from pydantic import BaseModel from hermes.commands.base import HermesCommand, HermesPlugin -from hermes.model.context import CodeMetaContext -from hermes.model.path import ContextPath -from hermes.model.errors import HermesValidationError +from hermes.model.context_manager import HermesContext +from hermes.model import SoftwareMetadata +from hermes.model.error import HermesValidationError class BaseDepositPlugin(HermesPlugin): @@ -24,16 +22,19 @@ class BaseDepositPlugin(HermesPlugin): TODO: describe workflow... needs refactoring to be less stateful! """ - def __init__(self, command, ctx): - self.command = command - self.ctx = ctx - def __call__(self, command: HermesCommand) -> None: """Initiate the deposition process. This calls a list of additional methods on the class, none of which need to be implemented. """ self.command = command + self.ctx = HermesContext() + + self.ctx.prepare_step("curate") + self.metadata = SoftwareMetadata.load_from_cache(self.ctx, "result") + self.ctx.finalize_step("curate") + + self.ctx.prepare_step("deposit") self.prepare() self.map_metadata() @@ -106,7 +107,7 @@ def publish(self) -> None: pass -class _DepositSettings(BaseModel): +class DepositSettings(BaseModel): """Generic deposition settings.""" target: str = "" @@ -116,7 +117,7 @@ class HermesDepositCommand(HermesCommand): """ Deposit the curated metadata to repositories. """ command_name = "deposit" - settings_class = _DepositSettings + settings_class = DepositSettings def init_command_parser(self, command_parser: argparse.ArgumentParser) -> None: command_parser.add_argument('--file', '-f', nargs=1, action='append', @@ -128,26 +129,12 @@ def __call__(self, args: argparse.Namespace) -> None: self.args = args plugin_name = self.settings.target - ctx = CodeMetaContext() - codemeta_file = ctx.get_cache("curate", ctx.hermes_name) - if not codemeta_file.exists(): - self.log.error("You must run the 'curate' command before deposit") - sys.exit(1) - - codemeta_path = ContextPath("codemeta") - with open(codemeta_file) as codemeta_fh: - ctx.update(codemeta_path, json.load(codemeta_fh)) - try: - plugin_func = self.plugins[plugin_name](self, ctx) - + plugin_func = self.plugins[plugin_name]() + plugin_func(self) except KeyError as e: self.log.error("Plugin '%s' not found.", plugin_name) self.errors.append(e) - - try: - plugin_func(self) - except HermesValidationError as e: self.log.error("Error while executing %s: %s", plugin_name, e) self.errors.append(e) diff --git a/src/hermes/commands/deposit/file.py b/src/hermes/commands/deposit/file.py index 6c5d6419..5ce8d8e0 100644 --- a/src/hermes/commands/deposit/file.py +++ b/src/hermes/commands/deposit/file.py @@ -11,22 +11,17 @@ from pydantic import BaseModel from hermes.commands.deposit.base import BaseDepositPlugin -from hermes.model.path import ContextPath class FileDepositSettings(BaseModel): - filename: str = 'hermes.json' + filename: str = 'codemeta.json' class FileDepositPlugin(BaseDepositPlugin): settings_class = FileDepositSettings - def map_metadata(self) -> None: - self.ctx.update(ContextPath.parse('deposit.file'), self.ctx['codemeta']) - def publish(self) -> None: file_config = self.command.settings.file - output_data = self.ctx['deposit.file'] with open(file_config.filename, 'w') as deposition_file: - json.dump(output_data, deposition_file, indent=2) + json.dump(self.metadata.compact(), deposition_file, indent=2) diff --git a/src/hermes/commands/deposit/invenio.py b/src/hermes/commands/deposit/invenio.py index 69fb87a0..aafe51b7 100644 --- a/src/hermes/commands/deposit/invenio.py +++ b/src/hermes/commands/deposit/invenio.py @@ -17,11 +17,10 @@ import requests from pydantic import BaseModel -from hermes.commands.deposit.base import BaseDepositPlugin, HermesDepositCommand +from hermes.commands.deposit.base import BaseDepositPlugin from hermes.commands.deposit.error import DepositionUnauthorizedError from hermes.error import MisconfigurationError -from hermes.model.context import CodeMetaContext -from hermes.model.path import ContextPath +from hermes.model.context_manager import HermesContext from hermes.utils import hermes_doi, hermes_user_agent @@ -258,11 +257,13 @@ class InvenioDepositPlugin(BaseDepositPlugin): invenio_resolver_class = InvenioResolver settings_class = InvenioDepositSettings - def __init__(self, command: HermesDepositCommand, ctx: CodeMetaContext, client=None, resolver=None) -> None: - super().__init__(command, ctx) + def __init__(self) -> None: + super().__init__() - self.invenio_context_path = ContextPath.parse(f"deposit.{self.platform_name}") self.invenio_ctx = None + + def __call__(self, command, *, client=None, resolver=None): + self.command = command self.config = getattr(self.command.settings, self.platform_name) if client is None: @@ -292,7 +293,9 @@ def __init__(self, command: HermesDepositCommand, ctx: CodeMetaContext, client=N self.resolver = resolver or self.invenio_resolver_class(self.client) self.links = {} - # TODO: Populate some data structure here? Or move more of this into __init__? + super().__call__(command) + + # TODO: Populate some data structure here? Or move more of this into __init__.py? def prepare(self) -> None: """Prepare the deposition on an Invenio-based platform. @@ -305,49 +308,42 @@ def prepare(self) -> None: - check access modalities (access right, access conditions, embargo data, existence of license) - check whether required configuration options are present - - update ``self.ctx`` with metadata collected during the checks + - update ``self.metadata`` with metadata collected during the checks """ rec_id = self.config.record_id doi = self.config.doi - try: - codemeta_identifier = self.ctx["codemeta.identifier"] - except KeyError: - codemeta_identifier = None - + codemeta_identifier = self.metadata.get("identifier", None) rec_id, rec_meta = self.resolver.resolve_latest_id( record_id=rec_id, doi=doi, codemeta_identifier=codemeta_identifier ) - version = self.ctx["codemeta"].get("version") + version = self.metadata["version"] if rec_meta and (version == rec_meta.get("version")): raise ValueError(f"Version {version} already deposited.") - self.ctx.update(self.invenio_context_path['latestRecord'], {'id': rec_id, 'metadata': rec_meta}) - - license = self._get_license_identifier() - self.ctx.update(self.invenio_context_path["license"], license) - - communities = self._get_community_identifiers() - self.ctx.update(self.invenio_context_path["communities"], communities) + deposition_data = {} + deposition_data["latestRecord"] = {'id': rec_id, 'metadata': rec_meta} + deposition_data["license"] = self._get_license_identifier() + deposition_data["communities"] = self._get_community_identifiers() access_right, embargo_date, access_conditions = self._get_access_modalities(license) - self.ctx.update(self.invenio_context_path["access_right"], access_right) - self.ctx.update(self.invenio_context_path["embargo_date"], embargo_date) - self.ctx.update(self.invenio_context_path["access_conditions"], access_conditions) + deposition_data["access_right"] = access_right + deposition_data["embargo_date"] = embargo_date + deposition_data["access_conditions"] = access_conditions - self.invenio_ctx = self.ctx[self.invenio_context_path] + self.invenio_ctx = deposition_data def map_metadata(self) -> None: """Map the harvested metadata onto the Invenio schema.""" deposition_metadata = self._codemeta_to_invenio_deposition() - self.ctx.update(self.invenio_context_path["depositionMetadata"], deposition_metadata) - - # Store a snapshot of the mapped data within the cache, useful for analysis, debugging, etc - with open(self.ctx.get_cache("deposit", self.platform_name, create=True), 'w') as invenio_json: - json.dump(deposition_metadata, invenio_json, indent=' ') + ctx = HermesContext() + ctx.prepare_step("deposit") + with ctx[self.platform_name] as deposit_ctx: + deposit_ctx["deposit"] = deposition_metadata + ctx.finalize_step("deposit") def is_initial_publication(self) -> bool: latest_record_id = self.invenio_ctx.get("latestRecord", {}).get("id") @@ -426,7 +422,7 @@ def update_metadata(self) -> None: self.links.update(deposit["links"]) _log.debug("Created new version deposit: %s", self.links["html"]) - with open(self.ctx.get_cache('deposit', 'deposit', create=True), 'w') as deposit_file: + with open(self.metadata.get_cache('deposit', 'deposit', create=True), 'w') as deposit_file: json.dump(deposit, deposit_file, indent=4) def delete_artifacts(self) -> None: @@ -505,7 +501,7 @@ def _codemeta_to_invenio_deposition(self) -> dict: differences between Invenio-based platforms. """ - metadata = self.ctx["codemeta"] + metadata = self.metadata license = self.invenio_ctx["license"] communities = self.invenio_ctx["communities"] access_right = self.invenio_ctx["access_right"] @@ -520,7 +516,7 @@ def _codemeta_to_invenio_deposition(self) -> dict: "affiliation": author.get("affiliation", {"legalName": None}).get("legalName"), # Invenio wants "family, given". author.get("name") might not have this format. "name": f"{author.get('familyName')}, {author.get('givenName')}" - if author.get("familyName") and author.get("givenName") + if "familyName" in author and "givenName" in author else author.get("name"), # Invenio expects the ORCID without the URL part "orcid": author.get("@id", "").replace("https://orcid.org/", "") or None, @@ -538,7 +534,7 @@ def _codemeta_to_invenio_deposition(self) -> dict: "affiliation": contributor.get("affiliation", {"legalName": None}).get("legalName"), # Invenio wants "family, given". contributor.get("name") might not have this format. "name": f"{contributor.get('familyName')}, {contributor.get('givenName')}" - if contributor.get("familyName") and contributor.get("givenName") + if "familyName" in contributor and "givenName" in contributor else contributor.get("name"), # Invenio expects the ORCID without the URL part "orcid": contributor.get("@id", "").replace("https://orcid.org/", "") or None, @@ -604,7 +600,7 @@ def _get_license_identifier(self) -> t.Optional[str]: If no license is configured, ``None`` will be returned. """ - license_url = self.ctx["codemeta"].get("license") + license_url = self.metadata["license"] return self.resolver.resolve_license_id(license_url) def _get_community_identifiers(self): @@ -612,7 +608,7 @@ def _get_community_identifiers(self): This function gets the communities to be used for the deposition on an Invenio-based site from the config and checks their validity against the site's API. If one of the - identifiers can not be found on the site, a :class:`HermesMisconfigurationError` is + identifiers can not be found on the site, a :class:`MisconfigurationError` is raised. """ diff --git a/src/hermes/error.py b/src/hermes/error.py index e56c2499..1669ed39 100644 --- a/src/hermes/error.py +++ b/src/hermes/error.py @@ -4,5 +4,5 @@ # SPDX-FileContributor: David Pape -class HermesMisconfigurationError(Exception): +class MisconfigurationError(Exception): pass diff --git a/src/hermes/model/api.py b/src/hermes/model/api.py index 8b079544..24f1405e 100644 --- a/src/hermes/model/api.py +++ b/src/hermes/model/api.py @@ -1,6 +1,7 @@ +from hermes.model.context_manager import HermesContext, HermesContexError from hermes.model.types import ld_dict - from hermes.model.types.ld_context import ALL_CONTEXTS +from hermes.model.types.ld_dict import bundled_loader class SoftwareMetadata(ld_dict): @@ -8,3 +9,21 @@ class SoftwareMetadata(ld_dict): def __init__(self, data: dict = None, extra_vocabs: dict[str, str] = None) -> None: ctx = ALL_CONTEXTS + [{**extra_vocabs}] if extra_vocabs is not None else ALL_CONTEXTS super().__init__([ld_dict.from_dict(data, context=ctx).data_dict if data else {}], context=ctx) + + @classmethod + def load_from_cache(cls, ctx: HermesContext, source: str) -> "SoftwareMetadata": + with ctx[source] as cache: + try: + return SoftwareMetadata(cache["codemeta"]) + except Exception: + pass + try: + context = cache["context"]["@context"] + data = SoftwareMetadata() + data.active_ctx = data.ld_proc.initial_ctx(context, {"documentLoader": bundled_loader}) + data.context = context + for key, value in cache["expanded"][0]: + data[key] = value + return data + except Exception as e: + raise HermesContexError("There is no (valid) data stored in the cache.") from e From 0284b01186b26670fbb0351d37445562420e6a50 Mon Sep 17 00:00:00 2001 From: "Kernchen, Sophie" Date: Tue, 20 Jan 2026 09:29:58 +0100 Subject: [PATCH 186/247] Correct Docs for newer functionality --- docs/source/dev/data_model.md | 44 +++++++---------------------------- 1 file changed, 8 insertions(+), 36 deletions(-) diff --git a/docs/source/dev/data_model.md b/docs/source/dev/data_model.md index e788fd44..9077b0d3 100644 --- a/docs/source/dev/data_model.md +++ b/docs/source/dev/data_model.md @@ -214,9 +214,9 @@ for i, author in enumerate(data["author"], start=1): :caption: Value check for email in data["author"][0]["email"]: if email.endswith(".edu"): - print("Author has an email address at an educational institution.") + print("Shakespeare has an email address at an educational institution.") else: - print("Cannot confirm affiliation with educational institution for author.") + print("Cannot confirm affiliation with educational institution for Shakespeare.") # Mock output # $> Cannot confirm affiliation with educational institution for author. @@ -239,8 +239,8 @@ The API class {class}`hermes.model.SoftwareMetadata` hides many of the more complex aspects of JSON-LD and makes it easy to work with the data model. -Assertions, however, operate on the internal model objects. -Therefore, they may not work as you would expect from plain +So the API class hides the internal model objects. +Therefore, they work as you would expect from plain Python data: ```{code-block} python @@ -258,12 +258,10 @@ except AssertionError: raise # Mock output -# $> The author could not be found. -# $> AssertionError: -# assert -# {'email': ['shakespeare@baz.net'], 'name': ['Shakespeare']} -# in -# _LDList( +# $> The author was found! +# +# +# Internal Model from data["author"]: # {'@list': [ # { # 'http://schema.org/name': [{'@value': 'Shakespeare'}], @@ -281,32 +279,6 @@ except AssertionError: # ) ``` -The mock output in the example above shows the inequality of the expected and the actual value. -The actual value is an internal data type wrapping the more complex JSON-LD data. - -The complex data structure of JSON-LD is internally constructed in the `hermes` data -model, and to make it possible to work with only the data that is important - the actual terms -and their values - the internal data model types provide a function `.to_python()`. -This function can be used in assertions to assert full data integrity: - -```{code-block} python -:caption: Containment assertion with `to_python()` -:emphasize-lines: 5,13 -try: - assert ( - {'name': ['Shakespeare'], 'email': ['shakespeare@baz.net']} - in - data["author"].to_python() - ) - print("The author was found!") -except AssertionError: - print("The author could not be found.") - raise - -# Mock output -# $> The author was found! -``` - --- ## See Also From d46394e551b6265c0b7a34d301a2c4663537123b Mon Sep 17 00:00:00 2001 From: "Kernchen, Sophie" Date: Fri, 23 Jan 2026 09:13:20 +0100 Subject: [PATCH 187/247] Correct type of value --- src/hermes/model/types/pyld_util.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/hermes/model/types/pyld_util.py b/src/hermes/model/types/pyld_util.py index 10b9b7c5..8bcef253 100644 --- a/src/hermes/model/types/pyld_util.py +++ b/src/hermes/model/types/pyld_util.py @@ -103,10 +103,9 @@ class JsonLdProcessor(jsonld.JsonLdProcessor): def expand_iri(self, active_ctx: t.Any, short_iri: str, vocab: bool = True) -> str: return self._expand_iri(active_ctx, short_iri, vocab=vocab) - def compact_iri(self, active_ctx: t.Any, long_iri: str, vocab: bool = True, value: list = None) -> str: - # print(active_ctx, long_iri, vocab) + def compact_iri(self, active_ctx: t.Any, long_iri: str, vocab: bool = True, value: dict = None) -> str: for mapping in active_ctx["mappings"].values(): - if ("@container" in mapping and long_iri): + if "@container" in mapping and long_iri: value = {x: "none" for x in mapping["@container"]} return self._compact_iri(active_ctx, long_iri, vocab=vocab, value=value) From feeb16b9263849f14a0cfe9b34bfd6ab12b3e7b7 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 23 Jan 2026 14:18:39 +0100 Subject: [PATCH 188/247] worked on invenio deposit --- src/hermes/commands/deposit/base.py | 29 ++++--- src/hermes/commands/deposit/file.py | 5 +- src/hermes/commands/deposit/invenio.py | 94 ++++++++++++++------- test/hermes_test/model/test_api_e2e.py | 108 +++++++++++++++++++++++++ 4 files changed, 195 insertions(+), 41 deletions(-) diff --git a/src/hermes/commands/deposit/base.py b/src/hermes/commands/deposit/base.py index 800c15e9..4a996eaa 100644 --- a/src/hermes/commands/deposit/base.py +++ b/src/hermes/commands/deposit/base.py @@ -34,17 +34,25 @@ def __call__(self, command: HermesCommand) -> None: self.metadata = SoftwareMetadata.load_from_cache(self.ctx, "result") self.ctx.finalize_step("curate") - self.ctx.prepare_step("deposit") - self.prepare() - self.map_metadata() + deposit = self.map_metadata() + self.ctx.prepare_step("deposit") + with self.ctx[command.settings.target] as cache: + cache["deposit"] = deposit.compact() + self.ctx.finalize_step("deposit") if self.is_initial_publication(): self.create_initial_version() else: self.create_new_version() - self.update_metadata() + deposit = self.update_metadata() + self.ctx.prepare_step("deposit") + with self.ctx[command.settings.target] as cache: + cache["codemeta"] = deposit.compact() + cache["expanded"] = deposit.ld_value + cache["context"] = {"@context": deposit.full_context} + self.ctx.finalize_step("deposit") self.delete_artifacts() self.upload_artifacts() self.publish() @@ -59,8 +67,8 @@ def prepare(self) -> None: pass @abc.abstractmethod - def map_metadata(self) -> None: - """Map the given metadata to the target schema of the deposition platform. + def map_metadata(self) -> SoftwareMetadata: + """Map the given metadata to the target schema of the deposition platform and return it. When mapping metadata, make sure to add traces to the HERMES software, e.g. via DataCite's ``relatedIdentifier`` using the ``isCompiledBy`` relation. Ideally, the value @@ -89,9 +97,9 @@ def create_new_version(self) -> None: """Create a new version of an existing publication on the target platform.""" pass - def update_metadata(self) -> None: - """Update the metadata of the newly created version.""" - pass + def update_metadata(self) -> SoftwareMetadata: + """Update the metadata of the newly created version and return it even if it hasn't changed.""" + return self.metadata def delete_artifacts(self) -> None: """Delete any superfluous artifacts taken from the previous version of the publication.""" @@ -131,10 +139,11 @@ def __call__(self, args: argparse.Namespace) -> None: try: plugin_func = self.plugins[plugin_name]() - plugin_func(self) except KeyError as e: self.log.error("Plugin '%s' not found.", plugin_name) self.errors.append(e) + try: + plugin_func(self) except HermesValidationError as e: self.log.error("Error while executing %s: %s", plugin_name, e) self.errors.append(e) diff --git a/src/hermes/commands/deposit/file.py b/src/hermes/commands/deposit/file.py index 5ce8d8e0..53876c53 100644 --- a/src/hermes/commands/deposit/file.py +++ b/src/hermes/commands/deposit/file.py @@ -11,7 +11,7 @@ from pydantic import BaseModel from hermes.commands.deposit.base import BaseDepositPlugin - +from hermes.model import SoftwareMetadata class FileDepositSettings(BaseModel): filename: str = 'codemeta.json' @@ -20,6 +20,9 @@ class FileDepositSettings(BaseModel): class FileDepositPlugin(BaseDepositPlugin): settings_class = FileDepositSettings + def map_metadata(self) -> SoftwareMetadata: + return self.metadata + def publish(self) -> None: file_config = self.command.settings.file diff --git a/src/hermes/commands/deposit/invenio.py b/src/hermes/commands/deposit/invenio.py index aafe51b7..2fd13f0d 100644 --- a/src/hermes/commands/deposit/invenio.py +++ b/src/hermes/commands/deposit/invenio.py @@ -6,21 +6,21 @@ # SPDX-FileContributor: Oliver Bertuch # SPDX-FileContributor: Michael Meinel -import json import logging import pathlib -import typing as t from datetime import date, datetime from pathlib import Path from urllib.parse import urlparse import requests from pydantic import BaseModel +from typing import Union from hermes.commands.deposit.base import BaseDepositPlugin from hermes.commands.deposit.error import DepositionUnauthorizedError from hermes.error import MisconfigurationError -from hermes.model.context_manager import HermesContext +from hermes.model import SoftwareMetadata +from hermes.model.error import HermesValidationError from hermes.utils import hermes_doi, hermes_user_agent @@ -108,7 +108,7 @@ def __init__(self, client=None): def resolve_latest_id( self, record_id=None, doi=None, codemeta_identifier=None - ) -> t.Tuple[t.Optional[str], dict]: + ) -> tuple[Union[str, None], dict]: """ Using the given metadata parameters, figure out the latest record id. @@ -166,7 +166,7 @@ def resolve_doi(self, doi) -> str: *_, record_id = page_url.path.split('/') return record_id - def resolve_record_id(self, record_id: str) -> t.Tuple[str, dict]: + def resolve_record_id(self, record_id: str) -> tuple[str, dict]: """ Find the latest version of a given record. @@ -185,7 +185,7 @@ def resolve_record_id(self, record_id: str) -> t.Tuple[str, dict]: res_json = res.json() return res_json['id'], res_json['metadata'] - def resolve_license_id(self, license_url: t.Optional[str]) -> t.Optional[str]: + def resolve_license_id(self, license_url: Union[str, None]) -> Union[str, None]: """Get Invenio license representation from CodeMeta. The license to use is extracted from the ``license`` field in the @@ -218,7 +218,7 @@ def resolve_license_id(self, license_url: t.Optional[str]) -> t.Optional[str]: parsed_url = urlparse(license_url) url_path = parsed_url.path.rstrip("/") - license_id = url_path.split("/")[-1] + license_id = str.lower(url_path.split("/")[-1]) response = self.client.get_license(license_id) if response.status_code == 404: @@ -230,7 +230,8 @@ def resolve_license_id(self, license_url: t.Optional[str]) -> t.Optional[str]: @staticmethod def _extract_license_id_from_response(data: dict) -> str: - return data["metadata"]["id"] + # TODO: find correct key, data["metadata"]["id"] did not work for me but data["id"] does + return data["id"] class InvenioDepositSettings(BaseModel): @@ -242,7 +243,7 @@ class InvenioDepositSettings(BaseModel): access_right: str = None embargo_date: str = None access_conditions: str = None - api_paths: t.Dict = {} + api_paths: dict = {} auth_token: str = '' files: list[pathlib.Path] = [] @@ -335,15 +336,10 @@ def prepare(self) -> None: self.invenio_ctx = deposition_data - def map_metadata(self) -> None: - """Map the harvested metadata onto the Invenio schema.""" - - deposition_metadata = self._codemeta_to_invenio_deposition() - ctx = HermesContext() - ctx.prepare_step("deposit") - with ctx[self.platform_name] as deposit_ctx: - deposit_ctx["deposit"] = deposition_metadata - ctx.finalize_step("deposit") + def map_metadata(self) -> SoftwareMetadata: + """Map the harvested metadata onto the Invenio schema and return it.""" + self.invenio_ctx["depositionMetadata"] = self._codemeta_to_invenio_deposition() + return SoftwareMetadata(self.invenio_ctx["depositionMetadata"]) def is_initial_publication(self) -> bool: latest_record_id = self.invenio_ctx.get("latestRecord", {}).get("id") @@ -402,8 +398,8 @@ def related_identifiers(self): }, ] - def update_metadata(self) -> None: - """Update the metadata of a draft.""" + def update_metadata(self) -> SoftwareMetadata: + """Update the metadata of a draft and return it.""" draft_url = self.links["latest_draft"] @@ -422,8 +418,7 @@ def update_metadata(self) -> None: self.links.update(deposit["links"]) _log.debug("Created new version deposit: %s", self.links["html"]) - with open(self.metadata.get_cache('deposit', 'deposit', create=True), 'w') as deposit_file: - json.dump(deposit, deposit_file, indent=4) + return SoftwareMetadata(deposit.get("metadata", {})) def delete_artifacts(self) -> None: """Delete existing file artifacts. @@ -444,7 +439,10 @@ def upload_artifacts(self) -> None: bucket_url = self.links["bucket"] - files = *self.config.files, *[f[0] for f in self.command.args.file] + if self.command.args.file: + files = *self.config.files, *[f[0] for f in self.command.args.file] + else: + files = tuple(*self.config.files) for path_arg in files: path = Path(path_arg) @@ -508,7 +506,22 @@ def _codemeta_to_invenio_deposition(self) -> dict: embargo_date = self.invenio_ctx["embargo_date"] access_conditions = self.invenio_ctx["access_conditions"] - creators = [ + creators = [] + for author in metadata["author"]: + creator = {} + if len(affils := [name for affil in author["affiliation"] for name in affil["legalname"]]) != 0: + creator["affiliation"] = affils + given_names_str = " ".join(author["givenName"]) + names = [f"{family_name}, {given_names_str}" for family_name in author["familyName"]] + names.extend(author["names"]) + if len(names) != 0: + creator["name"] = names + if (id := author.get("@id", None)) is not None: + creator["orcid"] = id.replace("https://orcid.org/", "") + if creator: + creators.append(creator) + + """creators = [ # TODO: Distinguish between @type "Person" and others { k: v for k, v in { @@ -523,7 +536,7 @@ def _codemeta_to_invenio_deposition(self) -> dict: }.items() if v is not None } for author in metadata["author"] - ] + ]""" # This is not used at the moment. See comment below in `deposition_metadata` dict. contributors = [ # noqa: F841 @@ -546,6 +559,27 @@ def _codemeta_to_invenio_deposition(self) -> dict: for contributor in metadata.get("contributor", []) if contributor.get("name") != "GitHub" ] + if len(metadata["name"]) != 1: + _log.error("More than one or zero names for the Software are given.") + raise HermesValidationError("More than one or zerno names for the Software.") + name = metadata["name"][0] + + if len(metadata["schema:description"]) > 1: + _log.error("More than one descriptions of the Software are given.") + raise HermesValidationError("More than one descriptions of the Software are given.") + if len(metadata["schema:description"]) == 1: + description = metadata["schema:description"][0] + else: + description = None + + if len(metadata["schema:version"]) > 1: + _log.error("More than one version of the Software are given.") + raise HermesValidationError("More than one version of the Software are given.") + if len(metadata["schema:version"]) == 1: + version = metadata["schema:version"][0] + else: + version = None + # TODO: Use the fields currently set to `None`. # Some more fields are available but they most likely don't relate to software # publications targeted by hermes. @@ -559,12 +593,12 @@ def _codemeta_to_invenio_deposition(self) -> dict: # TODO: Maybe we want a different date? Then make this configurable. If not, # this can be removed as it defaults to today. "publication_date": date.today().isoformat(), - "title": metadata["name"], + "title": name, "creators": creators, # TODO: Use a real description here. Possible sources could be # `tool.poetry.description` from pyproject.toml or `abstract` from # CITATION.cff. This should then be stored in codemeta description field. - "description": metadata["name"], + "description": description, "access_right": access_right, "license": license, "embargo_date": embargo_date, @@ -590,17 +624,17 @@ def _codemeta_to_invenio_deposition(self) -> dict: "communities": communities, "grants": None, "subjects": None, - "version": metadata.get('version'), + "version": version, }.items() if v is not None} return deposition_metadata - def _get_license_identifier(self) -> t.Optional[str]: + def _get_license_identifier(self) -> Union[str, None]: """Get Invenio license identifier that matches the given license URL. If no license is configured, ``None`` will be returned. """ - license_url = self.metadata["license"] + license_url = self.metadata["license"][0] return self.resolver.resolve_license_id(license_url) def _get_community_identifiers(self): diff --git a/test/hermes_test/model/test_api_e2e.py b/test/hermes_test/model/test_api_e2e.py index f4ec7fd6..1202572e 100644 --- a/test/hermes_test/model/test_api_e2e.py +++ b/test/hermes_test/model/test_api_e2e.py @@ -4,10 +4,21 @@ # SPDX-FileContributor: Michael Fritzsche +import json import pytest import sys from hermes.model import context_manager, SoftwareMetadata from hermes.commands import cli +from pathlib import Path + + +@pytest.fixture +def sandbox_auth(): + path = Path("./../auth.txt") + if not path.exists(): + pytest.skip("Local auth token file does not exist.") + with path.open() as f: + yield f.read() @pytest.mark.parametrize( @@ -353,3 +364,100 @@ def test_codemeta_harvest(tmp_path, monkeypatch, codemeta, res): sys.argv = orig_argv assert result.data_dict == res.data_dict + + +@pytest.mark.parametrize( + "deposit, res", + [ + 2 * ( + SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/name": [{"@value": "Test"}] + }), + ) + ] +) +def test_file_deposit(tmp_path, monkeypatch, deposit, res): + monkeypatch.chdir(tmp_path) + + manager = context_manager.HermesContext(tmp_path) + manager.prepare_step("curate") + with manager["result"] as cache: + cache["codemeta"] = deposit.compact() + manager.finalize_step("curate") + + config_file = tmp_path / "hermes.toml" + config_file.write_text("[deposit]\ntarget = \"file\"") + + orig_argv = sys.argv[:] + sys.argv = ["hermes", "deposit", "--path", str(tmp_path), "--config", str(config_file)] + result = {} + try: + monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) + cli.main() + except SystemExit: + with open('codemeta.json', 'r') as cache: + result = SoftwareMetadata(json.load(cache)) + finally: + sys.argv = orig_argv + + assert result.data_dict == res.data_dict + + +@pytest.mark.parametrize( + "metadata", + [ + SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/name": [{"@value": "Test"}], + "http://schema.org/author": [{ + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Test"}], + "http://schema.org/givenName": [{"@value": "Testi"}] + }], + "http://schema.org/license": [{"@id": "https://spdx.org/licenses/apache-2.0"}] + }), + ] +) +def test_invenio_deposit(tmp_path, monkeypatch, sandbox_auth, metadata): + monkeypatch.chdir(tmp_path) + + manager = context_manager.HermesContext(tmp_path) + manager.prepare_step("curate") + with manager["result"] as cache: + cache["codemeta"] = metadata.compact() + manager.finalize_step("curate") + + config_file = tmp_path / "hermes.toml" + config_file.write_text(f"""[deposit] +target = \"invenio\" +[deposit.invenio] +site_url = \"https://sandbox.zenodo.org\" +access_right = \"closed\" +auth_token = \"{sandbox_auth}\" +file = [] +[deposit.invenio.api_paths] +licenses = "api/vocabularies/licenses" +""") + + orig_argv = sys.argv[:] + sys.argv = ["hermes", "deposit", "--path", str(tmp_path), "--config", str(config_file), "--initial"] + result = {} + try: + monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) + cli.main() + except SystemExit: + manager.prepare_step("deposit") + result = SoftwareMetadata.load_from_cache(manager, "invenio") + manager.finalize_step("deposit") + finally: + sys.argv = orig_argv + + assert result.data_dict == metadata.data_dict + +# TODO: +# - handle get() on Softwaremetadata objects in invenio.py +# - Sophie genaueres bezüglich Zeiten für Arbeitszeiterhöhung und -zeitraumerweiterung schicken + From ed0916baa4b9c75983ad3ced5bf9da200b20d0ff Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Mon, 26 Jan 2026 10:12:20 +0100 Subject: [PATCH 189/247] fixed bugs in invenio.py --- src/hermes/commands/deposit/invenio.py | 19 +++++++++++++------ test/hermes_test/model/test_api_e2e.py | 22 ++++++++++------------ 2 files changed, 23 insertions(+), 18 deletions(-) diff --git a/src/hermes/commands/deposit/invenio.py b/src/hermes/commands/deposit/invenio.py index 2fd13f0d..01211e5a 100644 --- a/src/hermes/commands/deposit/invenio.py +++ b/src/hermes/commands/deposit/invenio.py @@ -442,7 +442,8 @@ def upload_artifacts(self) -> None: if self.command.args.file: files = *self.config.files, *[f[0] for f in self.command.args.file] else: - files = tuple(*self.config.files) + files = tuple(self.config.files) + for path_arg in files: path = Path(path_arg) @@ -511,11 +512,17 @@ def _codemeta_to_invenio_deposition(self) -> dict: creator = {} if len(affils := [name for affil in author["affiliation"] for name in affil["legalname"]]) != 0: creator["affiliation"] = affils - given_names_str = " ".join(author["givenName"]) - names = [f"{family_name}, {given_names_str}" for family_name in author["familyName"]] - names.extend(author["names"]) - if len(names) != 0: - creator["name"] = names + if len(author["familyName"]) > 1: + raise HermesValidationError(f"Author has too many family names: {author.to_python()}") + if len(author["familyName"]) == 1: + given_names_str = " ".join(author["givenName"]) + name = f"{author["familyName"][0]}, {given_names_str}" + elif len(author["name"]) != 1: + raise HermesValidationError(f"Author has too many names: {author.to_python()}") + else: + name = author["name"][0] + if len(name) != 0: + creator["name"] = name if (id := author.get("@id", None)) is not None: creator["orcid"] = id.replace("https://orcid.org/", "") if creator: diff --git a/test/hermes_test/model/test_api_e2e.py b/test/hermes_test/model/test_api_e2e.py index 1202572e..fa8f4ac8 100644 --- a/test/hermes_test/model/test_api_e2e.py +++ b/test/hermes_test/model/test_api_e2e.py @@ -205,7 +205,7 @@ def test_cff_harvest(tmp_path, monkeypatch, cff, res): # FIXME: update to compare the SoftwareMetadata objects instead of the data_dicts (in multiple places) # after merge with refactor/data-model and/or refactor/423-implement-public-api - assert result.data_dict == res.data_dict + assert result == res @pytest.mark.parametrize( @@ -363,7 +363,7 @@ def test_codemeta_harvest(tmp_path, monkeypatch, codemeta, res): finally: sys.argv = orig_argv - assert result.data_dict == res.data_dict + assert result == res @pytest.mark.parametrize( @@ -402,7 +402,7 @@ def test_file_deposit(tmp_path, monkeypatch, deposit, res): finally: sys.argv = orig_argv - assert result.data_dict == res.data_dict + assert result == res @pytest.mark.parametrize( @@ -432,12 +432,12 @@ def test_invenio_deposit(tmp_path, monkeypatch, sandbox_auth, metadata): config_file = tmp_path / "hermes.toml" config_file.write_text(f"""[deposit] -target = \"invenio\" +target = "invenio" [deposit.invenio] -site_url = \"https://sandbox.zenodo.org\" -access_right = \"closed\" -auth_token = \"{sandbox_auth}\" -file = [] +site_url = "https://sandbox.zenodo.org" +access_right = "closed" +auth_token = "{sandbox_auth}" +files = ["hermes.toml"] [deposit.invenio.api_paths] licenses = "api/vocabularies/licenses" """) @@ -455,9 +455,7 @@ def test_invenio_deposit(tmp_path, monkeypatch, sandbox_auth, metadata): finally: sys.argv = orig_argv - assert result.data_dict == metadata.data_dict + assert result == metadata -# TODO: -# - handle get() on Softwaremetadata objects in invenio.py -# - Sophie genaueres bezüglich Zeiten für Arbeitszeiterhöhung und -zeitraumerweiterung schicken +# TODO: handle get() on Softwaremetadata objects in invenio.py From 382e2c3e3f55c95bf1a9908208cea061eaf7b17e Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 30 Jan 2026 14:07:09 +0100 Subject: [PATCH 190/247] fixed bug and adjusted tests --- src/hermes/commands/deposit/base.py | 15 ++--- src/hermes/commands/deposit/file.py | 9 ++- src/hermes/commands/deposit/invenio.py | 68 ++++++++++++-------- src/hermes/model/types/ld_dict.py | 27 ++++---- test/hermes_test/model/test_api.py | 20 +++--- test/hermes_test/model/test_api_e2e.py | 61 +++++++++--------- test/hermes_test/model/types/test_ld_dict.py | 19 ++++++ 7 files changed, 129 insertions(+), 90 deletions(-) diff --git a/src/hermes/commands/deposit/base.py b/src/hermes/commands/deposit/base.py index 4a996eaa..6fbf3625 100644 --- a/src/hermes/commands/deposit/base.py +++ b/src/hermes/commands/deposit/base.py @@ -38,7 +38,7 @@ def __call__(self, command: HermesCommand) -> None: deposit = self.map_metadata() self.ctx.prepare_step("deposit") with self.ctx[command.settings.target] as cache: - cache["deposit"] = deposit.compact() + cache["deposit"] = deposit self.ctx.finalize_step("deposit") if self.is_initial_publication(): @@ -48,10 +48,8 @@ def __call__(self, command: HermesCommand) -> None: deposit = self.update_metadata() self.ctx.prepare_step("deposit") - with self.ctx[command.settings.target] as cache: - cache["codemeta"] = deposit.compact() - cache["expanded"] = deposit.ld_value - cache["context"] = {"@context": deposit.full_context} + with self.ctx["deposit"] as cache: + cache["result"] = deposit self.ctx.finalize_step("deposit") self.delete_artifacts() self.upload_artifacts() @@ -67,7 +65,7 @@ def prepare(self) -> None: pass @abc.abstractmethod - def map_metadata(self) -> SoftwareMetadata: + def map_metadata(self) -> dict: """Map the given metadata to the target schema of the deposition platform and return it. When mapping metadata, make sure to add traces to the HERMES software, e.g. via @@ -97,9 +95,10 @@ def create_new_version(self) -> None: """Create a new version of an existing publication on the target platform.""" pass - def update_metadata(self) -> SoftwareMetadata: + @abc.abstractmethod + def update_metadata(self) -> dict: """Update the metadata of the newly created version and return it even if it hasn't changed.""" - return self.metadata + pass def delete_artifacts(self) -> None: """Delete any superfluous artifacts taken from the previous version of the publication.""" diff --git a/src/hermes/commands/deposit/file.py b/src/hermes/commands/deposit/file.py index 53876c53..ed6bd570 100644 --- a/src/hermes/commands/deposit/file.py +++ b/src/hermes/commands/deposit/file.py @@ -11,7 +11,7 @@ from pydantic import BaseModel from hermes.commands.deposit.base import BaseDepositPlugin -from hermes.model import SoftwareMetadata + class FileDepositSettings(BaseModel): filename: str = 'codemeta.json' @@ -20,8 +20,11 @@ class FileDepositSettings(BaseModel): class FileDepositPlugin(BaseDepositPlugin): settings_class = FileDepositSettings - def map_metadata(self) -> SoftwareMetadata: - return self.metadata + def map_metadata(self) -> dict: + return self.metadata.compact() + + def update_metadata(self) -> dict: + return self.metadata.compact() def publish(self) -> None: file_config = self.command.settings.file diff --git a/src/hermes/commands/deposit/invenio.py b/src/hermes/commands/deposit/invenio.py index 01211e5a..9434beca 100644 --- a/src/hermes/commands/deposit/invenio.py +++ b/src/hermes/commands/deposit/invenio.py @@ -19,7 +19,6 @@ from hermes.commands.deposit.base import BaseDepositPlugin from hermes.commands.deposit.error import DepositionUnauthorizedError from hermes.error import MisconfigurationError -from hermes.model import SoftwareMetadata from hermes.model.error import HermesValidationError from hermes.utils import hermes_doi, hermes_user_agent @@ -320,7 +319,12 @@ def prepare(self) -> None: record_id=rec_id, doi=doi, codemeta_identifier=codemeta_identifier ) - version = self.metadata["version"] + if len(self.metadata.get("version", [])) > 1: + raise HermesValidationError("Too many licenses for invenio deposit.") + if len(self.metadata.get("version", [])) == 1: + version = self.metadata["version"][0] + else: + version = None if rec_meta and (version == rec_meta.get("version")): raise ValueError(f"Version {version} already deposited.") @@ -336,10 +340,10 @@ def prepare(self) -> None: self.invenio_ctx = deposition_data - def map_metadata(self) -> SoftwareMetadata: + def map_metadata(self) -> dict: """Map the harvested metadata onto the Invenio schema and return it.""" self.invenio_ctx["depositionMetadata"] = self._codemeta_to_invenio_deposition() - return SoftwareMetadata(self.invenio_ctx["depositionMetadata"]) + return self.invenio_ctx["depositionMetadata"] def is_initial_publication(self) -> bool: latest_record_id = self.invenio_ctx.get("latestRecord", {}).get("id") @@ -398,7 +402,7 @@ def related_identifiers(self): }, ] - def update_metadata(self) -> SoftwareMetadata: + def update_metadata(self) -> dict: """Update the metadata of a draft and return it.""" draft_url = self.links["latest_draft"] @@ -418,7 +422,7 @@ def update_metadata(self) -> SoftwareMetadata: self.links.update(deposit["links"]) _log.debug("Created new version deposit: %s", self.links["html"]) - return SoftwareMetadata(deposit.get("metadata", {})) + return deposit def delete_artifacts(self) -> None: """Delete existing file artifacts. @@ -508,21 +512,25 @@ def _codemeta_to_invenio_deposition(self) -> dict: access_conditions = self.invenio_ctx["access_conditions"] creators = [] - for author in metadata["author"]: + for author in metadata.get("author", []): creator = {} - if len(affils := [name for affil in author["affiliation"] for name in affil["legalname"]]) != 0: + if len( + affils := [ + name for affil in author.get("affiliation", []) for name in affil.get("legalname", []) + ] + ) != 0: creator["affiliation"] = affils - if len(author["familyName"]) > 1: - raise HermesValidationError(f"Author has too many family names: {author.to_python()}") - if len(author["familyName"]) == 1: - given_names_str = " ".join(author["givenName"]) + + if len(author.get("familyName", [])) > 1: + raise HermesValidationError(f"Author has too many family names: {author}") + if len(author.get("familyName", [])) == 1: + given_names_str = " ".join(author.get("givenName", [])) name = f"{author["familyName"][0]}, {given_names_str}" - elif len(author["name"]) != 1: - raise HermesValidationError(f"Author has too many names: {author.to_python()}") + elif len(author.get("name", [])) != 1: + raise HermesValidationError(f"Author has too many or no names: {author}") else: name = author["name"][0] - if len(name) != 0: - creator["name"] = name + creator["name"] = name if (id := author.get("@id", None)) is not None: creator["orcid"] = id.replace("https://orcid.org/", "") if creator: @@ -545,6 +553,7 @@ def _codemeta_to_invenio_deposition(self) -> dict: for author in metadata["author"] ]""" + # TODO: reimplement with new api # This is not used at the moment. See comment below in `deposition_metadata` dict. contributors = [ # noqa: F841 # TODO: Distinguish between @type "Person" and others @@ -566,27 +575,33 @@ def _codemeta_to_invenio_deposition(self) -> dict: for contributor in metadata.get("contributor", []) if contributor.get("name") != "GitHub" ] - if len(metadata["name"]) != 1: + if len(metadata.get("name", [])) != 1: _log.error("More than one or zero names for the Software are given.") raise HermesValidationError("More than one or zerno names for the Software.") name = metadata["name"][0] - if len(metadata["schema:description"]) > 1: + if len(metadata.get("schema:description", [])) > 1: _log.error("More than one descriptions of the Software are given.") raise HermesValidationError("More than one descriptions of the Software are given.") - if len(metadata["schema:description"]) == 1: + if len(metadata.get("schema:description", [])) == 1: description = metadata["schema:description"][0] else: description = None - if len(metadata["schema:version"]) > 1: + if len(metadata.get("schema:version", [])) > 1: _log.error("More than one version of the Software are given.") raise HermesValidationError("More than one version of the Software are given.") - if len(metadata["schema:version"]) == 1: + if len(metadata.get("schema:version", [])) == 1: version = metadata["schema:version"][0] else: version = None + keywords = metadata.get("schema:keywords", []) + if len(keywords) == 0: + keywords = None + else: + keywords = keywords.to_python() + # TODO: Use the fields currently set to `None`. # Some more fields are available but they most likely don't relate to software # publications targeted by hermes. @@ -602,9 +617,6 @@ def _codemeta_to_invenio_deposition(self) -> dict: "publication_date": date.today().isoformat(), "title": name, "creators": creators, - # TODO: Use a real description here. Possible sources could be - # `tool.poetry.description` from pyproject.toml or `abstract` from - # CITATION.cff. This should then be stored in codemeta description field. "description": description, "access_right": access_right, "license": license, @@ -618,8 +630,8 @@ def _codemeta_to_invenio_deposition(self) -> dict: # them. # TODO: Use the DOI we get back from this. "prereserve_doi": True, - # TODO: A good source for this could be `tool.poetry.keywords` in pyproject.toml. - "keywords": None, + "keywords": keywords, + # TODO: Is there a good codemeta/ schema field? "notes": None, "related_identifiers": self.related_identifiers(), # TODO: Use `contributors`. In the case of the hermes workflow itself, the @@ -641,6 +653,10 @@ def _get_license_identifier(self) -> Union[str, None]: If no license is configured, ``None`` will be returned. """ + if "license" not in self.metadata: + raise HermesValidationError("No license is given.") + if len(self.metadata["license"]) > 1: + raise HermesValidationError("Too many licenses for invenio deposit.") license_url = self.metadata["license"][0] return self.resolver.resolve_license_id(license_url) diff --git a/src/hermes/model/types/ld_dict.py b/src/hermes/model/types/ld_dict.py index 8311b67f..f368ec73 100644 --- a/src/hermes/model/types/ld_dict.py +++ b/src/hermes/model/types/ld_dict.py @@ -22,14 +22,7 @@ def __init__(self, data, *, parent=None, key=None, index=None, context=None): def __getitem__(self, key): full_iri = self.ld_proc.expand_iri(self.active_ctx, key) - if full_iri == "@id": - return self._to_python(full_iri, self.data_dict[full_iri]) - try: - ld_value = self.data_dict[full_iri] - except KeyError: - self[key] = [] - ld_value = self.data_dict[full_iri] - return self._to_python(full_iri, ld_value) + return self._to_python(full_iri, self.data_dict[full_iri]) def __setitem__(self, key, value): ld_value = self._to_expanded_json({key: value}) @@ -41,12 +34,7 @@ def __delitem__(self, key): def __contains__(self, key): full_iri = self.ld_proc.expand_iri(self.active_ctx, key) - if full_iri == "@id": - return "@id" in self.data_dict - try: - return len(self[full_iri]) != 0 - except KeyError: - return False + return full_iri in self.data_dict def __eq__(self, other): if not isinstance(other, (dict, ld_dict)): @@ -89,6 +77,15 @@ def get(self, key, default=_NO_DEFAULT): return default return self[key] + def setdefault(self, key, default): + if key not in self: + self[key] = default + return self[key] + + def emplace(self, key): + if key not in self: + self[key] = [] + def update(self, other): for key, value in other.items(): self[key] = value @@ -136,7 +133,7 @@ def from_dict(cls, value, *, parent=None, key=None, context=None, ld_type=None): full_context = parent.full_context + merged_contexts ld_value = cls.ld_proc.expand(ld_data, {"expandContext": full_context, "documentLoader": bundled_loader}) - ld_value = cls(ld_value, parent=parent, key=key, context=merged_contexts) + ld_value = ld_dict(ld_value, parent=parent, key=key, context=merged_contexts) return ld_value diff --git a/test/hermes_test/model/test_api.py b/test/hermes_test/model/test_api.py index 6845a210..895968d7 100644 --- a/test/hermes_test/model/test_api.py +++ b/test/hermes_test/model/test_api.py @@ -53,16 +53,18 @@ def test_init_nested_object(): def test_append(): data = SoftwareMetadata() + data.emplace("schema:name") data["schema:name"].append("a") assert type(data["schema:name"]) is ld_list assert data["schema:name"][0] == "a" and data["schema:name"].item_list == [{"@value": "a"}] data["schema:name"].append("b") assert type(data["schema:name"]) is ld_list and data["schema:name"].item_list == [{"@value": "a"}, {"@value": "b"}] + data.emplace("schema:name") data["schema:name"].append("c") assert data["schema:name"].item_list == [{"@value": "a"}, {"@value": "b"}, {"@value": "c"}] data = SoftwareMetadata() - data["schema:Person"].append({"schema:name": "foo"}) + data.setdefault("schema:Person", []).append({"schema:name": "foo"}) assert type(data["schema:Person"]) is ld_list and type(data["schema:Person"][0]) is ld_dict assert data["schema:Person"][0].data_dict == {"http://schema.org/name": [{"@value": "foo"}]} data["schema:Person"].append({"schema:name": "foo"}) @@ -94,7 +96,7 @@ def test_usage(): data["author"][0]["email"].append("foo@baz.com") assert len(data["author"]) == 2 assert len(data["author"][0]["email"]) == 2 - assert len(data["author"][1]["email"]) == 0 + assert len(data["author"][1].get("email", [])) == 0 harvest = { "authors": [ {"name": "Foo", "affiliation": ["Uni A", "Lab B"], "kw": ["a", "b", "c"]}, @@ -103,17 +105,19 @@ def test_usage(): ] } for author in harvest["authors"]: - for exist_author in data["author"]: - if author["name"] == exist_author["name"][0]: + for exist_author in data.get("author", []): + if author["name"] in exist_author.get("name", []): exist_author["affiliation"] = author["affiliation"] if "email" in author: + exist_author.emplace("email") exist_author["email"].append(author["email"]) if "kw" in author: + exist_author.emplace("schema:knowsAbout") exist_author["schema:knowsAbout"].extend(author["kw"]) break else: - data["author"].append(author) - assert len(data["author"]) == 3 + data.setdefault("author", []).append(author) + assert len(data.get("author", [])) == 3 foo, bar, baz = data["author"] assert foo["name"][0] == "Foo" assert foo["affiliation"].to_python() == ["Uni A", "Lab B"] @@ -124,8 +128,8 @@ def test_usage(): assert bar["email"].to_python() == ["bar@c.edu"] assert baz["name"][0] == "Baz" assert baz["affiliation"].to_python() == ["Lab E"] - assert len(baz["schema:knowsAbout"]) == 0 - assert len(baz["email"]) == 0 + assert len(baz.get("schema:knowsAbout", [])) == 0 + assert len(baz.get("email", [])) == 0 for author in data["author"]: assert "name" in author if "Baz" not in author["name"]: diff --git a/test/hermes_test/model/test_api_e2e.py b/test/hermes_test/model/test_api_e2e.py index fa8f4ac8..16302000 100644 --- a/test/hermes_test/model/test_api_e2e.py +++ b/test/hermes_test/model/test_api_e2e.py @@ -194,17 +194,16 @@ def test_cff_harvest(tmp_path, monkeypatch, cff, res): try: monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) cli.main() - except SystemExit: + except SystemExit as e: + if e.code != 0: + raise e + finally: manager = context_manager.HermesContext() manager.prepare_step("harvest") - with manager["cff"] as cache: - result = SoftwareMetadata(cache["codemeta"]) + result = SoftwareMetadata.load_from_cache(manager, "cff") manager.finalize_step("harvest") - finally: sys.argv = orig_argv - # FIXME: update to compare the SoftwareMetadata objects instead of the data_dicts (in multiple places) - # after merge with refactor/data-model and/or refactor/423-implement-public-api assert result == res @@ -354,37 +353,36 @@ def test_codemeta_harvest(tmp_path, monkeypatch, codemeta, res): try: monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) cli.main() - except SystemExit: + except SystemExit as e: + if e.code != 0: + raise e + finally: manager = context_manager.HermesContext() manager.prepare_step("harvest") - with manager["codemeta"] as cache: - result = SoftwareMetadata(cache["codemeta"]) + result = SoftwareMetadata.load_from_cache(manager, "codemeta") manager.finalize_step("harvest") - finally: sys.argv = orig_argv assert result == res @pytest.mark.parametrize( - "deposit, res", + "metadata", [ - 2 * ( - SoftwareMetadata({ - "@type": ["http://schema.org/SoftwareSourceCode"], - "http://schema.org/description": [{"@value": "for testing"}], - "http://schema.org/name": [{"@value": "Test"}] - }), - ) + SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/name": [{"@value": "Test"}] + }), ] ) -def test_file_deposit(tmp_path, monkeypatch, deposit, res): +def test_file_deposit(tmp_path, monkeypatch, metadata): monkeypatch.chdir(tmp_path) manager = context_manager.HermesContext(tmp_path) manager.prepare_step("curate") with manager["result"] as cache: - cache["codemeta"] = deposit.compact() + cache["codemeta"] = metadata.compact() manager.finalize_step("curate") config_file = tmp_path / "hermes.toml" @@ -396,13 +394,15 @@ def test_file_deposit(tmp_path, monkeypatch, deposit, res): try: monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) cli.main() - except SystemExit: + except SystemExit as e: + if e.code != 0: + raise e + finally: with open('codemeta.json', 'r') as cache: result = SoftwareMetadata(json.load(cache)) - finally: sys.argv = orig_argv - assert result == res + assert result == metadata @pytest.mark.parametrize( @@ -448,14 +448,15 @@ def test_invenio_deposit(tmp_path, monkeypatch, sandbox_auth, metadata): try: monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) cli.main() - except SystemExit: + except SystemExit as e: + if e.code != 0: + raise e + finally: manager.prepare_step("deposit") - result = SoftwareMetadata.load_from_cache(manager, "invenio") + with manager["deposit"] as cache: + result = cache["result"] manager.finalize_step("deposit") - finally: sys.argv = orig_argv - assert result == metadata - -# TODO: handle get() on Softwaremetadata objects in invenio.py - + # TODO: compare to actually expected value + assert result == {} diff --git a/test/hermes_test/model/types/test_ld_dict.py b/test/hermes_test/model/types/test_ld_dict.py index c7a7a183..8736439d 100644 --- a/test/hermes_test/model/types/test_ld_dict.py +++ b/test/hermes_test/model/types/test_ld_dict.py @@ -197,6 +197,25 @@ def test_get(): di["bar"] +def test_setdefault(): + di = ld_dict([{"https://schema.org/name": [{"@value": "Manu Sporny"}]}], + context=[{"schema": "https://schema.org/"}]) + assert di.setdefault("schema:name", []) == [{"@value": "Manu Sporny"}] + assert di.setdefault("schema:email", []) == [] + assert di["schema:email"] == [] + + +def test_emplace(): + di = ld_dict([{"https://schema.org/name": [{"@value": "Manu Sporny"}]}], + context=[{"schema": "https://schema.org/"}]) + di.emplace("schema:name") + assert di["schema:name"] == [{"@value": "Manu Sporny"}] + with pytest.raises(KeyError): + di["schema:email"] + di.emplace("schema:email") + assert di["schema:email"] == [] + + def test_update(): di = ld_dict([{"http://xmlns.com/foaf/0.1/name": [{"@value": "Manu Sporny"}], "http://xmlns.com/foaf/0.1/homepage": [{"@id": "http://manu.sporny.org/"}]}], From 96861ec750f8ef4553a34c062e2b9604b021ff32 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Mon, 2 Feb 2026 10:45:50 +0100 Subject: [PATCH 191/247] adjusted invenio.py and its test a bit --- src/hermes/commands/deposit/invenio.py | 2 + src/hermes/commands/deposit/invenio_rdm.py | 14 ++++-- test/hermes_test/model/test_api_e2e.py | 51 ++++++++++++++-------- 3 files changed, 45 insertions(+), 22 deletions(-) diff --git a/src/hermes/commands/deposit/invenio.py b/src/hermes/commands/deposit/invenio.py index 9434beca..3915d536 100644 --- a/src/hermes/commands/deposit/invenio.py +++ b/src/hermes/commands/deposit/invenio.py @@ -513,6 +513,8 @@ def _codemeta_to_invenio_deposition(self) -> dict: creators = [] for author in metadata.get("author", []): + if not "Person" in author.get("@type", []): + continue creator = {} if len( affils := [ diff --git a/src/hermes/commands/deposit/invenio_rdm.py b/src/hermes/commands/deposit/invenio_rdm.py index a381db90..01e08371 100644 --- a/src/hermes/commands/deposit/invenio_rdm.py +++ b/src/hermes/commands/deposit/invenio_rdm.py @@ -6,9 +6,8 @@ # SPDX-FileContributor: Oliver Bertuch # SPDX-FileContributor: Michael Meinel -import typing as t - from requests import HTTPError +from typing import Union from hermes.commands.deposit.invenio import InvenioClient, InvenioDepositPlugin, InvenioResolver @@ -27,7 +26,7 @@ def get_licenses(self): class InvenioRDMResolver(InvenioResolver): invenio_client_class = InvenioRDMClient - def resolve_license_id(self, license_url: t.Optional[str]) -> t.Optional[dict]: + def resolve_license_id(self, license_url: Union[str, None]) -> Union[dict, None]: """Deliberately try to resolve the license URL to a valid InvenioRDM license information record from the vocabulary. @@ -47,6 +46,12 @@ def resolve_license_id(self, license_url: t.Optional[str]) -> t.Optional[dict]: except HTTPError: pass + # FIXME: Why not get all license_cross_refs and then use a query parameter like this: + # ?q=props.url:("license_url" OR "license_cross_ref[1]" OR ...)&size=1000 + # That would be able to replace _search_license_info. + # FIXME: Some licenses in valid_licenses["hits"]["hits"]["props"]["url"] are only http although + # https://spdx.org/licenses/license.json lists them in crossRef as https + # If the easy "mapping" did not work, we really need to "search" for the correct license ID. response = self.client.get_licenses() response.raise_for_status() @@ -65,6 +70,7 @@ def resolve_license_id(self, license_url: t.Optional[str]) -> t.Optional[dict]: if license_info is not None: break else: + # FIXME: Why is this only raised here and not always when license_info is None? raise RuntimeError(f"Could not resolve license URL {license_url} to a valid identifier.") return license_info @@ -73,7 +79,7 @@ def resolve_license_id(self, license_url: t.Optional[str]) -> t.Optional[dict]: def _extract_license_id_from_response(data: dict) -> str: return data["id"] - def _search_license_info(self, _url: str, valid_licenses: dict) -> t.Optional[dict]: + def _search_license_info(self, _url: str, valid_licenses: dict) -> Union[dict, None]: for license_info in valid_licenses['hits']['hits']: try: if license_info['props']['url'] == _url: diff --git a/test/hermes_test/model/test_api_e2e.py b/test/hermes_test/model/test_api_e2e.py index 16302000..18dc973c 100644 --- a/test/hermes_test/model/test_api_e2e.py +++ b/test/hermes_test/model/test_api_e2e.py @@ -172,7 +172,7 @@ def sandbox_auth(): "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}], "http://schema.org/name": [{"@value": "Test"}], "http://schema.org/url": [ - {"@id": 'https://arxiv.org/abs/2201.09015'}, + {"@id": "https://arxiv.org/abs/2201.09015"}, {"@id": "https://docs.software-metadata.pub/en/latest"} ], "http://schema.org/version": [{"@value": "9.0.1"}] @@ -398,7 +398,7 @@ def test_file_deposit(tmp_path, monkeypatch, metadata): if e.code != 0: raise e finally: - with open('codemeta.json', 'r') as cache: + with open("codemeta.json", "r") as cache: result = SoftwareMetadata(json.load(cache)) sys.argv = orig_argv @@ -406,22 +406,37 @@ def test_file_deposit(tmp_path, monkeypatch, metadata): @pytest.mark.parametrize( - "metadata", + "metadata, invenio_metadata", [ - SoftwareMetadata({ - "@type": ["http://schema.org/SoftwareSourceCode"], - "http://schema.org/description": [{"@value": "for testing"}], - "http://schema.org/name": [{"@value": "Test"}], - "http://schema.org/author": [{ - "@type": "http://schema.org/Person", - "http://schema.org/familyName": [{"@value": "Test"}], - "http://schema.org/givenName": [{"@value": "Testi"}] - }], - "http://schema.org/license": [{"@id": "https://spdx.org/licenses/apache-2.0"}] - }), + ( + SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/name": [{"@value": "Test"}], + "http://schema.org/author": [{ + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Test"}], + "http://schema.org/givenName": [{"@value": "Testi"}] + }], + "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}] + }), + { + "upload_type": "software", + "publication_date": "2026-02-02", + "title": "Test", + "creators": [{"name": "Test, Testi"}], + "description": "for testing", + "access_right": "closed", + "license": "apache-2.0", + "prereserve_doi": True, + "related_identifiers": [ + {"identifier": "10.5281/zenodo.13311079", "relation": "isCompiledBy", "scheme": "doi"} + ] + } + ) ] ) -def test_invenio_deposit(tmp_path, monkeypatch, sandbox_auth, metadata): +def test_invenio_deposit(tmp_path, monkeypatch, sandbox_auth, metadata, invenio_metadata): monkeypatch.chdir(tmp_path) manager = context_manager.HermesContext(tmp_path) @@ -453,10 +468,10 @@ def test_invenio_deposit(tmp_path, monkeypatch, sandbox_auth, metadata): raise e finally: manager.prepare_step("deposit") - with manager["deposit"] as cache: - result = cache["result"] + with manager["invenio"] as cache: + result = cache["deposit"] manager.finalize_step("deposit") sys.argv = orig_argv # TODO: compare to actually expected value - assert result == {} + assert result == invenio_metadata From 248ae33b8f094c361a8280b83241fc780f4629f7 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 6 Feb 2026 13:12:19 +0100 Subject: [PATCH 192/247] added adjusted files from feature/153-refactor-datamodel for process --- src/hermes/commands/process/base.py | 51 ++++------- src/hermes/model/merge/__init__.py | 3 + src/hermes/model/merge/action.py | 83 ++++++++++++++++++ src/hermes/model/merge/container.py | 116 +++++++++++++++++++++++++ src/hermes/model/merge/match.py | 17 ++++ src/hermes/model/merge/strategy.py | 42 +++++++++ src/hermes/model/types/ld_container.py | 6 +- 7 files changed, 279 insertions(+), 39 deletions(-) create mode 100644 src/hermes/model/merge/__init__.py create mode 100644 src/hermes/model/merge/action.py create mode 100644 src/hermes/model/merge/container.py create mode 100644 src/hermes/model/merge/match.py create mode 100644 src/hermes/model/merge/strategy.py diff --git a/src/hermes/commands/process/base.py b/src/hermes/commands/process/base.py index 9e29d1e6..83480056 100644 --- a/src/hermes/commands/process/base.py +++ b/src/hermes/commands/process/base.py @@ -5,13 +5,13 @@ # SPDX-FileContributor: Michael Meinel import argparse -import json -import sys from pydantic import BaseModel from hermes.commands.base import HermesCommand, HermesPlugin -from hermes.model.context import HermesHarvestContext, CodeMetaContext +from hermes.model.api import SoftwareMetadata +from hermes.model.context_manager import HermesContext +from hermes.model.merge.container import ld_merge_dict class HermesProcessPlugin(HermesPlugin): @@ -33,42 +33,21 @@ class HermesProcessCommand(HermesCommand): def __call__(self, args: argparse.Namespace) -> None: self.args = args - ctx = CodeMetaContext() - - if not (ctx.hermes_dir / "harvest").exists(): - self.log.error("You must run the harvest command before process") - sys.exit(1) + ctx = HermesContext() + merged_doc = ld_merge_dict([{}]) # Get all harvesters harvester_names = self.root_settings.harvest.sources - harvester_names.reverse() # Switch order for priority handling + ctx.prepare_step('harvest') for harvester in harvester_names: self.log.info("## Process data from %s", harvester) - - harvest_context = HermesHarvestContext(ctx, harvester, {}) - try: - harvest_context.load_cache() - # when the harvest step ran, but there is no cache file, this is a serious flaw - except FileNotFoundError: - self.log.warning("No output data from harvester %s found, skipping", harvester) - continue - - ctx.merge_from(harvest_context) - ctx.merge_contexts_from(harvest_context) - - if ctx._errors: - self.log.error('Errors during merge') - self.errors.extend(ctx._errors) - - for ep, error in ctx._errors: - self.log.info(" - %s: %s", ep.name, error) - - tags_path = ctx.get_cache('process', 'tags', create=True) - with tags_path.open('w') as tags_file: - json.dump(ctx.tags, tags_file, indent=2) - - ctx.prepare_codemeta() - - with open(ctx.get_cache("process", ctx.hermes_name, create=True), 'w') as codemeta_file: - json.dump(ctx._data, codemeta_file, indent=2) + merged_doc.update(SoftwareMetadata.load_from_cache(ctx, harvester)) + ctx.finalize_step("harvest") + + ctx.prepare_step("process") + with ctx["result"] as result_ctx: + result_ctx["codemeta"] = merged_doc.compact() + result_ctx["context"] = {"@context": merged_doc.full_context} + result_ctx["expanded"] = merged_doc.ld_value + ctx.finalize_step("process") diff --git a/src/hermes/model/merge/__init__.py b/src/hermes/model/merge/__init__.py new file mode 100644 index 00000000..1741dca8 --- /dev/null +++ b/src/hermes/model/merge/__init__.py @@ -0,0 +1,3 @@ +# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 \ No newline at end of file diff --git a/src/hermes/model/merge/action.py b/src/hermes/model/merge/action.py new file mode 100644 index 00000000..80f45591 --- /dev/null +++ b/src/hermes/model/merge/action.py @@ -0,0 +1,83 @@ +# SPDX-FileCopyrightText: 2025 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Meinel + +from hermes.model.types import ld_list + + +class MergeError(ValueError): + pass + + +class MergeAction: + def merge(self, target, key, value, update): + raise NotImplementedError() + + +class Reject(MergeAction): + @classmethod + def merge(cls, target, key, value, update): + if value != update: + target.reject(key, update) + return value + + +class Replace(MergeAction): + @classmethod + def merge(cls, target, key, value, update): + if value != update: + target.replace(key, value) + return update + + +class Concat(MergeAction): + @classmethod + def merge(cls, target, key, value, update): + return cls.merge_to_list(value, update) + + @classmethod + def merge_to_list(cls, head, tail): + if not isinstance(head, (list, ld_list)): + head = [head] + if not isinstance(tail, (list, ld_list)): + head.append(tail) + else: + head.extend(tail) + return head + + +class Collect(MergeAction): + def __init__(self, match): + self.match = match + + def merge(self, target, key, value, update): + if not isinstance(value, list): + value = [value] + if not isinstance(update, list): + update = [update] + + for update_item in update: + if not any(self.match(item, update_item) for item in value): + value.append(update_item) + + if len(value) == 1: + return value[0] + else: + return value + + +class MergeSet(MergeAction): + def __init__(self, match, merge_items=True): + self.match = match + self.merge_items = merge_items + + def merge(self, target, key, value, update): + for item in update: + target_item = target.match(key[-1], item, self.match) + if target_item and self.merge_items: + target_item.update(item) + else: + value.append(item) + return value diff --git a/src/hermes/model/merge/container.py b/src/hermes/model/merge/container.py new file mode 100644 index 00000000..80395d87 --- /dev/null +++ b/src/hermes/model/merge/container.py @@ -0,0 +1,116 @@ +# SPDX-FileCopyrightText: 2025 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Meinel + +from hermes.model.types import ld_context, ld_dict, ld_list + +from .strategy import CODEMETA_STRATEGY, PROV_STRATEGY, REPLACE_STRATEGY +from ..types.pyld_util import bundled_loader + + +class _ld_merge_container: + def _to_python(self, full_iri, ld_value): + value = super()._to_python(full_iri, ld_value) + if isinstance(value, ld_dict) and not isinstance(value, ld_merge_dict): + value = ld_merge_dict( + value.ld_value, + parent=value.parent, + key=value.key, + index=value.index, + context=value.context + ) + if isinstance(value, ld_list) and not isinstance(value, ld_merge_list): + value = ld_merge_list( + value.ld_value, + parent=value.parent, + key=value.key, + index=value.index, + context=value.context + ) + return value + + +class ld_merge_list(_ld_merge_container, ld_list): + def __init__(self, data, *, parent=None, key=None, index=None, context=None): + super().__init__(data, parent=parent, key=key, index=index, context=context) + + +class ld_merge_dict(_ld_merge_container, ld_dict): + def __init__(self, data, *, parent=None, key=None, index=None, context=None): + super().__init__(data, parent=parent, key=key, index=index, context=context) + + self.update_context(ld_context.HERMES_PROV_CONTEXT) + + self.strategies = {**REPLACE_STRATEGY} + self.add_strategy(CODEMETA_STRATEGY) + self.add_strategy(PROV_STRATEGY) + + def update_context(self, other_context): + if other_context: + if len(self.context) < 1 or not isinstance(self.context[-1], dict): + self.context.append({}) + + if not isinstance(other_context, list): + other_context = [other_context] + for ctx in other_context: + if isinstance(ctx, dict): + # FIXME: Shouldn't the dict be appended instead? + # How it is implemented currently results in anomalies like this: + # other_context = [{"codemeta": "https://doi.org/10.5063/schema/codemeta-1.0/"}] + # self.context = [{"codemeta": "https://doi.org/10.5063/schema/codemeta-2.0/"}] + # resulting context is only [{"codemeta": "https://doi.org/10.5063/schema/codemeta-1.0/"}] + # values that start with "https://doi.org/10.5063/schema/codemeta-2.0/" can't be compacted anymore + self.context[-1].update(ctx) + elif ctx not in self.context: + self.context.insert(0, ctx) + + self.active_ctx = self.ld_proc.initial_ctx(self.context, {"documentLoader": bundled_loader}) + + def update(self, other): + if isinstance(other, ld_dict): + self.update_context(other.context) + + super().update(other) + + def add_strategy(self, strategy): + for key, value in strategy.items(): + self.strategies[key] = {**value, **self.strategies.get(key, {})} + + def __setitem__(self, key, value): + if key in self: + value = self._merge_item(key, value) + super().__setitem__(key, value) + + def match(self, key, value, match): + for index, item in enumerate(self[key]): + if match(item, value): + if isinstance(item, ld_dict) and not isinstance(item, ld_merge_dict): + item = ld_merge_dict( + item.ld_value, parent=item.parent, key=item.key, index=index, context=item.context + ) + elif isinstance(item, ld_list) and not isinstance(item, ld_merge_list): + item = ld_merge_list( + item.ld_value, parent=item.parent, key=item.key, index=index, context=item.context + ) + return item + + def _merge_item(self, key, value): + strategy = {**self.strategies[None]} + ld_types = self.data_dict.get('@type', []) + for ld_type in ld_types: + strategy.update(self.strategies.get(ld_type, {})) + + merger = strategy.get(key, strategy[None]) + return merger.merge(self, [*self.path, key], self[key], value) + + def _add_related(self, rel, key, value): + self.emplace(rel) + self[rel].append({"@type": "schema:PropertyValue", "schema:name": str(key), "schema:value": str(value)}) + + def reject(self, key, value): + self._add_related("hermes-rt:reject", key, value) + + def replace(self, key, value): + self._add_related("hermes-rt:replace", key, value) diff --git a/src/hermes/model/merge/match.py b/src/hermes/model/merge/match.py new file mode 100644 index 00000000..03b9f9ef --- /dev/null +++ b/src/hermes/model/merge/match.py @@ -0,0 +1,17 @@ +# SPDX-FileCopyrightText: 2025 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Meinel + + +def match_equals(a, b): + return a == b + + +def match_keys(*keys): + def match_func(left, right): + active_keys = [key for key in keys if key in left and key in right] + pairs = [(left[key] == right[key]) for key in active_keys] + return len(active_keys) > 0 and all(pairs) + return match_func diff --git a/src/hermes/model/merge/strategy.py b/src/hermes/model/merge/strategy.py new file mode 100644 index 00000000..12681fe6 --- /dev/null +++ b/src/hermes/model/merge/strategy.py @@ -0,0 +1,42 @@ +# SPDX-FileCopyrightText: 2025 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Meinel + +from hermes.model.types.ld_context import iri_map as iri + +from .action import Reject, Replace, Collect, Concat, MergeSet +from .match import match_equals, match_keys + + +REPLACE_STRATEGY = { + None: { + None: Replace, + "@type": Collect(match_equals), + }, +} + + +REJECT_STRATEGY = { + None: { + None: Reject, + "@type": Collect(match_equals), + }, +} + + +PROV_STRATEGY = { + None: { + iri["hermes-rt:graph"]: Concat, + iri["hermes-rt:replace"]: Concat, + iri["hermes-rt:reject"]: Concat, + }, +} + + +CODEMETA_STRATEGY = { + iri["schema:SoftwareSourceCode"]: { + iri["schema:author"]: MergeSet(match_keys('@id', iri['schema:email'])), + }, +} diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index a18c886d..f97868d9 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -237,7 +237,7 @@ def _to_expanded_json( # while searching build a path such that it leads from the found ld_dicts ld_value to selfs data_dict/ item_list parent = self path = [] - while parent.__class__.__name__ not in ("ld_dict", "SoftwareMetadata"): + while parent.__class__.__name__ not in ("ld_dict", "SoftwareMetadata", "ld_merge_dict"): if parent.container_type == "@list": path.extend(["@list", 0]) elif parent.container_type == "@graph": @@ -250,7 +250,7 @@ def _to_expanded_json( # if neither self nor any of its parents is a ld_dict: # create a dict with the key of the outer most parent of self and this parents ld_value as a value # this dict is stored in an ld_container and simulates the most minimal JSON-LD object possible - if parent.__class__.__name__ not in ("ld_dict", "SoftwareMetadata"): + if parent.__class__.__name__ not in ("ld_dict", "SoftwareMetadata", "ld_merge_dict"): key = self.ld_proc.expand_iri(parent.active_ctx, parent.key) parent = ld_container([{key: parent._data}]) path.append(0) @@ -277,7 +277,7 @@ def _to_expanded_json( [(new_key, temp) for new_key in temp.keys() if isinstance(temp[new_key], special_types)] ) elif isinstance(temp, ld_container): - if temp.__class__.__name__ == "ld_list" and temp.container_type == "@set": + if temp.__class__.__name__ in ("ld_list", "ld_merge_list") and temp.container_type == "@set": ref[key] = temp._data else: ref[key] = temp._data[0] From ebebca4e5099c1a856acfbf755077ca5d0a2aa45 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 6 Feb 2026 14:00:09 +0100 Subject: [PATCH 193/247] added first tests --- src/hermes/commands/__init__.py | 2 +- src/hermes/commands/cli.py | 4 +- test/hermes_test/model/test_api_e2e.py | 103 +++++++++++++++++++++++++ 3 files changed, 106 insertions(+), 3 deletions(-) diff --git a/src/hermes/commands/__init__.py b/src/hermes/commands/__init__.py index 278faddf..e1ddf036 100644 --- a/src/hermes/commands/__init__.py +++ b/src/hermes/commands/__init__.py @@ -14,6 +14,6 @@ # from hermes.commands.init.base import HermesInitCommand # from hermes.commands.curate.base import HermesCurateCommand from hermes.commands.harvest.base import HermesHarvestCommand -# from hermes.commands.process.base import HermesProcessCommand +from hermes.commands.process.base import HermesProcessCommand from hermes.commands.deposit.base import HermesDepositCommand # from hermes.commands.postprocess.base import HermesPostprocessCommand diff --git a/src/hermes/commands/cli.py b/src/hermes/commands/cli.py index 0ec2d1ae..d465f3b8 100644 --- a/src/hermes/commands/cli.py +++ b/src/hermes/commands/cli.py @@ -16,7 +16,7 @@ # from hermes.commands import (HermesHelpCommand, HermesVersionCommand, HermesCleanCommand, # HermesHarvestCommand, HermesProcessCommand, HermesCurateCommand, # HermesDepositCommand, HermesPostprocessCommand, HermesInitCommand) -from hermes.commands import HermesDepositCommand, HermesHarvestCommand +from hermes.commands import HermesDepositCommand, HermesHarvestCommand, HermesProcessCommand from hermes.commands.base import HermesCommand @@ -43,7 +43,7 @@ def main() -> None: # HermesInitCommand(parser), # HermesCleanCommand(parser), HermesHarvestCommand(parser), - # HermesProcessCommand(parser), + HermesProcessCommand(parser), # HermesCurateCommand(parser), HermesDepositCommand(parser), # HermesPostprocessCommand(parser), diff --git a/test/hermes_test/model/test_api_e2e.py b/test/hermes_test/model/test_api_e2e.py index 18dc973c..0eddc59b 100644 --- a/test/hermes_test/model/test_api_e2e.py +++ b/test/hermes_test/model/test_api_e2e.py @@ -475,3 +475,106 @@ def test_invenio_deposit(tmp_path, monkeypatch, sandbox_auth, metadata, invenio_ # TODO: compare to actually expected value assert result == invenio_metadata + + +@pytest.mark.parametrize( + "metadata_in, metadata_out", + [ + ( + { + "cff": SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/name": [{"@value": "Test"}], + "http://schema.org/author": [{ + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Test"}], + "http://schema.org/givenName": [{"@value": "Testi"}] + }], + "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}] + }) + }, + SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/name": [{"@value": "Test"}], + "http://schema.org/author": [{ + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Test"}], + "http://schema.org/givenName": [{"@value": "Testi"}] + }], + "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}] + }) + ), + ( + { + "cff": SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/name": [{"@value": "Test"}], + "http://schema.org/author": [{ + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Test"}], + "http://schema.org/givenName": [{"@value": "Testi"}], + "http://schema.org/email": [{"@value": "test.testi@testis.tests"}] + }], + "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}] + }), + "codemeta": SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/name": [{"@value": "Test"}], + "http://schema.org/author": [{ + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Test"}], + "http://schema.org/givenName": [{"@value": "Testi"}], + "http://schema.org/email": [{"@value": "test.testi@testis.tests"}] + }] + }) + }, + SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/name": [{"@value": "Test"}], + "http://schema.org/author": [{ + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Test"}], + "http://schema.org/givenName": [{"@value": "Testi"}], + "http://schema.org/email": [{"@value": "test.testi@testis.tests"}] + }], + "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}] + }) + ) + ] +) +def test_process(tmp_path, monkeypatch, metadata_in, metadata_out): + monkeypatch.chdir(tmp_path) + + manager = context_manager.HermesContext(tmp_path) + manager.prepare_step("harvest") + for harvester, result in metadata_in.items(): + with manager[harvester] as cache: + cache["codemeta"] = result.compact() + cache["context"] = {"@context": result.full_context} + cache["expanded"] = result.ld_value + manager.finalize_step("harvest") + + config_file = tmp_path / "hermes.toml" + config_file.write_text(f"[harvest]\nsources = [{", ".join(f"\"{harvester}\"" for harvester in metadata_in)}]") + + orig_argv = sys.argv[:] + sys.argv = ["hermes", "process", "--path", str(tmp_path), "--config", str(config_file)] + result = {} + try: + monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) + cli.main() + except SystemExit as e: + if e.code != 0: + raise e + finally: + manager.prepare_step("process") + result = SoftwareMetadata.load_from_cache(manager, "result") + manager.finalize_step("process") + sys.argv = orig_argv + + assert result.ld_value == metadata_out.ld_value + assert result == metadata_out From f21df496ef85d61341dfa31ff15f4cbf54d42a87 Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Mon, 9 Feb 2026 09:16:05 +0100 Subject: [PATCH 194/247] (re)added version and help commands to the available commands --- src/hermes/commands/__init__.py | 6 +++--- src/hermes/commands/base.py | 21 +++++++++++++++++++++ src/hermes/commands/cli.py | 8 +++++--- 3 files changed, 29 insertions(+), 6 deletions(-) diff --git a/src/hermes/commands/__init__.py b/src/hermes/commands/__init__.py index e1ddf036..d239cb0e 100644 --- a/src/hermes/commands/__init__.py +++ b/src/hermes/commands/__init__.py @@ -8,9 +8,9 @@ # "unused import" errors. # flake8: noqa -# from hermes.commands.base import HermesHelpCommand -# from hermes.commands.base import HermesVersionCommand -# from hermes.commands.clean.base import HermesCleanCommand +from hermes.commands.base import HermesHelpCommand +from hermes.commands.base import HermesVersionCommand +from hermes.commands.clean.base import HermesCleanCommand # from hermes.commands.init.base import HermesInitCommand # from hermes.commands.curate.base import HermesCurateCommand from hermes.commands.harvest.base import HermesHarvestCommand diff --git a/src/hermes/commands/base.py b/src/hermes/commands/base.py index 2d182267..12e3c994 100644 --- a/src/hermes/commands/base.py +++ b/src/hermes/commands/base.py @@ -175,6 +175,7 @@ def __call__(self, command: HermesCommand) -> None: class HermesHelpSettings(BaseModel): + """Intentionally empty settings class for the help command.""" pass @@ -200,3 +201,23 @@ def __call__(self, args: argparse.Namespace) -> None: # Otherwise, simply show the general help and exit (cleanly). self.parser.print_help() self.parser.exit() + + +class HermesVersionSettings(BaseModel): + """Intentionally empty settings class for the version command.""" + pass + + +class HermesVersionCommand(HermesCommand): + """Show HERMES version and exit.""" + + command_name = "version" + settings_class = HermesVersionSettings + + def load_settings(self, args: argparse.Namespace): + """Pass loading settings as not necessary for this command.""" + pass + + def __call__(self, args: argparse.Namespace) -> None: + self.log.info(metadata.version("hermes")) + self.parser.exit() diff --git a/src/hermes/commands/cli.py b/src/hermes/commands/cli.py index d465f3b8..debe6f62 100644 --- a/src/hermes/commands/cli.py +++ b/src/hermes/commands/cli.py @@ -16,7 +16,9 @@ # from hermes.commands import (HermesHelpCommand, HermesVersionCommand, HermesCleanCommand, # HermesHarvestCommand, HermesProcessCommand, HermesCurateCommand, # HermesDepositCommand, HermesPostprocessCommand, HermesInitCommand) -from hermes.commands import HermesDepositCommand, HermesHarvestCommand, HermesProcessCommand +from hermes.commands import ( + HermesDepositCommand, HermesHarvestCommand, HermesHelpCommand, HermesProcessCommand, HermesVersionCommand +) from hermes.commands.base import HermesCommand @@ -38,8 +40,8 @@ def main() -> None: setting_types = {} for command in ( - # HermesHelpCommand(parser), - # HermesVersionCommand(parser), + HermesHelpCommand(parser), + HermesVersionCommand(parser), # HermesInitCommand(parser), # HermesCleanCommand(parser), HermesHarvestCommand(parser), From d4d9ca8d6e84edf137cf739483816a346139a151 Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Mon, 9 Feb 2026 09:16:50 +0100 Subject: [PATCH 195/247] made test for process step more complex --- test/hermes_test/model/test_api_e2e.py | 37 +++++++++++++++++--------- 1 file changed, 25 insertions(+), 12 deletions(-) diff --git a/test/hermes_test/model/test_api_e2e.py b/test/hermes_test/model/test_api_e2e.py index 0eddc59b..7a65098b 100644 --- a/test/hermes_test/model/test_api_e2e.py +++ b/test/hermes_test/model/test_api_e2e.py @@ -511,12 +511,18 @@ def test_invenio_deposit(tmp_path, monkeypatch, sandbox_auth, metadata, invenio_ "cff": SoftwareMetadata({ "@type": ["http://schema.org/SoftwareSourceCode"], "http://schema.org/name": [{"@value": "Test"}], - "http://schema.org/author": [{ - "@type": "http://schema.org/Person", - "http://schema.org/familyName": [{"@value": "Test"}], - "http://schema.org/givenName": [{"@value": "Testi"}], - "http://schema.org/email": [{"@value": "test.testi@testis.tests"}] - }], + "http://schema.org/author": [ + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Test"}], + "http://schema.org/email": [{"@value": "test.testi@testis.tests"}] + }, + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Tester"}], + "http://schema.org/email": [{"@value": "test@tester.tests"}] + } + ], "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}] }), "codemeta": SoftwareMetadata({ @@ -535,12 +541,19 @@ def test_invenio_deposit(tmp_path, monkeypatch, sandbox_auth, metadata, invenio_ "@type": ["http://schema.org/SoftwareSourceCode"], "http://schema.org/description": [{"@value": "for testing"}], "http://schema.org/name": [{"@value": "Test"}], - "http://schema.org/author": [{ - "@type": "http://schema.org/Person", - "http://schema.org/familyName": [{"@value": "Test"}], - "http://schema.org/givenName": [{"@value": "Testi"}], - "http://schema.org/email": [{"@value": "test.testi@testis.tests"}] - }], + "http://schema.org/author": [ + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Test"}], + "http://schema.org/givenName": [{"@value": "Testi"}], + "http://schema.org/email": [{"@value": "test.testi@testis.tests"}] + }, + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Tester"}], + "http://schema.org/email": [{"@value": "test@tester.tests"}] + } + ], "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}] }) ) From 7cfa7bcc7be101dd6580ead1d933f762e768d280 Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Mon, 9 Feb 2026 09:18:19 +0100 Subject: [PATCH 196/247] made process step and ld_container._to_expanded_json more robust --- src/hermes/commands/process/base.py | 9 ++++++++- src/hermes/model/types/ld_container.py | 6 +++--- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/src/hermes/commands/process/base.py b/src/hermes/commands/process/base.py index 83480056..1aae0dab 100644 --- a/src/hermes/commands/process/base.py +++ b/src/hermes/commands/process/base.py @@ -11,6 +11,7 @@ from hermes.commands.base import HermesCommand, HermesPlugin from hermes.model.api import SoftwareMetadata from hermes.model.context_manager import HermesContext +from hermes.model.error import HermesContextError from hermes.model.merge.container import ld_merge_dict @@ -42,7 +43,13 @@ def __call__(self, args: argparse.Namespace) -> None: ctx.prepare_step('harvest') for harvester in harvester_names: self.log.info("## Process data from %s", harvester) - merged_doc.update(SoftwareMetadata.load_from_cache(ctx, harvester)) + try: + metadata = SoftwareMetadata.load_from_cache(ctx, harvester) + except HermesContextError as e: + self.log.error("Error while trying to load data from harvest plugin '%s': %s", harvester, e) + self.errors.append(e) + continue + merged_doc.update(metadata) ctx.finalize_step("harvest") ctx.prepare_step("process") diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index f97868d9..756f2033 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -237,7 +237,7 @@ def _to_expanded_json( # while searching build a path such that it leads from the found ld_dicts ld_value to selfs data_dict/ item_list parent = self path = [] - while parent.__class__.__name__ not in ("ld_dict", "SoftwareMetadata", "ld_merge_dict"): + while not "ld_dict" in [sub_cls.__name__ for sub_cls in type(parent).mro()]: if parent.container_type == "@list": path.extend(["@list", 0]) elif parent.container_type == "@graph": @@ -250,7 +250,7 @@ def _to_expanded_json( # if neither self nor any of its parents is a ld_dict: # create a dict with the key of the outer most parent of self and this parents ld_value as a value # this dict is stored in an ld_container and simulates the most minimal JSON-LD object possible - if parent.__class__.__name__ not in ("ld_dict", "SoftwareMetadata", "ld_merge_dict"): + if not "ld_dict" in [sub_cls.__name__ for sub_cls in type(parent).mro()]: key = self.ld_proc.expand_iri(parent.active_ctx, parent.key) parent = ld_container([{key: parent._data}]) path.append(0) @@ -277,7 +277,7 @@ def _to_expanded_json( [(new_key, temp) for new_key in temp.keys() if isinstance(temp[new_key], special_types)] ) elif isinstance(temp, ld_container): - if temp.__class__.__name__ in ("ld_list", "ld_merge_list") and temp.container_type == "@set": + if "ld_list" in [sub_cls.__name__ for sub_cls in type(temp).mro()] and temp.container_type == "@set": ref[key] = temp._data else: ref[key] = temp._data[0] From 520ef39bf267643f32ab13da06d10db22a014565 Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Mon, 9 Feb 2026 09:26:51 +0100 Subject: [PATCH 197/247] improved flake8 rating --- src/hermes/model/merge/__init__.py | 2 +- src/hermes/model/types/ld_container.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/hermes/model/merge/__init__.py b/src/hermes/model/merge/__init__.py index 1741dca8..faf5a2f5 100644 --- a/src/hermes/model/merge/__init__.py +++ b/src/hermes/model/merge/__init__.py @@ -1,3 +1,3 @@ # SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR) # -# SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +# SPDX-License-Identifier: Apache-2.0 diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index 756f2033..f30a212c 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -237,7 +237,7 @@ def _to_expanded_json( # while searching build a path such that it leads from the found ld_dicts ld_value to selfs data_dict/ item_list parent = self path = [] - while not "ld_dict" in [sub_cls.__name__ for sub_cls in type(parent).mro()]: + while "ld_dict" not in [sub_cls.__name__ for sub_cls in type(parent).mro()]: if parent.container_type == "@list": path.extend(["@list", 0]) elif parent.container_type == "@graph": @@ -250,7 +250,7 @@ def _to_expanded_json( # if neither self nor any of its parents is a ld_dict: # create a dict with the key of the outer most parent of self and this parents ld_value as a value # this dict is stored in an ld_container and simulates the most minimal JSON-LD object possible - if not "ld_dict" in [sub_cls.__name__ for sub_cls in type(parent).mro()]: + if "ld_dict" not in [sub_cls.__name__ for sub_cls in type(parent).mro()]: key = self.ld_proc.expand_iri(parent.active_ctx, parent.key) parent = ld_container([{key: parent._data}]) path.append(0) From d04b0e2a1f66198481e122a37d463206f46fe9a8 Mon Sep 17 00:00:00 2001 From: "Kernchen, Sophie" Date: Fri, 13 Feb 2026 10:05:48 +0100 Subject: [PATCH 198/247] Remove SysExit call --- test/hermes_test/model/test_api_e2e.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/test/hermes_test/model/test_api_e2e.py b/test/hermes_test/model/test_api_e2e.py index f4ec7fd6..6f3733fc 100644 --- a/test/hermes_test/model/test_api_e2e.py +++ b/test/hermes_test/model/test_api_e2e.py @@ -184,12 +184,13 @@ def test_cff_harvest(tmp_path, monkeypatch, cff, res): monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) cli.main() except SystemExit: + print("TODO: Delete wenn package is working again or mock cli") + finally: manager = context_manager.HermesContext() manager.prepare_step("harvest") with manager["cff"] as cache: result = SoftwareMetadata(cache["codemeta"]) manager.finalize_step("harvest") - finally: sys.argv = orig_argv # FIXME: update to compare the SoftwareMetadata objects instead of the data_dicts (in multiple places) @@ -344,12 +345,13 @@ def test_codemeta_harvest(tmp_path, monkeypatch, codemeta, res): monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) cli.main() except SystemExit: + print("TODO: Delete wenn package is working again or mock cli") + finally: manager = context_manager.HermesContext() manager.prepare_step("harvest") with manager["codemeta"] as cache: result = SoftwareMetadata(cache["codemeta"]) manager.finalize_step("harvest") - finally: sys.argv = orig_argv assert result.data_dict == res.data_dict From 13a095243758680376e8c2854909753146f6db3b Mon Sep 17 00:00:00 2001 From: "Kernchen, Sophie" Date: Fri, 13 Feb 2026 10:15:35 +0100 Subject: [PATCH 199/247] Fix typo --- test/hermes_test/model/test_api_e2e.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/hermes_test/model/test_api_e2e.py b/test/hermes_test/model/test_api_e2e.py index 6f3733fc..7a1f7b2c 100644 --- a/test/hermes_test/model/test_api_e2e.py +++ b/test/hermes_test/model/test_api_e2e.py @@ -184,7 +184,7 @@ def test_cff_harvest(tmp_path, monkeypatch, cff, res): monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) cli.main() except SystemExit: - print("TODO: Delete wenn package is working again or mock cli") + print("TODO: Delete when package is working again or mock cli") finally: manager = context_manager.HermesContext() manager.prepare_step("harvest") @@ -345,7 +345,7 @@ def test_codemeta_harvest(tmp_path, monkeypatch, codemeta, res): monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) cli.main() except SystemExit: - print("TODO: Delete wenn package is working again or mock cli") + print("TODO: Delete when package is working again or mock cli") finally: manager = context_manager.HermesContext() manager.prepare_step("harvest") From bcdc82124a1a6f3cacd0398bcf3a978ae8a18b57 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 13 Feb 2026 13:54:47 +0100 Subject: [PATCH 200/247] added lots of comments and fixed small inconsistencies --- src/hermes/commands/deposit/invenio.py | 4 +- src/hermes/model/merge/container.py | 301 +++++++++++++++++++++++-- src/hermes/model/merge/match.py | 53 ++++- src/hermes/model/types/ld_container.py | 6 +- src/hermes/model/types/ld_list.py | 4 +- test/hermes_test/model/test_api_e2e.py | 9 +- 6 files changed, 342 insertions(+), 35 deletions(-) diff --git a/src/hermes/commands/deposit/invenio.py b/src/hermes/commands/deposit/invenio.py index 3915d536..ba45c146 100644 --- a/src/hermes/commands/deposit/invenio.py +++ b/src/hermes/commands/deposit/invenio.py @@ -513,7 +513,7 @@ def _codemeta_to_invenio_deposition(self) -> dict: creators = [] for author in metadata.get("author", []): - if not "Person" in author.get("@type", []): + if "Person" not in author.get("@type", []): continue creator = {} if len( @@ -527,7 +527,7 @@ def _codemeta_to_invenio_deposition(self) -> dict: raise HermesValidationError(f"Author has too many family names: {author}") if len(author.get("familyName", [])) == 1: given_names_str = " ".join(author.get("givenName", [])) - name = f"{author["familyName"][0]}, {given_names_str}" + name = f"{author['familyName'][0]}, {given_names_str}" elif len(author.get("name", [])) != 1: raise HermesValidationError(f"Author has too many or no names: {author}") else: diff --git a/src/hermes/model/merge/container.py b/src/hermes/model/merge/container.py index 80395d87..ec9fedd9 100644 --- a/src/hermes/model/merge/container.py +++ b/src/hermes/model/merge/container.py @@ -3,16 +3,49 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileContributor: Michael Meinel +# SPDX-FileContributor: Michael Fritzsche -from hermes.model.types import ld_context, ld_dict, ld_list +from typing import Callable, Union +from typing_extensions import Self + +from hermes.model.merge.action import MergeAction +from hermes.model.types import ld_container, ld_context, ld_dict, ld_list +from hermes.model.types.ld_container import ( + BASIC_TYPE, EXPANDED_JSON_LD_VALUE, JSON_LD_CONTEXT_DICT, JSON_LD_VALUE, TIME_TYPE +) from .strategy import CODEMETA_STRATEGY, PROV_STRATEGY, REPLACE_STRATEGY from ..types.pyld_util import bundled_loader class _ld_merge_container: - def _to_python(self, full_iri, ld_value): + """ + Abstract base class for ld_merge_dict and ld_merge_list, + providing the merge containers with overrides of ld_container._to_python(). + See also :class:`ld_dict`, :class:`ld_list` and :class:`ld_container`. + """ + + def _to_python( + self: Self, + full_iri: str, + ld_value: Union[EXPANDED_JSON_LD_VALUE, dict[str, EXPANDED_JSON_LD_VALUE], list[str], str] + ) -> Union["ld_merge_dict", "ld_merge_list", BASIC_TYPE, TIME_TYPE]: + """ + Returns a pythonized version of the given value pretending the value is in self and full_iri its key. + + :param self: the ld_container ld_value is considered to be in. + :type self: Self + :param full_iri: The expanded iri of the key of ld_value / self (later if self is not a dictionary). + :type full_iri: str + :param ld_value: The value thats pythonized value is requested. ld_value has to be valid expanded JSON-LD if it + was embeded in self._data. + :type ld_value: EXPANDED_JSON_LD_VALUE | dict[str, EXPANDED_JSON_LD_VALUE] | list[str] | str + + :return: The pythonized value of the ld_value. + :rtype: ld_merge_dict | ld_merge_list | BASIC_TYPE | TIME_TYPE + """ value = super()._to_python(full_iri, ld_value) + # replace ld_dicts with ld_merge_dicts if isinstance(value, ld_dict) and not isinstance(value, ld_merge_dict): value = ld_merge_dict( value.ld_value, @@ -21,6 +54,7 @@ def _to_python(self, full_iri, ld_value): index=value.index, context=value.context ) + # replace ld_lists with ld_merge_lists if isinstance(value, ld_list) and not isinstance(value, ld_merge_list): value = ld_merge_list( value.ld_value, @@ -33,21 +67,108 @@ def _to_python(self, full_iri, ld_value): class ld_merge_list(_ld_merge_container, ld_list): - def __init__(self, data, *, parent=None, key=None, index=None, context=None): + """ + ld_list wrapper to ensure the 'merge_container'-property does not get lost, while merging. + See also :class:`ld_list` and :class:`ld_merge_container`. + """ + + def __init__( + self: "ld_merge_list", + data: Union[list[str], list[dict[str, EXPANDED_JSON_LD_VALUE]]], + *, + parent: Union[ld_container, None] = None, + key: Union[str, None] = None, + index: Union[int, None] = None, + context: Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None + ) -> None: + """ + Create a new ld_merge_list. + For further information on this function and the errors it throws see :meth:`ld_list.__init__`. + + :param self: The instance of ld_merge_list to be initialized. + :type self: Self + :param data: The expanded json-ld data that is mapped (must be valid for @set, @list or @graph) + :type data: list[str] | list[dict[str, BASIC_TYPE | EXPANDED_JSON_LD_VALUE]] + :param parent: parent node of this container. + :type parent: ld_container | None + :param key: key into the parent container. + :type key: str | None + :param index: index into the parent container. + :type index: int | None + :param context: local context for this container. + :type context: list[str | JSON_LD_CONTEXT_DICT] | None + + :return: + :rtype: None + """ super().__init__(data, parent=parent, key=key, index=index, context=context) class ld_merge_dict(_ld_merge_container, ld_dict): - def __init__(self, data, *, parent=None, key=None, index=None, context=None): + """ + ld_dict wrapper providing methods to merge an object of this class with an ld_dict object. + See also :class:`ld_dict` and :class:`ld_merge_container`. + + :ivar strategies: The strategies for merging different types of values in the ld_dicts. + :ivartype strategies: dict[str | None, dict[str | None, MergeAction]] + """ + + def __init__( + self: Self, + data: list[dict[str, EXPANDED_JSON_LD_VALUE]], + *, + parent: Union[ld_dict, ld_list, None] = None, + key: Union[str, None] = None, + index: Union[int, None] = None, + context: Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None + ) -> None: + """ + Create a new instance of an ld_merge_dict. + See also :meth:`ld_dict.__init__`. + + :param self: The instance of ld_container to be initialized. + :type self: Self + :param data: The expanded json-ld data that is mapped. + :type data: EXPANDED_JSON_LD_VALUE + :param parent: parent node of this container. + :type parent: ld_dict | ld_list | None + :param key: key into the parent container. + :type key: str | None + :param index: index into the parent container. + :type index: int | None + :param context: local context for this container. + :type context: list[str | JSON_LD_CONTEXT_DICT] | None + + :return: + :rtype: None + + :raises ValueError: If the given data doesn't represent an ld_dict. + """ super().__init__(data, parent=parent, key=key, index=index, context=context) + # add provernance context self.update_context(ld_context.HERMES_PROV_CONTEXT) + # add strategies self.strategies = {**REPLACE_STRATEGY} self.add_strategy(CODEMETA_STRATEGY) self.add_strategy(PROV_STRATEGY) - def update_context(self, other_context): + def update_context( + self: Self, other_context: Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None] + ) -> None: + """ + Updates selfs context with other_context. + JSON-LD processing prioritizes the context values in order (first least important, last most important). + + :param self: The instance of the ld_merge_dict context is added to. + :type self: Self + :param other_context: The context object that is added to selfs context. + :type other_context: list[str | JSON_LD_CONTEXT_DICT] | None + + :return: + :rtype: None + """ if other_context: if len(self.context) < 1 or not isinstance(self.context[-1], dict): self.context.append({}) @@ -56,7 +177,7 @@ def update_context(self, other_context): other_context = [other_context] for ctx in other_context: if isinstance(ctx, dict): - # FIXME: Shouldn't the dict be appended instead? + # FIXME #471: Shouldn't the dict be appended instead? # How it is implemented currently results in anomalies like this: # other_context = [{"codemeta": "https://doi.org/10.5063/schema/codemeta-1.0/"}] # self.context = [{"codemeta": "https://doi.org/10.5063/schema/codemeta-2.0/"}] @@ -64,53 +185,187 @@ def update_context(self, other_context): # values that start with "https://doi.org/10.5063/schema/codemeta-2.0/" can't be compacted anymore self.context[-1].update(ctx) elif ctx not in self.context: + # FIXME #471: If multiple string values are in self.context, the others are prefered + # if the new one is inserted at the beginning. But with the dictionaries the order is reversed. self.context.insert(0, ctx) + # update the active context that is used for compaction/ expansion self.active_ctx = self.ld_proc.initial_ctx(self.context, {"documentLoader": bundled_loader}) - def update(self, other): + def update(self: Self, other: ld_dict) -> None: + """ + Updates/ Merges this ld_merge dict with the given ld_dict other. + This overwrites :meth:`ld_dict.update`, and may cause unexpected behavior if not used carefully. + + :param self: The ld_merge_dict that is updated with other. + :type self: Self + :param other: The ld_container that is merged into self. + :type other: ld_dict + + :return: + :rtype: None + """ + # update add all new context if isinstance(other, ld_dict): self.update_context(other.context) + # add the acutal values based on the MergeAction strategies + # this works implicitly because ld_dict.update invokes self.__setitem__ which is overwritten by ld_merge_dict super().update(other) - def add_strategy(self, strategy): + def add_strategy(self: Self, strategy: dict[Union[str, None], dict[Union[str, None], MergeAction]]) -> None: + """ + Adds the given strategy to the self.strategies. + + :param self: The ld_merge_dict the strategy is added to. + :type self: Self + :param strategy: The object describing how which object types are supposed to be merged. + :type strategy: dict[str | None, dict[str | None, MergeAction]] + """ for key, value in strategy.items(): self.strategies[key] = {**value, **self.strategies.get(key, {})} - def __setitem__(self, key, value): + def __setitem__(self: Self, key: str, value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]): + """ + Creates the new entry for self[key] using self.strategies on the values in self[key] and value. + Wraps :meth:`ld_dict.__setitem__`, and may cause unexpected behavior if not used carefully. + + :param self: The ld_merge_dict whose value at key gets updated/ merged with value. + :type self: Self + :param key: The key at whicht the value is updated/ merged at in self. + :type key: str + :param value: The value that is merged into self[key]. + :type value: JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list + """ + # create the new item if self[key] and value have to be merged. if key in self: value = self._merge_item(key, value) + # update the entry of self[key] super().__setitem__(key, value) - def match(self, key, value, match): - for index, item in enumerate(self[key]): + def match( + self: Self, + key: str, + value: Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list], + match: Union[ + Callable[ + [ + Union[BASIC_TYPE, TIME_TYPE, "ld_merge_dict", ld_merge_list], + Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] + ], + bool + ], + Callable[["ld_merge_dict", ld_dict], bool] + ] + ) -> Union[BASIC_TYPE, TIME_TYPE, "ld_merge_dict", ld_merge_list]: + """ + Returns the first item in self[key] for which match(item, value) returns true. + If no such item is found None is returned instead. + + :param self: The ld_merge_dict in whose entry for key a match for value is searched. + :type self: Self + :param key: The key to the items in self in which a match for value is searched. + :type key: str + :param value: The value a match is searched for in self[key]. + :type value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] + :param match: The method defining if two objects are a match. + :type match: Callable[ + [ + BASIC_TYPE | TIME_TYPE | ld_merge_dict | ld_merge_list, + BASIC_TYPE | TIME_TYPE | ld_dict | ld_list + ], + bool + ] | Callable[[ld_merge_dict, ld_dict], bool] + + :return: The item in self[key] that is a match to value if one exists else None + :rtype: BASIC_TYPE | TIME_TYPE | ld_merge_dict | ld_merge_list + """ + # iterate over all items in self[key] and return the first that is a match + for item in self[key]: if match(item, value): - if isinstance(item, ld_dict) and not isinstance(item, ld_merge_dict): - item = ld_merge_dict( - item.ld_value, parent=item.parent, key=item.key, index=index, context=item.context - ) - elif isinstance(item, ld_list) and not isinstance(item, ld_merge_list): - item = ld_merge_list( - item.ld_value, parent=item.parent, key=item.key, index=index, context=item.context - ) return item - def _merge_item(self, key, value): + def _merge_item( + self: Self, key: str, value: Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] + ) -> Union[BASIC_TYPE, TIME_TYPE, "ld_merge_dict", ld_merge_list]: + """ + Applies the most suitable merge strategy to merge self[key] and value and then returns the result. + + :param self: The ld_merge_dict whose entry at key is to be merged with value. + :type self: Self + :param key: The key to the entry in self that is to be merged with value. + :type key: str + :param value: The value that is to be merged with self[key]. + :type value: BASIC_TYPE | TIME_TYPE | ld_dict | ld_list + + :return: The result of the merge from self[key] with value. + :rtype: BASIC_TYPE | TIME_TYPE | ld_merge_dict | ld_merge_list + """ + # search for all applicable strategies strategy = {**self.strategies[None]} ld_types = self.data_dict.get('@type', []) for ld_type in ld_types: strategy.update(self.strategies.get(ld_type, {})) + # choose one merge strategy and return the item returned by following the merge startegy merger = strategy.get(key, strategy[None]) return merger.merge(self, [*self.path, key], self[key], value) - def _add_related(self, rel, key, value): + def _add_related( + self: Self, rel: str, key: str, value: Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] + ) -> None: + """ + Adds an entry for rel to self containing which key and value is affected. + + :param self: The ld_merge_container the special entry is added to. + :type self: Self + :param rel: The "type" of the special entry (used as the key). + :type rel: str + :param key: The key of the affected key, value pair in self. + :type key: str + :param value: The value of the affected key, value pair in self. + :type value: BASIC_TYPE | TIME_TYPE | ld_dict | ld_list + + :return: + :rtype: None + """ + # make sure appending is possible self.emplace(rel) + # append the new entry self[rel].append({"@type": "schema:PropertyValue", "schema:name": str(key), "schema:value": str(value)}) - def reject(self, key, value): + def reject(self: Self, key: str, value: Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]) -> None: + """ + Adds an entry to self containing containing information that the key, value pair + key, value has been rejected in the merge. + For further information see :meth:`ld_merge_dict._add_related`. + + :param self: The ld_merge_container the special entry is added to. + :type self: Self + :param key: The key of the rejected key, value pair in self. + :type key: str + :param value: The value of the rejected key, value pair in self. + :type value: BASIC_TYPE | TIME_TYPE | ld_dict | ld_list + + :return: + :rtype: None + """ self._add_related("hermes-rt:reject", key, value) - def replace(self, key, value): + def replace(self: Self, key: str, value: Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]) -> None: + """ + Adds an entry to self containing containing information that the key, value pair + key, value was replaced in the merge. + For further information see :meth:`ld_merge_dict._add_related`. + + :param self: The ld_merge_container the special entry is added to. + :type self: Self + :param key: The key of the old key, value pair in self. + :type key: str + :param value: The value of the old key, value pair in self. + :type value: BASIC_TYPE | TIME_TYPE | ld_dict | ld_list + + :return: + :rtype: None + """ self._add_related("hermes-rt:replace", key, value) diff --git a/src/hermes/model/merge/match.py b/src/hermes/model/merge/match.py index 03b9f9ef..77abca35 100644 --- a/src/hermes/model/merge/match.py +++ b/src/hermes/model/merge/match.py @@ -4,14 +4,61 @@ # SPDX-FileContributor: Michael Meinel +from typing import Any, Callable -def match_equals(a, b): +from hermes.model.merge.container import ld_merge_dict +from hermes.model.types import ld_dict + + +def match_equals(a: Any, b: Any) -> bool: + """ + Wrapper method for normal == comparison. + + :param a: First item for the comparison. + :type a: Any + :param b: Second item for the comparison. + :type b: Any + + :return: Truth value of a == b. + :rtype: bool + """ return a == b -def match_keys(*keys): - def match_func(left, right): +def match_keys( + *keys: list[str] +) -> Callable[[ld_merge_dict, ld_dict], bool]: + """ + Creates a function taking to parameters that returns true + if both given parameter have at least one common key in the given list of keys + and for all common keys in the given list of keys the values of both objects are the same. + + :param keys: The list of important keys for the comparison method. + :type keys: list[str] + + :return: A function comparing two given objects values for the keys in keys. + :rtype: Callable[[ld_merge_dict, ld_dict], bool] + """ + + # create and return the match function using the given keys + def match_func(left: ld_merge_dict, right: ld_dict) -> bool: + """ + Compares left to right by checking if a) they have at least one common key in a predetermined list of keys and + b) testing if both objects have equal values for all common keys in the predetermined key list. + + :param left: The first object for the comparison. + :type left: ld_merge_dict + :param right: The second object for the comparison. + :type right: ld_dict + + :return: The result of the comparison. + :rtype: bool + """ + # create a list of all common important keys active_keys = [key for key in keys if key in left and key in right] + # check if both objects have the same values for all active keys pairs = [(left[key] == right[key]) for key in active_keys] + # return whether or not both objects had the same values for all active keys + # and there was at least one active key return len(active_keys) > 0 and all(pairs) return match_func diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index f30a212c..b2456017 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -176,7 +176,9 @@ def ld_value(self: Self) -> EXPANDED_JSON_LD_VALUE: return self._data def _to_python( - self: Self, full_iri: str, ld_value: Union[list, dict, str] + self: Self, + full_iri: str, + ld_value: Union[EXPANDED_JSON_LD_VALUE, dict[str, EXPANDED_JSON_LD_VALUE], list[str], str] ) -> Union["ld_container", BASIC_TYPE, TIME_TYPE]: """ Returns a pythonized version of the given value pretending the value is in self and full_iri its key. @@ -187,7 +189,7 @@ def _to_python( :type full_iri: str :param ld_value: The value thats pythonized value is requested. ld_value has to be valid expanded JSON-LD if it was embeded in self._data. - :type ld_value: list | dict | str + :type ld_value: EXPANDED_JSON_LD_VALUE | dict[str, EXPANDED_JSON_LD_VALUE] | list[str] | str :return: The pythonized value of the ld_value. :rtype: ld_container | BASIC_TYPE | TIME_TYPE diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index c4d1c450..a76db3b6 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -23,7 +23,7 @@ class ld_list(ld_container): """ An JSON-LD container resembling a list ("@set", "@list" or "@graph"). - See also :class:`ld_container` + See also :class:`ld_container`. :ivar container_type: The type of JSON-LD container the list is representing. ("@set", "@list", "graph") :ivartype container_type: str @@ -35,7 +35,7 @@ def __init__( self: Self, data: Union[list[str], list[dict[str, EXPANDED_JSON_LD_VALUE]]], *, - parent: Union["ld_container", None] = None, + parent: Union[ld_container, None] = None, key: Union[str, None] = None, index: Union[int, None] = None, context: Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None, diff --git a/test/hermes_test/model/test_api_e2e.py b/test/hermes_test/model/test_api_e2e.py index 7a65098b..f756f101 100644 --- a/test/hermes_test/model/test_api_e2e.py +++ b/test/hermes_test/model/test_api_e2e.py @@ -4,6 +4,7 @@ # SPDX-FileContributor: Michael Fritzsche +from datetime import date import json import pytest import sys @@ -422,7 +423,7 @@ def test_file_deposit(tmp_path, monkeypatch, metadata): }), { "upload_type": "software", - "publication_date": "2026-02-02", + "publication_date": date.today().isoformat(), "title": "Test", "creators": [{"name": "Test, Testi"}], "description": "for testing", @@ -445,6 +446,8 @@ def test_invenio_deposit(tmp_path, monkeypatch, sandbox_auth, metadata, invenio_ cache["codemeta"] = metadata.compact() manager.finalize_step("curate") + (tmp_path / "test.txt").write_text("Test, oh wonderful test!\n") + config_file = tmp_path / "hermes.toml" config_file.write_text(f"""[deposit] target = "invenio" @@ -452,7 +455,7 @@ def test_invenio_deposit(tmp_path, monkeypatch, sandbox_auth, metadata, invenio_ site_url = "https://sandbox.zenodo.org" access_right = "closed" auth_token = "{sandbox_auth}" -files = ["hermes.toml"] +files = ["test.txt"] [deposit.invenio.api_paths] licenses = "api/vocabularies/licenses" """) @@ -572,7 +575,7 @@ def test_process(tmp_path, monkeypatch, metadata_in, metadata_out): manager.finalize_step("harvest") config_file = tmp_path / "hermes.toml" - config_file.write_text(f"[harvest]\nsources = [{", ".join(f"\"{harvester}\"" for harvester in metadata_in)}]") + config_file.write_text(f"[harvest]\nsources = [{', '.join(f'\"{harvester}\"' for harvester in metadata_in)}]") orig_argv = sys.argv[:] sys.argv = ["hermes", "process", "--path", str(tmp_path), "--config", str(config_file)] From 1c10dcab898d4e5c31b33b383a64af4b7430ba20 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 27 Feb 2026 12:30:41 +0100 Subject: [PATCH 201/247] added coments and fix small bug --- src/hermes/model/api.py | 7 + src/hermes/model/merge/action.py | 259 ++++++++++++++++++++++--- src/hermes/model/merge/container.py | 44 ++--- src/hermes/model/merge/match.py | 14 +- src/hermes/model/merge/strategy.py | 13 +- src/hermes/model/types/ld_container.py | 14 +- test/hermes_test/model/test_api.py | 7 + 7 files changed, 280 insertions(+), 78 deletions(-) diff --git a/src/hermes/model/api.py b/src/hermes/model/api.py index 24f1405e..db582656 100644 --- a/src/hermes/model/api.py +++ b/src/hermes/model/api.py @@ -1,3 +1,10 @@ +# SPDX-FileCopyrightText: 2026 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Fritzsche +# SPDX-FileContributor: Stephan Druskat + from hermes.model.context_manager import HermesContext, HermesContexError from hermes.model.types import ld_dict from hermes.model.types.ld_context import ALL_CONTEXTS diff --git a/src/hermes/model/merge/action.py b/src/hermes/model/merge/action.py index 80f45591..08a2c084 100644 --- a/src/hermes/model/merge/action.py +++ b/src/hermes/model/merge/action.py @@ -3,81 +3,282 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileContributor: Michael Meinel +# SPDX-FileContributor: Michael Fritzsche -from hermes.model.types import ld_list +from __future__ import annotations + +from typing import TYPE_CHECKING, Callable, Union +from typing_extensions import Self + +from ..types import ld_dict, ld_list +from ..types.ld_container import BASIC_TYPE, JSON_LD_VALUE, TIME_TYPE + +if TYPE_CHECKING: + from .container import ld_merge_dict, ld_merge_list class MergeError(ValueError): + """ Class for any error while merging. """ pass class MergeAction: - def merge(self, target, key, value, update): + """ Base class for the different actions occuring druing a merge. """ + def merge( + self: Self, + target: ld_merge_dict, + key: list[Union[str, int]], + value: ld_merge_list, + update: Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] + ) -> Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]: + """ + An abstract method that needs to be implemented by all subclasses + to have a generic way to use the merge actions. + + :param target: The ld_merge_dict inside of which the items are merged. + :type target: ld_merge_dict + :param key: The "path" of keys so that parent[key[-1]] is value and + for the outermost parent of target out_parent out_parent[key[0]]...[key[-1]] results in value. + :type key: list[str | int] + :param value: The value inside target that is to be merged with update. + :type value: ld_merge_list + :param update: The value that is to be merged into target with value. + :type update: BASIC_TYPE | TIME_TYPE | ld_dict | ld_list + + :return: The merged value in an arbitrary format that is supported by :meth:`ld_dict.__setitem__`. + :rtype: JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list + """ raise NotImplementedError() class Reject(MergeAction): - @classmethod - def merge(cls, target, key, value, update): + def merge( + self: Self, + target: ld_merge_dict, + key: list[Union[str, int]], + value: ld_merge_list, + update: Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] + ) -> ld_merge_list: + """ + Rejects the new data ``update`` and lets target add an entry to itself documenting what data has been rejected. + + :param target: The ld_merge_dict inside of which the items are merged. + :type target: ld_merge_dict + :param key: The "path" of keys so that parent[key[-1]] is value and + for the outermost parent of target out_parent out_parent[key[0]]...[key[-1]] results in value. + :type key: list[str | int] + :param value: The value inside target that is to be merged with update.
This value won't be changed. + :type value: ld_merge_list + :param update: The value that is to be merged into target with value.
This value will be rejected. + :type update: BASIC_TYPE | TIME_TYPE | ld_dict | ld_list + + :return: The merged value.
+ This value will always be value. + :rtype: ld_merge_list + """ + # If necessary, add the entry that data has been rejected. if value != update: target.reject(key, update) + # Return value unchanged. return value class Replace(MergeAction): - @classmethod - def merge(cls, target, key, value, update): + def merge( + self: Self, + target: ld_merge_dict, + key: list[Union[str, int]], + value: ld_merge_list, + update: Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] + ) -> Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]: + """ + Replaces the old data ``value`` with the new data ``update`` + and lets target add an entry to itself documenting what data has been replaced. + + :param target: The ld_merge_dict inside of which the items are merged. + :type target: ld_merge_dict + :param key: The "path" of keys so that parent[key[-1]] is value and + for the outermost parent of target out_parent out_parent[key[0]]...[key[-1]] results in value. + :type key: list[str | int] + :param value: The value inside target that is to be merged with update.
This value will bew replaced. + :type value: ld_merge_list + :param update: The value that is to be merged into target with value.
+ This value will be used instead of value. + :type update: BASIC_TYPE | TIME_TYPE | ld_dict | ld_list + + :return: The merged value.
+ This value will be update. + :rtype: BASIC_TYPE | TIME_TYPE | ld_dict | ld_list + """ + # If necessary, add the entry that data has been replaced. if value != update: target.replace(key, value) + # Return the new value. return update class Concat(MergeAction): - @classmethod - def merge(cls, target, key, value, update): - return cls.merge_to_list(value, update) - - @classmethod - def merge_to_list(cls, head, tail): - if not isinstance(head, (list, ld_list)): - head = [head] - if not isinstance(tail, (list, ld_list)): - head.append(tail) + def merge( + self: Self, + target: ld_merge_dict, + key: list[Union[str, int]], + value: ld_merge_list, + update: Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] + ) -> ld_merge_list: + """ + Concatenates the new data ``update`` to the old data ``value``. + + :param target: The ld_merge_dict inside of which the items are merged. + :type target: ld_merge_dict + :param key: The "path" of keys so that parent[key[-1]] is value and + for the outermost parent of target out_parent out_parent[key[0]]...[key[-1]] results in value. + :type key: list[str | int] + :param value: The value inside target that is to be merged with update. + :type value: ld_merge_list + :param update: The value that is to be merged into target with value. + :type update: BASIC_TYPE | TIME_TYPE | ld_dict | ld_list + + :return: The merged value.
+ ``value`` concatenated with ``update``. + :rtype: ld_merge_list + """ + # Concatenate the items and return the result. + if isinstance(update, (list, ld_list)): + value.extend(update) else: - head.extend(tail) - return head + value.append(update) + return value class Collect(MergeAction): - def __init__(self, match): + def __init__( + self: Self, + match: Union[ + Callable[ + [ + Union[BASIC_TYPE, TIME_TYPE, ld_merge_dict, ld_merge_list], + Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] + ], + bool + ], + Callable[[ld_merge_dict, ld_dict], bool] + ] + ) -> None: + """ + Set the match function for this collect merge action. + + :param match: The function used to evaluate equality while merging. + :type match: Callable[ + [BASIC_TYPE | TIME_TYPE | ld_merge_dict | ld_merge_list, BASIC_TYPE | TIME_TYPE | ld_dict | ld_list], + bool + ] | Callable[[ld_merge_dict, ld_dict], bool] + + :return: + :rtype: None + """ self.match = match - def merge(self, target, key, value, update): - if not isinstance(value, list): - value = [value] - if not isinstance(update, list): + def merge( + self: Self, + target: ld_merge_dict, + key: list[Union[str, int]], + value: ld_merge_list, + update: Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] + ) -> ld_merge_list: + """ + Collects the unique items (according to :attr:`match`) from ``value`` and ``update``. + + :param target: The ld_merge_dict inside of which the items are merged. + :type target: ld_merge_dict + :param key: The "path" of keys so that parent[key[-1]] is value and + for the outermost parent of target out_parent out_parent[key[0]]...[key[-1]] results in value. + :type key: list[str | int] + :param value: The value inside target that is to be merged with update. + :type value: ld_merge_list + :param update: The value that is to be merged into target with value. + :type update: BASIC_TYPE | TIME_TYPE | ld_dict | ld_list + + :return: The merged value. + :rtype: ld_merge_list + """ + if not isinstance(update, (list, ld_list)): update = [update] + # iterate over all new items for update_item in update: + # If the current new item has no occurence in value (according to self.match) add it to value. if not any(self.match(item, update_item) for item in value): value.append(update_item) - if len(value) == 1: - return value[0] - else: - return value + return value class MergeSet(MergeAction): - def __init__(self, match, merge_items=True): + def __init__( + self: Self, + match: Union[ + Callable[ + [ + Union[BASIC_TYPE, TIME_TYPE, ld_merge_dict, ld_merge_list], + Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] + ], + bool + ], + Callable[[ld_merge_dict, ld_dict], bool] + ], + merge_items: bool = True + ) -> None: + """ + Set the match function for this collect merge action. + + :param match: The function used to evaluate equality while merging. + :type match: Callable[ + [BASIC_TYPE | TIME_TYPE | ld_merge_dict | ld_merge_list, BASIC_TYPE | TIME_TYPE | ld_dict | ld_list], + bool + ] | Callable[[ld_merge_dict, ld_dict], bool] + :param merge_items: Whether or to to merge similar items. (If false this is basically :class:`Concat`) + :type merge_items: bool + + :return: + :rtype: None + """ self.match = match self.merge_items = merge_items - def merge(self, target, key, value, update): + def merge( + self: Self, + target: ld_merge_dict, + key: list[Union[str, int]], + value: ld_merge_list, + update: Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] + ) -> ld_merge_list: + """ + Merges similar items (according to :attr:`match`) from ``value`` and ``update``. + + :param target: The ld_merge_dict inside of which the items are merged. + :type target: ld_merge_dict + :param key: The "path" of keys so that parent[key[-1]] is value and + for the outermost parent of target out_parent out_parent[key[0]]...[key[-1]] results in value. + :type key: list[str | int] + :param value: The value inside target that is to be merged with update. + :type value: ld_merge_list + :param update: The value that is to be merged into target with value. + :type update: BASIC_TYPE | TIME_TYPE | ld_dict | ld_list + + :return: The merged value. + :rtype: ld_merge_list + """ + if not isinstance(update, (list, ld_list)): + update = [update] + for item in update: + # For each new item merge it into a similar item (according to match) inside target[key[-1]] + # (aka inside value) if such an item exists and merging is permitted. + # Otherwise append it to target[key[-1]] (aka to value). target_item = target.match(key[-1], item, self.match) if target_item and self.merge_items: target_item.update(item) else: value.append(item) + # Return the merged values. return value diff --git a/src/hermes/model/merge/container.py b/src/hermes/model/merge/container.py index ec9fedd9..30af9aea 100644 --- a/src/hermes/model/merge/container.py +++ b/src/hermes/model/merge/container.py @@ -5,17 +5,20 @@ # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: Michael Fritzsche -from typing import Callable, Union +from __future__ import annotations + +from typing import Callable, Union, TYPE_CHECKING from typing_extensions import Self -from hermes.model.merge.action import MergeAction -from hermes.model.types import ld_container, ld_context, ld_dict, ld_list -from hermes.model.types.ld_container import ( +from ..types import ld_container, ld_context, ld_dict, ld_list +from ..types.ld_container import ( BASIC_TYPE, EXPANDED_JSON_LD_VALUE, JSON_LD_CONTEXT_DICT, JSON_LD_VALUE, TIME_TYPE ) - -from .strategy import CODEMETA_STRATEGY, PROV_STRATEGY, REPLACE_STRATEGY from ..types.pyld_util import bundled_loader +from .strategy import CODEMETA_STRATEGY, PROV_STRATEGY, REPLACE_STRATEGY + +if TYPE_CHECKING: + from .action import MergeAction class _ld_merge_container: @@ -170,24 +173,12 @@ def update_context( :rtype: None """ if other_context: - if len(self.context) < 1 or not isinstance(self.context[-1], dict): - self.context.append({}) - - if not isinstance(other_context, list): - other_context = [other_context] - for ctx in other_context: - if isinstance(ctx, dict): - # FIXME #471: Shouldn't the dict be appended instead? - # How it is implemented currently results in anomalies like this: - # other_context = [{"codemeta": "https://doi.org/10.5063/schema/codemeta-1.0/"}] - # self.context = [{"codemeta": "https://doi.org/10.5063/schema/codemeta-2.0/"}] - # resulting context is only [{"codemeta": "https://doi.org/10.5063/schema/codemeta-1.0/"}] - # values that start with "https://doi.org/10.5063/schema/codemeta-2.0/" can't be compacted anymore - self.context[-1].update(ctx) - elif ctx not in self.context: - # FIXME #471: If multiple string values are in self.context, the others are prefered - # if the new one is inserted at the beginning. But with the dictionaries the order is reversed. - self.context.insert(0, ctx) + if not isinstance(self.context, list): + self.context = [self.context] + if isinstance(other_context, list): + self.context = [*other_context, *self.context] + else: + self.context = [other_context, *self.context] # update the active context that is used for compaction/ expansion self.active_ctx = self.ld_proc.initial_ctx(self.context, {"documentLoader": bundled_loader}) @@ -270,10 +261,7 @@ def match( :type value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] :param match: The method defining if two objects are a match. :type match: Callable[ - [ - BASIC_TYPE | TIME_TYPE | ld_merge_dict | ld_merge_list, - BASIC_TYPE | TIME_TYPE | ld_dict | ld_list - ], + [BASIC_TYPE | TIME_TYPE | ld_merge_dict | ld_merge_list, BASIC_TYPE | TIME_TYPE | ld_dict | ld_list], bool ] | Callable[[ld_merge_dict, ld_dict], bool] diff --git a/src/hermes/model/merge/match.py b/src/hermes/model/merge/match.py index 77abca35..453bfba1 100644 --- a/src/hermes/model/merge/match.py +++ b/src/hermes/model/merge/match.py @@ -3,11 +3,16 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileContributor: Michael Meinel +# SPDX-FileContributor: Michael Fritzsche -from typing import Any, Callable +from __future__ import annotations -from hermes.model.merge.container import ld_merge_dict -from hermes.model.types import ld_dict +from typing import Any, Callable, TYPE_CHECKING + +from ..types import ld_dict + +if TYPE_CHECKING: + from .container import ld_merge_dict def match_equals(a: Any, b: Any) -> bool: @@ -22,6 +27,8 @@ def match_equals(a: Any, b: Any) -> bool: :return: Truth value of a == b. :rtype: bool """ + print(f"a: {a}") + print(f"b: {b}") return a == b @@ -54,6 +61,7 @@ def match_func(left: ld_merge_dict, right: ld_dict) -> bool: :return: The result of the comparison. :rtype: bool """ + # TODO: This method maybe should try == comparison instead of returning false if active_keys == []. # create a list of all common important keys active_keys = [key for key in keys if key in left and key in right] # check if both objects have the same values for all active keys diff --git a/src/hermes/model/merge/strategy.py b/src/hermes/model/merge/strategy.py index 12681fe6..40c7757d 100644 --- a/src/hermes/model/merge/strategy.py +++ b/src/hermes/model/merge/strategy.py @@ -4,15 +4,14 @@ # SPDX-FileContributor: Michael Meinel -from hermes.model.types.ld_context import iri_map as iri - +from ..types.ld_context import iri_map as iri from .action import Reject, Replace, Collect, Concat, MergeSet from .match import match_equals, match_keys REPLACE_STRATEGY = { None: { - None: Replace, + None: Replace(), "@type": Collect(match_equals), }, } @@ -20,7 +19,7 @@ REJECT_STRATEGY = { None: { - None: Reject, + None: Reject(), "@type": Collect(match_equals), }, } @@ -28,9 +27,9 @@ PROV_STRATEGY = { None: { - iri["hermes-rt:graph"]: Concat, - iri["hermes-rt:replace"]: Concat, - iri["hermes-rt:reject"]: Concat, + iri["hermes-rt:graph"]: Concat(), + iri["hermes-rt:replace"]: Concat(), + iri["hermes-rt:reject"]: Concat(), }, } diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index b2456017..14f16161 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -96,17 +96,9 @@ def __init__( self.context = context or [] - # Create active context (to use with pyld) depending on the initial variables - # Re-use active context from parent if available - if self.parent: - if self.context: - self.active_ctx = self.ld_proc.process_context( - self.parent.active_ctx, self.context, {"documentLoader": bundled_loader} - ) - else: - self.active_ctx = parent.active_ctx - else: - self.active_ctx = self.ld_proc.initial_ctx(self.full_context, {"documentLoader": bundled_loader}) + # Create active context (to use with pyld) depending on the initial variables. + # Don't re-use active context from parent (created some weird in the process step when context is often added). + self.active_ctx = self.ld_proc.initial_ctx(self.full_context, {"documentLoader": bundled_loader}) def add_context(self: Self, context: list[Union[str | JSON_LD_CONTEXT_DICT]]) -> None: """ diff --git a/test/hermes_test/model/test_api.py b/test/hermes_test/model/test_api.py index 895968d7..a7495c4f 100644 --- a/test/hermes_test/model/test_api.py +++ b/test/hermes_test/model/test_api.py @@ -1,3 +1,10 @@ +# SPDX-FileCopyrightText: 2026 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Fritzsche +# SPDX-FileContributor: Stephan Druskat + import pytest from hermes.model import SoftwareMetadata From aa4284ebf81bfb2a1cc7de383401ba069e03153c Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 27 Feb 2026 13:14:05 +0100 Subject: [PATCH 202/247] removed unnecessary print statements --- src/hermes/model/merge/match.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/hermes/model/merge/match.py b/src/hermes/model/merge/match.py index 453bfba1..629fbee2 100644 --- a/src/hermes/model/merge/match.py +++ b/src/hermes/model/merge/match.py @@ -27,8 +27,6 @@ def match_equals(a: Any, b: Any) -> bool: :return: Truth value of a == b. :rtype: bool """ - print(f"a: {a}") - print(f"b: {b}") return a == b From 4080091be9b8bdab0de237d802fc17402f6cbcf4 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Tue, 3 Mar 2026 14:28:21 +0100 Subject: [PATCH 203/247] json_ids are now returned as ld_dicts instead of the id string --- src/hermes/model/types/__init__.py | 1 - src/hermes/model/types/ld_dict.py | 3 --- .../model/types/test_ld_container.py | 2 +- test/hermes_test/model/types/test_ld_dict.py | 17 ++++++++++------- 4 files changed, 11 insertions(+), 12 deletions(-) diff --git a/src/hermes/model/types/__init__.py b/src/hermes/model/types/__init__.py index 9e4b1bf5..ba6085f7 100644 --- a/src/hermes/model/types/__init__.py +++ b/src/hermes/model/types/__init__.py @@ -30,7 +30,6 @@ (lambda c: isinstance(c, list), {"ld_container": lambda c, **kw: ld_list(c, **kw)}), # pythonize items from lists (expanded set is already handled above) - (ld_container.is_json_id, {"python": lambda c, **_: c["@id"]}), (ld_container.is_typed_json_value, {"python": lambda c, **kw: ld_container.typed_ld_to_py([c], **kw)}), (ld_container.is_json_value, {"python": lambda c, **_: c["@value"]}), (ld_list.is_container, {"ld_container": lambda c, **kw: ld_list([c], **kw)}), diff --git a/src/hermes/model/types/ld_dict.py b/src/hermes/model/types/ld_dict.py index f368ec73..42bc3ed9 100644 --- a/src/hermes/model/types/ld_dict.py +++ b/src/hermes/model/types/ld_dict.py @@ -149,7 +149,4 @@ def is_json_dict(cls, ld_value): if any(k in ld_value for k in ["@set", "@graph", "@list", "@value"]): return False - if ['@id'] == [*ld_value.keys()]: - return False - return True diff --git a/test/hermes_test/model/types/test_ld_container.py b/test/hermes_test/model/types/test_ld_container.py index f73fdcd9..f0844ecd 100644 --- a/test/hermes_test/model/types/test_ld_container.py +++ b/test/hermes_test/model/types/test_ld_container.py @@ -107,7 +107,7 @@ def test_to_python_id_value(self, mock_context): assert cont._to_python("http://spam.eggs/ham", [{"@id": "http://spam.eggs/spam"}]) == [{"@id": "http://spam.eggs/spam"}] assert cont._to_python("http://spam.eggs/ham", - {"@id": "http://spam.eggs/identifier"}) == "http://spam.eggs/identifier" + {"@id": "http://spam.eggs/identifier"}) == {"@id": "http://spam.eggs/identifier"} def test_to_python_basic_value(self, mock_context): cont = ld_container([{}], context=[mock_context]) diff --git a/test/hermes_test/model/types/test_ld_dict.py b/test/hermes_test/model/types/test_ld_dict.py index 8736439d..239f92ed 100644 --- a/test/hermes_test/model/types/test_ld_dict.py +++ b/test/hermes_test/model/types/test_ld_dict.py @@ -299,13 +299,13 @@ def test_to_python(): inner_di = ld_dict([{}], parent=di) inner_di.update({"xmlns:foobar": "bar", "http://xmlns.com/foaf/0.1/barfoo": {"@id": "foo"}}) di.update({"http://xmlns.com/foaf/0.1/name": "foo", "xmlns:homepage": {"@id": "bar"}, "xmlns:foo": inner_di}) - assert di.to_python() == {"xmlns:name": ["foo"], "xmlns:homepage": ["bar"], - "xmlns:foo": [{"xmlns:foobar": ["bar"], "xmlns:barfoo": ["foo"]}]} + assert di.to_python() == {"xmlns:name": ["foo"], "xmlns:homepage": [{"@id": "bar"}], + "xmlns:foo": [{"xmlns:foobar": ["bar"], "xmlns:barfoo": [{"@id": "foo"}]}]} di.update({"http://spam.eggs/eggs": { "@value": "2022-02-22T00:00:00", "@type": "https://schema.org/DateTime" }}) - assert di.to_python() == {"xmlns:name": ["foo"], "xmlns:homepage": ["bar"], - "xmlns:foo": [{"xmlns:foobar": ["bar"], "xmlns:barfoo": ["foo"]}], + assert di.to_python() == {"xmlns:name": ["foo"], "xmlns:homepage": [{"@id": "bar"}], + "xmlns:foo": [{"xmlns:foobar": ["bar"], "xmlns:barfoo": [{"@id": "foo"}]}], "http://spam.eggs/eggs": ["2022-02-22T00:00:00"]} @@ -376,13 +376,16 @@ def test_from_dict(): def test_is_ld_dict(): assert not any(ld_dict.is_ld_dict(item) for item in [{}, {"foo": "bar"}, {"@id": "foo"}]) - assert not any(ld_dict.is_ld_dict(item) for item in [[{"@id": "foo"}], [{"@set": "foo"}], [{}, {}], [], [""]]) - assert all(ld_dict.is_ld_dict([item]) for item in [{"@id": "foo", "foobar": "bar"}, {"foo": "bar"}]) + assert not any(ld_dict.is_ld_dict(item) for item in [[{"@set": "foo"}], [{}, {}], [], [""]]) + assert all( + ld_dict.is_ld_dict([item]) + for item in [{"@id": "foo"}, {"@id": "foo", "foobar": "bar"}, {"foo": "bar"}] + ) def test_is_json_dict(): assert not any(ld_dict.is_json_dict(item) for item in [1, "", [], {""}, ld_dict([{}])]) assert not any(ld_dict.is_json_dict({key: [], "foo": "bar"}) for key in ["@set", "@graph", "@list", "@value"]) - assert not ld_dict.is_json_dict({"@id": "foo"}) + assert ld_dict.is_json_dict({"@id": "foo"}) assert ld_dict.is_json_dict({"@id": "foo", "foobar": "bar"}) assert ld_dict.is_json_dict({"foo": "bar"}) From b7543ee0d374b722807ced29b069ec2c3eb7a9c5 Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Thu, 5 Mar 2026 14:35:36 +0100 Subject: [PATCH 204/247] reworked merging and added strategies --- src/hermes/model/merge/action.py | 84 ++-- src/hermes/model/merge/container.py | 26 +- src/hermes/model/merge/match.py | 39 +- src/hermes/model/merge/strategy.py | 616 ++++++++++++++++++++++++- src/hermes/model/types/ld_dict.py | 3 + test/hermes_test/model/test_api_e2e.py | 79 +++- 6 files changed, 731 insertions(+), 116 deletions(-) diff --git a/src/hermes/model/merge/action.py b/src/hermes/model/merge/action.py index 08a2c084..6108b9ea 100644 --- a/src/hermes/model/merge/action.py +++ b/src/hermes/model/merge/action.py @@ -7,7 +7,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Callable, Union +from typing import TYPE_CHECKING, Any, Callable, Union from typing_extensions import Self from ..types import ld_dict, ld_list @@ -76,9 +76,8 @@ def merge( This value will always be value. :rtype: ld_merge_list """ - # If necessary, add the entry that data has been rejected. - if value != update: - target.reject(key, update) + # Add the entry that data has been rejected. + target.reject(key, update) # Return value unchanged. return value @@ -111,8 +110,7 @@ def merge( :rtype: BASIC_TYPE | TIME_TYPE | ld_dict | ld_list """ # If necessary, add the entry that data has been replaced. - if value != update: - target.replace(key, value) + target.replace(key, value) # Return the new value. return update @@ -151,32 +149,21 @@ def merge( class Collect(MergeAction): - def __init__( - self: Self, - match: Union[ - Callable[ - [ - Union[BASIC_TYPE, TIME_TYPE, ld_merge_dict, ld_merge_list], - Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] - ], - bool - ], - Callable[[ld_merge_dict, ld_dict], bool] - ] - ) -> None: + def __init__(self: Self, match: Callable[[Any, Any], bool], reject_incoming: bool = True) -> None: """ - Set the match function for this collect merge action. + Set the match function for this collect merge action. And the behaivior for matches. :param match: The function used to evaluate equality while merging. - :type match: Callable[ - [BASIC_TYPE | TIME_TYPE | ld_merge_dict | ld_merge_list, BASIC_TYPE | TIME_TYPE | ld_dict | ld_list], - bool - ] | Callable[[ld_merge_dict, ld_dict], bool] + :type match: Callable[[Any, Any], bool] + :param reject_incoming: If an incoming item matches an already collected one, if ``reject_incoming`` True, + the incoming item gets rejected, if ``reject_incoming`` False, the match of the incoming item gets replaced. + :type reject_incoming: bool :return: :rtype: None """ self.match = match + self.reject_incoming = reject_incoming def merge( self: Self, @@ -206,44 +193,31 @@ def merge( # iterate over all new items for update_item in update: - # If the current new item has no occurence in value (according to self.match) add it to value. - if not any(self.match(item, update_item) for item in value): + # Iterate over all items in value and if a match is found replace the first one or reject update_item. + for index, item in enumerate(value): + if self.match(item, update_item): + if not self.reject_incoming: + value[index] = update_item + break + else: + # If the current new item has no occurence in value (according to self.match) add it to value. value.append(update_item) return value class MergeSet(MergeAction): - def __init__( - self: Self, - match: Union[ - Callable[ - [ - Union[BASIC_TYPE, TIME_TYPE, ld_merge_dict, ld_merge_list], - Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] - ], - bool - ], - Callable[[ld_merge_dict, ld_dict], bool] - ], - merge_items: bool = True - ) -> None: + def __init__(self: Self, match: Callable[[Any, Any], bool]) -> None: """ Set the match function for this collect merge action. :param match: The function used to evaluate equality while merging. - :type match: Callable[ - [BASIC_TYPE | TIME_TYPE | ld_merge_dict | ld_merge_list, BASIC_TYPE | TIME_TYPE | ld_dict | ld_list], - bool - ] | Callable[[ld_merge_dict, ld_dict], bool] - :param merge_items: Whether or to to merge similar items. (If false this is basically :class:`Concat`) - :type merge_items: bool + :type match: Callable[[ANy, Any], bool] :return: :rtype: None """ self.match = match - self.merge_items = merge_items def merge( self: Self, @@ -271,13 +245,19 @@ def merge( if not isinstance(update, (list, ld_list)): update = [update] - for item in update: + for update_item in update: # For each new item merge it into a similar item (according to match) inside target[key[-1]] - # (aka inside value) if such an item exists and merging is permitted. + # (aka inside value) if such an item exists. # Otherwise append it to target[key[-1]] (aka to value). - target_item = target.match(key[-1], item, self.match) - if target_item and self.merge_items: - target_item.update(item) + for index, item in enumerate(value): + if self.match(item, update_item): + if isinstance(item, ld_dict) and isinstance(update_item, ld_dict): + item.update(update_item) + elif isinstance(item, ld_list) and isinstance(update_item, ld_list): + self.merge(target, [*key, index], item, update_item) + elif isinstance(item, (ld_dict, ld_list)) or isinstance(update_item, (ld_dict, ld_list)): + """ FIXME: log error """ + break else: value.append(item) # Return the merged values. diff --git a/src/hermes/model/merge/container.py b/src/hermes/model/merge/container.py index 30af9aea..2be14694 100644 --- a/src/hermes/model/merge/container.py +++ b/src/hermes/model/merge/container.py @@ -7,7 +7,7 @@ from __future__ import annotations -from typing import Callable, Union, TYPE_CHECKING +from typing import TYPE_CHECKING, Any, Callable, Union from typing_extensions import Self from ..types import ld_container, ld_context, ld_dict, ld_list @@ -15,7 +15,7 @@ BASIC_TYPE, EXPANDED_JSON_LD_VALUE, JSON_LD_CONTEXT_DICT, JSON_LD_VALUE, TIME_TYPE ) from ..types.pyld_util import bundled_loader -from .strategy import CODEMETA_STRATEGY, PROV_STRATEGY, REPLACE_STRATEGY +from .strategy import CODEMETA_STRATEGY, PROV_STRATEGY if TYPE_CHECKING: from .action import MergeAction @@ -153,8 +153,7 @@ def __init__( self.update_context(ld_context.HERMES_PROV_CONTEXT) # add strategies - self.strategies = {**REPLACE_STRATEGY} - self.add_strategy(CODEMETA_STRATEGY) + self.strategies = {**CODEMETA_STRATEGY} self.add_strategy(PROV_STRATEGY) def update_context( @@ -238,16 +237,7 @@ def match( self: Self, key: str, value: Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list], - match: Union[ - Callable[ - [ - Union[BASIC_TYPE, TIME_TYPE, "ld_merge_dict", ld_merge_list], - Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] - ], - bool - ], - Callable[["ld_merge_dict", ld_dict], bool] - ] + match: Callable[[Any, Any], bool] ) -> Union[BASIC_TYPE, TIME_TYPE, "ld_merge_dict", ld_merge_list]: """ Returns the first item in self[key] for which match(item, value) returns true. @@ -260,10 +250,7 @@ def match( :param value: The value a match is searched for in self[key]. :type value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] :param match: The method defining if two objects are a match. - :type match: Callable[ - [BASIC_TYPE | TIME_TYPE | ld_merge_dict | ld_merge_list, BASIC_TYPE | TIME_TYPE | ld_dict | ld_list], - bool - ] | Callable[[ld_merge_dict, ld_dict], bool] + :type match: Callable[[Any, Any], bool] :return: The item in self[key] that is a match to value if one exists else None :rtype: BASIC_TYPE | TIME_TYPE | ld_merge_dict | ld_merge_list @@ -317,6 +304,7 @@ def _add_related( :return: :rtype: None """ + # FIXME: key not only string # make sure appending is possible self.emplace(rel) # append the new entry @@ -338,6 +326,7 @@ def reject(self: Self, key: str, value: Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld :return: :rtype: None """ + # FIXME: key not only string self._add_related("hermes-rt:reject", key, value) def replace(self: Self, key: str, value: Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]) -> None: @@ -356,4 +345,5 @@ def replace(self: Self, key: str, value: Union[BASIC_TYPE, TIME_TYPE, ld_dict, l :return: :rtype: None """ + # FIXME: key not only string self._add_related("hermes-rt:replace", key, value) diff --git a/src/hermes/model/merge/match.py b/src/hermes/model/merge/match.py index 629fbee2..3934b785 100644 --- a/src/hermes/model/merge/match.py +++ b/src/hermes/model/merge/match.py @@ -5,15 +5,10 @@ # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: Michael Fritzsche -from __future__ import annotations - -from typing import Any, Callable, TYPE_CHECKING +from typing import Any, Callable from ..types import ld_dict -if TYPE_CHECKING: - from .container import ld_merge_dict - def match_equals(a: Any, b: Any) -> bool: """ @@ -30,26 +25,29 @@ def match_equals(a: Any, b: Any) -> bool: return a == b -def match_keys( - *keys: list[str] -) -> Callable[[ld_merge_dict, ld_dict], bool]: +def match_keys(*keys: list[str], fall_back_to_equals: bool = False) -> Callable[[Any, Any], bool]: """ Creates a function taking to parameters that returns true if both given parameter have at least one common key in the given list of keys - and for all common keys in the given list of keys the values of both objects are the same. + and for all common keys in the given list of keys the values of both objects are the same.
+ If fall_back_to_equals is True, the returned function returns the value of normal == comparison + if no key from keys is in both objects. :param keys: The list of important keys for the comparison method. :type keys: list[str] + :param fall_back_to_equals: Whether or not a fall back option should be used. + :type fall_back_to_equals: bool :return: A function comparing two given objects values for the keys in keys. :rtype: Callable[[ld_merge_dict, ld_dict], bool] """ # create and return the match function using the given keys - def match_func(left: ld_merge_dict, right: ld_dict) -> bool: + def match_func(left: Any, right: Any) -> bool: """ Compares left to right by checking if a) they have at least one common key in a predetermined list of keys and - b) testing if both objects have equal values for all common keys in the predetermined key list. + b) testing if both objects have equal values for all common keys in the predetermined key list.
+ It may fall back on == if no common key in the predetermined list of keys exists. :param left: The first object for the comparison. :type left: ld_merge_dict @@ -59,12 +57,27 @@ def match_func(left: ld_merge_dict, right: ld_dict) -> bool: :return: The result of the comparison. :rtype: bool """ - # TODO: This method maybe should try == comparison instead of returning false if active_keys == []. + if not (isinstance(left, ld_dict) and isinstance(right, ld_dict)): + return fall_back_to_equals and (left == right) # create a list of all common important keys active_keys = [key for key in keys if key in left and key in right] + # fall back to == if no active keys + if fall_back_to_equals and not active_keys: + return left == right # check if both objects have the same values for all active keys pairs = [(left[key] == right[key]) for key in active_keys] # return whether or not both objects had the same values for all active keys # and there was at least one active key return len(active_keys) > 0 and all(pairs) return match_func + + +def match_person(left: Any, right: Any) -> bool: + if not (isinstance(left, ld_dict) and isinstance(right, ld_dict)): + return left == right + if "@id" in left and "@id" in right: + return left["@id"] == right["@id"] + if "schema:email" in left and "schema:email" in right: + mails_right = right["schema:email"] + return any((mail in mails_right) for mail in left["schema:email"]) + return left == right diff --git a/src/hermes/model/merge/strategy.py b/src/hermes/model/merge/strategy.py index 40c7757d..e928a4fc 100644 --- a/src/hermes/model/merge/strategy.py +++ b/src/hermes/model/merge/strategy.py @@ -3,39 +3,611 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileContributor: Michael Meinel +# SPDX-FileContributor: Michael Fritzsche from ..types.ld_context import iri_map as iri -from .action import Reject, Replace, Collect, Concat, MergeSet -from .match import match_equals, match_keys +from .action import Concat, MergeSet +from .match import match_keys, match_person -REPLACE_STRATEGY = { - None: { - None: Replace(), - "@type": Collect(match_equals), - }, +ACTIONS = { + "default": MergeSet(match_keys("@id", fall_back_to_equals=True)), + "merge_match_person": MergeSet(match_person) } -REJECT_STRATEGY = { - None: { - None: Reject(), - "@type": Collect(match_equals), - }, +PROV_STRATEGY = { + None: {iri["hermes-rt:graph"]: Concat(), iri["hermes-rt:replace"]: Concat(), iri["hermes-rt:reject"]: Concat()} } +# All troublesome marked entries can contain objects of different types, e.g. Person and Organization. +# This is troublesome because Persons may be compared using a different method than Organizations. -PROV_STRATEGY = { - None: { - iri["hermes-rt:graph"]: Concat(), - iri["hermes-rt:replace"]: Concat(), - iri["hermes-rt:reject"]: Concat(), - }, +# Filled with entries for every schema-type that can be found inside an JSON-LD dict of type +# SoftwareSourceCode or SoftwareApplication. +CODEMETA_STRATEGY = {None: {None: ACTIONS["default"]}} + +CODEMETA_STRATEGY[iri["schema:Thing"]] = {iri["schema:owner"]: None} # FIXME: troublesome Organization or Person + +CODEMETA_STRATEGY[iri["schema:CreativeWork"]] = { + **CODEMETA_STRATEGY[iri["schema:Thing"]], + iri["schema:accountablePerson"]: ACTIONS["merge_match_person"], + iri["schema:audio"]: None, # FIXME: troublesome AudioObject or Clip or MusicRecording + iri["schema:author"]: None, # FIXME: troublesome Organization or Person + iri["schema:character"]: ACTIONS["merge_match_person"], + iri["schema:contributor"]: None, # FIXME: troublesome Organization or Person + iri["schema:copyrightHolder"]: None, # FIXME: troublesome Organization or Person + iri["schema:creator"]: None, # FIXME: troublesome Organization or Person + iri["schema:editor"]: ACTIONS["merge_match_person"], + iri["schema:funder"]: None, # FIXME: troublesome Organization or Person + iri["schema:isBasedOn"]: None, # FIXME: troublesome CreativeWork or Product + iri["schema:maintainer"]: None, # FIXME: troublesome Organization or Person + iri["schema:offers"]: None, # FIXME: troublesome Demand or Offer + iri["schema:producer"]: None, # FIXME: troublesome Organization or Person + iri["schema:provider"]: None, # FIXME: troublesome Organization or Person + iri["schema:publisher"]: None, # FIXME: troublesome Organization or Person + iri["schema:sdPublisher"]: None, # FIXME: troublesome Organization or Person + iri["schema:size"]: None, # FIXME: troublesome DefinedTerm or QuantitativeValue or SizeSpecification + iri["schema:sponsor"]: None, # FIXME: troublesome Organization or Person + iri["schema:translator"]: None, # FIXME: troublesome Organization or Person + iri["schema:video"]: None # FIXME: troublesome Clip or VideoObject +} +CODEMETA_STRATEGY[iri["schema:SoftwareSourceCode"]] = { + **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], + iri["maintainer"]: ACTIONS["merge_match_person"] +} +CODEMETA_STRATEGY[iri["schema:MediaObject"]] = { + **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], + iri["schema:duration"]: None, # FIXME: troublesome Duration or QuantitativeValue + iri["schema:height"]: None, # FIXME: troublesome Distance or QuantitativeValue + iri["schema:ineligibleRegion"]: None, # FIXME: troublesome GeoShape or Place + iri["schema:width"]: None # FIXME: troublesome Distance or QuantitativeValue +} +CODEMETA_STRATEGY[iri["schema:AudioObject"]] = {**CODEMETA_STRATEGY[iri["schema:MediaObject"]]} +CODEMETA_STRATEGY[iri["schema:ImageObject"]] = {**CODEMETA_STRATEGY[iri["schema:MediaObject"]]} +CODEMETA_STRATEGY[iri["schema:VideoObject"]] = { + **CODEMETA_STRATEGY[iri["schema:MediaObject"]], + iri["schema:actor"]: None, # FIXME: troublesome PerformingGroup or Person + iri["schema:dircetor"]: ACTIONS["merge_match_person"], + iri["schema:musicBy"]: None # FIXME: troublesome MusicGroup or Person +} +CODEMETA_STRATEGY[iri["schema:DataDownload"]] = {**CODEMETA_STRATEGY[iri["schema:MediaObject"]]} +CODEMETA_STRATEGY[iri["schema:Certification"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} +CODEMETA_STRATEGY[iri["schema:Claim"]] = { + **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], + iri["schema:claimInterpreter"]: None # FIXME: troublesome Organization or Person +} +CODEMETA_STRATEGY[iri["schema:Clip"]] = { + **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], + iri["schema:actor"]: None, # FIXME: troublesome PerformingGroup or Person + iri["schema:dircetor"]: ACTIONS["merge_match_person"], + iri["schema:musicBy"]: None # FIXME: troublesome MusicGroup or Person +} +CODEMETA_STRATEGY[iri["schema:Comment"]] = { + **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], + iri["schema:parentItem"]: None # FIXME: troublesome Comment or CreativeWork +} +CODEMETA_STRATEGY[iri["schema:CorrectionComment"]] = {**CODEMETA_STRATEGY[iri["schema:Comment"]]} +CODEMETA_STRATEGY[iri["schema:CreativeWorkSeason"]] = { + **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], + iri["schema:actor"]: None # FIXME: troublesome PerformingGroup or Person +} +CODEMETA_STRATEGY[iri["schema:DefinedTermSet"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} +CODEMETA_STRATEGY[iri["schema:CategoryCodeSet"]] = {**CODEMETA_STRATEGY[iri["schema:DefinedTermSet"]]} +CODEMETA_STRATEGY[iri["schema:Episode"]] = { + **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], + iri["schema:actor"]: None, # FIXME: troublesome PerformingGroup or Person + iri["schema:dircetor"]: ACTIONS["merge_match_person"], + iri["schema:duration"]: None, # FIXME: troublesome Duration or QuantitativeValue + iri["schema:musicBy"]: None # FIXME: troublesome MusicGroup or Person +} +CODEMETA_STRATEGY[iri["schema:HowTo"]] = { + **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], + iri["schema:step"]: None # FIXME: troublesome CreativeWork or HowToSection or HowToStep +} +CODEMETA_STRATEGY[iri["schema:HyperTocEntry"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} +CODEMETA_STRATEGY[iri["schema:Map"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} +CODEMETA_STRATEGY[iri["schema:MenuSection"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} +CODEMETA_STRATEGY[iri["schema:MusicRecording"]] = { + **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], + iri["schema:byArtist"]: None, # FIXME: troublesome MusicGroup or Person + iri["schema:duration"]: None # FIXME: troublesome Duration or QuantitativeValue +} +CODEMETA_STRATEGY[iri["schema:WebPage"]] = { + **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], + iri["schema:reviewedBy"]: None # FIXME: troublesome Organization or Person +} +CODEMETA_STRATEGY[iri["schema:AboutPage"]] = {**CODEMETA_STRATEGY[iri["schema:WebPage"]]} +CODEMETA_STRATEGY[iri["schema:Article"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} +CODEMETA_STRATEGY[iri["schema:NewsArticle"]] = {**CODEMETA_STRATEGY[iri["schema:Article"]]} +CODEMETA_STRATEGY[iri["schema:ScholarlyArticle"]] = {**CODEMETA_STRATEGY[iri["schema:Article"]]} +CODEMETA_STRATEGY[iri["schema:WebPageElement"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} +CODEMETA_STRATEGY[iri["schema:EducationalOccupationalCredential"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} +CODEMETA_STRATEGY[iri["schema:MusicPlaylist"]] = { + **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], + iri["schema:track"]: None # FIXME: troublesome ItemList or MusicRecording +} +CODEMETA_STRATEGY[iri["schema:MusicAlbum"]] = { + **CODEMETA_STRATEGY[iri["schema:MusicPlaylist"]], + iri["schema:byArtist"]: None, # FIXME: troublesome MusicGroup or Person +} +CODEMETA_STRATEGY[iri["schema:MusicRelease"]] = { + **CODEMETA_STRATEGY[iri["schema:MusicPlaylist"]], + iri["schema:creditedTo"]: None, # FIXME: troublesome Organization or Person + iri["schema:duration"]: None # FIXME: troublesome Duration or QuantitativeValue +} +CODEMETA_STRATEGY[iri["schema:MusicComposition"]] = { + **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], + iri["schema:composer"]: None, # FIXME: troublesome Organization or Person + iri["schema:lyricist"]: ACTIONS["merge_match_person"], +} +CODEMETA_STRATEGY[iri["schema:Photograph"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} +CODEMETA_STRATEGY[iri["schema:Review"]] = { + **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], + iri["schema:negativeNotes"]: None, # FIXME: troublesome ItemList or ListItem or WebContent + iri["schema:positiveNotes"]: None # FIXME: troublesome ItemList or ListItem or WebContent +} +CODEMETA_STRATEGY[iri["schema:SoftwareApplication"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} +CODEMETA_STRATEGY[iri["schema:RuntimePlatform"]] = {**CODEMETA_STRATEGY[iri["schema:SoftwareApplication"]]} +CODEMETA_STRATEGY[iri["schema:OperatingSystem"]] = {**CODEMETA_STRATEGY[iri["schema:SoftwareApplication"]]} +CODEMETA_STRATEGY[iri["schema:WebSite"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} +CODEMETA_STRATEGY[iri["schema:WebContent"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} +CODEMETA_STRATEGY[iri["schema:DataCatalog"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} +CODEMETA_STRATEGY[iri["schema:Dataset"]] = { + **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], + iri["schema:variableMeasured"]: None # FIXME: troublesome Property or PropertyValue or StatisticalVariable +} +CODEMETA_STRATEGY[iri["schema:DataFeed"]] = { + **CODEMETA_STRATEGY[iri["schema:Dataset"]], + iri["schema:dataFeedElement"]: None # FIXME: troublesome DataFeedItem or Thing } +CODEMETA_STRATEGY[iri["schema:Action"]] = { + **CODEMETA_STRATEGY[iri["schema:Thing"]], + iri["schema:agent"]: None, # FIXME: troublesome Organization or Person + iri["schema:location"]: None, # FIXME: troublesome Place or PostalAddress or VirtualLocation + iri["schema:participant"]: None, # FIXME: troublesome Organization or Person + iri["schema:provider"]: None # FIXME: troublesome Organization or Person +} -CODEMETA_STRATEGY = { - iri["schema:SoftwareSourceCode"]: { - iri["schema:author"]: MergeSet(match_keys('@id', iri['schema:email'])), - }, +CODEMETA_STRATEGY[iri["schema:Intangible"]] = {**CODEMETA_STRATEGY[iri["schema:Thing"]]} +CODEMETA_STRATEGY[iri["schema:Rating"]] = { + **CODEMETA_STRATEGY[iri["schema:Intangible"]], + iri["schema:author"]: None # FIXME: troublesome Organization or Person +} +CODEMETA_STRATEGY[iri["schema:AggregateRating"]] = {**CODEMETA_STRATEGY[iri["schema:Rating"]]} +CODEMETA_STRATEGY[iri["schema:AlignmentObject"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:Audience"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:ComputerLanguage"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:Series"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:DefinedTerm"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:CategoryCode"]] = {**CODEMETA_STRATEGY[iri["schema:DefinedTerm"]]} +CODEMETA_STRATEGY[iri["schema:Demand"]] = { + **CODEMETA_STRATEGY[iri["schema:Intangible"]], + iri["schema:acceptedPaymentMethod"]: None, # FIXME: troublesome LoanOrCredit or PaymentMethod + iri["schema:areaServed"]: None, # FIXME: troublesome AdministrativeArea or GeoShape or Place + iri["schema:eligibleRegion"]: None, # FIXME: troublesome GeoShape or Place + iri["schema:ineligibleRegion"]: None, # FIXME: troublesome GeoShape or Place + iri["schema:itemOffered"]: None, # FIXME: troublesome AggregateOffer or CreativeWork or Event or MenuItem or Product or Service or Trip + iri["schema:seller"]: None # FIXME: troublesome Organization or Person +} +CODEMETA_STRATEGY[iri["schema:Offer"]] = { + **CODEMETA_STRATEGY[iri["schema:Intangible"]], + iri["schema:acceptedPaymentMethod"]: None, # FIXME: troublesome LoanOrCredit or PaymentMethod + iri["schema:areaServed"]: None, # FIXME: troublesome AdministrativeArea or GeoShape or Place + iri["schema:category"]: None, # FIXME: troublesome CategoryCode or Thing + iri["schema:eligibleRegion"]: None, # FIXME: troublesome GeoShape or Place + iri["schema:ineligibleRegion"]: None, # FIXME: troublesome GeoShape or Place + iri["schema:itemOffered"]: None, # FIXME: troublesome AggregateOffer or CreativeWork or Event or MenuItem or Product or Service or Trip + iri["schema:leaseLength"]: None, # FIXME: troublesome Duration or QuantitativeValue + iri["schema:offeredBy"]: None, # FIXME: troublesome Organization or Person + iri["schema:seller"]: None, # FIXME: troublesome Organization or Person +} +CODEMETA_STRATEGY[iri["schema:AggregateOffer"]] = { + **CODEMETA_STRATEGY[iri["schema:Offer"]], + iri["schema:offers"]: None # FIXME: troublesome Demand or Offer +} +CODEMETA_STRATEGY[iri["schema:Quantity"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:Duration"]] = {**CODEMETA_STRATEGY[iri["schema:Quantity"]]} +CODEMETA_STRATEGY[iri["schema:Energy"]] = {**CODEMETA_STRATEGY[iri["schema:Quantity"]]} +CODEMETA_STRATEGY[iri["schema:Mass"]] = {**CODEMETA_STRATEGY[iri["schema:Quantity"]]} +CODEMETA_STRATEGY[iri["schema:EntryPoint"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:StructuredValue"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:GeoCoordinates"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} +CODEMETA_STRATEGY[iri["schema:GeoShape"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} +CODEMETA_STRATEGY[iri["schema:NutritionInformation"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} +CODEMETA_STRATEGY[iri["schema:MonetaryAmount"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} +CODEMETA_STRATEGY[iri["schema:Distance"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} +CODEMETA_STRATEGY[iri["schema:PostalCodeRangeSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} +CODEMETA_STRATEGY[iri["schema:OpeningHoursSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} +CODEMETA_STRATEGY[iri["schema:RepaymentSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} +CODEMETA_STRATEGY[iri["schema:WarrantyPromise"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} +CODEMETA_STRATEGY[iri["schema:ShippingRateSettings"]] = { + **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], + iri["schema:shippingRate"]: None # FIXME: troublesome MonetaryAmount or ShippingRateSettings +} +CODEMETA_STRATEGY[iri["schema:InteractionCounter"]] = { + **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], + iri["schema:interactionService"]: None, # FIXME: troublesome SoftwareApplication or WebSite + iri["schema:location"]: None # FIXME: troublesome Place or PostalAddress or VirtualLocation +} +CODEMETA_STRATEGY[iri["schema:PropertyValue"]] = { + **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], + iri["schema:valueReference"]: None # FIXME: troublesome DefinedTerm or Enumeration or PropertyValue or QualitativeValue or QuantitativeValue or StructuredValue +} +CODEMETA_STRATEGY[iri["schema:ContactPoint"]] = { + **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], + iri["schema:areaServed"]: None, # FIXME: troublesome AdministrativeArea or GeoShape or Place +} +CODEMETA_STRATEGY[iri["schema:PostalAddress"]] = {**CODEMETA_STRATEGY[iri["schema:ContactPoint"]]} +CODEMETA_STRATEGY[iri["schema:OfferShippingDetails"]] = { + **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], + iri["schema:depth"]: None, # FIXME: troublesome Distance or QuantitativeValue + iri["schema:height"]: None, # FIXME: troublesome Distance or QuantitativeValue + iri["schema:shippingRate"]: None, # FIXME: troublesome MonetaryAmount or ShippingRateSettings + iri["schema:weight"]: None, # FIXME: troublesome Mass or QuantitativeValue + iri["schema:width"]: None # FIXME: troublesome Distance or QuantitativeValue +} +CODEMETA_STRATEGY[iri["schema:ShippingDeliveryTime"]] = { + **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], + iri["schema:handlingTime"]: None, # FIXME: troublesome QuantitativeValue or ServicePeriod + iri["schema:transitTime"]: None # FIXME: troublesome QuantitativeValue or ServicePeriod +} +CODEMETA_STRATEGY[iri["schema:TypeAndQuantityNode"]] = { + **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], + iri["schema:typeOfGood"]: None # FIXME: troublesome Product or Service +} +CODEMETA_STRATEGY[iri["schema:ServicePeriod"]] = { + **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], + iri["schema:duration"]: None # FIXME: troublesome Duration or QuantitativeValue +} +CODEMETA_STRATEGY[iri["schema:QuantitativeValue"]] = { + **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], + iri["schema:valueReference"]: None # FIXME: troublesome DefinedTerm or Enumeration or PropertyValue or QualitativeValue or QuantitativeValue or StructuredValue +} +CODEMETA_STRATEGY[iri["schema:ShippingService"]] = { + **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], + iri["schema:handlingTime"]: None # FIXME: troublesome QuantitativeValue or ServicePeriod +} +CODEMETA_STRATEGY[iri["schema:ShippingConditions"]] = { + **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], + iri["schema:depth"]: None, # FIXME: troublesome Distance or QuantitativeValue + iri["schema:height"]: None, # FIXME: troublesome Distance or QuantitativeValue + iri["schema:shippingRate"]: None, # FIXME: troublesome MonetaryAmount or ShippingRateSettings + iri["schema:transitTime"]: None, # FIXME: troublesome QuantitativeValue or ServicePeriod + iri["schema:weight"]: None, # FIXME: troublesome Mass or QuantitativeValue + iri["schema:width"]: None # FIXME: troublesome Distance or QuantitativeValue +} +CODEMETA_STRATEGY[iri["schema:QuantitativeValueDistribution"]] = { + **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], + iri["schema:duration"]: None # FIXME: troublesome Duration or QuantitativeValue +} +CODEMETA_STRATEGY[iri["schema:MonetaryAmountDistribution"]] = { + **CODEMETA_STRATEGY[iri["schema:QuantitativeValueDistribution"]] +} +CODEMETA_STRATEGY[iri["schema:PriceSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} +CODEMETA_STRATEGY[iri["schema:UnitPriceSpecification"]] = { + **CODEMETA_STRATEGY[iri["schema:PriceSpecification"]], + iri["schema:billingDuration"]: None, # FIXME: troublesome Duration or QuantitativeValue +} +CODEMETA_STRATEGY[iri["schema:DeliveryChargeSpecification"]] = { + **CODEMETA_STRATEGY[iri["schema:PriceSpecification"]], + iri["schema:areaServed"]: None, # FIXME: troublesome AdministrativeArea or GeoShape or Place + iri["schema:eligibleRegion"]: None, # FIXME: troublesome GeoShape or Place + iri["schema:ineligibleRegion"]: None # FIXME: troublesome GeoShape or Place +} +CODEMETA_STRATEGY[iri["schema:LocationFeatureSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:PropertyValue"]]} +CODEMETA_STRATEGY[iri["schema:GeospatialGeometry"]] = { + **CODEMETA_STRATEGY[iri["schema:Intangible"]], + iri["schema:geoContains"]: None, # FIXME: troublesome GeospatialGeometry or Place + iri["schema:geoCoveredBy"]: None, # FIXME: troublesome GeospatialGeometry or Place + iri["schema:geoCovers"]: None, # FIXME: troublesome GeospatialGeometry or Place + iri["schema:geoCrosses"]: None, # FIXME: troublesome GeospatialGeometry or Place + iri["schema:geoDisjoint"]: None, # FIXME: troublesome GeospatialGeometry or Place + iri["schema:geoEquals"]: None, # FIXME: troublesome GeospatialGeometry or Place + iri["schema:geoIntersects"]: None, # FIXME: troublesome GeospatialGeometry or Place + iri["schema:geoOverlaps"]: None, # FIXME: troublesome GeospatialGeometry or Place + iri["schema:geoTouches"]: None, # FIXME: troublesome GeospatialGeometry or Place + iri["schema:geoWithin"]: None # FIXME: troublesome GeospatialGeometry or Place +} +CODEMETA_STRATEGY[iri["schema:Grant"]] = { + **CODEMETA_STRATEGY[iri["schema:Intangible"]], + iri["schema:fundedItem"]: None, # FIXME: troublesome BioChemEntity or CreativeWork or Event or MedicalEntity or Organization or Person or Product + iri["schema:funder"]: None, # FIXME: troublesome Organization or Person + iri["schema:sponsor"]: None # FIXME: troublesome Organization or Person +} +CODEMETA_STRATEGY[iri["schema:ItemList"]] = { + **CODEMETA_STRATEGY[iri["schema:Intangible"]], + iri["schema:itemListElement"]: None # FIXME: troublesome ListItem or Thing +} +CODEMETA_STRATEGY[iri["schema:OfferCatalog"]] = {**CODEMETA_STRATEGY[iri["schema:ItemList"]]} +CODEMETA_STRATEGY[iri["schema:BreadcrumbList"]] = {**CODEMETA_STRATEGY[iri["schema:ItemList"]]} +CODEMETA_STRATEGY[iri["schema:Language"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:Service"]] = { + **CODEMETA_STRATEGY[iri["schema:Intangible"]], + iri["schema:areaServed"]: None, # FIXME: troublesome AdministrativeArea or GeoShape or Place + iri["schema:brand"]: None, # FIXME: troublesome Brand or Organization + iri["schema:broker"]: None, # FIXME: troublesome Organization or Person + iri["schema:category"]: None, # FIXME: troublesome CategoryCode or Thing + iri["schema:isRelatedTo"]: None, # FIXME: troublesome Product or Service + iri["schema:isSimilarTo"]: None, # FIXME: troublesome Product or Service + iri["schema:offers Demand"]: None, # FIXME: troublesome or Offer + iri["schema:provider"]: None # FIXME: troublesome Organization or Person +} +CODEMETA_STRATEGY[iri["schema:FinancialProduct"]] = {**CODEMETA_STRATEGY[iri["schema:Service"]]} +CODEMETA_STRATEGY[iri["schema:BroadcastService"]] = {**CODEMETA_STRATEGY[iri["schema:Service"]]} +CODEMETA_STRATEGY[iri["schema:CableOrSatelliteService"]] = {**CODEMETA_STRATEGY[iri["schema:Service"]]} +CODEMETA_STRATEGY[iri["schema:LoanOrCredit"]] = {**CODEMETA_STRATEGY[iri["schema:FinancialProduct"]]} +CODEMETA_STRATEGY[iri["schema:MediaSubscription"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:Brand"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:HealthInsurancePlan"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:ListItem"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:HowToItem"]] = {**CODEMETA_STRATEGY[iri["schema:ListItem"]]} +CODEMETA_STRATEGY[iri["schema:HowToSupply"]] = {**CODEMETA_STRATEGY[iri["schema:HowToItem"]]} +CODEMETA_STRATEGY[iri["schema:HowToTool"]] = {**CODEMETA_STRATEGY[iri["schema:HowToItem"]]} +CODEMETA_STRATEGY[iri["schema:Enumeration"]] = { + **CODEMETA_STRATEGY[iri["schema:Intangible"]], + iri["schema:supersededBy"]: None # FIXME: troublesome Class or Enumeration } +CODEMETA_STRATEGY[iri["schema:QualitativeValue"]] = { + **CODEMETA_STRATEGY[iri["schema:Enumeration"]], + iri["schema:valueReference"]: None # FIXME: troublesome DefinedTerm or Enumeration or PropertyValue or QualitativeValue or QuantitativeValue or StructuredValue +} +CODEMETA_STRATEGY[iri["schema:SizeSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:QualitativeValue"]]} +CODEMETA_STRATEGY[iri["schema:Class"]] = { + **CODEMETA_STRATEGY[iri["schema:Intangible"]], + iri["schema:supersededBy"]: None # FIXME: troublesome Class or Enumeration +} +CODEMETA_STRATEGY[iri["schema:HealthPlanFormulary"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:HealthPlanCostSharingSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:HealthPlanNetwork"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:MemberProgramTier"]] = { + **CODEMETA_STRATEGY[iri["schema:Intangible"]], + iri["schema:hasTierRequirement"]: None # FIXME: troublesome CreditCard or MonetaryAmount or UnitPriceSpecification +} +CODEMETA_STRATEGY[iri["schema:MemberProgram"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:MenuItem"]] = { + **CODEMETA_STRATEGY[iri["schema:Intangible"]], + iri["schema:menuAddOn"]: None, # FIXME: troublesome MenuItem or MenuSection + iri["schema:offers"]: None # FIXME: troublesome Demand or Offer +} +CODEMETA_STRATEGY[iri["schema:MerchantReturnPolicy"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:MerchantReturnPolicySeasonalOverride"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:SpeakableSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:PaymentMethod"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:ProgramMembership"]] = { + **CODEMETA_STRATEGY[iri["schema:Intangible"]], + iri["schema:member"]: None # FIXME: troublesome Organization or Person +} +CODEMETA_STRATEGY[iri["schema:Schedule"]] = { + **CODEMETA_STRATEGY[iri["schema:Intangible"]], + iri["schema:duration"]: None # FIXME: troublesome Duration or QuantitativeValue +} +CODEMETA_STRATEGY[iri["schema:ServiceChannel"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:VirtualLocation"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:Occupation"]] = { + **CODEMETA_STRATEGY[iri["schema:Intangible"]], + iri["schema:estimatedSalary"]: None # FIXME: troublesome MonetaryAmount or MonetaryAmountDistribution +} +CODEMETA_STRATEGY[iri["schema:EnergyConsumptionDetails"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:OccupationalExperienceRequirements"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:AlignmentObject"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:BroadcastFrequencySpecification"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:BroadcastChannel"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:ConstraintNode"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:StatisticalVariable"]] = {**CODEMETA_STRATEGY[iri["schema:ConstraintNode"]]} +CODEMETA_STRATEGY[iri["schema:Property"]] = { + **CODEMETA_STRATEGY[iri["schema:Intangible"]], + iri["schema:supersededBy"]: None, # FIXME: troublesome Class or Enumeration or Property +} + +CODEMETA_STRATEGY[iri["schema:Place"]] = { + **CODEMETA_STRATEGY[iri["schema:Thing"]], + iri["schema:geo"]: None, # FIXME: troublesome GeoCoordinates or GeoShape + iri["schema:geoContains"]: None, # FIXME: troublesome GeospatialGeometry or Place + iri["schema:geoCoveredBy"]: None, # FIXME: troublesome GeospatialGeometry or Place + iri["schema:geoCovers"]: None, # FIXME: troublesome GeospatialGeometry or Place + iri["schema:geoCrosses"]: None, # FIXME: troublesome GeospatialGeometry or Place + iri["schema:geoDisjoint"]: None, # FIXME: troublesome GeospatialGeometry or Place + iri["schema:geoEquals"]: None, # FIXME: troublesome GeospatialGeometry or Place + iri["schema:geoIntersects"]: None, # FIXME: troublesome GeospatialGeometry or Place + iri["schema:geoOverlaps"]: None, # FIXME: troublesome GeospatialGeometry or Place + iri["schema:geoTouches"]: None, # FIXME: troublesome GeospatialGeometry or Place + iri["schema:geoWithin"]: None, # FIXME: troublesome GeospatialGeometry or Place + iri["schema:photo"]: None # FIXME: troublesome ImageObject or Photograph +} +CODEMETA_STRATEGY[iri["schema:AdministrativeArea"]] = {**CODEMETA_STRATEGY[iri["schema:Place"]]} +CODEMETA_STRATEGY[iri["schema:Country"]] = {**CODEMETA_STRATEGY[iri["schema:AdministrativeArea"]]} +CODEMETA_STRATEGY[iri["schema:CivicStructure"]] = {**CODEMETA_STRATEGY[iri["schema:Place"]]} + +CODEMETA_STRATEGY[iri["schema:CreativeWorkSeries"]] = { + **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], + **CODEMETA_STRATEGY[iri["schema:Series"]] +} + +CODEMETA_STRATEGY[iri["schema:HowToSection"]] = { + **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], + **CODEMETA_STRATEGY[iri["schema:ItemList"]], + **CODEMETA_STRATEGY[iri["schema:ListItem"]] +} +CODEMETA_STRATEGY[iri["schema:HowToStep"]] = { + **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], + **CODEMETA_STRATEGY[iri["schema:ItemList"]], + **CODEMETA_STRATEGY[iri["schema:ListItem"]] +} + +CODEMETA_STRATEGY[iri["schema:Event"]] = { + **CODEMETA_STRATEGY[iri["schema:Thing"]], + iri["schema:actor"]: None, # FIXME: troublesome PerformingGroup or Person + iri["schema:attendee"]: None, # FIXME: troublesome Organization or Person + iri["schema:composer"]: None, # FIXME: troublesome Organization or Person + iri["schema:contributor"]: None, # FIXME: troublesome Organization or Person + iri["schema:dircetor"]: ACTIONS["merge_match_person"], + iri["schema:duration"]: None, # FIXME: troublesome Duration or QuantitativeValue + iri["schema:funder"]: None, # FIXME: troublesome Organization or Person + iri["schema:location"]: None, # FIXME: troublesome Place or PostalAddress or VirtualLocation + iri["schema:offers"]: None, # FIXME: troublesome Demand or Offer + iri["schema:organizer"]: None, # FIXME: troublesome Organization or Person + iri["schema:performer"]: None, # FIXME: troublesome Organization or Person + iri["schema:sponsor"]: None, # FIXME: troublesome Organization or Person + iri["schema:translator"]: None # FIXME: troublesome Organization or Person +} +CODEMETA_STRATEGY[iri["schema:PublicationEvent"]] = { + **CODEMETA_STRATEGY[iri["schema:Event"]], + iri["schema:publishedBy"]: None, # FIXME: troublesome Organization or Person +} + +CODEMETA_STRATEGY[iri["schema:BioChemEntity"]] = { + **CODEMETA_STRATEGY[iri["schema:Thing"]], + iri["schema:associatedDisease"]: None, # FIXME: troublesome MedicalCondition or PropertyValue + iri["schema:hasMolecularFunction"]: None, # FIXME: troublesome DefinedTerm or PropertyValue + iri["schema:isInvolvedInBiologicalProcess"]: None, # FIXME: troublesome DefinedTerm or PropertyValue + iri["schema:isLocatedInSubcellularLocation"]: None, # FIXME: troublesome DefinedTerm or PropertyValue + iri["schema:taxonomicRange"]: None # FIXME: troublesome DefinedTerm or Taxon +} +CODEMETA_STRATEGY[iri["schema:Gene"]] = { + **CODEMETA_STRATEGY[iri["schema:BioChemEntity"]], + iri["schema:expressedIn"]: None # FIXME: troublesome AnatomicalStructure or AnatomicalSystem or BioChemEntity or DefinedTerm +} + +CODEMETA_STRATEGY[iri["schema:MedicalEntity"]] = {**CODEMETA_STRATEGY[iri["schema:Thing"]]} +CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} +CODEMETA_STRATEGY[iri["schema:DrugLegalStatus"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]]} +CODEMETA_STRATEGY[iri["schema:DDxElement"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]]} +CODEMETA_STRATEGY[iri["schema:MedicalConditionStage"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]]} +CODEMETA_STRATEGY[iri["schema:DrugStrength"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]]} +CODEMETA_STRATEGY[iri["schema:DoseSchedule"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]]} +CODEMETA_STRATEGY[iri["schema:MaximumDoseSchedule"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]]} +CODEMETA_STRATEGY[iri["schema:MedicalGuideline"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} +CODEMETA_STRATEGY[iri["schema:AnatomicalStructure"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} +CODEMETA_STRATEGY[iri["schema:MedicalCause"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} +CODEMETA_STRATEGY[iri["schema:DrugClass"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} +CODEMETA_STRATEGY[iri["schema:LifestyleModification"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} +CODEMETA_STRATEGY[iri["schema:MedicalRiskFactor"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} +CODEMETA_STRATEGY[iri["schema:MedicalTest"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} +CODEMETA_STRATEGY[iri["schema:MedicalDevice"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} +CODEMETA_STRATEGY[iri["schema:MedicalTest"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} +CODEMETA_STRATEGY[iri["schema:MedicalContraindication"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} +CODEMETA_STRATEGY[iri["schema:MedicalProcedure"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} +CODEMETA_STRATEGY[iri["schema:TherapeuticProcedure"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalProcedure"]]} +CODEMETA_STRATEGY[iri["schema:MedicalTherapy"]] = {**CODEMETA_STRATEGY[iri["schema:TherapeuticProcedure"]]} +CODEMETA_STRATEGY[iri["schema:MedicalStudy"]] = { + **CODEMETA_STRATEGY[iri["schema:MedicalEntity"]], + iri["schema:sponsor"]: None # FIXME: troublesome Organization or Person +} +CODEMETA_STRATEGY[iri["schema:MedicalCondition"]] = { + **CODEMETA_STRATEGY[iri["schema:MedicalEntity"]], + iri["schema:associatedAnatomy"]: None, # FIXME: troublesome AnatomicalStructure or AnatomicalSystem or SuperficialAnatomy + iri["schema:possibleTreatment"]: None, # FIXME: troublesome Drug or DrugClass or LifestyleModification or MedicalTherapy + iri["schema:secondaryPrevention"]: None # FIXME: troublesome Drug or DrugClass or LifestyleModification or MedicalTherapy +} +CODEMETA_STRATEGY[iri["schema:MedicalSignOrSymptom"]] = { + **CODEMETA_STRATEGY[iri["schema:MedicalCondition"]], + iri["schema:possibleTreatment"]: None # FIXME: troublesome Drug or DrugClass or LifestyleModification or MedicalTherapy +} +CODEMETA_STRATEGY[iri["schema:MedicalSign"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalSignOrSymptom"]]} +CODEMETA_STRATEGY[iri["schema:SuperficialAnatomy"]] = { + **CODEMETA_STRATEGY[iri["schema:MedicalEntity"]], + iri["schema:relatedAnatomy"]: None # FIXME: troublesome AnatomicalStructure or AnatomicalSystem +} +CODEMETA_STRATEGY[iri["schema:AnatomicalSystem"]] = { + **CODEMETA_STRATEGY[iri["schema:MedicalEntity"]], + iri["schema:comprisedOf"]: None # FIXME: troublesome AnatomicalStructure or AnatomicalSystem +} + +CODEMETA_STRATEGY[iri["schema:MedicalCode"]] = { + **CODEMETA_STRATEGY[iri["schema:CategoryCode"]], + **CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]] +} + +CODEMETA_STRATEGY[iri["schema:Product"]] = { + **CODEMETA_STRATEGY[iri["schema:Thing"]], + iri["schema:brand"]: None, # FIXME: troublesome Brand or Organization + iri["schema:category"]: None, # FIXME: troublesome CategoryCode or Thing + iri["schema:depth"]: None, # FIXME: troublesome Distance or QuantitativeValue + iri["schema:height"]: None, # FIXME: troublesome Distance or QuantitativeValue + iri["schema:isRelatedTo"]: None, # FIXME: troublesome Product or Service + iri["schema:isSimilarTo"]: None, # FIXME: troublesome Product or Service + iri["schema:isVariantOf"]: None, # FIXME: troublesome ProductGroup or ProductModel + iri["schema:negativeNotes"]: None, # FIXME: troublesome ItemList or ListItem or WebContent + iri["schema:offers"]: None, # FIXME: troublesome Demand or Offer + iri["schema:positiveNotes"]: None, # FIXME: troublesome ItemList or ListItem or WebContent + iri["schema:size"]: None, # FIXME: troublesome DefinedTerm or QuantitativeValue or SizeSpecification + iri["schema:weight"]: None, # FIXME: troublesome Mass or QuantitativeValue + iri["schema:width"]: None, # FIXME: troublesome Distance or QuantitativeValue +} +CODEMETA_STRATEGY[iri["schema:ProductGroup"]] = {**CODEMETA_STRATEGY[iri["schema:Product"]]} +CODEMETA_STRATEGY[iri["schema:Drug"]] = { + **CODEMETA_STRATEGY[iri["schema:Product"]], + **CODEMETA_STRATEGY[iri["schema:MedicalEntity"]] +} +CODEMETA_STRATEGY[iri["schema:ProductModel"]] = { + **CODEMETA_STRATEGY[iri["schema:Product"]], + iri["schema:isVariantOf"]: None, # FIXME: troublesome ProductGroup or ProductModel +} + +CODEMETA_STRATEGY[iri["schema:PaymentCard"]] = { + **CODEMETA_STRATEGY[iri["schema:FinancialProduct"]], + **CODEMETA_STRATEGY[iri["schema:PaymentMethod"]] +} +CODEMETA_STRATEGY[iri["schema:CreditCard"]] = { + **CODEMETA_STRATEGY[iri["schema:LoanOrCredit"]], + **CODEMETA_STRATEGY[iri["schema:PaymentCard"]] +} + +CODEMETA_STRATEGY[iri["schema:Organization"]] = { + **CODEMETA_STRATEGY[iri["schema:Thing"]], + iri["schema:acceptedPaymentMethod"]: None, # FIXME: troublesome LoanOrCredit or PaymentMethod + iri["schema:alumni"]: ACTIONS["merge_match_person"], + iri["schema:areaServed"]: None, # FIXME: troublesome AdministrativeArea or GeoShape or Place + iri["schema:brand"]: None, # FIXME: troublesome Brand or Organization + iri["schema:employee"]: ACTIONS["merge_match_person"], + iri["schema:founder"]: None, # FIXME: troublesome Organization or Person + iri["schema:funder"]: None, # FIXME: troublesome Organization or Person + iri["schema:legalRepresentative"]: ACTIONS["merge_match_person"], + iri["schema:location"]: None, # FIXME: troublesome Place or PostalAddress or Text or VirtualLocation + iri["schema:member"]: None, # FIXME: troublesome Organization or Person + iri["schema:memberOf"]: None, # FIXME: troublesome MemberProgramTier or Organization or ProgramMembership + iri["schema:ownershipFundingInfo"]: None, # FIXME: troublesome AboutPage or CreativeWork + iri["schema:sponsor"]: None # FIXME: troublesome Organization or Person +} +CODEMETA_STRATEGY[iri["schema:PerformingGroup"]] = {**CODEMETA_STRATEGY[iri["schema:Organization"]]} +CODEMETA_STRATEGY[iri["schema:MusicGroup"]] = { + **CODEMETA_STRATEGY[iri["schema:PerformingGroup"]], + iri["schema:musicGroupMember"]: ACTIONS["merge_match_person"], + iri["schema:track"]: None # FIXME: troublesome ItemList or MusicRecording +} +CODEMETA_STRATEGY[iri["schema:EducationalOrganization"]] = { + **CODEMETA_STRATEGY[iri["schema:Organization"]], + **CODEMETA_STRATEGY[iri["schema:CivicStructure"]] +} + +CODEMETA_STRATEGY[iri["schema:DefinedRegion"]] = { + **CODEMETA_STRATEGY[iri["schema:Place"]], + **CODEMETA_STRATEGY[iri["schema:StructuredValue"]] +} + +CODEMETA_STRATEGY[iri["schema:Person"]] = { + **CODEMETA_STRATEGY[iri["schema:Thing"]], + iri["schema:alumniOf"]: None, # FIXME: troublesome EducationalOrganization or Organization + iri["schema:brand"]: None, # FIXME: troublesome Brand or Organization + iri["schema:children"]: ACTIONS["merge_match_person"], + iri["schema:colleague"]: ACTIONS["merge_match_person"], + iri["schema:follows"]: ACTIONS["merge_match_person"], + iri["schema:funder"]: None, # FIXME: troublesome Organization or Person + iri["schema:height"]: None, # FIXME: troublesome Distance or QuantitativeValue + iri["schema:homeLocation"]: None, # FIXME: troublesome ContactPoint or Place + iri["schema:knows"]: ACTIONS["merge_match_person"], + iri["schema:memberOf"]: None, # FIXME: troublesome MemberProgramTier or Organization or ProgramMembership + iri["schema:netWorth"]: None, # FIXME: troublesome MonetaryAmount or PriceSpecification + iri["schema:parent"]: ACTIONS["merge_match_person"], + iri["schema:pronouns"]: None, # FIXME: troublesome DefinedTerm or StructuredValue + iri["schema:relatedTo"]: ACTIONS["merge_match_person"], + iri["schema:sibling"]: ACTIONS["merge_match_person"], + iri["schema:sponsor"]: None, # FIXME: troublesome Organization or Person + iri["schema:spouse"]: ACTIONS["merge_match_person"], + iri["schema:weight"]: None, # FIXME: troublesome Mass or QuantitativeValue + iri["schema:workLocation"]: None # FIXME: troublesome ContactPoint or Place +} + +CODEMETA_STRATEGY[iri["schema:Taxon"]] = {**CODEMETA_STRATEGY[iri["schema:Thing"]]} diff --git a/src/hermes/model/types/ld_dict.py b/src/hermes/model/types/ld_dict.py index 42bc3ed9..2c88a520 100644 --- a/src/hermes/model/types/ld_dict.py +++ b/src/hermes/model/types/ld_dict.py @@ -72,6 +72,9 @@ def __ne__(self, other): return NotImplemented return not x + def __bool__(self): + return bool(self.data_dict) + def get(self, key, default=_NO_DEFAULT): if key not in self and default is not ld_dict._NO_DEFAULT: return default diff --git a/test/hermes_test/model/test_api_e2e.py b/test/hermes_test/model/test_api_e2e.py index f756f101..646f815a 100644 --- a/test/hermes_test/model/test_api_e2e.py +++ b/test/hermes_test/model/test_api_e2e.py @@ -508,7 +508,45 @@ def test_invenio_deposit(tmp_path, monkeypatch, sandbox_auth, metadata, invenio_ }], "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}] }) - ), + ) + ] +) +def test_process(tmp_path, monkeypatch, metadata_in, metadata_out): + monkeypatch.chdir(tmp_path) + + manager = context_manager.HermesContext(tmp_path) + manager.prepare_step("harvest") + for harvester, result in metadata_in.items(): + with manager[harvester] as cache: + cache["codemeta"] = result.compact() + cache["context"] = {"@context": result.full_context} + cache["expanded"] = result.ld_value + manager.finalize_step("harvest") + + config_file = tmp_path / "hermes.toml" + config_file.write_text(f"[harvest]\nsources = [{', '.join(f'\"{harvester}\"' for harvester in metadata_in)}]") + + orig_argv = sys.argv[:] + sys.argv = ["hermes", "process", "--path", str(tmp_path), "--config", str(config_file)] + result = {} + try: + monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) + cli.main() + except SystemExit as e: + if e.code != 0: + raise e + finally: + manager.prepare_step("process") + result = SoftwareMetadata.load_from_cache(manager, "result") + manager.finalize_step("process") + sys.argv = orig_argv + + assert result == metadata_out + +@pytest.mark.xfail +@pytest.mark.parametrize( + "metadata_in, metadata_out", + [ ( { "cff": SoftwareMetadata({ @@ -520,6 +558,10 @@ def test_invenio_deposit(tmp_path, monkeypatch, sandbox_auth, metadata, invenio_ "http://schema.org/familyName": [{"@value": "Test"}], "http://schema.org/email": [{"@value": "test.testi@testis.tests"}] }, + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Testers"}] + }, { "@type": "http://schema.org/Person", "http://schema.org/familyName": [{"@value": "Tester"}], @@ -531,25 +573,41 @@ def test_invenio_deposit(tmp_path, monkeypatch, sandbox_auth, metadata, invenio_ "codemeta": SoftwareMetadata({ "@type": ["http://schema.org/SoftwareSourceCode"], "http://schema.org/description": [{"@value": "for testing"}], - "http://schema.org/name": [{"@value": "Test"}], - "http://schema.org/author": [{ - "@type": "http://schema.org/Person", - "http://schema.org/familyName": [{"@value": "Test"}], - "http://schema.org/givenName": [{"@value": "Testi"}], - "http://schema.org/email": [{"@value": "test.testi@testis.tests"}] - }] + "http://schema.org/name": [{"@value": "Test"}, {"@value": "Testis Test"}], + "http://schema.org/author": [ + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Test"}], + "http://schema.org/givenName": [{"@value": "Testi"}], + "http://schema.org/email": [ + {"@value": "test.testi@testis.tests"}, + {"@value": "test.testi@testis.tests2"} + ] + }, + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Testers"}] + } + ] }) }, SoftwareMetadata({ "@type": ["http://schema.org/SoftwareSourceCode"], "http://schema.org/description": [{"@value": "for testing"}], - "http://schema.org/name": [{"@value": "Test"}], + "http://schema.org/name": [{"@value": "Test"}, {"@value": "Testis Test"}], "http://schema.org/author": [ { "@type": "http://schema.org/Person", "http://schema.org/familyName": [{"@value": "Test"}], "http://schema.org/givenName": [{"@value": "Testi"}], - "http://schema.org/email": [{"@value": "test.testi@testis.tests"}] + "http://schema.org/email": [ + {"@value": "test.testi@testis.tests"}, + {"@value": "test.testi@testis.tests2"} + ] + }, + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Testers"}] }, { "@type": "http://schema.org/Person", @@ -592,5 +650,4 @@ def test_process(tmp_path, monkeypatch, metadata_in, metadata_out): manager.finalize_step("process") sys.argv = orig_argv - assert result.ld_value == metadata_out.ld_value assert result == metadata_out From 08619ee89ece119ec7a9f764dc72ef51d704d8ee Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Thu, 5 Mar 2026 14:40:46 +0100 Subject: [PATCH 205/247] fixed formation errors --- src/hermes/model/merge/strategy.py | 40 +++++++++++++++++++------- test/hermes_test/model/test_api_e2e.py | 3 +- 2 files changed, 31 insertions(+), 12 deletions(-) diff --git a/src/hermes/model/merge/strategy.py b/src/hermes/model/merge/strategy.py index e928a4fc..fb2aeaf6 100644 --- a/src/hermes/model/merge/strategy.py +++ b/src/hermes/model/merge/strategy.py @@ -188,7 +188,9 @@ iri["schema:areaServed"]: None, # FIXME: troublesome AdministrativeArea or GeoShape or Place iri["schema:eligibleRegion"]: None, # FIXME: troublesome GeoShape or Place iri["schema:ineligibleRegion"]: None, # FIXME: troublesome GeoShape or Place - iri["schema:itemOffered"]: None, # FIXME: troublesome AggregateOffer or CreativeWork or Event or MenuItem or Product or Service or Trip + iri[ + "schema:itemOffered" + ]: None, # FIXME: troublesome AggregateOffer or CreativeWork or Event or MenuItem or Product or Service or Trip iri["schema:seller"]: None # FIXME: troublesome Organization or Person } CODEMETA_STRATEGY[iri["schema:Offer"]] = { @@ -198,7 +200,9 @@ iri["schema:category"]: None, # FIXME: troublesome CategoryCode or Thing iri["schema:eligibleRegion"]: None, # FIXME: troublesome GeoShape or Place iri["schema:ineligibleRegion"]: None, # FIXME: troublesome GeoShape or Place - iri["schema:itemOffered"]: None, # FIXME: troublesome AggregateOffer or CreativeWork or Event or MenuItem or Product or Service or Trip + iri[ + "schema:itemOffered" + ]: None, # FIXME: troublesome AggregateOffer or CreativeWork or Event or MenuItem or Product or Service or Trip iri["schema:leaseLength"]: None, # FIXME: troublesome Duration or QuantitativeValue iri["schema:offeredBy"]: None, # FIXME: troublesome Organization or Person iri["schema:seller"]: None, # FIXME: troublesome Organization or Person @@ -233,7 +237,8 @@ } CODEMETA_STRATEGY[iri["schema:PropertyValue"]] = { **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], - iri["schema:valueReference"]: None # FIXME: troublesome DefinedTerm or Enumeration or PropertyValue or QualitativeValue or QuantitativeValue or StructuredValue + iri["schema:valueReference"]: None # FIXME: troublesome DefinedTerm or Enumeration or PropertyValue + # or QualitativeValue or QuantitativeValue or StructuredValue } CODEMETA_STRATEGY[iri["schema:ContactPoint"]] = { **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], @@ -263,7 +268,8 @@ } CODEMETA_STRATEGY[iri["schema:QuantitativeValue"]] = { **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], - iri["schema:valueReference"]: None # FIXME: troublesome DefinedTerm or Enumeration or PropertyValue or QualitativeValue or QuantitativeValue or StructuredValue + iri["schema:valueReference"]: None # FIXME: troublesome DefinedTerm or Enumeration or PropertyValue + # or QualitativeValue or QuantitativeValue or StructuredValue } CODEMETA_STRATEGY[iri["schema:ShippingService"]] = { **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], @@ -312,7 +318,8 @@ } CODEMETA_STRATEGY[iri["schema:Grant"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], - iri["schema:fundedItem"]: None, # FIXME: troublesome BioChemEntity or CreativeWork or Event or MedicalEntity or Organization or Person or Product + iri["schema:fundedItem"]: None, # FIXME: troublesome BioChemEntity or CreativeWork or Event or MedicalEntity + # or Organization or Person or Product iri["schema:funder"]: None, # FIXME: troublesome Organization or Person iri["schema:sponsor"]: None # FIXME: troublesome Organization or Person } @@ -351,7 +358,8 @@ } CODEMETA_STRATEGY[iri["schema:QualitativeValue"]] = { **CODEMETA_STRATEGY[iri["schema:Enumeration"]], - iri["schema:valueReference"]: None # FIXME: troublesome DefinedTerm or Enumeration or PropertyValue or QualitativeValue or QuantitativeValue or StructuredValue + iri["schema:valueReference"]: None # FIXME: troublesome DefinedTerm or Enumeration or PropertyValue + # or QualitativeValue or QuantitativeValue or StructuredValue } CODEMETA_STRATEGY[iri["schema:SizeSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:QualitativeValue"]]} CODEMETA_STRATEGY[iri["schema:Class"]] = { @@ -467,7 +475,9 @@ } CODEMETA_STRATEGY[iri["schema:Gene"]] = { **CODEMETA_STRATEGY[iri["schema:BioChemEntity"]], - iri["schema:expressedIn"]: None # FIXME: troublesome AnatomicalStructure or AnatomicalSystem or BioChemEntity or DefinedTerm + iri[ + "schema:expressedIn" + ]: None # FIXME: troublesome AnatomicalStructure or AnatomicalSystem or BioChemEntity or DefinedTerm } CODEMETA_STRATEGY[iri["schema:MedicalEntity"]] = {**CODEMETA_STRATEGY[iri["schema:Thing"]]} @@ -497,13 +507,21 @@ } CODEMETA_STRATEGY[iri["schema:MedicalCondition"]] = { **CODEMETA_STRATEGY[iri["schema:MedicalEntity"]], - iri["schema:associatedAnatomy"]: None, # FIXME: troublesome AnatomicalStructure or AnatomicalSystem or SuperficialAnatomy - iri["schema:possibleTreatment"]: None, # FIXME: troublesome Drug or DrugClass or LifestyleModification or MedicalTherapy - iri["schema:secondaryPrevention"]: None # FIXME: troublesome Drug or DrugClass or LifestyleModification or MedicalTherapy + iri[ + "schema:associatedAnatomy" + ]: None, # FIXME: troublesome AnatomicalStructure or AnatomicalSystem or SuperficialAnatomy + iri[ + "schema:possibleTreatment" + ]: None, # FIXME: troublesome Drug or DrugClass or LifestyleModification or MedicalTherapy + iri[ + "schema:secondaryPrevention" + ]: None # FIXME: troublesome Drug or DrugClass or LifestyleModification or MedicalTherapy } CODEMETA_STRATEGY[iri["schema:MedicalSignOrSymptom"]] = { **CODEMETA_STRATEGY[iri["schema:MedicalCondition"]], - iri["schema:possibleTreatment"]: None # FIXME: troublesome Drug or DrugClass or LifestyleModification or MedicalTherapy + iri[ + "schema:possibleTreatment" + ]: None # FIXME: troublesome Drug or DrugClass or LifestyleModification or MedicalTherapy } CODEMETA_STRATEGY[iri["schema:MedicalSign"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalSignOrSymptom"]]} CODEMETA_STRATEGY[iri["schema:SuperficialAnatomy"]] = { diff --git a/test/hermes_test/model/test_api_e2e.py b/test/hermes_test/model/test_api_e2e.py index 646f815a..30ecd11c 100644 --- a/test/hermes_test/model/test_api_e2e.py +++ b/test/hermes_test/model/test_api_e2e.py @@ -543,6 +543,7 @@ def test_process(tmp_path, monkeypatch, metadata_in, metadata_out): assert result == metadata_out + @pytest.mark.xfail @pytest.mark.parametrize( "metadata_in, metadata_out", @@ -620,7 +621,7 @@ def test_process(tmp_path, monkeypatch, metadata_in, metadata_out): ) ] ) -def test_process(tmp_path, monkeypatch, metadata_in, metadata_out): +def test_process_complex(tmp_path, monkeypatch, metadata_in, metadata_out): monkeypatch.chdir(tmp_path) manager = context_manager.HermesContext(tmp_path) From ac36a286a2bf86842fc6c6e04391221e535b7248 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 6 Mar 2026 17:48:50 +0100 Subject: [PATCH 206/247] updated strategies and match functions --- src/hermes/model/merge/action.py | 2 +- src/hermes/model/merge/match.py | 31 +- src/hermes/model/merge/strategy.py | 1094 ++++++++++++++---------- test/hermes_test/model/test_api_e2e.py | 1 - 4 files changed, 659 insertions(+), 469 deletions(-) diff --git a/src/hermes/model/merge/action.py b/src/hermes/model/merge/action.py index 6108b9ea..b9b516ef 100644 --- a/src/hermes/model/merge/action.py +++ b/src/hermes/model/merge/action.py @@ -259,6 +259,6 @@ def merge( """ FIXME: log error """ break else: - value.append(item) + value.append(update_item) # Return the merged values. return value diff --git a/src/hermes/model/merge/match.py b/src/hermes/model/merge/match.py index 3934b785..8a0aa9a1 100644 --- a/src/hermes/model/merge/match.py +++ b/src/hermes/model/merge/match.py @@ -10,21 +10,6 @@ from ..types import ld_dict -def match_equals(a: Any, b: Any) -> bool: - """ - Wrapper method for normal == comparison. - - :param a: First item for the comparison. - :type a: Any - :param b: Second item for the comparison. - :type b: Any - - :return: Truth value of a == b. - :rtype: bool - """ - return a == b - - def match_keys(*keys: list[str], fall_back_to_equals: bool = False) -> Callable[[Any, Any], bool]: """ Creates a function taking to parameters that returns true @@ -81,3 +66,19 @@ def match_person(left: Any, right: Any) -> bool: mails_right = right["schema:email"] return any((mail in mails_right) for mail in left["schema:email"]) return left == right + + +def match_multiple_types( + *functions_for_types: list[tuple[str, Callable[[Any, Any], bool]]], + fall_back_function: Callable[[Any, Any], bool] = match_keys("@id", fall_back_to_equals=True) +) -> Callable[[Any, Any], bool]: + def match_func(left: Any, right: Any) -> bool: + if not ((isinstance(left, ld_dict) and isinstance(right, ld_dict)) and "@type" in left and "@type" in right): + return fall_back_function(left, right) + types_left = left["@type"] + types_right = right["@type"] + for ld_type, func in functions_for_types: + if ld_type in types_left and ld_type in types_right: + return func(left, right) + return fall_back_function(left, right) + return match_func diff --git a/src/hermes/model/merge/strategy.py b/src/hermes/model/merge/strategy.py index fb2aeaf6..5aaa5d7f 100644 --- a/src/hermes/model/merge/strategy.py +++ b/src/hermes/model/merge/strategy.py @@ -7,625 +7,815 @@ from ..types.ld_context import iri_map as iri from .action import Concat, MergeSet -from .match import match_keys, match_person +from .match import match_keys, match_person, match_multiple_types +DEFAULT_MATCH = match_keys("@id", fall_back_to_equals=True) + +MATCH_FUNCTION_FOR_TYPE = {"schema:Person": match_person} + ACTIONS = { - "default": MergeSet(match_keys("@id", fall_back_to_equals=True)), - "merge_match_person": MergeSet(match_person) + "default": MergeSet(DEFAULT_MATCH), + "concat": Concat(), + "Person": MergeSet(MATCH_FUNCTION_FOR_TYPE["schema:Person"]), + **{ + "Or".join(types): MergeSet(match_multiple_types( + *(("schema:" + type, MATCH_FUNCTION_FOR_TYPE.get("schema:" + type, DEFAULT_MATCH)) for type in types) + )) + for types in [ + ("AboutPage", "CreativeWork"), + ("AdministrativeArea", "GeoShape", "Place"), + ("AggregateOffer", "CreativeWork", "Event", "MenuItem", "Product", "Service", "Trip"), + ("AnatomicalStructure", "AnatomicalSystem"), + ("AnatomicalStructure", "AnatomicalSystem", "BioChemEntity", "DefinedTerm"), + ("AnatomicalStructure", "AnatomicalSystem", "SuperficialAnatomy"), + ("AudioObject", "Clip", "MusicRecording"), + ("BioChemEntity", "CreativeWork", "Event", "MedicalEntity", "Organization", "Person", "Product"), + ("Brand", "Organization"), + ("CategoryCode", "Thing"), + ("Class", "Enumeration"), + ("Class", "Enumeration", "Property"), + ("Clip", "VideoObject"), + ("Comment", "CreativeWork"), + ("ContactPoint", "Place"), + ("CreativeWork", "HowToSection", "HowToStep"), + ("CreativeWork", "Product"), + ("CreditCard", "MonetaryAmount", "UnitPriceSpecification"), + ("DataFeedItem", "Thing"), + ("Demand", "Offer"), + ("DefinedTerm", "Enumeration", "PropertyValue", "QualitativeValue", "QuantitativeValue", "StructuredValue"), + ("DefinedTerm", "PropertyValue"), + ("DefinedTerm", "QuantitativeValue", "SizeSpecification"), + ("DefinedTerm", "StructuredValue"), + ("DefinedTerm", "Taxon"), + ("Distance", "QuantitativeValue"), + ("Drug", "DrugClass", "LifestyleModification", "MedicalTherapy"), + ("Duration", "QuantitativeValue"), + ("EducationalOrganization", "Organization"), + ("GeoCoordinates", "GeoShape"), + ("GeoShape", "Place"), + ("GeospatialGeometry", "Place"), + ("ImageObject", "Photograph"), + ("ItemList", "ListItem", "WebContent"), + ("ItemList", "MusicRecording"), + ("ListItem", "Thing"), + ("LoanOrCredit", "PaymentMethod"), + ("Mass", "QuantitativeValue"), + ("MedicalCondition", "PropertyValue"), + ("MemberProgramTier", "Organization", "ProgramMembership"), + ("MenuItem", "MenuSection"), + ("MonetaryAmount", "MonetaryAmountDistribution"), + ("MonetaryAmount", "PriceSpecification"), + ("MonetaryAmount", "ShippingRateSettings"), + ("MusicGroup", "Person"), + ("Organization", "Person"), + ("PerformingGroup", "Person"), + ("Place", "PostalAddress", "VirtualLocation"), + ("ProductGroup", "ProductModel"), + ("Property", "PropertyValue", "StatisticalVariable"), + ("Product", "Service"), + ("QuantitativeValue", "ServicePeriod"), + ("SoftwareApplication", "WebSite") + ] + } } PROV_STRATEGY = { - None: {iri["hermes-rt:graph"]: Concat(), iri["hermes-rt:replace"]: Concat(), iri["hermes-rt:reject"]: Concat()} + None: { + iri["hermes-rt:graph"]: ACTIONS["concat"], + iri["hermes-rt:replace"]: ACTIONS["concat"], + iri["hermes-rt:reject"]: ACTIONS["concat"] + } } -# All troublesome marked entries can contain objects of different types, e.g. Person and Organization. -# This is troublesome because Persons may be compared using a different method than Organizations. + # Filled with entries for every schema-type that can be found inside an JSON-LD dict of type # SoftwareSourceCode or SoftwareApplication. CODEMETA_STRATEGY = {None: {None: ACTIONS["default"]}} +CODEMETA_STRATEGY[iri["schema:Thing"]] = {iri["schema:owner"]: ACTIONS["OrganizationOrPerson"]} -CODEMETA_STRATEGY[iri["schema:Thing"]] = {iri["schema:owner"]: None} # FIXME: troublesome Organization or Person -CODEMETA_STRATEGY[iri["schema:CreativeWork"]] = { + +CODEMETA_STRATEGY[iri["schema:Action"]] = { **CODEMETA_STRATEGY[iri["schema:Thing"]], - iri["schema:accountablePerson"]: ACTIONS["merge_match_person"], - iri["schema:audio"]: None, # FIXME: troublesome AudioObject or Clip or MusicRecording - iri["schema:author"]: None, # FIXME: troublesome Organization or Person - iri["schema:character"]: ACTIONS["merge_match_person"], - iri["schema:contributor"]: None, # FIXME: troublesome Organization or Person - iri["schema:copyrightHolder"]: None, # FIXME: troublesome Organization or Person - iri["schema:creator"]: None, # FIXME: troublesome Organization or Person - iri["schema:editor"]: ACTIONS["merge_match_person"], - iri["schema:funder"]: None, # FIXME: troublesome Organization or Person - iri["schema:isBasedOn"]: None, # FIXME: troublesome CreativeWork or Product - iri["schema:maintainer"]: None, # FIXME: troublesome Organization or Person - iri["schema:offers"]: None, # FIXME: troublesome Demand or Offer - iri["schema:producer"]: None, # FIXME: troublesome Organization or Person - iri["schema:provider"]: None, # FIXME: troublesome Organization or Person - iri["schema:publisher"]: None, # FIXME: troublesome Organization or Person - iri["schema:sdPublisher"]: None, # FIXME: troublesome Organization or Person - iri["schema:size"]: None, # FIXME: troublesome DefinedTerm or QuantitativeValue or SizeSpecification - iri["schema:sponsor"]: None, # FIXME: troublesome Organization or Person - iri["schema:translator"]: None, # FIXME: troublesome Organization or Person - iri["schema:video"]: None # FIXME: troublesome Clip or VideoObject + iri["schema:agent"]: ACTIONS["OrganizationOrPerson"], + iri["schema:location"]: ACTIONS["PlaceOrPostalAddressOrVirtualLocation"], + iri["schema:participant"]: ACTIONS["OrganizationOrPerson"], + iri["schema:provider"]: ACTIONS["OrganizationOrPerson"] } -CODEMETA_STRATEGY[iri["schema:SoftwareSourceCode"]] = { - **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], - iri["maintainer"]: ACTIONS["merge_match_person"] + + + +CODEMETA_STRATEGY[iri["schema:BioChemEntity"]] = { + **CODEMETA_STRATEGY[iri["schema:Thing"]], + iri["schema:associatedDisease"]: ACTIONS["MedicalConditionOrPropertyValue"], + iri["schema:hasMolecularFunction"]: ACTIONS["DefinedTermOrPropertyValue"], + iri["schema:isInvolvedInBiologicalProcess"]: ACTIONS["DefinedTermOrPropertyValue"], + iri["schema:isLocatedInSubcellularLocation"]: ACTIONS["DefinedTermOrPropertyValue"], + iri["schema:taxonomicRange"]: ACTIONS["DefinedTermOrTaxon"] } -CODEMETA_STRATEGY[iri["schema:MediaObject"]] = { - **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], - iri["schema:duration"]: None, # FIXME: troublesome Duration or QuantitativeValue - iri["schema:height"]: None, # FIXME: troublesome Distance or QuantitativeValue - iri["schema:ineligibleRegion"]: None, # FIXME: troublesome GeoShape or Place - iri["schema:width"]: None # FIXME: troublesome Distance or QuantitativeValue + + +CODEMETA_STRATEGY[iri["schema:Gene"]] = { + **CODEMETA_STRATEGY[iri["schema:BioChemEntity"]], + iri["schema:expressedIn"]: ACTIONS["AnatomicalStructureOrAnatomicalSystemOrBioChemEntityOrDefinedTerm"] } -CODEMETA_STRATEGY[iri["schema:AudioObject"]] = {**CODEMETA_STRATEGY[iri["schema:MediaObject"]]} -CODEMETA_STRATEGY[iri["schema:ImageObject"]] = {**CODEMETA_STRATEGY[iri["schema:MediaObject"]]} -CODEMETA_STRATEGY[iri["schema:VideoObject"]] = { - **CODEMETA_STRATEGY[iri["schema:MediaObject"]], - iri["schema:actor"]: None, # FIXME: troublesome PerformingGroup or Person - iri["schema:dircetor"]: ACTIONS["merge_match_person"], - iri["schema:musicBy"]: None # FIXME: troublesome MusicGroup or Person + + + +CODEMETA_STRATEGY[iri["schema:CreativeWork"]] = { + **CODEMETA_STRATEGY[iri["schema:Thing"]], + iri["schema:accountablePerson"]: ACTIONS["Person"], + iri["schema:audio"]: ACTIONS["AudioObjectOrClipOrMusicRecording"], + iri["schema:author"]: ACTIONS["OrganizationOrPerson"], + iri["schema:character"]: ACTIONS["Person"], + iri["schema:contributor"]: ACTIONS["OrganizationOrPerson"], + iri["schema:copyrightHolder"]: ACTIONS["OrganizationOrPerson"], + iri["schema:creator"]: ACTIONS["OrganizationOrPerson"], + iri["schema:editor"]: ACTIONS["Person"], + iri["schema:funder"]: ACTIONS["OrganizationOrPerson"], + iri["schema:isBasedOn"]: ACTIONS["CreativeWorkOrProduct"], + iri["schema:maintainer"]: ACTIONS["OrganizationOrPerson"], + iri["schema:offers"]: ACTIONS["DemandOrOffer"], + iri["schema:producer"]: ACTIONS["OrganizationOrPerson"], + iri["schema:provider"]: ACTIONS["OrganizationOrPerson"], + iri["schema:publisher"]: ACTIONS["OrganizationOrPerson"], + iri["schema:sdPublisher"]: ACTIONS["OrganizationOrPerson"], + iri["schema:size"]: ACTIONS["DefinedTermOrQuantitativeValueOrSizeSpecification"], + iri["schema:sponsor"]: ACTIONS["OrganizationOrPerson"], + iri["schema:translator"]: ACTIONS["OrganizationOrPerson"], + iri["schema:video"]: ACTIONS["ClipOrVideoObject"] } -CODEMETA_STRATEGY[iri["schema:DataDownload"]] = {**CODEMETA_STRATEGY[iri["schema:MediaObject"]]} + + +CODEMETA_STRATEGY[iri["schema:Article"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} +CODEMETA_STRATEGY[iri["schema:NewsArticle"]] = {**CODEMETA_STRATEGY[iri["schema:Article"]]} +CODEMETA_STRATEGY[iri["schema:ScholarlyArticle"]] = {**CODEMETA_STRATEGY[iri["schema:Article"]]} + CODEMETA_STRATEGY[iri["schema:Certification"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} + CODEMETA_STRATEGY[iri["schema:Claim"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], - iri["schema:claimInterpreter"]: None # FIXME: troublesome Organization or Person + iri["schema:claimInterpreter"]: ACTIONS["OrganizationOrPerson"] } + CODEMETA_STRATEGY[iri["schema:Clip"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], - iri["schema:actor"]: None, # FIXME: troublesome PerformingGroup or Person - iri["schema:dircetor"]: ACTIONS["merge_match_person"], - iri["schema:musicBy"]: None # FIXME: troublesome MusicGroup or Person + iri["schema:actor"]: ACTIONS["PerformingGroupOrPerson"], + iri["schema:dircetor"]: ACTIONS["Person"], + iri["schema:musicBy"]: ACTIONS["MusicGroupOrPerson"] } + CODEMETA_STRATEGY[iri["schema:Comment"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], - iri["schema:parentItem"]: None # FIXME: troublesome Comment or CreativeWork + iri["schema:parentItem"]: ACTIONS["CommentOrCreativeWork"] } CODEMETA_STRATEGY[iri["schema:CorrectionComment"]] = {**CODEMETA_STRATEGY[iri["schema:Comment"]]} + CODEMETA_STRATEGY[iri["schema:CreativeWorkSeason"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], - iri["schema:actor"]: None # FIXME: troublesome PerformingGroup or Person + iri["schema:actor"]: ACTIONS["PerformingGroupOrPerson"] +} + +CODEMETA_STRATEGY[iri["schema:DataCatalog"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} + +CODEMETA_STRATEGY[iri["schema:Dataset"]] = { + **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], + iri["schema:variableMeasured"]: ACTIONS["PropertyOrPropertyValueOrStatisticalVariable"] +} +CODEMETA_STRATEGY[iri["schema:DataFeed"]] = { + **CODEMETA_STRATEGY[iri["schema:Dataset"]], + iri["schema:dataFeedElement"]: ACTIONS["DataFeedItemOrThing"] } + CODEMETA_STRATEGY[iri["schema:DefinedTermSet"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} CODEMETA_STRATEGY[iri["schema:CategoryCodeSet"]] = {**CODEMETA_STRATEGY[iri["schema:DefinedTermSet"]]} + +CODEMETA_STRATEGY[iri["schema:EducationalOccupationalCredential"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} + CODEMETA_STRATEGY[iri["schema:Episode"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], - iri["schema:actor"]: None, # FIXME: troublesome PerformingGroup or Person - iri["schema:dircetor"]: ACTIONS["merge_match_person"], - iri["schema:duration"]: None, # FIXME: troublesome Duration or QuantitativeValue - iri["schema:musicBy"]: None # FIXME: troublesome MusicGroup or Person + iri["schema:actor"]: ACTIONS["PerformingGroupOrPerson"], + iri["schema:dircetor"]: ACTIONS["Person"], + iri["schema:duration"]: ACTIONS["DurationOrQuantitativeValue"], + iri["schema:musicBy"]: ACTIONS["MusicGroupOrPerson"] } + CODEMETA_STRATEGY[iri["schema:HowTo"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], - iri["schema:step"]: None # FIXME: troublesome CreativeWork or HowToSection or HowToStep + iri["schema:step"]: ACTIONS["CreativeWorkOrHowToSectionOrHowToStep"] } + CODEMETA_STRATEGY[iri["schema:HyperTocEntry"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} + CODEMETA_STRATEGY[iri["schema:Map"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} -CODEMETA_STRATEGY[iri["schema:MenuSection"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} -CODEMETA_STRATEGY[iri["schema:MusicRecording"]] = { + +CODEMETA_STRATEGY[iri["schema:MediaObject"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], - iri["schema:byArtist"]: None, # FIXME: troublesome MusicGroup or Person - iri["schema:duration"]: None # FIXME: troublesome Duration or QuantitativeValue + iri["schema:duration"]: ACTIONS["DurationOrQuantitativeValue"], + iri["schema:height"]: ACTIONS["DistanceOrQuantitativeValue"], + iri["schema:ineligibleRegion"]: ACTIONS["GeoShapeOrPlace"], + iri["schema:width"]: ACTIONS["DistanceOrQuantitativeValue"] } -CODEMETA_STRATEGY[iri["schema:WebPage"]] = { +CODEMETA_STRATEGY[iri["schema:AudioObject"]] = {**CODEMETA_STRATEGY[iri["schema:MediaObject"]]} +CODEMETA_STRATEGY[iri["schema:DataDownload"]] = {**CODEMETA_STRATEGY[iri["schema:MediaObject"]]} +CODEMETA_STRATEGY[iri["schema:ImageObject"]] = {**CODEMETA_STRATEGY[iri["schema:MediaObject"]]} +CODEMETA_STRATEGY[iri["schema:VideoObject"]] = { + **CODEMETA_STRATEGY[iri["schema:MediaObject"]], + iri["schema:actor"]: ACTIONS["PerformingGroupOrPerson"], + iri["schema:dircetor"]: ACTIONS["Person"], + iri["schema:musicBy"]: ACTIONS["MusicGroupOrPerson"] +} + +CODEMETA_STRATEGY[iri["schema:MenuSection"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} + +CODEMETA_STRATEGY[iri["schema:MusicComposition"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], - iri["schema:reviewedBy"]: None # FIXME: troublesome Organization or Person + iri["schema:composer"]: ACTIONS["OrganizationOrPerson"], + iri["schema:lyricist"]: ACTIONS["Person"] } -CODEMETA_STRATEGY[iri["schema:AboutPage"]] = {**CODEMETA_STRATEGY[iri["schema:WebPage"]]} -CODEMETA_STRATEGY[iri["schema:Article"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} -CODEMETA_STRATEGY[iri["schema:NewsArticle"]] = {**CODEMETA_STRATEGY[iri["schema:Article"]]} -CODEMETA_STRATEGY[iri["schema:ScholarlyArticle"]] = {**CODEMETA_STRATEGY[iri["schema:Article"]]} -CODEMETA_STRATEGY[iri["schema:WebPageElement"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} -CODEMETA_STRATEGY[iri["schema:EducationalOccupationalCredential"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} + CODEMETA_STRATEGY[iri["schema:MusicPlaylist"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], - iri["schema:track"]: None # FIXME: troublesome ItemList or MusicRecording + iri["schema:track"]: ACTIONS["ItemListOrMusicRecording"] } CODEMETA_STRATEGY[iri["schema:MusicAlbum"]] = { **CODEMETA_STRATEGY[iri["schema:MusicPlaylist"]], - iri["schema:byArtist"]: None, # FIXME: troublesome MusicGroup or Person + iri["schema:byArtist"]: ACTIONS["MusicGroupOrPerson"] } CODEMETA_STRATEGY[iri["schema:MusicRelease"]] = { **CODEMETA_STRATEGY[iri["schema:MusicPlaylist"]], - iri["schema:creditedTo"]: None, # FIXME: troublesome Organization or Person - iri["schema:duration"]: None # FIXME: troublesome Duration or QuantitativeValue + iri["schema:creditedTo"]: ACTIONS["OrganizationOrPerson"], + iri["schema:duration"]: ACTIONS["DurationOrQuantitativeValue"] } -CODEMETA_STRATEGY[iri["schema:MusicComposition"]] = { + +CODEMETA_STRATEGY[iri["schema:MusicRecording"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], - iri["schema:composer"]: None, # FIXME: troublesome Organization or Person - iri["schema:lyricist"]: ACTIONS["merge_match_person"], + iri["schema:byArtist"]: ACTIONS["MusicGroupOrPerson"], + iri["schema:duration"]: ACTIONS["DurationOrQuantitativeValue"] } + CODEMETA_STRATEGY[iri["schema:Photograph"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} + CODEMETA_STRATEGY[iri["schema:Review"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], - iri["schema:negativeNotes"]: None, # FIXME: troublesome ItemList or ListItem or WebContent - iri["schema:positiveNotes"]: None # FIXME: troublesome ItemList or ListItem or WebContent + iri["schema:negativeNotes"]: ACTIONS["ItemListOrListItemOrWebContent"], + iri["schema:positiveNotes"]: ACTIONS["ItemListOrListItemOrWebContent"] } + CODEMETA_STRATEGY[iri["schema:SoftwareApplication"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} -CODEMETA_STRATEGY[iri["schema:RuntimePlatform"]] = {**CODEMETA_STRATEGY[iri["schema:SoftwareApplication"]]} CODEMETA_STRATEGY[iri["schema:OperatingSystem"]] = {**CODEMETA_STRATEGY[iri["schema:SoftwareApplication"]]} -CODEMETA_STRATEGY[iri["schema:WebSite"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} -CODEMETA_STRATEGY[iri["schema:WebContent"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} -CODEMETA_STRATEGY[iri["schema:DataCatalog"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} -CODEMETA_STRATEGY[iri["schema:Dataset"]] = { +CODEMETA_STRATEGY[iri["schema:RuntimePlatform"]] = {**CODEMETA_STRATEGY[iri["schema:SoftwareApplication"]]} + +CODEMETA_STRATEGY[iri["schema:SoftwareSourceCode"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], - iri["schema:variableMeasured"]: None # FIXME: troublesome Property or PropertyValue or StatisticalVariable + iri["maintainer"]: ACTIONS["Person"] } -CODEMETA_STRATEGY[iri["schema:DataFeed"]] = { - **CODEMETA_STRATEGY[iri["schema:Dataset"]], - iri["schema:dataFeedElement"]: None # FIXME: troublesome DataFeedItem or Thing + +CODEMETA_STRATEGY[iri["schema:WebContent"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} + +CODEMETA_STRATEGY[iri["schema:WebPage"]] = { + **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], + iri["schema:reviewedBy"]: ACTIONS["OrganizationOrPerson"] } +CODEMETA_STRATEGY[iri["schema:AboutPage"]] = {**CODEMETA_STRATEGY[iri["schema:WebPage"]]} -CODEMETA_STRATEGY[iri["schema:Action"]] = { +CODEMETA_STRATEGY[iri["schema:WebPageElement"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} + +CODEMETA_STRATEGY[iri["schema:WebSite"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} + + + +CODEMETA_STRATEGY[iri["schema:Event"]] = { **CODEMETA_STRATEGY[iri["schema:Thing"]], - iri["schema:agent"]: None, # FIXME: troublesome Organization or Person - iri["schema:location"]: None, # FIXME: troublesome Place or PostalAddress or VirtualLocation - iri["schema:participant"]: None, # FIXME: troublesome Organization or Person - iri["schema:provider"]: None # FIXME: troublesome Organization or Person + iri["schema:actor"]: ACTIONS["PerformingGroupOrPerson"], + iri["schema:attendee"]: ACTIONS["OrganizationOrPerson"], + iri["schema:composer"]: ACTIONS["OrganizationOrPerson"], + iri["schema:contributor"]: ACTIONS["OrganizationOrPerson"], + iri["schema:dircetor"]: ACTIONS["Person"], + iri["schema:duration"]: ACTIONS["DurationOrQuantitativeValue"], + iri["schema:funder"]: ACTIONS["OrganizationOrPerson"], + iri["schema:location"]: ACTIONS["PlaceOrPostalAddressOrVirtualLocation"], + iri["schema:offers"]: ACTIONS["DemandOrOffer"], + iri["schema:organizer"]: ACTIONS["OrganizationOrPerson"], + iri["schema:performer"]: ACTIONS["OrganizationOrPerson"], + iri["schema:sponsor"]: ACTIONS["OrganizationOrPerson"], + iri["schema:translator"]: ACTIONS["OrganizationOrPerson"] } -CODEMETA_STRATEGY[iri["schema:Intangible"]] = {**CODEMETA_STRATEGY[iri["schema:Thing"]]} -CODEMETA_STRATEGY[iri["schema:Rating"]] = { - **CODEMETA_STRATEGY[iri["schema:Intangible"]], - iri["schema:author"]: None # FIXME: troublesome Organization or Person + +CODEMETA_STRATEGY[iri["schema:PublicationEvent"]] = { + **CODEMETA_STRATEGY[iri["schema:Event"]], + iri["schema:publishedBy"]: ACTIONS["OrganizationOrPerson"] } -CODEMETA_STRATEGY[iri["schema:AggregateRating"]] = {**CODEMETA_STRATEGY[iri["schema:Rating"]]} + + + +CODEMETA_STRATEGY[iri["schema:Intangible"]] = {**CODEMETA_STRATEGY[iri["schema:Thing"]]} + + CODEMETA_STRATEGY[iri["schema:AlignmentObject"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} + CODEMETA_STRATEGY[iri["schema:Audience"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} + +CODEMETA_STRATEGY[iri["schema:Brand"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} + +CODEMETA_STRATEGY[iri["schema:BroadcastChannel"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} + +CODEMETA_STRATEGY[iri["schema:BroadcastFrequencySpecification"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} + +CODEMETA_STRATEGY[iri["schema:Class"]] = { + **CODEMETA_STRATEGY[iri["schema:Intangible"]], + iri["schema:supersededBy"]: ACTIONS["ClassOrEnumeration"] +} + CODEMETA_STRATEGY[iri["schema:ComputerLanguage"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} -CODEMETA_STRATEGY[iri["schema:Series"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} + +CODEMETA_STRATEGY[iri["schema:ConstraintNode"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:StatisticalVariable"]] = {**CODEMETA_STRATEGY[iri["schema:ConstraintNode"]]} + CODEMETA_STRATEGY[iri["schema:DefinedTerm"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} CODEMETA_STRATEGY[iri["schema:CategoryCode"]] = {**CODEMETA_STRATEGY[iri["schema:DefinedTerm"]]} + CODEMETA_STRATEGY[iri["schema:Demand"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], - iri["schema:acceptedPaymentMethod"]: None, # FIXME: troublesome LoanOrCredit or PaymentMethod - iri["schema:areaServed"]: None, # FIXME: troublesome AdministrativeArea or GeoShape or Place - iri["schema:eligibleRegion"]: None, # FIXME: troublesome GeoShape or Place - iri["schema:ineligibleRegion"]: None, # FIXME: troublesome GeoShape or Place - iri[ - "schema:itemOffered" - ]: None, # FIXME: troublesome AggregateOffer or CreativeWork or Event or MenuItem or Product or Service or Trip - iri["schema:seller"]: None # FIXME: troublesome Organization or Person -} -CODEMETA_STRATEGY[iri["schema:Offer"]] = { - **CODEMETA_STRATEGY[iri["schema:Intangible"]], - iri["schema:acceptedPaymentMethod"]: None, # FIXME: troublesome LoanOrCredit or PaymentMethod - iri["schema:areaServed"]: None, # FIXME: troublesome AdministrativeArea or GeoShape or Place - iri["schema:category"]: None, # FIXME: troublesome CategoryCode or Thing - iri["schema:eligibleRegion"]: None, # FIXME: troublesome GeoShape or Place - iri["schema:ineligibleRegion"]: None, # FIXME: troublesome GeoShape or Place - iri[ - "schema:itemOffered" - ]: None, # FIXME: troublesome AggregateOffer or CreativeWork or Event or MenuItem or Product or Service or Trip - iri["schema:leaseLength"]: None, # FIXME: troublesome Duration or QuantitativeValue - iri["schema:offeredBy"]: None, # FIXME: troublesome Organization or Person - iri["schema:seller"]: None, # FIXME: troublesome Organization or Person -} -CODEMETA_STRATEGY[iri["schema:AggregateOffer"]] = { - **CODEMETA_STRATEGY[iri["schema:Offer"]], - iri["schema:offers"]: None # FIXME: troublesome Demand or Offer + iri["schema:acceptedPaymentMethod"]: ACTIONS["LoanOrCreditOrPaymentMethod"], + iri["schema:areaServed"]: ACTIONS["AdministrativeAreaOrGeoShapeOrPlace"], + iri["schema:eligibleRegion"]: ACTIONS["GeoShapeOrPlace"], + iri["schema:ineligibleRegion"]: ACTIONS["GeoShapeOrPlace"], + iri["schema:itemOffered"]: ACTIONS["AggregateOfferOrCreativeWorkOrEventOrMenuItemOrProductOrServiceOrTrip"], + iri["schema:seller"]: ACTIONS["OrganizationOrPerson"] } -CODEMETA_STRATEGY[iri["schema:Quantity"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} -CODEMETA_STRATEGY[iri["schema:Duration"]] = {**CODEMETA_STRATEGY[iri["schema:Quantity"]]} -CODEMETA_STRATEGY[iri["schema:Energy"]] = {**CODEMETA_STRATEGY[iri["schema:Quantity"]]} -CODEMETA_STRATEGY[iri["schema:Mass"]] = {**CODEMETA_STRATEGY[iri["schema:Quantity"]]} + +CODEMETA_STRATEGY[iri["schema:EnergyConsumptionDetails"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} + CODEMETA_STRATEGY[iri["schema:EntryPoint"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} -CODEMETA_STRATEGY[iri["schema:StructuredValue"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} -CODEMETA_STRATEGY[iri["schema:GeoCoordinates"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} -CODEMETA_STRATEGY[iri["schema:GeoShape"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} -CODEMETA_STRATEGY[iri["schema:NutritionInformation"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} -CODEMETA_STRATEGY[iri["schema:MonetaryAmount"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} -CODEMETA_STRATEGY[iri["schema:Distance"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} -CODEMETA_STRATEGY[iri["schema:PostalCodeRangeSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} -CODEMETA_STRATEGY[iri["schema:OpeningHoursSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} -CODEMETA_STRATEGY[iri["schema:RepaymentSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} -CODEMETA_STRATEGY[iri["schema:WarrantyPromise"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} -CODEMETA_STRATEGY[iri["schema:ShippingRateSettings"]] = { - **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], - iri["schema:shippingRate"]: None # FIXME: troublesome MonetaryAmount or ShippingRateSettings -} -CODEMETA_STRATEGY[iri["schema:InteractionCounter"]] = { - **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], - iri["schema:interactionService"]: None, # FIXME: troublesome SoftwareApplication or WebSite - iri["schema:location"]: None # FIXME: troublesome Place or PostalAddress or VirtualLocation -} -CODEMETA_STRATEGY[iri["schema:PropertyValue"]] = { - **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], - iri["schema:valueReference"]: None # FIXME: troublesome DefinedTerm or Enumeration or PropertyValue - # or QualitativeValue or QuantitativeValue or StructuredValue -} -CODEMETA_STRATEGY[iri["schema:ContactPoint"]] = { - **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], - iri["schema:areaServed"]: None, # FIXME: troublesome AdministrativeArea or GeoShape or Place -} -CODEMETA_STRATEGY[iri["schema:PostalAddress"]] = {**CODEMETA_STRATEGY[iri["schema:ContactPoint"]]} -CODEMETA_STRATEGY[iri["schema:OfferShippingDetails"]] = { - **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], - iri["schema:depth"]: None, # FIXME: troublesome Distance or QuantitativeValue - iri["schema:height"]: None, # FIXME: troublesome Distance or QuantitativeValue - iri["schema:shippingRate"]: None, # FIXME: troublesome MonetaryAmount or ShippingRateSettings - iri["schema:weight"]: None, # FIXME: troublesome Mass or QuantitativeValue - iri["schema:width"]: None # FIXME: troublesome Distance or QuantitativeValue -} -CODEMETA_STRATEGY[iri["schema:ShippingDeliveryTime"]] = { - **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], - iri["schema:handlingTime"]: None, # FIXME: troublesome QuantitativeValue or ServicePeriod - iri["schema:transitTime"]: None # FIXME: troublesome QuantitativeValue or ServicePeriod -} -CODEMETA_STRATEGY[iri["schema:TypeAndQuantityNode"]] = { - **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], - iri["schema:typeOfGood"]: None # FIXME: troublesome Product or Service -} -CODEMETA_STRATEGY[iri["schema:ServicePeriod"]] = { - **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], - iri["schema:duration"]: None # FIXME: troublesome Duration or QuantitativeValue -} -CODEMETA_STRATEGY[iri["schema:QuantitativeValue"]] = { - **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], - iri["schema:valueReference"]: None # FIXME: troublesome DefinedTerm or Enumeration or PropertyValue - # or QualitativeValue or QuantitativeValue or StructuredValue -} -CODEMETA_STRATEGY[iri["schema:ShippingService"]] = { - **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], - iri["schema:handlingTime"]: None # FIXME: troublesome QuantitativeValue or ServicePeriod -} -CODEMETA_STRATEGY[iri["schema:ShippingConditions"]] = { - **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], - iri["schema:depth"]: None, # FIXME: troublesome Distance or QuantitativeValue - iri["schema:height"]: None, # FIXME: troublesome Distance or QuantitativeValue - iri["schema:shippingRate"]: None, # FIXME: troublesome MonetaryAmount or ShippingRateSettings - iri["schema:transitTime"]: None, # FIXME: troublesome QuantitativeValue or ServicePeriod - iri["schema:weight"]: None, # FIXME: troublesome Mass or QuantitativeValue - iri["schema:width"]: None # FIXME: troublesome Distance or QuantitativeValue -} -CODEMETA_STRATEGY[iri["schema:QuantitativeValueDistribution"]] = { - **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], - iri["schema:duration"]: None # FIXME: troublesome Duration or QuantitativeValue -} -CODEMETA_STRATEGY[iri["schema:MonetaryAmountDistribution"]] = { - **CODEMETA_STRATEGY[iri["schema:QuantitativeValueDistribution"]] -} -CODEMETA_STRATEGY[iri["schema:PriceSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} -CODEMETA_STRATEGY[iri["schema:UnitPriceSpecification"]] = { - **CODEMETA_STRATEGY[iri["schema:PriceSpecification"]], - iri["schema:billingDuration"]: None, # FIXME: troublesome Duration or QuantitativeValue + +CODEMETA_STRATEGY[iri["schema:Enumeration"]] = { + **CODEMETA_STRATEGY[iri["schema:Intangible"]], + iri["schema:supersededBy"]: ACTIONS["ClassOrEnumeration"] } -CODEMETA_STRATEGY[iri["schema:DeliveryChargeSpecification"]] = { - **CODEMETA_STRATEGY[iri["schema:PriceSpecification"]], - iri["schema:areaServed"]: None, # FIXME: troublesome AdministrativeArea or GeoShape or Place - iri["schema:eligibleRegion"]: None, # FIXME: troublesome GeoShape or Place - iri["schema:ineligibleRegion"]: None # FIXME: troublesome GeoShape or Place +CODEMETA_STRATEGY[iri["schema:QualitativeValue"]] = { + **CODEMETA_STRATEGY[iri["schema:Enumeration"]], + iri[ + "schema:valueReference" + ]: ACTIONS["DefinedTermOrEnumerationOrPropertyValueOrQualitativeValueOrQuantitativeValueOrStructuredValue"] } -CODEMETA_STRATEGY[iri["schema:LocationFeatureSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:PropertyValue"]]} +CODEMETA_STRATEGY[iri["schema:SizeSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:QualitativeValue"]]} + CODEMETA_STRATEGY[iri["schema:GeospatialGeometry"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], - iri["schema:geoContains"]: None, # FIXME: troublesome GeospatialGeometry or Place - iri["schema:geoCoveredBy"]: None, # FIXME: troublesome GeospatialGeometry or Place - iri["schema:geoCovers"]: None, # FIXME: troublesome GeospatialGeometry or Place - iri["schema:geoCrosses"]: None, # FIXME: troublesome GeospatialGeometry or Place - iri["schema:geoDisjoint"]: None, # FIXME: troublesome GeospatialGeometry or Place - iri["schema:geoEquals"]: None, # FIXME: troublesome GeospatialGeometry or Place - iri["schema:geoIntersects"]: None, # FIXME: troublesome GeospatialGeometry or Place - iri["schema:geoOverlaps"]: None, # FIXME: troublesome GeospatialGeometry or Place - iri["schema:geoTouches"]: None, # FIXME: troublesome GeospatialGeometry or Place - iri["schema:geoWithin"]: None # FIXME: troublesome GeospatialGeometry or Place + iri["schema:geoContains"]: ACTIONS["GeospatialGeometryOrPlace"], + iri["schema:geoCoveredBy"]: ACTIONS["GeospatialGeometryOrPlace"], + iri["schema:geoCovers"]: ACTIONS["GeospatialGeometryOrPlace"], + iri["schema:geoCrosses"]: ACTIONS["GeospatialGeometryOrPlace"], + iri["schema:geoDisjoint"]: ACTIONS["GeospatialGeometryOrPlace"], + iri["schema:geoEquals"]: ACTIONS["GeospatialGeometryOrPlace"], + iri["schema:geoIntersects"]: ACTIONS["GeospatialGeometryOrPlace"], + iri["schema:geoOverlaps"]: ACTIONS["GeospatialGeometryOrPlace"], + iri["schema:geoTouches"]: ACTIONS["GeospatialGeometryOrPlace"], + iri["schema:geoWithin"]: ACTIONS["GeospatialGeometryOrPlace"] } + CODEMETA_STRATEGY[iri["schema:Grant"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], - iri["schema:fundedItem"]: None, # FIXME: troublesome BioChemEntity or CreativeWork or Event or MedicalEntity - # or Organization or Person or Product - iri["schema:funder"]: None, # FIXME: troublesome Organization or Person - iri["schema:sponsor"]: None # FIXME: troublesome Organization or Person + iri[ + "schema:fundedItem" + ]: ACTIONS["BioChemEntityOrCreativeWorkOrEventOrMedicalEntityOrOrganizationOrPersonOrProduct"], + iri["schema:funder"]: ACTIONS["OrganizationOrPerson"], + iri["schema:sponsor"]: ACTIONS["OrganizationOrPerson"] } + +CODEMETA_STRATEGY[iri["schema:HealthInsurancePlan"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} + +CODEMETA_STRATEGY[iri["schema:HealthPlanCostSharingSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} + +CODEMETA_STRATEGY[iri["schema:HealthPlanFormulary"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} + +CODEMETA_STRATEGY[iri["schema:HealthPlanNetwork"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} + CODEMETA_STRATEGY[iri["schema:ItemList"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], - iri["schema:itemListElement"]: None # FIXME: troublesome ListItem or Thing + iri["schema:itemListElement"]: ACTIONS["ListItemOrThing"] } CODEMETA_STRATEGY[iri["schema:OfferCatalog"]] = {**CODEMETA_STRATEGY[iri["schema:ItemList"]]} CODEMETA_STRATEGY[iri["schema:BreadcrumbList"]] = {**CODEMETA_STRATEGY[iri["schema:ItemList"]]} + CODEMETA_STRATEGY[iri["schema:Language"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} -CODEMETA_STRATEGY[iri["schema:Service"]] = { - **CODEMETA_STRATEGY[iri["schema:Intangible"]], - iri["schema:areaServed"]: None, # FIXME: troublesome AdministrativeArea or GeoShape or Place - iri["schema:brand"]: None, # FIXME: troublesome Brand or Organization - iri["schema:broker"]: None, # FIXME: troublesome Organization or Person - iri["schema:category"]: None, # FIXME: troublesome CategoryCode or Thing - iri["schema:isRelatedTo"]: None, # FIXME: troublesome Product or Service - iri["schema:isSimilarTo"]: None, # FIXME: troublesome Product or Service - iri["schema:offers Demand"]: None, # FIXME: troublesome or Offer - iri["schema:provider"]: None # FIXME: troublesome Organization or Person -} -CODEMETA_STRATEGY[iri["schema:FinancialProduct"]] = {**CODEMETA_STRATEGY[iri["schema:Service"]]} -CODEMETA_STRATEGY[iri["schema:BroadcastService"]] = {**CODEMETA_STRATEGY[iri["schema:Service"]]} -CODEMETA_STRATEGY[iri["schema:CableOrSatelliteService"]] = {**CODEMETA_STRATEGY[iri["schema:Service"]]} -CODEMETA_STRATEGY[iri["schema:LoanOrCredit"]] = {**CODEMETA_STRATEGY[iri["schema:FinancialProduct"]]} -CODEMETA_STRATEGY[iri["schema:MediaSubscription"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} -CODEMETA_STRATEGY[iri["schema:Brand"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} -CODEMETA_STRATEGY[iri["schema:HealthInsurancePlan"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} + CODEMETA_STRATEGY[iri["schema:ListItem"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} CODEMETA_STRATEGY[iri["schema:HowToItem"]] = {**CODEMETA_STRATEGY[iri["schema:ListItem"]]} CODEMETA_STRATEGY[iri["schema:HowToSupply"]] = {**CODEMETA_STRATEGY[iri["schema:HowToItem"]]} CODEMETA_STRATEGY[iri["schema:HowToTool"]] = {**CODEMETA_STRATEGY[iri["schema:HowToItem"]]} -CODEMETA_STRATEGY[iri["schema:Enumeration"]] = { + +CODEMETA_STRATEGY[iri["schema:MediaSubscription"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} + +CODEMETA_STRATEGY[iri["schema:MemberProgram"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} + +CODEMETA_STRATEGY[iri["schema:MemberProgramTier"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], - iri["schema:supersededBy"]: None # FIXME: troublesome Class or Enumeration -} -CODEMETA_STRATEGY[iri["schema:QualitativeValue"]] = { - **CODEMETA_STRATEGY[iri["schema:Enumeration"]], - iri["schema:valueReference"]: None # FIXME: troublesome DefinedTerm or Enumeration or PropertyValue - # or QualitativeValue or QuantitativeValue or StructuredValue + iri["schema:hasTierRequirement"]: ACTIONS["CreditCardOrMonetaryAmountOrUnitPriceSpecification"] } -CODEMETA_STRATEGY[iri["schema:SizeSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:QualitativeValue"]]} -CODEMETA_STRATEGY[iri["schema:Class"]] = { + +CODEMETA_STRATEGY[iri["schema:MenuItem"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], - iri["schema:supersededBy"]: None # FIXME: troublesome Class or Enumeration + iri["schema:menuAddOn"]: ACTIONS["MenuItemOrMenuSection"], + iri["schema:offers"]: ACTIONS["DemandOrOffer"] } -CODEMETA_STRATEGY[iri["schema:HealthPlanFormulary"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} -CODEMETA_STRATEGY[iri["schema:HealthPlanCostSharingSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} -CODEMETA_STRATEGY[iri["schema:HealthPlanNetwork"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} -CODEMETA_STRATEGY[iri["schema:MemberProgramTier"]] = { + +CODEMETA_STRATEGY[iri["schema:MerchantReturnPolicy"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} + +CODEMETA_STRATEGY[iri["schema:MerchantReturnPolicySeasonalOverride"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} + +CODEMETA_STRATEGY[iri["schema:Occupation"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], - iri["schema:hasTierRequirement"]: None # FIXME: troublesome CreditCard or MonetaryAmount or UnitPriceSpecification + iri["schema:estimatedSalary"]: ACTIONS["MonetaryAmountOrMonetaryAmountDistribution"] } -CODEMETA_STRATEGY[iri["schema:MemberProgram"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} -CODEMETA_STRATEGY[iri["schema:MenuItem"]] = { + +CODEMETA_STRATEGY[iri["schema:OccupationalExperienceRequirements"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} + +CODEMETA_STRATEGY[iri["schema:Offer"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], - iri["schema:menuAddOn"]: None, # FIXME: troublesome MenuItem or MenuSection - iri["schema:offers"]: None # FIXME: troublesome Demand or Offer + iri["schema:acceptedPaymentMethod"]: ACTIONS["LoanOrCreditOrPaymentMethod"], + iri["schema:areaServed"]: ACTIONS["AdministrativeAreaOrGeoShapeOrPlace"], + iri["schema:category"]: ACTIONS["CategoryCodeOrThing"], + iri["schema:eligibleRegion"]: ACTIONS["GeoShapeOrPlace"], + iri["schema:ineligibleRegion"]: ACTIONS["GeoShapeOrPlace"], + iri["schema:itemOffered"]: ACTIONS["AggregateOfferOrCreativeWorkOrEventOrMenuItemOrProductOrServiceOrTrip"], + iri["schema:leaseLength"]: ACTIONS["DurationOrQuantitativeValue"], + iri["schema:offeredBy"]: ACTIONS["OrganizationOrPerson"], + iri["schema:seller"]: ACTIONS["OrganizationOrPerson"] } -CODEMETA_STRATEGY[iri["schema:MerchantReturnPolicy"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} -CODEMETA_STRATEGY[iri["schema:MerchantReturnPolicySeasonalOverride"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} -CODEMETA_STRATEGY[iri["schema:SpeakableSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:AggregateOffer"]] = { + **CODEMETA_STRATEGY[iri["schema:Offer"]], + iri["schema:offers"]: ACTIONS["DemandOrOffer"] +} + CODEMETA_STRATEGY[iri["schema:PaymentMethod"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} + CODEMETA_STRATEGY[iri["schema:ProgramMembership"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], - iri["schema:member"]: None # FIXME: troublesome Organization or Person + iri["schema:member"]: ACTIONS["OrganizationOrPerson"] } -CODEMETA_STRATEGY[iri["schema:Schedule"]] = { + +CODEMETA_STRATEGY[iri["schema:Property"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], - iri["schema:duration"]: None # FIXME: troublesome Duration or QuantitativeValue + iri["schema:supersededBy"]: ACTIONS["ClassOrEnumerationOrProperty"] } -CODEMETA_STRATEGY[iri["schema:ServiceChannel"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} -CODEMETA_STRATEGY[iri["schema:VirtualLocation"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} -CODEMETA_STRATEGY[iri["schema:Occupation"]] = { + +CODEMETA_STRATEGY[iri["schema:Quantity"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:Duration"]] = {**CODEMETA_STRATEGY[iri["schema:Quantity"]]} +CODEMETA_STRATEGY[iri["schema:Energy"]] = {**CODEMETA_STRATEGY[iri["schema:Quantity"]]} +CODEMETA_STRATEGY[iri["schema:Mass"]] = {**CODEMETA_STRATEGY[iri["schema:Quantity"]]} + +CODEMETA_STRATEGY[iri["schema:Rating"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], - iri["schema:estimatedSalary"]: None # FIXME: troublesome MonetaryAmount or MonetaryAmountDistribution + iri["schema:author"]: ACTIONS["OrganizationOrPerson"] } -CODEMETA_STRATEGY[iri["schema:EnergyConsumptionDetails"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} -CODEMETA_STRATEGY[iri["schema:OccupationalExperienceRequirements"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} -CODEMETA_STRATEGY[iri["schema:AlignmentObject"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} -CODEMETA_STRATEGY[iri["schema:BroadcastFrequencySpecification"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} -CODEMETA_STRATEGY[iri["schema:BroadcastChannel"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} -CODEMETA_STRATEGY[iri["schema:ConstraintNode"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} -CODEMETA_STRATEGY[iri["schema:StatisticalVariable"]] = {**CODEMETA_STRATEGY[iri["schema:ConstraintNode"]]} -CODEMETA_STRATEGY[iri["schema:Property"]] = { +CODEMETA_STRATEGY[iri["schema:AggregateRating"]] = {**CODEMETA_STRATEGY[iri["schema:Rating"]]} + +CODEMETA_STRATEGY[iri["schema:Schedule"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], - iri["schema:supersededBy"]: None, # FIXME: troublesome Class or Enumeration or Property + iri["schema:duration"]: ACTIONS["DurationOrQuantitativeValue"] } -CODEMETA_STRATEGY[iri["schema:Place"]] = { - **CODEMETA_STRATEGY[iri["schema:Thing"]], - iri["schema:geo"]: None, # FIXME: troublesome GeoCoordinates or GeoShape - iri["schema:geoContains"]: None, # FIXME: troublesome GeospatialGeometry or Place - iri["schema:geoCoveredBy"]: None, # FIXME: troublesome GeospatialGeometry or Place - iri["schema:geoCovers"]: None, # FIXME: troublesome GeospatialGeometry or Place - iri["schema:geoCrosses"]: None, # FIXME: troublesome GeospatialGeometry or Place - iri["schema:geoDisjoint"]: None, # FIXME: troublesome GeospatialGeometry or Place - iri["schema:geoEquals"]: None, # FIXME: troublesome GeospatialGeometry or Place - iri["schema:geoIntersects"]: None, # FIXME: troublesome GeospatialGeometry or Place - iri["schema:geoOverlaps"]: None, # FIXME: troublesome GeospatialGeometry or Place - iri["schema:geoTouches"]: None, # FIXME: troublesome GeospatialGeometry or Place - iri["schema:geoWithin"]: None, # FIXME: troublesome GeospatialGeometry or Place - iri["schema:photo"]: None # FIXME: troublesome ImageObject or Photograph -} -CODEMETA_STRATEGY[iri["schema:AdministrativeArea"]] = {**CODEMETA_STRATEGY[iri["schema:Place"]]} -CODEMETA_STRATEGY[iri["schema:Country"]] = {**CODEMETA_STRATEGY[iri["schema:AdministrativeArea"]]} -CODEMETA_STRATEGY[iri["schema:CivicStructure"]] = {**CODEMETA_STRATEGY[iri["schema:Place"]]} +CODEMETA_STRATEGY[iri["schema:Series"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} -CODEMETA_STRATEGY[iri["schema:CreativeWorkSeries"]] = { - **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], - **CODEMETA_STRATEGY[iri["schema:Series"]] +CODEMETA_STRATEGY[iri["schema:Service"]] = { + **CODEMETA_STRATEGY[iri["schema:Intangible"]], + iri["schema:areaServed"]: ACTIONS["AdministrativeAreaOrGeoShapeOrPlace"], + iri["schema:brand"]: ACTIONS["BrandOrOrganization"], + iri["schema:broker"]: ACTIONS["OrganizationOrPerson"], + iri["schema:category"]: ACTIONS["CategoryCodeOrThing"], + iri["schema:isRelatedTo"]: ACTIONS["ProductOrService"], + iri["schema:isSimilarTo"]: ACTIONS["ProductOrService"], + iri["schema:offers"]: ACTIONS["DemandOrOffer"], + iri["schema:provider"]: ACTIONS["OrganizationOrPerson"] } +CODEMETA_STRATEGY[iri["schema:BroadcastService"]] = {**CODEMETA_STRATEGY[iri["schema:Service"]]} +CODEMETA_STRATEGY[iri["schema:CableOrSatelliteService"]] = {**CODEMETA_STRATEGY[iri["schema:Service"]]} +CODEMETA_STRATEGY[iri["schema:FinancialProduct"]] = {**CODEMETA_STRATEGY[iri["schema:Service"]]} +CODEMETA_STRATEGY[iri["schema:LoanOrCredit"]] = {**CODEMETA_STRATEGY[iri["schema:FinancialProduct"]]} -CODEMETA_STRATEGY[iri["schema:HowToSection"]] = { - **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], - **CODEMETA_STRATEGY[iri["schema:ItemList"]], - **CODEMETA_STRATEGY[iri["schema:ListItem"]] +CODEMETA_STRATEGY[iri["schema:ServiceChannel"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} + +CODEMETA_STRATEGY[iri["schema:SpeakableSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} + +CODEMETA_STRATEGY[iri["schema:StructuredValue"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} +CODEMETA_STRATEGY[iri["schema:ContactPoint"]] = { + **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], + iri["schema:areaServed"]: ACTIONS["AdministrativeAreaOrGeoShapeOrPlace"] } -CODEMETA_STRATEGY[iri["schema:HowToStep"]] = { - **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], - **CODEMETA_STRATEGY[iri["schema:ItemList"]], - **CODEMETA_STRATEGY[iri["schema:ListItem"]] +CODEMETA_STRATEGY[iri["schema:PostalAddress"]] = {**CODEMETA_STRATEGY[iri["schema:ContactPoint"]]} +CODEMETA_STRATEGY[iri["schema:Distance"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} +CODEMETA_STRATEGY[iri["schema:GeoCoordinates"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} +CODEMETA_STRATEGY[iri["schema:GeoShape"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} +CODEMETA_STRATEGY[iri["schema:InteractionCounter"]] = { + **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], + iri["schema:interactionService"]: ACTIONS["SoftwareApplicationOrWebSite"], + iri["schema:location"]: ACTIONS["PlaceOrPostalAddressOrVirtualLocation"] } - -CODEMETA_STRATEGY[iri["schema:Event"]] = { - **CODEMETA_STRATEGY[iri["schema:Thing"]], - iri["schema:actor"]: None, # FIXME: troublesome PerformingGroup or Person - iri["schema:attendee"]: None, # FIXME: troublesome Organization or Person - iri["schema:composer"]: None, # FIXME: troublesome Organization or Person - iri["schema:contributor"]: None, # FIXME: troublesome Organization or Person - iri["schema:dircetor"]: ACTIONS["merge_match_person"], - iri["schema:duration"]: None, # FIXME: troublesome Duration or QuantitativeValue - iri["schema:funder"]: None, # FIXME: troublesome Organization or Person - iri["schema:location"]: None, # FIXME: troublesome Place or PostalAddress or VirtualLocation - iri["schema:offers"]: None, # FIXME: troublesome Demand or Offer - iri["schema:organizer"]: None, # FIXME: troublesome Organization or Person - iri["schema:performer"]: None, # FIXME: troublesome Organization or Person - iri["schema:sponsor"]: None, # FIXME: troublesome Organization or Person - iri["schema:translator"]: None # FIXME: troublesome Organization or Person +CODEMETA_STRATEGY[iri["schema:MonetaryAmount"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} +CODEMETA_STRATEGY[iri["schema:NutritionInformation"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} +CODEMETA_STRATEGY[iri["schema:OfferShippingDetails"]] = { + **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], + iri["schema:depth"]: ACTIONS["DistanceOrQuantitativeValue"], + iri["schema:height"]: ACTIONS["DistanceOrQuantitativeValue"], + iri["schema:shippingRate"]: ACTIONS["MonetaryAmountOrShippingRateSettings"], + iri["schema:weight"]:ACTIONS["MassOrQuantitativeValue"], + iri["schema:width"]: ACTIONS["DistanceOrQuantitativeValue"] } -CODEMETA_STRATEGY[iri["schema:PublicationEvent"]] = { - **CODEMETA_STRATEGY[iri["schema:Event"]], - iri["schema:publishedBy"]: None, # FIXME: troublesome Organization or Person +CODEMETA_STRATEGY[iri["schema:OpeningHoursSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} +CODEMETA_STRATEGY[iri["schema:PostalCodeRangeSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} +CODEMETA_STRATEGY[iri["schema:PriceSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} +CODEMETA_STRATEGY[iri["schema:DeliveryChargeSpecification"]] = { + **CODEMETA_STRATEGY[iri["schema:PriceSpecification"]], + iri["schema:areaServed"]: ACTIONS["AdministrativeAreaOrGeoShapeOrPlace"], + iri["schema:eligibleRegion"]: ACTIONS["GeoShapeOrPlace"], + iri["schema:ineligibleRegion"]: ACTIONS["GeoShapeOrPlace"] } - -CODEMETA_STRATEGY[iri["schema:BioChemEntity"]] = { - **CODEMETA_STRATEGY[iri["schema:Thing"]], - iri["schema:associatedDisease"]: None, # FIXME: troublesome MedicalCondition or PropertyValue - iri["schema:hasMolecularFunction"]: None, # FIXME: troublesome DefinedTerm or PropertyValue - iri["schema:isInvolvedInBiologicalProcess"]: None, # FIXME: troublesome DefinedTerm or PropertyValue - iri["schema:isLocatedInSubcellularLocation"]: None, # FIXME: troublesome DefinedTerm or PropertyValue - iri["schema:taxonomicRange"]: None # FIXME: troublesome DefinedTerm or Taxon +CODEMETA_STRATEGY[iri["schema:UnitPriceSpecification"]] = { + **CODEMETA_STRATEGY[iri["schema:PriceSpecification"]], + iri["schema:billingDuration"]: ACTIONS["DurationOrQuantitativeValue"] } -CODEMETA_STRATEGY[iri["schema:Gene"]] = { - **CODEMETA_STRATEGY[iri["schema:BioChemEntity"]], +CODEMETA_STRATEGY[iri["schema:PropertyValue"]] = { + **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], iri[ - "schema:expressedIn" - ]: None # FIXME: troublesome AnatomicalStructure or AnatomicalSystem or BioChemEntity or DefinedTerm + "schema:valueReference" + ]: ACTIONS["DefinedTermOrEnumerationOrPropertyValueOrQualitativeValueOrQuantitativeValueOrStructuredValue"] +} +CODEMETA_STRATEGY[iri["schema:LocationFeatureSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:PropertyValue"]]} +CODEMETA_STRATEGY[iri["schema:QuantitativeValue"]] = { + **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], + iri[ + "schema:valueReference" + ]: ACTIONS["DefinedTermOrEnumerationOrPropertyValueOrQualitativeValueOrQuantitativeValueOrStructuredValue"] +} +CODEMETA_STRATEGY[iri["schema:QuantitativeValueDistribution"]] = { + **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], + iri["schema:duration"]: ACTIONS["DurationOrQuantitativeValue"] +} +CODEMETA_STRATEGY[iri["schema:MonetaryAmountDistribution"]] = { + **CODEMETA_STRATEGY[iri["schema:QuantitativeValueDistribution"]] +} +CODEMETA_STRATEGY[iri["schema:RepaymentSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} +CODEMETA_STRATEGY[iri["schema:ServicePeriod"]] = { + **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], + iri["schema:duration"]: ACTIONS["DurationOrQuantitativeValue"] +} +CODEMETA_STRATEGY[iri["schema:ShippingConditions"]] = { + **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], + iri["schema:depth"]: ACTIONS["DistanceOrQuantitativeValue"], + iri["schema:height"]: ACTIONS["DistanceOrQuantitativeValue"], + iri["schema:shippingRate"]: ACTIONS["MonetaryAmountOrShippingRateSettings"], + iri["schema:transitTime"]: ACTIONS["QuantitativeValueOrServicePeriod"], + iri["schema:weight"]:ACTIONS["MassOrQuantitativeValue"], + iri["schema:width"]: ACTIONS["DistanceOrQuantitativeValue"] +} +CODEMETA_STRATEGY[iri["schema:ShippingDeliveryTime"]] = { + **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], + iri["schema:handlingTime"]: ACTIONS["QuantitativeValueOrServicePeriod"], + iri["schema:transitTime"]: ACTIONS["QuantitativeValueOrServicePeriod"] +} +CODEMETA_STRATEGY[iri["schema:ShippingRateSettings"]] = { + **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], + iri["schema:shippingRate"]: ACTIONS["MonetaryAmountOrShippingRateSettings"] +} +CODEMETA_STRATEGY[iri["schema:ShippingService"]] = { + **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], + iri["schema:handlingTime"]: ACTIONS["QuantitativeValueOrServicePeriod"] +} +CODEMETA_STRATEGY[iri["schema:TypeAndQuantityNode"]] = { + **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], + iri["schema:typeOfGood"]: ACTIONS["ProductOrService"] } +CODEMETA_STRATEGY[iri["schema:WarrantyPromise"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} + +CODEMETA_STRATEGY[iri["schema:VirtualLocation"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} + + CODEMETA_STRATEGY[iri["schema:MedicalEntity"]] = {**CODEMETA_STRATEGY[iri["schema:Thing"]]} -CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} -CODEMETA_STRATEGY[iri["schema:DrugLegalStatus"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]]} -CODEMETA_STRATEGY[iri["schema:DDxElement"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]]} -CODEMETA_STRATEGY[iri["schema:MedicalConditionStage"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]]} -CODEMETA_STRATEGY[iri["schema:DrugStrength"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]]} -CODEMETA_STRATEGY[iri["schema:DoseSchedule"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]]} -CODEMETA_STRATEGY[iri["schema:MaximumDoseSchedule"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]]} -CODEMETA_STRATEGY[iri["schema:MedicalGuideline"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} + + CODEMETA_STRATEGY[iri["schema:AnatomicalStructure"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} -CODEMETA_STRATEGY[iri["schema:MedicalCause"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} + +CODEMETA_STRATEGY[iri["schema:AnatomicalSystem"]] = { + **CODEMETA_STRATEGY[iri["schema:MedicalEntity"]], + iri["schema:comprisedOf"]: ACTIONS["AnatomicalStructureOrAnatomicalSystem"] +} + CODEMETA_STRATEGY[iri["schema:DrugClass"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} + CODEMETA_STRATEGY[iri["schema:LifestyleModification"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} -CODEMETA_STRATEGY[iri["schema:MedicalRiskFactor"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} -CODEMETA_STRATEGY[iri["schema:MedicalTest"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} -CODEMETA_STRATEGY[iri["schema:MedicalDevice"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} -CODEMETA_STRATEGY[iri["schema:MedicalTest"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} + +CODEMETA_STRATEGY[iri["schema:MedicalCause"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} + +CODEMETA_STRATEGY[iri["schema:MedicalCondition"]] = { + **CODEMETA_STRATEGY[iri["schema:MedicalEntity"]], + iri["schema:associatedAnatomy"]: ACTIONS["AnatomicalStructureOrAnatomicalSystemOrSuperficialAnatomy"], + iri["schema:possibleTreatment"]: ACTIONS["DrugOrDrugClassOrLifestyleModificationOrMedicalTherapy"], + iri["schema:secondaryPrevention"]: ACTIONS["DrugOrDrugClassOrLifestyleModificationOrMedicalTherapy"] +} +CODEMETA_STRATEGY[iri["schema:MedicalSignOrSymptom"]] = { + **CODEMETA_STRATEGY[iri["schema:MedicalCondition"]], + iri["schema:possibleTreatment"]: ACTIONS["DrugOrDrugClassOrLifestyleModificationOrMedicalTherapy"] +} +CODEMETA_STRATEGY[iri["schema:MedicalSign"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalSignOrSymptom"]]} + CODEMETA_STRATEGY[iri["schema:MedicalContraindication"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} + +CODEMETA_STRATEGY[iri["schema:MedicalDevice"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} + +CODEMETA_STRATEGY[iri["schema:MedicalGuideline"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} + +CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} +CODEMETA_STRATEGY[iri["schema:DDxElement"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]]} +CODEMETA_STRATEGY[iri["schema:DrugLegalStatus"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]]} +CODEMETA_STRATEGY[iri["schema:DoseSchedule"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]]} +CODEMETA_STRATEGY[iri["schema:DrugStrength"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]]} +CODEMETA_STRATEGY[iri["schema:MaximumDoseSchedule"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]]} +CODEMETA_STRATEGY[iri["schema:MedicalConditionStage"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]]} + CODEMETA_STRATEGY[iri["schema:MedicalProcedure"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} CODEMETA_STRATEGY[iri["schema:TherapeuticProcedure"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalProcedure"]]} CODEMETA_STRATEGY[iri["schema:MedicalTherapy"]] = {**CODEMETA_STRATEGY[iri["schema:TherapeuticProcedure"]]} + +CODEMETA_STRATEGY[iri["schema:MedicalRiskFactor"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} + CODEMETA_STRATEGY[iri["schema:MedicalStudy"]] = { **CODEMETA_STRATEGY[iri["schema:MedicalEntity"]], - iri["schema:sponsor"]: None # FIXME: troublesome Organization or Person + iri["schema:sponsor"]: ACTIONS["OrganizationOrPerson"] } -CODEMETA_STRATEGY[iri["schema:MedicalCondition"]] = { + +CODEMETA_STRATEGY[iri["schema:MedicalTest"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} + +CODEMETA_STRATEGY[iri["schema:SuperficialAnatomy"]] = { **CODEMETA_STRATEGY[iri["schema:MedicalEntity"]], - iri[ - "schema:associatedAnatomy" - ]: None, # FIXME: troublesome AnatomicalStructure or AnatomicalSystem or SuperficialAnatomy - iri[ - "schema:possibleTreatment" - ]: None, # FIXME: troublesome Drug or DrugClass or LifestyleModification or MedicalTherapy - iri[ - "schema:secondaryPrevention" - ]: None # FIXME: troublesome Drug or DrugClass or LifestyleModification or MedicalTherapy + iri["schema:relatedAnatomy"]: ACTIONS["AnatomicalStructureOrAnatomicalSystem"] } -CODEMETA_STRATEGY[iri["schema:MedicalSignOrSymptom"]] = { - **CODEMETA_STRATEGY[iri["schema:MedicalCondition"]], - iri[ - "schema:possibleTreatment" - ]: None # FIXME: troublesome Drug or DrugClass or LifestyleModification or MedicalTherapy + + + +CODEMETA_STRATEGY[iri["schema:Organization"]] = { + **CODEMETA_STRATEGY[iri["schema:Thing"]], + iri["schema:acceptedPaymentMethod"]: ACTIONS["LoanOrCreditOrPaymentMethod"], + iri["schema:alumni"]: ACTIONS["Person"], + iri["schema:areaServed"]: ACTIONS["AdministrativeAreaOrGeoShapeOrPlace"], + iri["schema:brand"]: ACTIONS["BrandOrOrganization"], + iri["schema:employee"]: ACTIONS["Person"], + iri["schema:founder"]: ACTIONS["OrganizationOrPerson"], + iri["schema:funder"]: ACTIONS["OrganizationOrPerson"], + iri["schema:legalRepresentative"]: ACTIONS["Person"], + iri["schema:location"]: ACTIONS["PlaceOrPostalAddressOrVirtualLocation"], + iri["schema:member"]: ACTIONS["OrganizationOrPerson"], + iri["schema:memberOf"]: ACTIONS["MemberProgramTierOrOrganizationOrProgramMembership"], + iri["schema:ownershipFundingInfo"]: ACTIONS["AboutPageOrCreativeWork"], + iri["schema:sponsor"]: ACTIONS["OrganizationOrPerson"] } -CODEMETA_STRATEGY[iri["schema:MedicalSign"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalSignOrSymptom"]]} -CODEMETA_STRATEGY[iri["schema:SuperficialAnatomy"]] = { - **CODEMETA_STRATEGY[iri["schema:MedicalEntity"]], - iri["schema:relatedAnatomy"]: None # FIXME: troublesome AnatomicalStructure or AnatomicalSystem + + +CODEMETA_STRATEGY[iri["schema:PerformingGroup"]] = {**CODEMETA_STRATEGY[iri["schema:Organization"]]} +CODEMETA_STRATEGY[iri["schema:MusicGroup"]] = { + **CODEMETA_STRATEGY[iri["schema:PerformingGroup"]], + iri["schema:musicGroupMember"]: ACTIONS["Person"], + iri["schema:track"]: ACTIONS["ItemListOrMusicRecording"] } -CODEMETA_STRATEGY[iri["schema:AnatomicalSystem"]] = { - **CODEMETA_STRATEGY[iri["schema:MedicalEntity"]], - iri["schema:comprisedOf"]: None # FIXME: troublesome AnatomicalStructure or AnatomicalSystem + + + +CODEMETA_STRATEGY[iri["schema:Person"]] = { + **CODEMETA_STRATEGY[iri["schema:Thing"]], + iri["schema:alumniOf"]: ACTIONS["EducationalOrganizationOrOrganization"], + iri["schema:brand"]: ACTIONS["BrandOrOrganization"], + iri["schema:children"]: ACTIONS["Person"], + iri["schema:colleague"]: ACTIONS["Person"], + iri["schema:follows"]: ACTIONS["Person"], + iri["schema:funder"]: ACTIONS["OrganizationOrPerson"], + iri["schema:height"]: ACTIONS["DistanceOrQuantitativeValue"], + iri["schema:homeLocation"]: ACTIONS["ContactPointOrPlace"], + iri["schema:knows"]: ACTIONS["Person"], + iri["schema:memberOf"]: ACTIONS["MemberProgramTierOrOrganizationOrProgramMembership"], + iri["schema:netWorth"]: ACTIONS["MonetaryAmountOrPriceSpecification"], + iri["schema:parent"]: ACTIONS["Person"], + iri["schema:pronouns"]: ACTIONS["DefinedTermOrStructuredValue"], + iri["schema:relatedTo"]: ACTIONS["Person"], + iri["schema:sibling"]: ACTIONS["Person"], + iri["schema:sponsor"]: ACTIONS["OrganizationOrPerson"], + iri["schema:spouse"]: ACTIONS["Person"], + iri["schema:weight"]:ACTIONS["MassOrQuantitativeValue"], + iri["schema:workLocation"]: ACTIONS["ContactPointOrPlace"] } -CODEMETA_STRATEGY[iri["schema:MedicalCode"]] = { - **CODEMETA_STRATEGY[iri["schema:CategoryCode"]], - **CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]] + + +CODEMETA_STRATEGY[iri["schema:Place"]] = { + **CODEMETA_STRATEGY[iri["schema:Thing"]], + iri["schema:geo"]: ACTIONS["GeoCoordinatesOrGeoShape"], + iri["schema:geoContains"]: ACTIONS["GeospatialGeometryOrPlace"], + iri["schema:geoCoveredBy"]: ACTIONS["GeospatialGeometryOrPlace"], + iri["schema:geoCovers"]: ACTIONS["GeospatialGeometryOrPlace"], + iri["schema:geoCrosses"]: ACTIONS["GeospatialGeometryOrPlace"], + iri["schema:geoDisjoint"]: ACTIONS["GeospatialGeometryOrPlace"], + iri["schema:geoEquals"]: ACTIONS["GeospatialGeometryOrPlace"], + iri["schema:geoIntersects"]: ACTIONS["GeospatialGeometryOrPlace"], + iri["schema:geoOverlaps"]: ACTIONS["GeospatialGeometryOrPlace"], + iri["schema:geoTouches"]: ACTIONS["GeospatialGeometryOrPlace"], + iri["schema:geoWithin"]: ACTIONS["GeospatialGeometryOrPlace"], + iri["schema:photo"]: ACTIONS["ImageObjectOrPhotograph"] } + +CODEMETA_STRATEGY[iri["schema:AdministrativeArea"]] = {**CODEMETA_STRATEGY[iri["schema:Place"]]} +CODEMETA_STRATEGY[iri["schema:Country"]] = {**CODEMETA_STRATEGY[iri["schema:AdministrativeArea"]]} + +CODEMETA_STRATEGY[iri["schema:CivicStructure"]] = {**CODEMETA_STRATEGY[iri["schema:Place"]]} + + + CODEMETA_STRATEGY[iri["schema:Product"]] = { **CODEMETA_STRATEGY[iri["schema:Thing"]], - iri["schema:brand"]: None, # FIXME: troublesome Brand or Organization - iri["schema:category"]: None, # FIXME: troublesome CategoryCode or Thing - iri["schema:depth"]: None, # FIXME: troublesome Distance or QuantitativeValue - iri["schema:height"]: None, # FIXME: troublesome Distance or QuantitativeValue - iri["schema:isRelatedTo"]: None, # FIXME: troublesome Product or Service - iri["schema:isSimilarTo"]: None, # FIXME: troublesome Product or Service - iri["schema:isVariantOf"]: None, # FIXME: troublesome ProductGroup or ProductModel - iri["schema:negativeNotes"]: None, # FIXME: troublesome ItemList or ListItem or WebContent - iri["schema:offers"]: None, # FIXME: troublesome Demand or Offer - iri["schema:positiveNotes"]: None, # FIXME: troublesome ItemList or ListItem or WebContent - iri["schema:size"]: None, # FIXME: troublesome DefinedTerm or QuantitativeValue or SizeSpecification - iri["schema:weight"]: None, # FIXME: troublesome Mass or QuantitativeValue - iri["schema:width"]: None, # FIXME: troublesome Distance or QuantitativeValue + iri["schema:brand"]: ACTIONS["BrandOrOrganization"], + iri["schema:category"]: ACTIONS["CategoryCodeOrThing"], + iri["schema:depth"]: ACTIONS["DistanceOrQuantitativeValue"], + iri["schema:height"]: ACTIONS["DistanceOrQuantitativeValue"], + iri["schema:isRelatedTo"]: ACTIONS["ProductOrService"], + iri["schema:isSimilarTo"]: ACTIONS["ProductOrService"], + iri["schema:isVariantOf"]: ACTIONS["ProductGroupOrProductModel"], + iri["schema:negativeNotes"]: ACTIONS["ItemListOrListItemOrWebContent"], + iri["schema:offers"]: ACTIONS["DemandOrOffer"], + iri["schema:positiveNotes"]: ACTIONS["ItemListOrListItemOrWebContent"], + iri["schema:size"]: ACTIONS["DefinedTermOrQuantitativeValueOrSizeSpecification"], + iri["schema:weight"]:ACTIONS["MassOrQuantitativeValue"], + iri["schema:width"]: ACTIONS["DistanceOrQuantitativeValue"] } + + CODEMETA_STRATEGY[iri["schema:ProductGroup"]] = {**CODEMETA_STRATEGY[iri["schema:Product"]]} -CODEMETA_STRATEGY[iri["schema:Drug"]] = { - **CODEMETA_STRATEGY[iri["schema:Product"]], - **CODEMETA_STRATEGY[iri["schema:MedicalEntity"]] -} + CODEMETA_STRATEGY[iri["schema:ProductModel"]] = { **CODEMETA_STRATEGY[iri["schema:Product"]], - iri["schema:isVariantOf"]: None, # FIXME: troublesome ProductGroup or ProductModel + iri["schema:isVariantOf"]: ACTIONS["ProductGroupOrProductModel"] } -CODEMETA_STRATEGY[iri["schema:PaymentCard"]] = { - **CODEMETA_STRATEGY[iri["schema:FinancialProduct"]], - **CODEMETA_STRATEGY[iri["schema:PaymentMethod"]] -} -CODEMETA_STRATEGY[iri["schema:CreditCard"]] = { - **CODEMETA_STRATEGY[iri["schema:LoanOrCredit"]], - **CODEMETA_STRATEGY[iri["schema:PaymentCard"]] + + +CODEMETA_STRATEGY[iri["schema:Taxon"]] = {**CODEMETA_STRATEGY[iri["schema:Thing"]]} + + + +CODEMETA_STRATEGY[iri["schema:CreativeWorkSeries"]] = { + **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], + **CODEMETA_STRATEGY[iri["schema:Series"]] } -CODEMETA_STRATEGY[iri["schema:Organization"]] = { - **CODEMETA_STRATEGY[iri["schema:Thing"]], - iri["schema:acceptedPaymentMethod"]: None, # FIXME: troublesome LoanOrCredit or PaymentMethod - iri["schema:alumni"]: ACTIONS["merge_match_person"], - iri["schema:areaServed"]: None, # FIXME: troublesome AdministrativeArea or GeoShape or Place - iri["schema:brand"]: None, # FIXME: troublesome Brand or Organization - iri["schema:employee"]: ACTIONS["merge_match_person"], - iri["schema:founder"]: None, # FIXME: troublesome Organization or Person - iri["schema:funder"]: None, # FIXME: troublesome Organization or Person - iri["schema:legalRepresentative"]: ACTIONS["merge_match_person"], - iri["schema:location"]: None, # FIXME: troublesome Place or PostalAddress or Text or VirtualLocation - iri["schema:member"]: None, # FIXME: troublesome Organization or Person - iri["schema:memberOf"]: None, # FIXME: troublesome MemberProgramTier or Organization or ProgramMembership - iri["schema:ownershipFundingInfo"]: None, # FIXME: troublesome AboutPage or CreativeWork - iri["schema:sponsor"]: None # FIXME: troublesome Organization or Person + +CODEMETA_STRATEGY[iri["schema:DefinedRegion"]] = { + **CODEMETA_STRATEGY[iri["schema:Place"]], + **CODEMETA_STRATEGY[iri["schema:StructuredValue"]] } -CODEMETA_STRATEGY[iri["schema:PerformingGroup"]] = {**CODEMETA_STRATEGY[iri["schema:Organization"]]} -CODEMETA_STRATEGY[iri["schema:MusicGroup"]] = { - **CODEMETA_STRATEGY[iri["schema:PerformingGroup"]], - iri["schema:musicGroupMember"]: ACTIONS["merge_match_person"], - iri["schema:track"]: None # FIXME: troublesome ItemList or MusicRecording + + +CODEMETA_STRATEGY[iri["schema:Drug"]] = { + **CODEMETA_STRATEGY[iri["schema:Product"]], + **CODEMETA_STRATEGY[iri["schema:MedicalEntity"]] } + + CODEMETA_STRATEGY[iri["schema:EducationalOrganization"]] = { **CODEMETA_STRATEGY[iri["schema:Organization"]], **CODEMETA_STRATEGY[iri["schema:CivicStructure"]] } -CODEMETA_STRATEGY[iri["schema:DefinedRegion"]] = { - **CODEMETA_STRATEGY[iri["schema:Place"]], - **CODEMETA_STRATEGY[iri["schema:StructuredValue"]] + +CODEMETA_STRATEGY[iri["schema:HowToSection"]] = { + **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], + **CODEMETA_STRATEGY[iri["schema:ItemList"]], + **CODEMETA_STRATEGY[iri["schema:ListItem"]] } -CODEMETA_STRATEGY[iri["schema:Person"]] = { - **CODEMETA_STRATEGY[iri["schema:Thing"]], - iri["schema:alumniOf"]: None, # FIXME: troublesome EducationalOrganization or Organization - iri["schema:brand"]: None, # FIXME: troublesome Brand or Organization - iri["schema:children"]: ACTIONS["merge_match_person"], - iri["schema:colleague"]: ACTIONS["merge_match_person"], - iri["schema:follows"]: ACTIONS["merge_match_person"], - iri["schema:funder"]: None, # FIXME: troublesome Organization or Person - iri["schema:height"]: None, # FIXME: troublesome Distance or QuantitativeValue - iri["schema:homeLocation"]: None, # FIXME: troublesome ContactPoint or Place - iri["schema:knows"]: ACTIONS["merge_match_person"], - iri["schema:memberOf"]: None, # FIXME: troublesome MemberProgramTier or Organization or ProgramMembership - iri["schema:netWorth"]: None, # FIXME: troublesome MonetaryAmount or PriceSpecification - iri["schema:parent"]: ACTIONS["merge_match_person"], - iri["schema:pronouns"]: None, # FIXME: troublesome DefinedTerm or StructuredValue - iri["schema:relatedTo"]: ACTIONS["merge_match_person"], - iri["schema:sibling"]: ACTIONS["merge_match_person"], - iri["schema:sponsor"]: None, # FIXME: troublesome Organization or Person - iri["schema:spouse"]: ACTIONS["merge_match_person"], - iri["schema:weight"]: None, # FIXME: troublesome Mass or QuantitativeValue - iri["schema:workLocation"]: None # FIXME: troublesome ContactPoint or Place + +CODEMETA_STRATEGY[iri["schema:HowToStep"]] = { + **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], + **CODEMETA_STRATEGY[iri["schema:ItemList"]], + **CODEMETA_STRATEGY[iri["schema:ListItem"]] } -CODEMETA_STRATEGY[iri["schema:Taxon"]] = {**CODEMETA_STRATEGY[iri["schema:Thing"]]} + +CODEMETA_STRATEGY[iri["schema:MedicalCode"]] = { + **CODEMETA_STRATEGY[iri["schema:CategoryCode"]], + **CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]] +} + + +CODEMETA_STRATEGY[iri["schema:PaymentCard"]] = { + **CODEMETA_STRATEGY[iri["schema:FinancialProduct"]], + **CODEMETA_STRATEGY[iri["schema:PaymentMethod"]] +} + +CODEMETA_STRATEGY[iri["schema:CreditCard"]] = { + **CODEMETA_STRATEGY[iri["schema:LoanOrCredit"]], + **CODEMETA_STRATEGY[iri["schema:PaymentCard"]] +} diff --git a/test/hermes_test/model/test_api_e2e.py b/test/hermes_test/model/test_api_e2e.py index 30ecd11c..6d0ce325 100644 --- a/test/hermes_test/model/test_api_e2e.py +++ b/test/hermes_test/model/test_api_e2e.py @@ -544,7 +544,6 @@ def test_process(tmp_path, monkeypatch, metadata_in, metadata_out): assert result == metadata_out -@pytest.mark.xfail @pytest.mark.parametrize( "metadata_in, metadata_out", [ From 3291c4d6a713a04899e24d223bf46462a4784694 Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Mon, 9 Mar 2026 12:52:56 +0100 Subject: [PATCH 207/247] formatting and doc strings --- src/hermes/model/merge/match.py | 56 ++++++++++++- src/hermes/model/merge/strategy.py | 123 ++--------------------------- 2 files changed, 58 insertions(+), 121 deletions(-) diff --git a/src/hermes/model/merge/match.py b/src/hermes/model/merge/match.py index 8a0aa9a1..cbcad94d 100644 --- a/src/hermes/model/merge/match.py +++ b/src/hermes/model/merge/match.py @@ -58,13 +58,30 @@ def match_func(left: Any, right: Any) -> bool: def match_person(left: Any, right: Any) -> bool: + """ + Compares two objects assuming they are representing schema:Person's + if they are not ld_dicts, == is used as a fallback.
+ If both objects have an @id value, the truth value returned by this function is the comparison of both ids. + If either other has no @id value and both objects have at least one email value, + they are considered equal if they have one common email. + If the equality of the objects is not yet decided, == comparison of the objects is returned. + + :param left: The first object for the comparison. + :type left: ld_merge_dict + :param right: The second object for the comparison. + :type right: ld_dict + + :return: The result of the comparison. + :rtype: bool + """ if not (isinstance(left, ld_dict) and isinstance(right, ld_dict)): return left == right if "@id" in left and "@id" in right: return left["@id"] == right["@id"] if "schema:email" in left and "schema:email" in right: - mails_right = right["schema:email"] - return any((mail in mails_right) for mail in left["schema:email"]) + if len(left["schema:email"]) > 0 and len(right["schema:email"]) > 0: + mails_right = right["schema:email"] + return any((mail in mails_right) for mail in left["schema:email"]) return left == right @@ -72,13 +89,46 @@ def match_multiple_types( *functions_for_types: list[tuple[str, Callable[[Any, Any], bool]]], fall_back_function: Callable[[Any, Any], bool] = match_keys("@id", fall_back_to_equals=True) ) -> Callable[[Any, Any], bool]: + """ + Returns a function that compares two objects using the given functions. + + :param functions_for_types: Tuples of type and match_function. + The returned function will compare two objects of a the same, given type with the specified function. + :type functions_for_types: list[tuple[str, Callable[[Any, Any], bool]]] + :param fall_back_function: The fallback for comparison if the objects that are being compared don't have a common + type with specified compare function or at least one object is not a JSON-LD dictionary. + :type fall_back_function: Callable[[Any, Any], bool] + + :return: The function that compares the two given objects using the given functions. + :rtype: Callable[[Any, Any], bool] + """ + + # create and return the match function using the given keys def match_func(left: Any, right: Any) -> bool: - if not ((isinstance(left, ld_dict) and isinstance(right, ld_dict)) and "@type" in left and "@type" in right): + """ + Compares two objects using a predetermined function if either objects is not an ld_dict + or they don't have a common type in a predetermined list of types.
+ If the objects are ld_dicts and have the same type with a known comparison function this is used instead. + + :param left: The first object for the comparison. + :type left: ld_merge_dict + :param right: The second object for the comparison. + :type right: ld_dict + + :return: The result of the comparison. + :rtype: bool + """ + # If at least one of the objects is not an ld_dict or contains no value for the key "@type", use the fallback. + if not (isinstance(left, ld_dict) and isinstance(right, ld_dict) and "@type" in left and "@type" in right): return fall_back_function(left, right) + # Extract the list of types types_left = left["@type"] types_right = right["@type"] + # Iterate over all known type, match_function pairs. + # If one type is in both objects return the result of the comparison with the match_function. for ld_type, func in functions_for_types: if ld_type in types_left and ld_type in types_right: return func(left, right) + # No common type with known match_function: Fallback return fall_back_function(left, right) return match_func diff --git a/src/hermes/model/merge/strategy.py b/src/hermes/model/merge/strategy.py index 5aaa5d7f..ac78545c 100644 --- a/src/hermes/model/merge/strategy.py +++ b/src/hermes/model/merge/strategy.py @@ -90,14 +90,12 @@ } - # Filled with entries for every schema-type that can be found inside an JSON-LD dict of type -# SoftwareSourceCode or SoftwareApplication. +# SoftwareSourceCode or SoftwareApplication using schema and CodeMeta as Context. CODEMETA_STRATEGY = {None: {None: ACTIONS["default"]}} CODEMETA_STRATEGY[iri["schema:Thing"]] = {iri["schema:owner"]: ACTIONS["OrganizationOrPerson"]} - CODEMETA_STRATEGY[iri["schema:Action"]] = { **CODEMETA_STRATEGY[iri["schema:Thing"]], iri["schema:agent"]: ACTIONS["OrganizationOrPerson"], @@ -107,7 +105,6 @@ } - CODEMETA_STRATEGY[iri["schema:BioChemEntity"]] = { **CODEMETA_STRATEGY[iri["schema:Thing"]], iri["schema:associatedDisease"]: ACTIONS["MedicalConditionOrPropertyValue"], @@ -117,14 +114,12 @@ iri["schema:taxonomicRange"]: ACTIONS["DefinedTermOrTaxon"] } - CODEMETA_STRATEGY[iri["schema:Gene"]] = { **CODEMETA_STRATEGY[iri["schema:BioChemEntity"]], iri["schema:expressedIn"]: ACTIONS["AnatomicalStructureOrAnatomicalSystemOrBioChemEntityOrDefinedTerm"] } - CODEMETA_STRATEGY[iri["schema:CreativeWork"]] = { **CODEMETA_STRATEGY[iri["schema:Thing"]], iri["schema:accountablePerson"]: ACTIONS["Person"], @@ -149,38 +144,30 @@ iri["schema:video"]: ACTIONS["ClipOrVideoObject"] } - CODEMETA_STRATEGY[iri["schema:Article"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} CODEMETA_STRATEGY[iri["schema:NewsArticle"]] = {**CODEMETA_STRATEGY[iri["schema:Article"]]} CODEMETA_STRATEGY[iri["schema:ScholarlyArticle"]] = {**CODEMETA_STRATEGY[iri["schema:Article"]]} - CODEMETA_STRATEGY[iri["schema:Certification"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} - CODEMETA_STRATEGY[iri["schema:Claim"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], iri["schema:claimInterpreter"]: ACTIONS["OrganizationOrPerson"] } - CODEMETA_STRATEGY[iri["schema:Clip"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], iri["schema:actor"]: ACTIONS["PerformingGroupOrPerson"], iri["schema:dircetor"]: ACTIONS["Person"], iri["schema:musicBy"]: ACTIONS["MusicGroupOrPerson"] } - CODEMETA_STRATEGY[iri["schema:Comment"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], iri["schema:parentItem"]: ACTIONS["CommentOrCreativeWork"] } CODEMETA_STRATEGY[iri["schema:CorrectionComment"]] = {**CODEMETA_STRATEGY[iri["schema:Comment"]]} - CODEMETA_STRATEGY[iri["schema:CreativeWorkSeason"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], iri["schema:actor"]: ACTIONS["PerformingGroupOrPerson"] } - CODEMETA_STRATEGY[iri["schema:DataCatalog"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} - CODEMETA_STRATEGY[iri["schema:Dataset"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], iri["schema:variableMeasured"]: ACTIONS["PropertyOrPropertyValueOrStatisticalVariable"] @@ -189,12 +176,9 @@ **CODEMETA_STRATEGY[iri["schema:Dataset"]], iri["schema:dataFeedElement"]: ACTIONS["DataFeedItemOrThing"] } - CODEMETA_STRATEGY[iri["schema:DefinedTermSet"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} CODEMETA_STRATEGY[iri["schema:CategoryCodeSet"]] = {**CODEMETA_STRATEGY[iri["schema:DefinedTermSet"]]} - CODEMETA_STRATEGY[iri["schema:EducationalOccupationalCredential"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} - CODEMETA_STRATEGY[iri["schema:Episode"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], iri["schema:actor"]: ACTIONS["PerformingGroupOrPerson"], @@ -202,16 +186,12 @@ iri["schema:duration"]: ACTIONS["DurationOrQuantitativeValue"], iri["schema:musicBy"]: ACTIONS["MusicGroupOrPerson"] } - CODEMETA_STRATEGY[iri["schema:HowTo"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], iri["schema:step"]: ACTIONS["CreativeWorkOrHowToSectionOrHowToStep"] } - CODEMETA_STRATEGY[iri["schema:HyperTocEntry"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} - CODEMETA_STRATEGY[iri["schema:Map"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} - CODEMETA_STRATEGY[iri["schema:MediaObject"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], iri["schema:duration"]: ACTIONS["DurationOrQuantitativeValue"], @@ -228,15 +208,12 @@ iri["schema:dircetor"]: ACTIONS["Person"], iri["schema:musicBy"]: ACTIONS["MusicGroupOrPerson"] } - CODEMETA_STRATEGY[iri["schema:MenuSection"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} - CODEMETA_STRATEGY[iri["schema:MusicComposition"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], iri["schema:composer"]: ACTIONS["OrganizationOrPerson"], iri["schema:lyricist"]: ACTIONS["Person"] } - CODEMETA_STRATEGY[iri["schema:MusicPlaylist"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], iri["schema:track"]: ACTIONS["ItemListOrMusicRecording"] @@ -250,44 +227,34 @@ iri["schema:creditedTo"]: ACTIONS["OrganizationOrPerson"], iri["schema:duration"]: ACTIONS["DurationOrQuantitativeValue"] } - CODEMETA_STRATEGY[iri["schema:MusicRecording"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], iri["schema:byArtist"]: ACTIONS["MusicGroupOrPerson"], iri["schema:duration"]: ACTIONS["DurationOrQuantitativeValue"] } - CODEMETA_STRATEGY[iri["schema:Photograph"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} - CODEMETA_STRATEGY[iri["schema:Review"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], iri["schema:negativeNotes"]: ACTIONS["ItemListOrListItemOrWebContent"], iri["schema:positiveNotes"]: ACTIONS["ItemListOrListItemOrWebContent"] } - CODEMETA_STRATEGY[iri["schema:SoftwareApplication"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} CODEMETA_STRATEGY[iri["schema:OperatingSystem"]] = {**CODEMETA_STRATEGY[iri["schema:SoftwareApplication"]]} CODEMETA_STRATEGY[iri["schema:RuntimePlatform"]] = {**CODEMETA_STRATEGY[iri["schema:SoftwareApplication"]]} - CODEMETA_STRATEGY[iri["schema:SoftwareSourceCode"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], iri["maintainer"]: ACTIONS["Person"] } - CODEMETA_STRATEGY[iri["schema:WebContent"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} - CODEMETA_STRATEGY[iri["schema:WebPage"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], iri["schema:reviewedBy"]: ACTIONS["OrganizationOrPerson"] } CODEMETA_STRATEGY[iri["schema:AboutPage"]] = {**CODEMETA_STRATEGY[iri["schema:WebPage"]]} - CODEMETA_STRATEGY[iri["schema:WebPageElement"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} - CODEMETA_STRATEGY[iri["schema:WebSite"]] = {**CODEMETA_STRATEGY[iri["schema:CreativeWork"]]} - CODEMETA_STRATEGY[iri["schema:Event"]] = { **CODEMETA_STRATEGY[iri["schema:Thing"]], iri["schema:actor"]: ACTIONS["PerformingGroupOrPerson"], @@ -305,40 +272,28 @@ iri["schema:translator"]: ACTIONS["OrganizationOrPerson"] } - CODEMETA_STRATEGY[iri["schema:PublicationEvent"]] = { **CODEMETA_STRATEGY[iri["schema:Event"]], iri["schema:publishedBy"]: ACTIONS["OrganizationOrPerson"] } - CODEMETA_STRATEGY[iri["schema:Intangible"]] = {**CODEMETA_STRATEGY[iri["schema:Thing"]]} - CODEMETA_STRATEGY[iri["schema:AlignmentObject"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} - CODEMETA_STRATEGY[iri["schema:Audience"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} - CODEMETA_STRATEGY[iri["schema:Brand"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} - CODEMETA_STRATEGY[iri["schema:BroadcastChannel"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} - CODEMETA_STRATEGY[iri["schema:BroadcastFrequencySpecification"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} - CODEMETA_STRATEGY[iri["schema:Class"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], iri["schema:supersededBy"]: ACTIONS["ClassOrEnumeration"] } - CODEMETA_STRATEGY[iri["schema:ComputerLanguage"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} - CODEMETA_STRATEGY[iri["schema:ConstraintNode"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} CODEMETA_STRATEGY[iri["schema:StatisticalVariable"]] = {**CODEMETA_STRATEGY[iri["schema:ConstraintNode"]]} - CODEMETA_STRATEGY[iri["schema:DefinedTerm"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} CODEMETA_STRATEGY[iri["schema:CategoryCode"]] = {**CODEMETA_STRATEGY[iri["schema:DefinedTerm"]]} - CODEMETA_STRATEGY[iri["schema:Demand"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], iri["schema:acceptedPaymentMethod"]: ACTIONS["LoanOrCreditOrPaymentMethod"], @@ -348,11 +303,8 @@ iri["schema:itemOffered"]: ACTIONS["AggregateOfferOrCreativeWorkOrEventOrMenuItemOrProductOrServiceOrTrip"], iri["schema:seller"]: ACTIONS["OrganizationOrPerson"] } - CODEMETA_STRATEGY[iri["schema:EnergyConsumptionDetails"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} - CODEMETA_STRATEGY[iri["schema:EntryPoint"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} - CODEMETA_STRATEGY[iri["schema:Enumeration"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], iri["schema:supersededBy"]: ACTIONS["ClassOrEnumeration"] @@ -364,7 +316,6 @@ ]: ACTIONS["DefinedTermOrEnumerationOrPropertyValueOrQualitativeValueOrQuantitativeValueOrStructuredValue"] } CODEMETA_STRATEGY[iri["schema:SizeSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:QualitativeValue"]]} - CODEMETA_STRATEGY[iri["schema:GeospatialGeometry"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], iri["schema:geoContains"]: ACTIONS["GeospatialGeometryOrPlace"], @@ -378,7 +329,6 @@ iri["schema:geoTouches"]: ACTIONS["GeospatialGeometryOrPlace"], iri["schema:geoWithin"]: ACTIONS["GeospatialGeometryOrPlace"] } - CODEMETA_STRATEGY[iri["schema:Grant"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], iri[ @@ -387,55 +337,39 @@ iri["schema:funder"]: ACTIONS["OrganizationOrPerson"], iri["schema:sponsor"]: ACTIONS["OrganizationOrPerson"] } - CODEMETA_STRATEGY[iri["schema:HealthInsurancePlan"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} - CODEMETA_STRATEGY[iri["schema:HealthPlanCostSharingSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} - CODEMETA_STRATEGY[iri["schema:HealthPlanFormulary"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} - CODEMETA_STRATEGY[iri["schema:HealthPlanNetwork"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} - CODEMETA_STRATEGY[iri["schema:ItemList"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], iri["schema:itemListElement"]: ACTIONS["ListItemOrThing"] } CODEMETA_STRATEGY[iri["schema:OfferCatalog"]] = {**CODEMETA_STRATEGY[iri["schema:ItemList"]]} CODEMETA_STRATEGY[iri["schema:BreadcrumbList"]] = {**CODEMETA_STRATEGY[iri["schema:ItemList"]]} - CODEMETA_STRATEGY[iri["schema:Language"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} - CODEMETA_STRATEGY[iri["schema:ListItem"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} CODEMETA_STRATEGY[iri["schema:HowToItem"]] = {**CODEMETA_STRATEGY[iri["schema:ListItem"]]} CODEMETA_STRATEGY[iri["schema:HowToSupply"]] = {**CODEMETA_STRATEGY[iri["schema:HowToItem"]]} CODEMETA_STRATEGY[iri["schema:HowToTool"]] = {**CODEMETA_STRATEGY[iri["schema:HowToItem"]]} - CODEMETA_STRATEGY[iri["schema:MediaSubscription"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} - CODEMETA_STRATEGY[iri["schema:MemberProgram"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} - CODEMETA_STRATEGY[iri["schema:MemberProgramTier"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], iri["schema:hasTierRequirement"]: ACTIONS["CreditCardOrMonetaryAmountOrUnitPriceSpecification"] } - CODEMETA_STRATEGY[iri["schema:MenuItem"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], iri["schema:menuAddOn"]: ACTIONS["MenuItemOrMenuSection"], iri["schema:offers"]: ACTIONS["DemandOrOffer"] } - CODEMETA_STRATEGY[iri["schema:MerchantReturnPolicy"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} - CODEMETA_STRATEGY[iri["schema:MerchantReturnPolicySeasonalOverride"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} - CODEMETA_STRATEGY[iri["schema:Occupation"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], iri["schema:estimatedSalary"]: ACTIONS["MonetaryAmountOrMonetaryAmountDistribution"] } - CODEMETA_STRATEGY[iri["schema:OccupationalExperienceRequirements"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} - CODEMETA_STRATEGY[iri["schema:Offer"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], iri["schema:acceptedPaymentMethod"]: ACTIONS["LoanOrCreditOrPaymentMethod"], @@ -452,37 +386,29 @@ **CODEMETA_STRATEGY[iri["schema:Offer"]], iri["schema:offers"]: ACTIONS["DemandOrOffer"] } - CODEMETA_STRATEGY[iri["schema:PaymentMethod"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} - CODEMETA_STRATEGY[iri["schema:ProgramMembership"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], iri["schema:member"]: ACTIONS["OrganizationOrPerson"] } - CODEMETA_STRATEGY[iri["schema:Property"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], iri["schema:supersededBy"]: ACTIONS["ClassOrEnumerationOrProperty"] } - CODEMETA_STRATEGY[iri["schema:Quantity"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} CODEMETA_STRATEGY[iri["schema:Duration"]] = {**CODEMETA_STRATEGY[iri["schema:Quantity"]]} CODEMETA_STRATEGY[iri["schema:Energy"]] = {**CODEMETA_STRATEGY[iri["schema:Quantity"]]} CODEMETA_STRATEGY[iri["schema:Mass"]] = {**CODEMETA_STRATEGY[iri["schema:Quantity"]]} - CODEMETA_STRATEGY[iri["schema:Rating"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], iri["schema:author"]: ACTIONS["OrganizationOrPerson"] } CODEMETA_STRATEGY[iri["schema:AggregateRating"]] = {**CODEMETA_STRATEGY[iri["schema:Rating"]]} - CODEMETA_STRATEGY[iri["schema:Schedule"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], iri["schema:duration"]: ACTIONS["DurationOrQuantitativeValue"] } - CODEMETA_STRATEGY[iri["schema:Series"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} - CODEMETA_STRATEGY[iri["schema:Service"]] = { **CODEMETA_STRATEGY[iri["schema:Intangible"]], iri["schema:areaServed"]: ACTIONS["AdministrativeAreaOrGeoShapeOrPlace"], @@ -498,11 +424,8 @@ CODEMETA_STRATEGY[iri["schema:CableOrSatelliteService"]] = {**CODEMETA_STRATEGY[iri["schema:Service"]]} CODEMETA_STRATEGY[iri["schema:FinancialProduct"]] = {**CODEMETA_STRATEGY[iri["schema:Service"]]} CODEMETA_STRATEGY[iri["schema:LoanOrCredit"]] = {**CODEMETA_STRATEGY[iri["schema:FinancialProduct"]]} - CODEMETA_STRATEGY[iri["schema:ServiceChannel"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} - CODEMETA_STRATEGY[iri["schema:SpeakableSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} - CODEMETA_STRATEGY[iri["schema:StructuredValue"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} CODEMETA_STRATEGY[iri["schema:ContactPoint"]] = { **CODEMETA_STRATEGY[iri["schema:StructuredValue"]], @@ -524,7 +447,7 @@ iri["schema:depth"]: ACTIONS["DistanceOrQuantitativeValue"], iri["schema:height"]: ACTIONS["DistanceOrQuantitativeValue"], iri["schema:shippingRate"]: ACTIONS["MonetaryAmountOrShippingRateSettings"], - iri["schema:weight"]:ACTIONS["MassOrQuantitativeValue"], + iri["schema:weight"]: ACTIONS["MassOrQuantitativeValue"], iri["schema:width"]: ACTIONS["DistanceOrQuantitativeValue"] } CODEMETA_STRATEGY[iri["schema:OpeningHoursSpecification"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} @@ -571,7 +494,7 @@ iri["schema:height"]: ACTIONS["DistanceOrQuantitativeValue"], iri["schema:shippingRate"]: ACTIONS["MonetaryAmountOrShippingRateSettings"], iri["schema:transitTime"]: ACTIONS["QuantitativeValueOrServicePeriod"], - iri["schema:weight"]:ACTIONS["MassOrQuantitativeValue"], + iri["schema:weight"]: ACTIONS["MassOrQuantitativeValue"], iri["schema:width"]: ACTIONS["DistanceOrQuantitativeValue"] } CODEMETA_STRATEGY[iri["schema:ShippingDeliveryTime"]] = { @@ -592,27 +515,19 @@ iri["schema:typeOfGood"]: ACTIONS["ProductOrService"] } CODEMETA_STRATEGY[iri["schema:WarrantyPromise"]] = {**CODEMETA_STRATEGY[iri["schema:StructuredValue"]]} - CODEMETA_STRATEGY[iri["schema:VirtualLocation"]] = {**CODEMETA_STRATEGY[iri["schema:Intangible"]]} - CODEMETA_STRATEGY[iri["schema:MedicalEntity"]] = {**CODEMETA_STRATEGY[iri["schema:Thing"]]} - CODEMETA_STRATEGY[iri["schema:AnatomicalStructure"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} - CODEMETA_STRATEGY[iri["schema:AnatomicalSystem"]] = { **CODEMETA_STRATEGY[iri["schema:MedicalEntity"]], iri["schema:comprisedOf"]: ACTIONS["AnatomicalStructureOrAnatomicalSystem"] } - CODEMETA_STRATEGY[iri["schema:DrugClass"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} - CODEMETA_STRATEGY[iri["schema:LifestyleModification"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} - CODEMETA_STRATEGY[iri["schema:MedicalCause"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} - CODEMETA_STRATEGY[iri["schema:MedicalCondition"]] = { **CODEMETA_STRATEGY[iri["schema:MedicalEntity"]], iri["schema:associatedAnatomy"]: ACTIONS["AnatomicalStructureOrAnatomicalSystemOrSuperficialAnatomy"], @@ -624,13 +539,9 @@ iri["schema:possibleTreatment"]: ACTIONS["DrugOrDrugClassOrLifestyleModificationOrMedicalTherapy"] } CODEMETA_STRATEGY[iri["schema:MedicalSign"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalSignOrSymptom"]]} - CODEMETA_STRATEGY[iri["schema:MedicalContraindication"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} - CODEMETA_STRATEGY[iri["schema:MedicalDevice"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} - CODEMETA_STRATEGY[iri["schema:MedicalGuideline"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} - CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} CODEMETA_STRATEGY[iri["schema:DDxElement"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]]} CODEMETA_STRATEGY[iri["schema:DrugLegalStatus"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]]} @@ -638,27 +549,21 @@ CODEMETA_STRATEGY[iri["schema:DrugStrength"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]]} CODEMETA_STRATEGY[iri["schema:MaximumDoseSchedule"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]]} CODEMETA_STRATEGY[iri["schema:MedicalConditionStage"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]]} - CODEMETA_STRATEGY[iri["schema:MedicalProcedure"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} CODEMETA_STRATEGY[iri["schema:TherapeuticProcedure"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalProcedure"]]} CODEMETA_STRATEGY[iri["schema:MedicalTherapy"]] = {**CODEMETA_STRATEGY[iri["schema:TherapeuticProcedure"]]} - CODEMETA_STRATEGY[iri["schema:MedicalRiskFactor"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} - CODEMETA_STRATEGY[iri["schema:MedicalStudy"]] = { **CODEMETA_STRATEGY[iri["schema:MedicalEntity"]], iri["schema:sponsor"]: ACTIONS["OrganizationOrPerson"] } - CODEMETA_STRATEGY[iri["schema:MedicalTest"]] = {**CODEMETA_STRATEGY[iri["schema:MedicalEntity"]]} - CODEMETA_STRATEGY[iri["schema:SuperficialAnatomy"]] = { **CODEMETA_STRATEGY[iri["schema:MedicalEntity"]], iri["schema:relatedAnatomy"]: ACTIONS["AnatomicalStructureOrAnatomicalSystem"] } - CODEMETA_STRATEGY[iri["schema:Organization"]] = { **CODEMETA_STRATEGY[iri["schema:Thing"]], iri["schema:acceptedPaymentMethod"]: ACTIONS["LoanOrCreditOrPaymentMethod"], @@ -676,7 +581,6 @@ iri["schema:sponsor"]: ACTIONS["OrganizationOrPerson"] } - CODEMETA_STRATEGY[iri["schema:PerformingGroup"]] = {**CODEMETA_STRATEGY[iri["schema:Organization"]]} CODEMETA_STRATEGY[iri["schema:MusicGroup"]] = { **CODEMETA_STRATEGY[iri["schema:PerformingGroup"]], @@ -685,7 +589,6 @@ } - CODEMETA_STRATEGY[iri["schema:Person"]] = { **CODEMETA_STRATEGY[iri["schema:Thing"]], iri["schema:alumniOf"]: ACTIONS["EducationalOrganizationOrOrganization"], @@ -705,12 +608,11 @@ iri["schema:sibling"]: ACTIONS["Person"], iri["schema:sponsor"]: ACTIONS["OrganizationOrPerson"], iri["schema:spouse"]: ACTIONS["Person"], - iri["schema:weight"]:ACTIONS["MassOrQuantitativeValue"], + iri["schema:weight"]: ACTIONS["MassOrQuantitativeValue"], iri["schema:workLocation"]: ACTIONS["ContactPointOrPlace"] } - CODEMETA_STRATEGY[iri["schema:Place"]] = { **CODEMETA_STRATEGY[iri["schema:Thing"]], iri["schema:geo"]: ACTIONS["GeoCoordinatesOrGeoShape"], @@ -727,14 +629,11 @@ iri["schema:photo"]: ACTIONS["ImageObjectOrPhotograph"] } - CODEMETA_STRATEGY[iri["schema:AdministrativeArea"]] = {**CODEMETA_STRATEGY[iri["schema:Place"]]} CODEMETA_STRATEGY[iri["schema:Country"]] = {**CODEMETA_STRATEGY[iri["schema:AdministrativeArea"]]} - CODEMETA_STRATEGY[iri["schema:CivicStructure"]] = {**CODEMETA_STRATEGY[iri["schema:Place"]]} - CODEMETA_STRATEGY[iri["schema:Product"]] = { **CODEMETA_STRATEGY[iri["schema:Thing"]], iri["schema:brand"]: ACTIONS["BrandOrOrganization"], @@ -748,73 +647,61 @@ iri["schema:offers"]: ACTIONS["DemandOrOffer"], iri["schema:positiveNotes"]: ACTIONS["ItemListOrListItemOrWebContent"], iri["schema:size"]: ACTIONS["DefinedTermOrQuantitativeValueOrSizeSpecification"], - iri["schema:weight"]:ACTIONS["MassOrQuantitativeValue"], + iri["schema:weight"]: ACTIONS["MassOrQuantitativeValue"], iri["schema:width"]: ACTIONS["DistanceOrQuantitativeValue"] } - CODEMETA_STRATEGY[iri["schema:ProductGroup"]] = {**CODEMETA_STRATEGY[iri["schema:Product"]]} - CODEMETA_STRATEGY[iri["schema:ProductModel"]] = { **CODEMETA_STRATEGY[iri["schema:Product"]], iri["schema:isVariantOf"]: ACTIONS["ProductGroupOrProductModel"] } - CODEMETA_STRATEGY[iri["schema:Taxon"]] = {**CODEMETA_STRATEGY[iri["schema:Thing"]]} - CODEMETA_STRATEGY[iri["schema:CreativeWorkSeries"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], **CODEMETA_STRATEGY[iri["schema:Series"]] } - CODEMETA_STRATEGY[iri["schema:DefinedRegion"]] = { **CODEMETA_STRATEGY[iri["schema:Place"]], **CODEMETA_STRATEGY[iri["schema:StructuredValue"]] } - CODEMETA_STRATEGY[iri["schema:Drug"]] = { **CODEMETA_STRATEGY[iri["schema:Product"]], **CODEMETA_STRATEGY[iri["schema:MedicalEntity"]] } - CODEMETA_STRATEGY[iri["schema:EducationalOrganization"]] = { **CODEMETA_STRATEGY[iri["schema:Organization"]], **CODEMETA_STRATEGY[iri["schema:CivicStructure"]] } - CODEMETA_STRATEGY[iri["schema:HowToSection"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], **CODEMETA_STRATEGY[iri["schema:ItemList"]], **CODEMETA_STRATEGY[iri["schema:ListItem"]] } - CODEMETA_STRATEGY[iri["schema:HowToStep"]] = { **CODEMETA_STRATEGY[iri["schema:CreativeWork"]], **CODEMETA_STRATEGY[iri["schema:ItemList"]], **CODEMETA_STRATEGY[iri["schema:ListItem"]] } - CODEMETA_STRATEGY[iri["schema:MedicalCode"]] = { **CODEMETA_STRATEGY[iri["schema:CategoryCode"]], **CODEMETA_STRATEGY[iri["schema:MedicalIntangible"]] } - CODEMETA_STRATEGY[iri["schema:PaymentCard"]] = { **CODEMETA_STRATEGY[iri["schema:FinancialProduct"]], **CODEMETA_STRATEGY[iri["schema:PaymentMethod"]] } - CODEMETA_STRATEGY[iri["schema:CreditCard"]] = { **CODEMETA_STRATEGY[iri["schema:LoanOrCredit"]], **CODEMETA_STRATEGY[iri["schema:PaymentCard"]] From 1e781cac2be59cc350335a0aadc1ad58dbc76ada Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Mon, 9 Mar 2026 13:10:51 +0100 Subject: [PATCH 208/247] fixed mistake in merge --- test/hermes_test/model/test_api_e2e.py | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/test/hermes_test/model/test_api_e2e.py b/test/hermes_test/model/test_api_e2e.py index 9ca781fb..43ce0be8 100644 --- a/test/hermes_test/model/test_api_e2e.py +++ b/test/hermes_test/model/test_api_e2e.py @@ -185,13 +185,9 @@ def test_cff_harvest(tmp_path, monkeypatch, cff, res): monkeypatch.setattr(HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) cli.main() except SystemExit: -<<<<<<< feature/454-e2e-curate - manager = HermesContext() -======= print("TODO: Delete when package is working again or mock cli") finally: - manager = context_manager.HermesContext() ->>>>>>> feature/454-e2e-test-plugin-api + manager = HermesContext() manager.prepare_step("harvest") with manager["cff"] as cache: result = SoftwareMetadata(cache["codemeta"]) @@ -350,13 +346,9 @@ def test_codemeta_harvest(tmp_path, monkeypatch, codemeta, res): monkeypatch.setattr(HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) cli.main() except SystemExit: -<<<<<<< feature/454-e2e-curate - manager = HermesContext() -======= print("TODO: Delete when package is working again or mock cli") finally: - manager = context_manager.HermesContext() ->>>>>>> feature/454-e2e-test-plugin-api + manager = HermesContext() manager.prepare_step("harvest") with manager["codemeta"] as cache: result = SoftwareMetadata(cache["codemeta"]) From 0e8b49c8c3ae51e40d32020d9024a282e8bc3e96 Mon Sep 17 00:00:00 2001 From: Michael Fritzsche Date: Mon, 9 Mar 2026 14:24:15 +0100 Subject: [PATCH 209/247] fixed minor bugs --- src/hermes/model/api.py | 2 +- src/hermes/model/types/ld_container.py | 48 +++++++++++++------------- src/hermes/model/types/ld_dict.py | 4 ++- 3 files changed, 28 insertions(+), 26 deletions(-) diff --git a/src/hermes/model/api.py b/src/hermes/model/api.py index db582656..a0e71b54 100644 --- a/src/hermes/model/api.py +++ b/src/hermes/model/api.py @@ -29,7 +29,7 @@ def load_from_cache(cls, ctx: HermesContext, source: str) -> "SoftwareMetadata": data = SoftwareMetadata() data.active_ctx = data.ld_proc.initial_ctx(context, {"documentLoader": bundled_loader}) data.context = context - for key, value in cache["expanded"][0]: + for key, value in cache["expanded"][0].items(): data[key] = value return data except Exception as e: diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index 49343954..bd57b520 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -7,35 +7,35 @@ from __future__ import annotations -from .pyld_util import JsonLdProcessor, bundled_loader from datetime import date, datetime, time +from typing import Any, TypeAlias, TYPE_CHECKING, Union +from typing_extensions import Self -from typing import TYPE_CHECKING +from .pyld_util import JsonLdProcessor, bundled_loader if TYPE_CHECKING: from .ld_dict import ld_dict from .ld_list import ld_list - from typing import Any, TypeAlias, Union - from typing_extensions import Self - JSON_LD_CONTEXT_DICT: TypeAlias = dict[str, Union[str, "JSON_LD_CONTEXT_DICT"]] - BASIC_TYPE: TypeAlias = Union[str, float, int, bool] - EXPANDED_JSON_LD_VALUE: TypeAlias = list[Union[ - dict[str, Union["EXPANDED_JSON_LD_VALUE", BASIC_TYPE]], - "EXPANDED_JSON_LD_VALUE", - str - ]] - COMPACTED_JSON_LD_VALUE: TypeAlias = Union[ - list[Union[dict[str, Union["COMPACTED_JSON_LD_VALUE", BASIC_TYPE]], BASIC_TYPE]], - dict[str, Union["COMPACTED_JSON_LD_VALUE", BASIC_TYPE]], - ] - TIME_TYPE: TypeAlias = Union[datetime, date, time] - JSON_LD_VALUE: TypeAlias = Union[ - list[Union["JSON_LD_VALUE", BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]], - dict[str, Union["JSON_LD_VALUE", BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]], - ] - PYTHONIZED_LD_CONTAINER: TypeAlias = Union[ - list[Union["PYTHONIZED_LD_CONTAINER", BASIC_TYPE, TIME_TYPE]], - dict[str, Union["PYTHONIZED_LD_CONTAINER", BASIC_TYPE, TIME_TYPE]], - ] + +JSON_LD_CONTEXT_DICT: TypeAlias = dict[str, Union[str, "JSON_LD_CONTEXT_DICT"]] +BASIC_TYPE: TypeAlias = Union[str, float, int, bool] +EXPANDED_JSON_LD_VALUE: TypeAlias = list[Union[ + dict[str, Union["EXPANDED_JSON_LD_VALUE", BASIC_TYPE]], + "EXPANDED_JSON_LD_VALUE", + str +]] +COMPACTED_JSON_LD_VALUE: TypeAlias = Union[ + list[Union[dict[str, Union["COMPACTED_JSON_LD_VALUE", BASIC_TYPE]], BASIC_TYPE]], + dict[str, Union["COMPACTED_JSON_LD_VALUE", BASIC_TYPE]], +] +TIME_TYPE: TypeAlias = Union[datetime, date, time] +JSON_LD_VALUE: TypeAlias = Union[ + list[Union["JSON_LD_VALUE", BASIC_TYPE, TIME_TYPE, "ld_dict", "ld_list"]], + dict[str, Union["JSON_LD_VALUE", BASIC_TYPE, TIME_TYPE, "ld_dict", "ld_list"]], +] +PYTHONIZED_LD_CONTAINER: TypeAlias = Union[ + list[Union["PYTHONIZED_LD_CONTAINER", BASIC_TYPE, TIME_TYPE]], + dict[str, Union["PYTHONIZED_LD_CONTAINER", BASIC_TYPE, TIME_TYPE]], +] class ld_container: diff --git a/src/hermes/model/types/ld_dict.py b/src/hermes/model/types/ld_dict.py index 6f38041b..5a673c9a 100644 --- a/src/hermes/model/types/ld_dict.py +++ b/src/hermes/model/types/ld_dict.py @@ -107,7 +107,9 @@ def __setitem__(self: Self, key: str, value: Union[JSON_LD_VALUE, BASIC_TYPE, TI :return: :rtype: None """ - # if the value is None delete the entry instead of updating it + # if the value is None delete the entry instead of updating it, but make sure it exists before deleting + if value is None and key not in self: + return if value is None: del self[self.ld_proc.expand_iri(self.active_ctx, key)] return From bd7e732a3f94f6b7ca5f46a529514603c77b3345 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Tue, 10 Mar 2026 17:12:22 +0100 Subject: [PATCH 210/247] adapted postprocess and added test --- src/hermes/commands/__init__.py | 2 +- src/hermes/commands/cli.py | 9 +- src/hermes/commands/deposit/base.py | 2 +- src/hermes/commands/postprocess/base.py | 17 +++- src/hermes/commands/postprocess/invenio.py | 82 ++++++++++++------- .../commands/postprocess/invenio_rdm.py | 38 ++++++--- test/hermes_test/model/test_api_e2e.py | 59 ++++++++++++- 7 files changed, 159 insertions(+), 50 deletions(-) diff --git a/src/hermes/commands/__init__.py b/src/hermes/commands/__init__.py index 03539bd4..3a2906d4 100644 --- a/src/hermes/commands/__init__.py +++ b/src/hermes/commands/__init__.py @@ -16,4 +16,4 @@ from hermes.commands.harvest.base import HermesHarvestCommand from hermes.commands.process.base import HermesProcessCommand from hermes.commands.deposit.base import HermesDepositCommand -# from hermes.commands.postprocess.base import HermesPostprocessCommand +from hermes.commands.postprocess.base import HermesPostprocessCommand diff --git a/src/hermes/commands/cli.py b/src/hermes/commands/cli.py index d725e40a..d772d09d 100644 --- a/src/hermes/commands/cli.py +++ b/src/hermes/commands/cli.py @@ -16,9 +16,10 @@ # from hermes.commands import (HermesHelpCommand, HermesVersionCommand, HermesCleanCommand, # HermesHarvestCommand, HermesProcessCommand, HermesCurateCommand, # HermesDepositCommand, HermesPostprocessCommand, HermesInitCommand) -from hermes.commands import (HermesCurateCommand, HermesDepositCommand, - HermesHarvestCommand, HermesHelpCommand, - HermesProcessCommand, HermesVersionCommand) +from hermes.commands import ( + HermesCurateCommand, HermesDepositCommand, HermesHarvestCommand, HermesHelpCommand, HermesPostprocessCommand, + HermesProcessCommand, HermesVersionCommand +) from hermes.commands.base import HermesCommand @@ -46,7 +47,7 @@ def main() -> None: HermesHarvestCommand(parser), HermesHelpCommand(parser), # HermesInitCommand(parser), - # HermesPostprocessCommand(parser), + HermesPostprocessCommand(parser), HermesProcessCommand(parser), HermesVersionCommand(parser), ): diff --git a/src/hermes/commands/deposit/base.py b/src/hermes/commands/deposit/base.py index 6fbf3625..be84ed29 100644 --- a/src/hermes/commands/deposit/base.py +++ b/src/hermes/commands/deposit/base.py @@ -48,7 +48,7 @@ def __call__(self, command: HermesCommand) -> None: deposit = self.update_metadata() self.ctx.prepare_step("deposit") - with self.ctx["deposit"] as cache: + with self.ctx[command.settings.target] as cache: cache["result"] = deposit self.ctx.finalize_step("deposit") self.delete_artifacts() diff --git a/src/hermes/commands/postprocess/base.py b/src/hermes/commands/postprocess/base.py index fea5c036..9daa37d6 100644 --- a/src/hermes/commands/postprocess/base.py +++ b/src/hermes/commands/postprocess/base.py @@ -15,17 +15,26 @@ class HermesPostprocessPlugin(HermesPlugin): pass -class _PostprocessSettings(BaseModel): +class PostprocessSettings(BaseModel): """Generic post-processing settings.""" - execute: list = [] + run: list = [] class HermesPostprocessCommand(HermesCommand): """Post-process the published metadata after deposition.""" command_name = "postprocess" - settings_class = _PostprocessSettings + settings_class = PostprocessSettings def __call__(self, args: argparse.Namespace) -> None: - pass + self.args = args + plugin_names = self.settings.run + + for plugin_name in plugin_names: + try: + plugin_func = self.plugins[plugin_name]() + plugin_func(self) + except KeyError as e: + self.log.error("Plugin '%s' not found.", plugin_name) + self.errors.append(e) diff --git a/src/hermes/commands/postprocess/invenio.py b/src/hermes/commands/postprocess/invenio.py index a7ba6b53..16e47d9a 100644 --- a/src/hermes/commands/postprocess/invenio.py +++ b/src/hermes/commands/postprocess/invenio.py @@ -5,45 +5,71 @@ # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: Stephan Druskat -import json import logging -import toml from ruamel import yaml +import toml + +from hermes.commands.base import HermesCommand +from hermes.error import MisconfigurationError +from hermes.model.context_manager import HermesContext + +from .base import HermesPostprocessPlugin -_log = logging.getLogger('deposit.invenio') +_log = logging.getLogger('postprocess.invenio') -def config_record_id(ctx): - deposition_path = ctx.get_cache('deposit', 'deposit') - with deposition_path.open("r") as deposition_file: - deposition = json.load(deposition_file) - conf = ctx.config.hermes - try: - conf.deposit.invenio.record_id = deposition['record_id'] +class config_record_id(HermesPostprocessPlugin): + def __call__(self, command: HermesCommand): + ctx = HermesContext() + ctx.prepare_step("deposit") + with ctx["invenio"] as manager: + deposition = manager["result"] + ctx.finalize_step("deposit") + + conf = toml.load(open('hermes.toml', 'r')) + try: + old_record_id = conf["deposit"]["invenio"]["record_id"] + if old_record_id == deposition["record_id"]: + return + _log.error("hermes.toml already contains a record_id for Invenio deposit.") + raise MisconfigurationError( + "Can't overwrite record_id automatically." + f"(Tried to overwrite {old_record_id} with {deposition["record_id"]})" + ) + except KeyError: + pass + conf.setdefault("deposit", {}).setdefault("invenio", {})["record_id"] = deposition['record_id'] toml.dump(conf, open('hermes.toml', 'w')) - except KeyError: - raise RuntimeError("No deposit.invenio configuration available to store record id in") -def cff_doi(ctx): - deposition_path = ctx.get_cache('deposit', 'deposit') - with deposition_path.open("r") as deposition_file: - deposition = json.load(deposition_file) - try: - cff = yaml.load(open('CITATION.cff', 'r'), yaml.Loader) - new_identifier = { +class cff_doi(HermesPostprocessPlugin): + def __call__(self, command: HermesCommand): + ctx = HermesContext() + ctx.prepare_step("deposit") + with ctx["invenio"] as manager: + deposition = manager["result"] + ctx.finalize_step("deposit") + + yaml_io = yaml.YAML() + yaml_io.default_flow_style = False + yaml_io.allow_unicode = True + yaml_io.indent(mapping=4, sequence=2, offset=0) + yaml_io.allow_unicode = True + + try: + cff = yaml_io.load(open('CITATION.cff', 'r')) + new_identifier = { 'description': f"DOI for the published version {deposition['metadata']['version']} " - f"[generated by hermes]", + "[generated by hermes]", 'type': 'doi', 'value': deposition['doi'] } - if 'identifiers' in cff: - cff['identifiers'].append(new_identifier) - else: - cff['identifiers'] = [new_identifier] - yaml.dump(cff, open('CITATION.cff', 'w'), - indent=4, default_flow_style=False, block_seq_indent=2, allow_unicode=True) - except Exception as e: - raise RuntimeError("Update of CITATION.cff failed.") from e + if 'identifiers' in cff: + cff['identifiers'].append(new_identifier) + else: + cff['identifiers'] = [new_identifier] + yaml_io.dump(cff, open('CITATION.cff', 'w')) + except Exception as e: + raise RuntimeError("Update of CITATION.cff failed.") from e diff --git a/src/hermes/commands/postprocess/invenio_rdm.py b/src/hermes/commands/postprocess/invenio_rdm.py index 9553f47b..6b322a4e 100644 --- a/src/hermes/commands/postprocess/invenio_rdm.py +++ b/src/hermes/commands/postprocess/invenio_rdm.py @@ -5,22 +5,38 @@ # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: Stephan Druskat -import json import logging import toml +from hermes.commands.base import HermesCommand +from hermes.error import MisconfigurationError +from hermes.model.context_manager import HermesContext -_log = logging.getLogger('deposit.invenio_rdm') +from .base import HermesPostprocessPlugin +_log = logging.getLogger('postprocess.invenio_rdm') -def config_record_id(ctx): - deposition_path = ctx.get_cache('deposit', 'deposit') - with deposition_path.open("r") as deposition_file: - deposition = json.load(deposition_file) - conf = ctx.config.hermes - try: - conf['deposit']['invenio_rdm']['record_id'] = deposition['record_id'] + +class config_record_id(HermesPostprocessPlugin): + def __call__(self, command: HermesCommand): + ctx = HermesContext() + ctx.prepare_step("deposit") + with ctx["invenio_rdm"] as manager: + deposition = manager["result"] + ctx.finalize_step("deposit") + + conf = toml.load(open('hermes.toml', 'r')) + try: + old_record_id = conf["deposit"]["invenio_rdm"]["record_id"] + if old_record_id == deposition["record_id"]: + return + _log.error("hermes.toml already contains a record_id for Invenio_RDM deposit.") + raise MisconfigurationError( + "Can't overwrite record_id automatically." + f"(Tried to overwrite {old_record_id} with {deposition["record_id"]})" + ) + except KeyError: + pass + conf.setdefault("deposit", {}).setdefault("invenio_rdm", {})["record_id"] = deposition['record_id'] toml.dump(conf, open('hermes.toml', 'w')) - except KeyError: - raise RuntimeError("No deposit.invenio_rdm configuration available to store record id in") diff --git a/test/hermes_test/model/test_api_e2e.py b/test/hermes_test/model/test_api_e2e.py index b85a07a6..37e21083 100644 --- a/test/hermes_test/model/test_api_e2e.py +++ b/test/hermes_test/model/test_api_e2e.py @@ -6,11 +6,15 @@ from datetime import date import json +from pathlib import Path import pytest import sys + +from ruamel import yaml +import toml + from hermes.model import context_manager, SoftwareMetadata from hermes.commands import cli -from pathlib import Path @pytest.fixture @@ -752,3 +756,56 @@ def test_process_complex(tmp_path, monkeypatch, metadata_in, metadata_out): sys.argv = orig_argv assert result == metadata_out + + +def test_invenio_postprocess(tmp_path, monkeypatch): + monkeypatch.chdir(tmp_path) + + input_file = tmp_path / ".hermes" / "deposit" / "invenio" / "result.json" + input_file.parent.mkdir(parents=True, exist_ok=True) + input_file.write_text("""{"record_id": "foo", "doi": "my_doi", "metadata": {"version": "1.0.0"}}""") + + citation_file = tmp_path / "CITATION.cff" + citation_file.write_text("cff-version: 1.2.0\ntitle: Test") + + config_file = tmp_path / "hermes.toml" + config_file.write_text( + """[postprocess] +run = ["config_invenio_record_id", "cff_doi"] +[deposit.invenio] +site_url = "https://zenodo.org" +""" + ) + + orig_argv = sys.argv[:] + sys.argv = ["hermes", "postprocess", "--path", str(tmp_path), "--config", str(config_file)] + print(" ".join(sys.argv)) + result_cff = result_toml = {} + try: + monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) + cli.main() + except SystemExit as e: + if e.code != 0: + raise e + finally: + result_toml = toml.load(config_file) + result_cff = yaml.YAML().load(citation_file) + sys.argv = orig_argv + + assert result_toml == toml.loads( + """[postprocess] +run = ["config_invenio_record_id", "cff_doi"] +[deposit.invenio] +site_url = "https://zenodo.org" +record_id = "foo" +""" + ) + assert result_cff == yaml.YAML().load( + """cff-version: 1.2.0 +title: Test +identifiers: + - type: doi + value: my_doi + description: DOI for the published version 1.0.0 [generated by hermes] +""" + ) From a30d3855ec137015e5300169f27948585dae4ae6 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Wed, 11 Mar 2026 10:44:44 +0100 Subject: [PATCH 211/247] changed imports and fixed syntax error for python 3.10 --- src/hermes/commands/postprocess/base.py | 3 ++- src/hermes/commands/postprocess/invenio.py | 22 +++++++++---------- .../commands/postprocess/invenio_rdm.py | 7 +++--- 3 files changed, 17 insertions(+), 15 deletions(-) diff --git a/src/hermes/commands/postprocess/base.py b/src/hermes/commands/postprocess/base.py index 9daa37d6..e528ae65 100644 --- a/src/hermes/commands/postprocess/base.py +++ b/src/hermes/commands/postprocess/base.py @@ -3,12 +3,13 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileContributor: Michael Meinel +# SPDX-FileContributor: Michael Fritzsche import argparse from pydantic import BaseModel -from hermes.commands.base import HermesCommand, HermesPlugin +from ..base import HermesCommand, HermesPlugin class HermesPostprocessPlugin(HermesPlugin): diff --git a/src/hermes/commands/postprocess/invenio.py b/src/hermes/commands/postprocess/invenio.py index 16e47d9a..2fbbc713 100644 --- a/src/hermes/commands/postprocess/invenio.py +++ b/src/hermes/commands/postprocess/invenio.py @@ -3,17 +3,17 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileContributor: Michael Meinel +# SPDX-FileContributor: Michael Fritzsche # SPDX-FileContributor: Stephan Druskat import logging -from ruamel import yaml +from ruamel.yaml import YAML import toml -from hermes.commands.base import HermesCommand from hermes.error import MisconfigurationError from hermes.model.context_manager import HermesContext - +from ..base import HermesCommand from .base import HermesPostprocessPlugin @@ -36,7 +36,7 @@ def __call__(self, command: HermesCommand): _log.error("hermes.toml already contains a record_id for Invenio deposit.") raise MisconfigurationError( "Can't overwrite record_id automatically." - f"(Tried to overwrite {old_record_id} with {deposition["record_id"]})" + f"(Tried to overwrite {old_record_id} with {deposition['record_id']})" ) except KeyError: pass @@ -52,14 +52,14 @@ def __call__(self, command: HermesCommand): deposition = manager["result"] ctx.finalize_step("deposit") - yaml_io = yaml.YAML() - yaml_io.default_flow_style = False - yaml_io.allow_unicode = True - yaml_io.indent(mapping=4, sequence=2, offset=0) - yaml_io.allow_unicode = True + yaml = YAML() + yaml.default_flow_style = False + yaml.allow_unicode = True + yaml.indent(mapping=4, sequence=2, offset=0) + yaml.allow_unicode = True try: - cff = yaml_io.load(open('CITATION.cff', 'r')) + cff = yaml.load(open('CITATION.cff', 'r')) new_identifier = { 'description': f"DOI for the published version {deposition['metadata']['version']} " "[generated by hermes]", @@ -70,6 +70,6 @@ def __call__(self, command: HermesCommand): cff['identifiers'].append(new_identifier) else: cff['identifiers'] = [new_identifier] - yaml_io.dump(cff, open('CITATION.cff', 'w')) + yaml.dump(cff, open('CITATION.cff', 'w')) except Exception as e: raise RuntimeError("Update of CITATION.cff failed.") from e diff --git a/src/hermes/commands/postprocess/invenio_rdm.py b/src/hermes/commands/postprocess/invenio_rdm.py index 6b322a4e..3c6cb4a7 100644 --- a/src/hermes/commands/postprocess/invenio_rdm.py +++ b/src/hermes/commands/postprocess/invenio_rdm.py @@ -3,18 +3,19 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileContributor: Michael Meinel +# SPDX-FileContributor: Michael Fritzsche # SPDX-FileContributor: Stephan Druskat import logging import toml -from hermes.commands.base import HermesCommand from hermes.error import MisconfigurationError from hermes.model.context_manager import HermesContext - +from ..base import HermesCommand from .base import HermesPostprocessPlugin + _log = logging.getLogger('postprocess.invenio_rdm') @@ -34,7 +35,7 @@ def __call__(self, command: HermesCommand): _log.error("hermes.toml already contains a record_id for Invenio_RDM deposit.") raise MisconfigurationError( "Can't overwrite record_id automatically." - f"(Tried to overwrite {old_record_id} with {deposition["record_id"]})" + f"(Tried to overwrite {old_record_id} with {deposition['record_id']})" ) except KeyError: pass From 230ee05b125a64451759376dc3673a7e1a749b87 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Wed, 11 Mar 2026 11:53:02 +0100 Subject: [PATCH 212/247] fixed syntax error and added lost dependency --- poetry.lock | 41 +++++++++++++++++++++++--- pyproject.toml | 1 + test/hermes_test/model/test_api_e2e.py | 8 +++-- 3 files changed, 44 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 27b40260..0eeefe31 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.3.2 and should not be changed by hand. [[package]] name = "accessible-pygments" @@ -927,7 +927,7 @@ version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" -groups = ["docs"] +groups = ["dev", "docs"] files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -1072,7 +1072,7 @@ sphinx = ">=6,<8" [package.extras] code-style = ["pre-commit (>=3.0,<4.0)"] linkify = ["linkify-it-py (>=2.0,<3.0)"] -rtd = ["ipython", "pydata-sphinx-theme (==v0.13.0rc4)", "sphinx-autodoc2 (>=0.4.2,<0.5.0)", "sphinx-book-theme (==1.0.0rc2)", "sphinx-copybutton", "sphinx-design2", "sphinx-pyscript", "sphinx-tippy (>=0.3.1)", "sphinx-togglebutton", "sphinxext-opengraph (>=0.8.2,<0.9.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"] +rtd = ["ipython", "pydata-sphinx-theme (==0.13.0rc4)", "sphinx-autodoc2 (>=0.4.2,<0.5.0)", "sphinx-book-theme (==1.0.0rc2)", "sphinx-copybutton", "sphinx-design2", "sphinx-pyscript", "sphinx-tippy (>=0.3.1)", "sphinx-togglebutton", "sphinxext-opengraph (>=0.8.2,<0.9.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"] testing = ["beautifulsoup4", "coverage[toml]", "pytest (>=7,<8)", "pytest-cov", "pytest-param-files (>=0.3.4,<0.4.0)", "pytest-regressions", "sphinx-pytest"] testing-docutils = ["pygments", "pytest (>=7,<8)", "pytest-param-files (>=0.3.4,<0.4.0)"] @@ -1574,6 +1574,21 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] +[[package]] +name = "pytest-httpserver" +version = "1.1.5" +description = "pytest-httpserver is a httpserver for pytest" +optional = false +python-versions = ">=3.10" +groups = ["dev"] +files = [ + {file = "pytest_httpserver-1.1.5-py3-none-any.whl", hash = "sha256:ee83feb587ab652c0c6729598db2820e9048233bac8df756818b7845a1621d0a"}, + {file = "pytest_httpserver-1.1.5.tar.gz", hash = "sha256:dc3d82e1fe00e491829d8939c549bf4bd9b39a260f87113c619b9d517c2f8ff1"}, +] + +[package.dependencies] +Werkzeug = ">=2.0.0" + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -2440,6 +2455,24 @@ h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "werkzeug" +version = "3.1.6" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "werkzeug-3.1.6-py3-none-any.whl", hash = "sha256:7ddf3357bb9564e407607f988f683d72038551200c704012bb9a4c523d42f131"}, + {file = "werkzeug-3.1.6.tar.gz", hash = "sha256:210c6bede5a420a913956b4791a7f4d6843a43b6fcee4dfa08a65e93007d0d25"}, +] + +[package.dependencies] +markupsafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + [[package]] name = "wheel" version = "0.45.1" @@ -2547,4 +2580,4 @@ files = [ [metadata] lock-version = "2.1" python-versions = ">=3.10, <4.0.0" -content-hash = "e76de51d1f5dd86486d4cc24a5cdf7d007b16ce5d9d0cc3f7d0f353cf0defff0" +content-hash = "b4999552687ca998bc8b7ec6f53141801b789896051ea12986ef53acda8ce589" diff --git a/pyproject.toml b/pyproject.toml index f9588a75..621c02be 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -78,6 +78,7 @@ pytest-cov = "^3.0.0" taskipy = "^1.10.3" flake8 = "^5.0.4" requests-mock = "^1.10.0" +pytest-httpserver = "^1.1.5" # Packages for developers for creating documentation [tool.poetry.group.docs] diff --git a/test/hermes_test/model/test_api_e2e.py b/test/hermes_test/model/test_api_e2e.py index 37e21083..8433bad6 100644 --- a/test/hermes_test/model/test_api_e2e.py +++ b/test/hermes_test/model/test_api_e2e.py @@ -629,7 +629,9 @@ def test_process(tmp_path, monkeypatch, metadata_in, metadata_out): manager.finalize_step("harvest") config_file = tmp_path / "hermes.toml" - config_file.write_text(f"[harvest]\nsources = [{', '.join(f'\"{harvester}\"' for harvester in metadata_in)}]") + config_file.write_text( + "[harvest]\nsources = [" + ", ".join('\"' + f'{harvester}' + '\"' for harvester in metadata_in) + "]" + ) orig_argv = sys.argv[:] sys.argv = ["hermes", "process", "--path", str(tmp_path), "--config", str(config_file)] @@ -738,7 +740,9 @@ def test_process_complex(tmp_path, monkeypatch, metadata_in, metadata_out): manager.finalize_step("harvest") config_file = tmp_path / "hermes.toml" - config_file.write_text(f"[harvest]\nsources = [{', '.join(f'\"{harvester}\"' for harvester in metadata_in)}]") + config_file.write_text( + "[harvest]\nsources = [" + ", ".join('\"' + f'{harvester}' + '\"' for harvester in metadata_in) + "]" + ) orig_argv = sys.argv[:] sys.argv = ["hermes", "process", "--path", str(tmp_path), "--config", str(config_file)] From 1fcbb4b29e12676ecf6568fa6083063c53501b70 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Wed, 11 Mar 2026 14:43:21 +0100 Subject: [PATCH 213/247] adjusted comments and config for doc build to try get it running --- docs/source/conf.py | 5 ++++- src/hermes/model/error.py | 1 + src/hermes/model/types/ld_container.py | 4 ++-- src/hermes/model/types/ld_context.py | 20 ++++++++++---------- 4 files changed, 17 insertions(+), 13 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 7ab7b582..e80dd460 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -102,7 +102,7 @@ def read_version_from_pyproject(): 'sphinx_togglebutton', 'sphinxcontrib.datatemplates', # Custom extensions, see `_ext` directory. - # 'plugin_markup', + 'plugin_markup', ] language = 'en' @@ -131,6 +131,9 @@ def read_version_from_pyproject(): autoapi_dirs = ["../../src"] autoapi_root = "api" autoapi_ignore = ["*__main__*"] +autoapi_options = [ + "members", "undoc-members", "private-members", "show-inheritance", "show-module-summary", "special-members" +] # -- Options for HTML output ------------------------------------------------- diff --git a/src/hermes/model/error.py b/src/hermes/model/error.py index 1da95943..ae3452ae 100644 --- a/src/hermes/model/error.py +++ b/src/hermes/model/error.py @@ -33,6 +33,7 @@ class HermesContextError(Exception): To be able to track and fix the error, you should use this in conjunction with the original exception if applicable: .. code:: python + try: context[term] except ValueError as e: diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index bd57b520..09d1cf52 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -365,8 +365,8 @@ def merge_to_list(cls: type[Self], *args: tuple[Any]) -> list[Any]: """ Returns a list that is contains all non-list items from args and all items in the lists in args. - :param *args: The items that should be put into one list. - :type *args: tuple[Any] + :param args: The items that should be put into one list. + :type args: tuple[Any] :return: A list containing all non-list items and all items from lists in args. (Same order as in args) :rytpe: list[Any] diff --git a/src/hermes/model/types/ld_context.py b/src/hermes/model/types/ld_context.py index 3d60bb41..ce2a09b1 100644 --- a/src/hermes/model/types/ld_context.py +++ b/src/hermes/model/types/ld_context.py @@ -45,19 +45,19 @@ class ContextPrefix: Represents the context of the hermes JSON-LD data model and provides two views on the model: - as a list of linked data vocabularies, where items can be vocabulary base IRI strings and/or dictionaries mapping - arbitrary strings used to prefix terms from a specific vocabulary to their respective vocabulary IRI strings.; + arbitrary strings used to prefix terms from a specific vocabulary to their respective vocabulary IRI strings.; - as a dict mapping prefixes to vocabulary IRIs, where the default vocabulary has a prefix of None. """ def __init__(self, vocabularies: list[str | dict]): """ - @param vocabularies: A list of linked data vocabularies. Items can be vocabulary base IRI strings and/or - dictionaries mapping arbitrary strings used to prefix terms from a specific vocabulary to their respective - vocabulary IRI strings. - If the list contains more than one string item, the last one will be used as the default vocabulary. If a prefix string is used more than once across all dictionaries in the list, the last item with this key will be included in the context. + + :param vocabularies: A list of linked data vocabularies. Items can be vocabulary base IRI strings and/or + dictionaries mapping arbitrary strings used to prefix terms from a specific vocabulary to their respective + vocabulary IRI strings. """ self.vocabularies = vocabularies self.context = {} @@ -89,12 +89,12 @@ def __getitem__(self, compressed_term: str | tuple) -> str: term = context["prefix1", "term"] term = context[None, "term_in_default_vocabulary"] - @param compressed_term: A term from a vocabulary in the context; terms from the default vocabulary are passed - with a prefix of None, or as an unprefixed string, terms from non-default vocabularies are prefixed with the - defined prefix for the vocabulary. The term can either be passed in as string if prefix is None, or - ":", or as a tuple. + :param compressed_term: A term from a vocabulary in the context; terms from the default vocabulary are passed + with a prefix of None, or as an unprefixed string, terms from non-default vocabularies are prefixed with the + defined prefix for the vocabulary. The term can either be passed in as string if prefix is None, or + ":", or as a tuple. - @return: The fully qualified IRI for the passed term + :return: The fully qualified IRI for the passed term """ if not isinstance(compressed_term, str): prefix, term = compressed_term From 65f3d104c778c9f775f1c57ff084e0d6b45772c2 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 13 Mar 2026 08:50:06 +0100 Subject: [PATCH 214/247] tweeked documentation a bit --- docs/source/_static/custom.css | 28 +++ docs/source/conf.py | 25 ++- src/hermes/model/types/ld_container.py | 260 +++++++++++-------------- 3 files changed, 160 insertions(+), 153 deletions(-) diff --git a/docs/source/_static/custom.css b/docs/source/_static/custom.css index 12ace29f..20c1f57f 100644 --- a/docs/source/_static/custom.css +++ b/docs/source/_static/custom.css @@ -6,6 +6,7 @@ /* * SPDX-FileContributor: Oliver Bertuch + * SPDX-FileContributor: Michael Fritzsche */ img.member { @@ -22,4 +23,31 @@ img.badge-icon { img.member { max-width: 23%; } +} + +img { + display: block; + margin: auto; +} + +.bd-page-width { + max-width: none !important; +} + +@media (min-width: 1200px) { + .bd-article-container { + max-width: none !important; + } +} + +.bd-article-container { + max-width: 100%; +} + +.bd-sidebar-primary.bd-sidebar { + max-width: 340px; +} + +.bd-sidebar-secondary{ + max-width: min-content; } \ No newline at end of file diff --git a/docs/source/conf.py b/docs/source/conf.py index e80dd460..caaea466 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -39,7 +39,7 @@ def read_from_pyproject(file_path="../../pyproject.toml"): data = toml.load(file_path) # Navigate to the authors metadata - metadata = data.get("tool", {}).get("poetry", {}) + metadata = data.get("project", {}) if not metadata: return "No metadata found in pyproject.toml" return metadata @@ -56,7 +56,7 @@ def read_authors_from_pyproject(): if not authors: return "No authors metadata found in pyproject.toml" # Convert the list of authors to a comma-separated string - return ", ".join([a.split(" <")[0] for a in authors]) + return ", ".join([author["name"] for author in authors]) def read_version_from_pyproject(): metadata = read_from_pyproject() @@ -73,7 +73,7 @@ def read_version_from_pyproject(): author = read_authors_from_pyproject() # The full version, including alpha/beta/rc tags -release = read_version_from_pyproject() +version = release = read_version_from_pyproject() # -- General configuration --------------------------------------------------- @@ -132,7 +132,7 @@ def read_version_from_pyproject(): autoapi_root = "api" autoapi_ignore = ["*__main__*"] autoapi_options = [ - "members", "undoc-members", "private-members", "show-inheritance", "show-module-summary", "special-members" + "members", "undoc-members", "private-members", "special-members", "show-inheritance", "show-module-summary" ] # -- Options for HTML output ------------------------------------------------- @@ -164,6 +164,7 @@ def read_version_from_pyproject(): "repository_url": "https://github.com/hermes-hmc/hermes", "use_repository_button": True, "navigation_with_keys": False, + "max_navbar_depth": -1 } html_css_files = [ @@ -185,3 +186,19 @@ def read_version_from_pyproject(): # -- Options for sphinx-togglebutton ----------------------------------------- togglebutton_hint = "Click to show screenshot" + + +# TODO: remove this workaround and remove "undoc-members" from autoapi_options once everything is documented +# This removes all generated entries for known documented classes (because autoapi will add all attributes +# it finds in the code no matter if they are described in a class doc string or not). +def autoapi_skip_member(app, obj_type, name, obj, skip, options): + if obj_type == "attribute": + if any(documented_type in obj.id for documented_type in [ + "ld_container", "ld_dict", "ld_list", "ld_merge_container", "ld_merge_dict", "ld_merge_list", "ld_context" + ]): + return True + + return skip + +def setup(app): + app.connect("autoapi-skip-member", autoapi_skip_member) diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index 09d1cf52..474da7bb 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -17,25 +17,32 @@ from .ld_list import ld_list JSON_LD_CONTEXT_DICT: TypeAlias = dict[str, Union[str, "JSON_LD_CONTEXT_DICT"]] +""" Type description for a context object in JSON_LD """ BASIC_TYPE: TypeAlias = Union[str, float, int, bool] +""" All primitive types in Python recogniced by ld_containers """ EXPANDED_JSON_LD_VALUE: TypeAlias = list[Union[ dict[str, Union["EXPANDED_JSON_LD_VALUE", BASIC_TYPE]], "EXPANDED_JSON_LD_VALUE", str ]] +""" Type description of an expanded JSON_LD object """ COMPACTED_JSON_LD_VALUE: TypeAlias = Union[ list[Union[dict[str, Union["COMPACTED_JSON_LD_VALUE", BASIC_TYPE]], BASIC_TYPE]], dict[str, Union["COMPACTED_JSON_LD_VALUE", BASIC_TYPE]], ] +""" Type description of an compacted JSON_LD object """ TIME_TYPE: TypeAlias = Union[datetime, date, time] +""" All time related types in Python recogniced by ld_Containers """ JSON_LD_VALUE: TypeAlias = Union[ list[Union["JSON_LD_VALUE", BASIC_TYPE, TIME_TYPE, "ld_dict", "ld_list"]], dict[str, Union["JSON_LD_VALUE", BASIC_TYPE, TIME_TYPE, "ld_dict", "ld_list"]], ] +""" Type description of valid JSON_LD objects that are partially represented by ld_containers """ PYTHONIZED_LD_CONTAINER: TypeAlias = Union[ list[Union["PYTHONIZED_LD_CONTAINER", BASIC_TYPE, TIME_TYPE]], dict[str, Union["PYTHONIZED_LD_CONTAINER", BASIC_TYPE, TIME_TYPE]], ] +""" Type description of the pythonized from of ld_containers (i.e. if the ld_container(s) is/ are replaced). """ class ld_container: @@ -46,25 +53,14 @@ class ld_container: It allows to easily interacts them by hinding all the nesting and automatically mapping between different forms. - :ivar active_ctx: The active context that is used by the json-ld processor. - :ivar context: The context exclusive to this ld_container and all its childs - (it can still be the same as e.g. parent.context) - :ivartype context: list[str | JSON_LD_CONTEXT_DICT] - :ivar full_context: The context of this ld_container and all its parents merged into one list. - :ivartype full_context: list[str | JSON_LD_CONTEXT_DICT] - :ivar index: The index into the parent container if it is a list. - :ivartype index: int - :ivar key: The key into the inner most parent that is a dict of this ld_container. - :ivartype key: str - :ivar ld_value: The expanded JSON-LD value this object represents. - :ivartype ld_value: EXPANDED_JSON_LD_VALUE - :ivar parent: The ld_container this one is directly contained in. - :ivartype parent: ld_container - :ivar path: The path from the outer most parent to this ld_container. - :ivartype path: list[str | int] - - :cvar ld_proc: The JSON-LD processor object for all ld_container. - :cvartype ld_proc: JsonLdProcessor + Attributes: + active_ctx: The active context that is used by the json-ld processor. + context (list[str | JSON_LD_CONTEXT_DICT]): The context exclusive to this ld_container and all its childs + (it can still be the same as e.g. parent.context) + index (int): The index into the parent container if it is a list. + key (str): The key into the inner most parent that is a dict of this ld_container. + parent (ld_container): The ld_container this one is directly contained in. + ld_proc (JsonLdProcessor): (class attribute) The JSON-LD processor object for all ld_container. """ ld_proc = JsonLdProcessor() @@ -81,21 +77,15 @@ def __init__( """ Create a new instance of an ld_container. - :param self: The instance of ld_container to be initialized. - :type self: Self - :param data: The expanded json-ld data that is mapped. - :type data: EXPANDED_JSON_LD_VALUE - :param parent: parent node of this container. - :type parent: ld_dict | ld_list | None - :param key: key into the parent container. - :type key: str | None - :param index: index into the parent container. - :type index: int | None - :param context: local context for this container. - :type context: list[str | JSON_LD_CONTEXT_DICT] | None - - :return: - :rtype: None + Args: + data (EXPANDED_JSON_LD_VALUE): The expanded json-ld data that is mapped. + parent (ld_dict | ld_list | None): parent node of this container. + key (str | None): key into the parent container. + index (int | None): index into the parent container. + context (list[str | JSON_LD_CONTEXT_DICT] | None): local context for this container. + + Returns: + None: """ # Store basic data self.parent = parent @@ -113,13 +103,11 @@ def add_context(self: Self, context: list[Union[str | JSON_LD_CONTEXT_DICT]]) -> """ Add the given context to the ld_container. - :param self: The ld_container the context should be added to. - :type self: Self - :param context: The context to be added to self. - :type context: list[str | JSON_LD_CONTEXT_DICT] + Args: + context (list[str | JSON_LD_CONTEXT_DICT]): The context to be added to self. - :return: - :rtype: None + Returns: + None: """ self.context = self.merge_to_list(self.context, context) self.active_ctx = self.ld_proc.process_context(self.active_ctx, context, {"documentLoader": bundled_loader}) @@ -127,14 +115,8 @@ def add_context(self: Self, context: list[Union[str | JSON_LD_CONTEXT_DICT]]) -> @property def full_context(self: Self) -> list[Union[str, JSON_LD_CONTEXT_DICT]]: """ - Return the context of the ld_container merged with the full_context of its parent. - - :param self: The ld_container whose full_context is returned - :type self: Self - - :return: The context of the ld_container merged with the full_context of its parent via - ld_container.merge_to_list or just the context of this ld_container if self.parent is None. - :rtype: list[str | JSON_LD_CONTEXT_DICT] + list[str | JSON_LD_CONTEXT_DICT]: The context of the ld_container merged with the full_context of its parent + via ld_container.merge_to_list or just the context of this ld_container if self.parent is None. """ if self.parent is not None: return self.merge_to_list(self.parent.full_context, self.context) @@ -144,16 +126,10 @@ def full_context(self: Self) -> list[Union[str, JSON_LD_CONTEXT_DICT]]: @property def path(self: Self) -> list[Union[str, int]]: """ - Create a path representation for this item. - - :param self: The ld_container the path leads to from its outer most parent container. - :type self: Self - - :return: The path from selfs outer most parent to it self. + list[str | int]: The path from selfs outer most parent to it self. Let parent be the outer most parent of self. Start with index = 1 and iteratively set parent to parent[path[index]] and then increment index until index == len(path) to get parent is self == true. - :rtype: list[str | int] """ if self.parent: return self.parent.path + [self.key if self.index is None else self.index] @@ -163,16 +139,10 @@ def path(self: Self) -> list[Union[str, int]]: @property def ld_value(self: Self) -> EXPANDED_JSON_LD_VALUE: """ - Return a representation that is suitable as a value in expanded JSON-LD of this ld_container. - - :param self: The ld_container whose expanded JSON-LD representation is returned. - :type self: Self - - :return: The expanded JSON-LD value of this container. + EXPANDED_JSON_LD_VALUE: The expanded JSON-LD value of this container. This value is the basis of all operations and a reference to the original is returned and not a copy. Do **not** modify unless strictly necessary and you know what you do. Otherwise unexpected behavior may occur. - :rtype: EXPANDED_JSON_LD_VALUE """ return self._data @@ -184,16 +154,13 @@ def _to_python( """ Returns a pythonized version of the given value pretending the value is in self and full_iri its key. - :param self: the ld_container ld_value is considered to be in. - :type self: Self - :param full_iri: The expanded iri of the key of ld_value / self (later if self is not a dictionary). - :type full_iri: str - :param ld_value: The value thats pythonized value is requested. ld_value has to be valid expanded JSON-LD if it - was embeded in self._data. - :type ld_value: EXPANDED_JSON_LD_VALUE | dict[str, EXPANDED_JSON_LD_VALUE] | list[str] | str + Args: + full_iri (str): The expanded iri of the key of ld_value / self (later if self is not a dictionary). + ld_value (EXPANDED_JSON_LD_VALUE | dict[str, EXPANDED_JSON_LD_VALUE] | list[str] | str): The value thats + pythonized value is requested. ld_value has to be valid expanded JSON-LD if it were inside self._data. - :return: The pythonized value of the ld_value. - :rtype: ld_dict | ld_list | BASIC_TYPE | TIME_TYPE + Returns: + ld_dict | ld_list | BASIC_TYPE | TIME_TYPE: The pythonized value of the ld_value. """ if full_iri == "@id": # values of key "@id" only have to be compacted @@ -222,19 +189,22 @@ def _to_expanded_json( The result of this function is what value has turned into. - :param self: The ld_dict or ld_list in which value gets expanded - :type self: Self - :param value: The value that is to be expanded. Different types are expected based on the type of self: -
  • If type(self) == ld_dict: value must be a dict
  • -
  • If type(self) == ld_list: value must be a list
- value will be expanded as if it was the data_dict/ the item_list of self. - :type value: JSON_LD_VALUE + Args: + value (JSON_LD_VALUE): The value that is to be expanded. + Different types are expected based on the type of self + + - If type(self) == ld_dict: value must be a dict + - If type(self) == ld_list: value must be a list + + value will be expanded as if it was the data_dict/ the item_list of self. - :return: The expanded version of value i.e. the data_dict/ item_list of self if it had been value. - The return type is based on the type of self: -
  • If type(self) == ld_dict: the returned values type is dict
  • -
  • If type(self) == ld_list: the returned values type is list
- :rtype: EXPANDED_JSON_LD_VALUE | dict[str, EXPANDED_JSON_LD_VALUE] + Returns: + EXPANDED_JSON_LD_VALUE | dict[str, EXPANDED_JSON_LD_VALUE]: + The expanded version of value i.e. the data_dict/ item_list of self if it had been value. + The return type is based on the type of self: + + - If type(self) == ld_dict: the returned values type is dict + - If type(self) == ld_list: the returned values type is list """ # search for an ld_dict that is either self or the inner most parents parent of self that is an ld_dict # while searching build a path such that it leads from the found ld_dicts ld_value to selfs data_dict/ item_list @@ -319,11 +289,8 @@ def __repr__(self: Self) -> str: """ Returns a short string representation of this object. - :param self: The object whose representation is returned. - :type self: Self - - :returns: The short representation of self. - :rtype: str + Returns: + str: The short representation of self. """ return f"{type(self).__name__}({self._data})" @@ -331,11 +298,8 @@ def __str__(self: Self) -> str: """ Returns a string representation of this object. - :param self: The object whose representation is returned. - :type self: Self - - :returns: The representation of self. - :rtype: str + Returns: + (str): The representation of self. """ return str(self.to_python()) @@ -345,13 +309,12 @@ def compact( """ Returns the compacted version of the given ld_container using its context only if none was supplied. - :param self: The ld_container that is to be compacted. - :type self: Self - :param context: The context to use for the compaction. If None the context of self is used. - :type context: list[JSON_LD_CONTEXT_DICT | str] | JSON_LD_CONTEXT_DICT | str | None + Args: + context (list[JSON_LD_CONTEXT_DICT | str] | JSON_LD_CONTEXT_DICT | str | None): + The context to use for the compaction. If None the context of self is used. - :returns: The compacted version of selfs JSON-LD representation. - :rtype: COMPACTED_JSON_LD_VALUE + Returns: + COMPACTED_JSON_LD_VALUE: The compacted version of selfs JSON-LD representation. """ return self.ld_proc.compact( self.ld_value, context or self.context, {"documentLoader": bundled_loader, "skipExpand": True} @@ -365,11 +328,11 @@ def merge_to_list(cls: type[Self], *args: tuple[Any]) -> list[Any]: """ Returns a list that is contains all non-list items from args and all items in the lists in args. - :param args: The items that should be put into one list. - :type args: tuple[Any] + Args: + args (tuple[Any]): The items that should be put into one list. - :return: A list containing all non-list items and all items from lists in args. (Same order as in args) - :rytpe: list[Any] + Returns: + list[Any]: A list containing all non-list items and all items from lists in args. (Same order as in args) """ # base case for recursion if not args: @@ -386,14 +349,14 @@ def merge_to_list(cls: type[Self], *args: tuple[Any]) -> list[Any]: @classmethod def is_ld_node(cls: type[Self], ld_value: Any) -> bool: """ - Returns wheter the given value is considered to be possible of representing an expanded JSON-LD node.
+ Returns wheter the given value is considered to be possible of representing an expanded JSON-LD node. I.e. if ld_value is of the form [{a: b, ..., y: z}]. - :param ld_value: The value that is checked. - :type ld_value: Any + Args: + ld_value (Any): The value that is checked. - :returns: Wheter or not ld_value could represent an expanded JSON-LD node. - :rtype: bool + Returns: + bool: Wheter or not ld_value could represent an expanded JSON-LD node. """ return isinstance(ld_value, list) and len(ld_value) == 1 and isinstance(ld_value[0], dict) @@ -401,28 +364,28 @@ def is_ld_node(cls: type[Self], ld_value: Any) -> bool: def is_ld_id(cls: type[Self], ld_value: Any) -> bool: """ Returns wheter the given value is considered to be possible of representing an expanded JSON-LD node - containing only an @id value.
+ containing only an @id value. I.e. if ld_value is of the form [{"@id": ...}]. - :param ld_value: The value that is checked. - :type ld_value: Any + Args: + ld_value (Any): The value that is checked. - :returns: Wheter or not ld_value could represent an expanded JSON-LD node containing only an @id value. - :rtype: bool + Returns: + bool: Wheter or not ld_value could represent an expanded JSON-LD node containing only an @id value. """ return cls.is_ld_node(ld_value) and cls.is_json_id(ld_value[0]) @classmethod def is_ld_value(cls: type[Self], ld_value: Any) -> bool: """ - Returns wheter the given value is considered to be possible of representing an expanded JSON-LD value.
+ Returns wheter the given value is considered to be possible of representing an expanded JSON-LD value. I.e. if ld_value is of the form [{"@value": a, ..., x: z}]. - :param ld_value: The value that is checked. - :type ld_value: Any + Args: + ld_value (Any): The value that is checked. - :returns: Wheter or not ld_value could represent an expanded JSON-LD value. - :rtype: bool + Returns: + bool: Wheter or not ld_value could represent an expanded JSON-LD value. """ return cls.is_ld_node(ld_value) and "@value" in ld_value[0] @@ -430,14 +393,14 @@ def is_ld_value(cls: type[Self], ld_value: Any) -> bool: def is_typed_ld_value(cls: type[Self], ld_value: Any) -> bool: """ Returns wheter the given value is considered to be possible of representing an expanded JSON-LD value - containing a value type.
+ containing a value type. I.e. if ld_value is of the form [{"@value": a, "@type": b, ..., x: z}]. - :param ld_value: The value that is checked. - :type ld_value: Any + Args: + ld_value (Any): The value that is checked. - :returns: Wheter or not ld_value could represent an expanded JSON-LD value containing a value type. - :rtype: bool + Returns + bool: Wheter or not ld_value could represent an expanded JSON-LD value containing a value type. """ return cls.is_ld_value(ld_value) and "@type" in ld_value[0] @@ -445,28 +408,28 @@ def is_typed_ld_value(cls: type[Self], ld_value: Any) -> bool: def is_json_id(cls: type[Self], ld_value: Any) -> bool: """ Returns wheter the given value is considered to be possible of representing a non-expanded JSON-LD node - containing only an @id value.
+ containing only an @id value. I.e. if ld_value is of the form {"@id": ...}. - :param ld_value: The value that is checked. - :type ld_value: Any + Args: + ld_value (Any): The value that is checked. - :returns: Wheter or not ld_value could represent a non-expanded JSON-LD node containing only an @id value. - :rtype: bool + Returns: + bool: Wheter or not ld_value could represent a non-expanded JSON-LD node containing only an @id value. """ return isinstance(ld_value, dict) and ["@id"] == [*ld_value.keys()] @classmethod def is_json_value(cls: type[Self], ld_value: Any) -> bool: """ - Returns wheter the given value is considered to be possible of representing a non-expanded JSON-LD value.
+ Returns wheter the given value is considered to be possible of representing a non-expanded JSON-LD value. I.e. if ld_value is of the form {"@value": b, ..., x: z}. - :param ld_value: The value that is checked. - :type ld_value: Any + Args: + ld_value (Any): The value that is checked. - :returns: Wheter or not ld_value could represent a non-expanded JSON-LD value. - :rtype: bool + Returns: + bool: Wheter or not ld_value could represent a non-expanded JSON-LD value. """ return isinstance(ld_value, dict) and "@value" in ld_value @@ -474,14 +437,14 @@ def is_json_value(cls: type[Self], ld_value: Any) -> bool: def is_typed_json_value(cls: type[Self], ld_value: Any) -> bool: """ Returns wheter the given value is considered to be possible of representing a non-expanded JSON-LD value - containing a value type.
+ containing a value type. I.e. if ld_value is of the form {"@value": a, "@type": b, ..., x: z}. - :param ld_value: The value that is checked. - :type ld_value: Any + Args: + ld_value (Any): The value that is checked. - :returns: Wheter or not ld_value could represent a non-expanded JSON-LD value containing a value type. - :rtype: bool + Returns: + bool: Wheter or not ld_value could represent a non-expanded JSON-LD value containing a value type. """ return cls.is_json_value(ld_value) and "@type" in ld_value @@ -489,14 +452,14 @@ def is_typed_json_value(cls: type[Self], ld_value: Any) -> bool: def typed_ld_to_py(cls: type[Self], data: list[dict[str, BASIC_TYPE]], **kwargs) -> Union[BASIC_TYPE, TIME_TYPE]: """ Returns the value of the given expanded JSON-LD value containing a value type converted into that type. - Meaning the pythonized version of the JSON-LD value data is returned.
+ Meaning the pythonized version of the JSON-LD value data is returned. ld_container.is_typed_ld_value(data) must return True. - :param data: The value that is that is converted into its pythonized from. - :type data: list[dict[str, BASIC_TYPE]] + Args: + data (list[dict[str, BASIC_TYPE]]): The value that is that is converted into its pythonized from. - :returns: The pythonized version of data. - :rtype: BASIC_TYPE | TIME_TYPE + Returns: + BASIC_TYPE | TIME_TYPE: The pythonized version of data. """ # FIXME: #434 dates are not returned as datetime/ date/ time but as string ld_value = data[0]['@value'] @@ -514,13 +477,12 @@ def are_values_equal( If the comparison is inconclusive i.e. exactly one or zero of both values have an "@id" value: Return whether or not all other keys exist in both values and all values of the keys are the same. - :param first: The first value of the comparison - :type first: dict[str, Union[BASIC_TYPE, TIME_TYPE]] - :param second: The second value of the comparison - :type second: dict[str, Union[BASIC_TYPE, TIME_TYPE]] + Args: + first (dict[str, Union[BASIC_TYPE, TIME_TYPE]]): The first value of the comparison + second (dict[str, Union[BASIC_TYPE, TIME_TYPE]]): The second value of the comparison - :return: Whether the values are considered equal or not. - :rtype: bool + Returns: + bool: Whether the values are considered equal or not. """ # compare @id's if "@id" in first and "@id" in second: From 4f3372cecc9361f7f5a5b9ba315e74ff4a8c1ef6 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 13 Mar 2026 09:32:30 +0100 Subject: [PATCH 215/247] tweeked documentation of ld_list --- src/hermes/model/types/ld_list.py | 375 ++++++++++++++---------------- 1 file changed, 177 insertions(+), 198 deletions(-) diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index 5dd0669f..d2d587b4 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -7,23 +7,22 @@ from __future__ import annotations -from .ld_container import ld_container from collections import deque - -from typing import TYPE_CHECKING +from collections.abc import Generator, Hashable +from typing import Any, Union, TYPE_CHECKING +from typing_extensions import Self + +from .ld_container import ( + ld_container, + JSON_LD_CONTEXT_DICT, + EXPANDED_JSON_LD_VALUE, + PYTHONIZED_LD_CONTAINER, + JSON_LD_VALUE, + TIME_TYPE, + BASIC_TYPE, +) if TYPE_CHECKING: - from collections.abc import Generator, Hashable from .ld_dict import ld_dict - from .ld_container import ( - JSON_LD_CONTEXT_DICT, - EXPANDED_JSON_LD_VALUE, - PYTHONIZED_LD_CONTAINER, - JSON_LD_VALUE, - TIME_TYPE, - BASIC_TYPE, - ) - from typing import Any, Union - from typing_extensions import Self class ld_list(ld_container): @@ -31,10 +30,10 @@ class ld_list(ld_container): An JSON-LD container resembling a list ("@set", "@list" or "@graph"). See also :class:`ld_container`. - :ivar container_type: The type of JSON-LD container the list is representing. ("@set", "@list", "graph") - :ivartype container_type: str - :ivar item_list: The list of items (in expanded JSON-LD form) that are contained in this ld_list. - :ivartype item_list: EXPANDED_JSON_LD_VALUE + Attributes: + container_type (str): The type of JSON-LD container the list is representing. ("@set", "@list", "graph") + item_list (EXPANDED_JSON_LD_VALUE): The list of items (in expanded JSON-LD form) + that are contained in this ld_list. """ def __init__( @@ -49,28 +48,24 @@ def __init__( """ Create a new instance of an ld_list. - :param self: The instance of ld_list to be initialized. - :type self: ld_list - :param data: The expanded json-ld data that is mapped (must be valid for @set, @list or @graph) - :type data: EXPANDED_JSON_LD_VALUE - :param parent: parent node of this container. - :type parent: ld_dict | ld_list | None - :param key: key into the parent container. - :type key: str | None - :param index: index into the parent container. - :type index: int | None - :param context: local context for this container. - :type context: list[str | JSON_LD_CONTEXT_DICT] | None - - :return: - :rtype: None - - :raises ValueError: If the given key is not a string or None was given. - :raises ValueError: If the given data is not a list. - :raises ValueError: If the data represents an unexpanded @set. I.e. is of the form [{"@set": [...]}] - :raises ValueError: If the given key is "@type" but the container_type not "@set" - or a value in the item_list not a string. - :raises ValueError: If the given key is not "@type" and any value in the item_list not a dict. + Args: + data (EXPANDED_JSON_LD_VALUE): The expanded json-ld data that is mapped + (must be valid for @set, @list or @graph) + parent (ld_dict | ld_list | None): parent node of this container. + key (str | None): key into the parent container. + index (int | None): index into the parent container. + context (list[str | JSON_LD_CONTEXT_DICT] | None): local context for this container. + + Returns: + None: + + Raises: + ValueError: If the given key is not a string or None was given. + ValueError: If the given data is not a list. + ValueError: If the data represents an unexpanded @set. I.e. is of the form [{"@set": [...]}] + ValueError: If the given key is "@type" but the container_type not "@set" + or a value in the item_list not a string. + ValueError: If the given key is not "@type" and any value in the item_list not a dict. """ # check for validity of data if not isinstance(key, str): @@ -105,13 +100,12 @@ def __getitem__( """ Get the item(s) at position index in a pythonized form. - :param self: The ld_list the items are taken from. - :type self: ld_list - :param index: The positon(s) from which the item(s) is/ are taken. - :type index: int | slice + Args: + index (int | slice): The positon(s) from which the item(s) is/ are taken. - :return: The pythonized item(s) at index. - :rtype: BASIC_TYPE | TIME_TYPE | ld_dict | ld_list | list[BASIC_TYPE | TIME_TYPE | ld_dict | ld_list] + Returns: + BASIC_TYPE | TIME_TYPE | ld_dict | ld_list | list[BASIC_TYPE | TIME_TYPE | ld_dict | ld_list]: + The pythonized item(s) at index. """ # handle slices by applying them to a list of indices and then getting the items at those if isinstance(index, slice): @@ -130,15 +124,12 @@ def __setitem__( Set the item(s) at position index to the given value(s). All given values are expanded. If any are assimilated by self all items that would be added by this are added. - :param self: The ld_list the items are set in. - :type self: ld_list - :param index: The positon(s) at which the item(s) is/ are set. - :type index: int | slice - :param value: The new value(s). - :type value: JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list + Args: + index (int | slice): The positon(s) at which the item(s) is/ are set. + value (JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list): The new value(s). - :return: - :rtype: None + Returns: + None: """ if not isinstance(index, slice): # expand the value @@ -166,13 +157,11 @@ def __delitem__(self: Self, index: Union[int, slice]) -> None: Note that if a deleted object is represented by an ld_container druing this process it will still exist and not be modified afterwards. - :param self: The ld_list the items are deleted from. - :type self: ld_list - :param index: The positon(s) at which the item(s) is/ are deleted. - :type index: int | slice + Args: + index (int | slice): The positon(s) at which the item(s) is/ are deleted. - :return: - :rtype: None + Returns: + None: """ del self.item_list[index] @@ -180,11 +169,8 @@ def __len__(self: Self) -> int: """ Returns the number of items in this ld_list. - :param self: The ld_list whose length is to be returned. - :type self: ld_list - - :return: The length of self. - :rtype: int + Returns: + int: The length of self. """ return len(self.item_list) @@ -192,11 +178,8 @@ def __iter__(self: Self) -> Generator[Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_l """ Returns an iterator over the pythonized values contained in self. - :param self: The ld_list over whose items is iterated. - :type self: ld_list - - :return: The Iterator over self's values. - :rtype: Generator[BASIC_TYPE | TIME_TYPE | ld_dict | ld_list, None, None] + Returns: + Generator[BASIC_TYPE | TIME_TYPE | ld_dict | ld_list, None, None]: The Iterator over self's values. """ # return an Iterator over each value in self in its pythonized from for index, value in enumerate(self.item_list): @@ -216,13 +199,11 @@ def __contains__(self: Self, value: JSON_LD_VALUE) -> bool: That means that this value is 'contained' in self.item_list if any object in self.item_list has the same @id like it or it xor the object in the item_list has an id an all other values are the same. - :param self: The ld_list that is checked if it contains value. - :type self: ld_list - :param value: The object being checked whether or not it is in self. - :type value: JSON_LD_VALUE + Args: + value (JSON_LD_VALUE): The object being checked whether or not it is in self. - :return: Whether or not value is being considered to be contained in self. - :rtype: bool + Returns: + bool: Whether or not value is being considered to be contained in self. """ # expand value expanded_value = self._to_expanded_json([value]) @@ -249,25 +230,28 @@ def __eq__( dict[str, Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]]] ) -> bool: """ - Returns wheter or not self is considered to be equal to other.
+ Returns wheter or not self is considered to be equal to other. + If other is not an ld_list, it is converted first. For each index it is checked if the ids of the items at index in self and other match if both have one, - if only one has or neither have an id all other values are compared.
+ if only one has or neither have an id all other values are compared. + Note that due to those circumstances equality is not transitve - meaning if a == b and b == c it is not guaranteed that a == c.
+ meaning if a == b and b == c it is not guaranteed that a == c. + If self or other is considered unordered the comparison is more difficult. All items in self are compared with all items in other. On the resulting graph given by the realtion == the Hopcroft-Karp algoritm is used to determine if there exists a bijection reordering self so that the ordered comparison of self with other returns true. - :param self: The ld_list other is compared to. - :type self: ld_list - :param other: The list/ container/ ld_list self is compared to. - :type other: ld_list | list[JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list] + Args: + other (ld_list | list[JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list]): The list/ container/ + ld_list self is compared to. - :return: Whether or not self and other are considered equal. - If other is of the wrong type return the NotImplemented singleton instead. - :rtype: bool + Returns: + bool: + Whether or not self and other are considered equal. + If other is of the wrong type return the NotImplemented singleton instead. """ # check if other has an acceptable type if not (isinstance(other, (list, ld_list)) or ld_list.is_container(other)): @@ -363,28 +347,29 @@ def _bfs_step( distances: dict[Hashable, Union[int, float]] ) -> bool: """ - Completes the BFS step of Hopcroft-Karp. I.e.:
+ Completes the BFS step of Hopcroft-Karp. I.e.: + Finds the shortest path from all unmatched verticies in verticies1 to any unmatched vertex in any value in edges - where the connecting paths are alternating between matches and its complement.
+ where the connecting paths are alternating between matches and its complement. + It also marks each vertex in verticies1 with how few verticies from verticies1 have to be passed to reach the vertex from an unmatched one in verticies1. This is stored in distances. - :param verticies1: The set of verticies in the left partition of the bipartite graph. - :type verticies1: set[Hashable] - :param edges: The edges in the bipartite graph. (As the edges are bidirectional they are expected to be given in - this format: Dictionary with keys being the vertices in the left partition and values being tuples - of verticies in the right partition.) - :type edges: dict[Hashable, tuple[Hashable]] - :param matches: The current matching of verticies in the left partition with the ones in the right partition. - :type matches: dict[Hashable, Hashable] - :param distances: The reference to the dictionary mapping verticies of the left partition to the minimal - number of verticies in the left partition that will be passed on a path from an unmatched vertex of the left - partition to the vertex that is the key. - :type distances: dict[Hashable, Union[int, float]] - - :returns: Wheter or not a alternating path from an unmatched vertex in the left partition to an unmatched vertex - in the right partition exists. - :rtype: bool + Args: + verticies1 (set[Hashable]): The set of verticies in the left partition of the bipartite graph. + edges (dict[Hashable, tuple[Hashable]]): The edges in the bipartite graph. (As the edges are bidirectional + they are expected to be given in this format: Dictionary with keys being the vertices in the left + partition and values being tuples of verticies in the right partition.) + matches (dict[Hashable, Hashable]): The current matching of verticies in the left partition with the ones in + the right partition. + distances (dict[Hashable, Union[int, float]]): The reference to the dictionary mapping verticies of the left + partition to the minimal number of verticies in the left partition that will be passed on a path from an + unmatched vertex of the left partition to the vertex that is the key. + + Returns: + bool: + Wheter or not a alternating path from an unmatched vertex in the left partition to an unmatched vertex + in the right partition exists. """ # initialize the queue and set the distances to zero for unmatched vertices and to inf for all others queue = deque() @@ -420,27 +405,27 @@ def _dfs_step( distances: dict[Hashable, Union[int, float]] ) -> bool: """ - Completes the DFS step of Hopcroft-Karp. I.e.:
+ Completes the DFS step of Hopcroft-Karp. I.e.: + Adds all edges on every path with the minimal path length to matches if they would be in the symmetric difference of matches and the set of edges on the union of the paths. - :param ver: The set of verticies in the left partition of the bipartite graph. - :type vert: Hashable - :param edges: The edges in the bipartite graph. (As the edges are bidirectional they are expected to be given in - this format: Dictionary with keys being the vertices in the left partition and values being tuples - of verticies in the right partition.) - :type edges: dict[Hashable, tuple[Hashable]] - :param matches: The current matching of verticies in the left partition with the ones in the right partition. - :type matches: dict[Hashable, Hashable] - :param distances: The reference to the dictionary mapping verticies of the left partition to the minimal - number of verticies in the left partition that will be passed on a path from an unmatched vertex of the left - partition to the vertex that is the key. The values will be replaced with float("inf") to mark already - visited vertices. - :type distances: dict[Hashable, Union[int, float]] - - :returns: Wheter or not a path from the unmatched vertex ver in the left partition to an unmatched vertex - in the right partition could still exist. - :rtype: bool + Args: + ver (Hashable): The set of verticies in the left partition of the bipartite graph. + edges (dict[Hashable, tuple[Hashable]]): The edges in the bipartite graph. (As the edges are bidirectional + they are expected to be given in this format: Dictionary with keys being the vertices in the left + partition and values being tuples of verticies in the right partition.) + matches (dict[Hashable, Hashable]): The current matching of verticies in the left partition with the ones in + the right partition. + distances (dict[Hashable, Union[int, float]]): The reference to the dictionary mapping verticies of the left + partition to the minimal number of verticies in the left partition that will be passed on a path from an + unmatched vertex of the left partition to the vertex that is the key. The values will be replaced with + float("inf") to mark already visited vertices. + + Returns: + bool: + Wheter or not a path from the unmatched vertex ver in the left partition to an unmatched vertex + in the right partition could still exist. """ # recursion base case: None always has a shortest possible path to itself if ver is None: @@ -466,22 +451,22 @@ def _hopcroft_karp( edges: dict[Hashable, tuple[Hashable]] ) -> int: """ - Implementation of Hopcroft-Karp. I.e.:
+ Implementation of Hopcroft-Karp. I.e.: + Finds how maximal number of edges with the property that no two edges share an endpoint (and startpoint) - in the given bipartite graph.
+ in the given bipartite graph. + Note that verticies1 and verticies2 have to be disjoint. - :param verticies1: The set of verticies in the left partition of the bipartite graph. - :type verticies1: set[Hashable] - :param verticies2: The set of verticies in the right partition of the bipartite graph. - :type verticies2: set[Hashable] - :param edges: The edges in the bipartite graph. (As the edges are bidirectional they are expected to be given in - this format: Dictionary with keys being the vertices in the left partition and values being tuples - of verticies in the right partition.) - :type edges: dict[Hashable, tuple[Hashable]] + Args: + verticies1 (set[Hashable]): The set of verticies in the left partition of the bipartite graph. + verticies2 (set[Hashable]): The set of verticies in the right partition of the bipartite graph. + edges (dict[Hashable, tuple[Hashable]]): The edges in the bipartite graph. (As the edges are bidirectional + they are expected to be given in this format: Dictionary with keys being the vertices in the left + partition and values being tuples of verticies in the right partition.) - :returns: The number of edges. - :rtype: int + Returns: + int: The number of edges. """ # initializes the first matching. None is a imaginary vertex to denote unmatched vertices. matches = dict() @@ -509,16 +494,16 @@ def __ne__( """ Returns whether or not self and other not considered to be equal. (Returns not self.__eq__(other) if the return type is bool. - See ld_list.__eq__ for more details on the comparison.) + See :meth:`ld_list.__eq__` for more details on the comparison.) - :param self: The ld_list other is compared to. - :type self: ld_list - :param other: The list/ container/ ld_list self is compared to. - :type other: ld_list | list[JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list] + Args: + other (ld_list | list[JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list]): The list/ container/ + ld_list self is compared to. - :return: Whether or not self and other are not considered equal. - If other is of the wrong type return the NotImplemented singleton instead. - :rtype: bool + Returns: + bool: + Whether or not self and other are not considered equal. + If other is of the wrong type return the NotImplemented singleton instead. """ # compare self and other using __eq__ x = self.__eq__(other) @@ -532,13 +517,11 @@ def append(self: Self, value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dic Append the item to the given ld_list self. The given value is expanded. If it is assimilated by self all items that would be added by this are added. - :param self: The ld_list the item is appended to. - :type self: ld_list - :param value: The new value. - :type value: JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list + Args: + value (JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list): The new value. - :return: - :rtype: None + Returns: + None: """ self.item_list.extend(self._to_expanded_json([value])) @@ -547,13 +530,11 @@ def extend(self: Self, value: list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, l Append the items in value to the given ld_list self. The given values are expanded. If any are assimilated by self all items that would be added by this are added. - :param self: The ld_list the items are appended to. - :type self: ld_list - :param value: The new values. - :type value: list[JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dcit | ld_list] + Args: + value (list[JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dcit | ld_list]): The new values. - :return: - :rtype: None + Returns: + None: """ for item in value: self.append(item) @@ -562,11 +543,8 @@ def to_python(self: Self) -> list[Union[BASIC_TYPE, TIME_TYPE, PYTHONIZED_LD_CON """ Return a fully pythonized version of this object where all ld_container are replaced by lists and dicts. - :param self: The ld_list whose fully pythonized version is returned. - :type self: ld_list - - :return: The fully pythonized version of self. - :rtype: list[BASIC_TYPE | TIME_TYPE | PYTHONIZED_LD_CONTAINER] + Returns: + list[BASIC_TYPE | TIME_TYPE | PYTHONIZED_LD_CONTAINER]: The fully pythonized version of self. """ return [ item.to_python() if isinstance(item, ld_container) else item @@ -576,28 +554,28 @@ def to_python(self: Self) -> list[Union[BASIC_TYPE, TIME_TYPE, PYTHONIZED_LD_CON @classmethod def is_ld_list(cls: type[Self], ld_value: Any) -> bool: """ - Returns wheter the given value is considered to be possible of representing an ld_list.
+ Returns wheter the given value is considered to be possible of representing an ld_list. I.e. if ld_value is of the form [{container_type: [...]}] where container_type is '@set', '@list' or '@graph'. - :param ld_value: The value that is checked. - :type ld_value: Any + Args: + ld_value (Any): The value that is checked. - :returns: Wheter or not ld_value could represent an ld_list. - :rtype: bool + Returns: + bool: Wheter or not ld_value could represent an ld_list. """ return cls.is_ld_node(ld_value) and cls.is_container(ld_value[0]) @classmethod def is_container(cls: type[Self], value: Any) -> bool: """ - Returns wheter the given value is considered to be possible of representing an json-ld container.
+ Returns wheter the given value is considered to be possible of representing an json-ld container. I.e. if ld_value is of the form {container_type: [...]} where container_type is '@set', '@list' or '@graph'. - :param ld_value: The value that is checked. - :type ld_value: Any + Args: + ld_value (Any): The value that is checked. - :returns: Wheter or not ld_value could represent a json-ld container. - :rtype: bool + Returns: + bool: Wheter or not ld_value could represent a json-ld container. """ return ( isinstance(value, dict) @@ -616,30 +594,30 @@ def from_list( container_type: str = "@set" ) -> ld_list: """ - Creates a ld_list from the given list with the given parent, key, context and container_type.
- Note that only container_type '@set' is valid for key '@type'.
+ Creates a ld_list from the given list with the given parent, key, context and container_type. + + Note that only container_type '@set' is valid for key '@type'. + Further more note that if parent would assimilate the values in value no new ld_list is created and the given values are appended to parent instead and parent is returned. - :param value: The list of values the ld_list should be created from. - :type value: list[JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE] - :param parent: The parent container of the new ld_list.
If value is assimilated by parent druing JSON-LD - expansion parent is extended by value and parent is returned. - :type parent: ld_dict | ld_list | None - :param key: The key into the inner most parent container representing a dict of the new ld_list. - :type: key: str | None - :param context: The context for the new list (it will also inherit the context of parent).
- Note that this context won't be added to parent if value is assimilated by parent and parent is returned. - :type context: str | JSON_LD_CONTEXT_DICT | list[str | JSON_LD_CONTEXT_DICT] | None - :param container_type: The container type of the new list valid are '@set', '@list' and '@graph'.
- If value is assimilated by parent and parent is returned the given container_type won't affect - the container type of parent.
Also note that only '@set' is valid if key is '@type'. - :type container_type: str - - :return: The new ld_list build from value or if value is assimilated by parent, parent extended by value. - :rtype: ld_list - - :raises ValueError: If key is '@type' and container_type is not '@set'. + Args: + value (list[JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE]): The list of values the ld_list should be created from. + parent (ld_dict | ld_list | None): The parent container of the new ld_list. If value is assimilated by + parent druing JSON-LD expansion parent is extended by value and parent is returned. + key (str | None): The key into the inner most parent container representing a dict of the new ld_list. + context (str | JSON_LD_CONTEXT_DICT | list[str | JSON_LD_CONTEXT_DICT] | None): The context for the new list + (it will also inherit the context of parent). Note that this context won't be added to parent if value + is assimilated by parent and parent is returned. + container_type (str): The container type of the new list valid are '@set', '@list' and '@graph'. + If value is assimilated by parent and parent is returned the given container_type won't affect + the container type of parent. Also note that only '@set' is valid if key is '@type'. + + Returns: + ld_list: The new ld_list build from value or if value is assimilated by parent, parent extended by value. + + Raises: + ValueError: If key is '@type' and container_type is not '@set'. """ # TODO: handle context if not of type list or None # validate container_type @@ -678,18 +656,19 @@ def from_list( @classmethod def get_item_list_from_container(cls: type[Self], ld_value: dict[str, list[Any]]) -> list[Any]: """ - Returns the item list from a container, the given ld_value, (i.e. {container_type: item_list}).
+ Returns the item list from a container, the given ld_value, (i.e. {container_type: item_list}). Only '@set', '@list' and '@graph' are valid container types. - :param ld_value: The container whose item list is to be returned. - :type ld_value: dict[str, list[Any]] + Args: + ld_value (dict[str, list[Any]]): The container whose item list is to be returned. - :returns: The list the container holds. - :rtype: list[Any] + Returns: + list[Any]: The list the container holds. - :raises ValueError: If the item_container is not a dict. - :raises ValueError: If the container_type is not exactly one of '@set', '@list' and '@graph'. - :raises ValueError: If the item_list is no list. + Raises: + ValueError: If the item_container is not a dict. + ValueError: If the container_type is not exactly one of '@set', '@list' and '@graph'. + ValueError: If the item_list is no list. """ if type(ld_value) != dict: raise ValueError(f"The given data {ld_value} is not a dictionary and therefor no container.") From 45459fc4e0d0b1d545d7035dbc47b376ef2e2cbc Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 13 Mar 2026 10:22:34 +0100 Subject: [PATCH 216/247] improved comments for ld_dict --- src/hermes/model/types/ld_container.py | 28 ++- src/hermes/model/types/ld_context.py | 38 ++-- src/hermes/model/types/ld_dict.py | 243 +++++++++++-------------- src/hermes/model/types/ld_list.py | 52 +++--- 4 files changed, 167 insertions(+), 194 deletions(-) diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index 474da7bb..580d403a 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -47,8 +47,7 @@ class ld_container: """ - Base class for Linked Data containers. - + Base class for Linked Data containers.\n A linked data container impelements a view on the expanded form of an JSON-LD document. It allows to easily interacts them by hinding all the nesting and automatically mapping between different forms. @@ -178,15 +177,13 @@ def _to_expanded_json( self: Self, value: JSON_LD_VALUE ) -> Union[EXPANDED_JSON_LD_VALUE, dict[str, EXPANDED_JSON_LD_VALUE]]: """ - Returns an expanded version of the given value. - + Returns an expanded version of the given value.\n The item_list/ data_dict of self will be substituted with value. Value can be an ld_container or contain zero or more. Then the _data of the inner most ld_dict that contains or is self will be expanded using the JSON_LD-Processor. If self and none of self's parents is an ld_dict, use the key from outer most ld_list - to generate a minimal dict. - + to generate a minimal dict.\n The result of this function is what value has turned into. Args: @@ -199,7 +196,7 @@ def _to_expanded_json( value will be expanded as if it was the data_dict/ the item_list of self. Returns: - EXPANDED_JSON_LD_VALUE | dict[str, EXPANDED_JSON_LD_VALUE]: + EXPANDED_JSON_LD_VALUE | dict[str, EXPANDED_JSON_LD_VALUE]: The expanded version of value i.e. the data_dict/ item_list of self if it had been value. The return type is based on the type of self: @@ -349,7 +346,7 @@ def merge_to_list(cls: type[Self], *args: tuple[Any]) -> list[Any]: @classmethod def is_ld_node(cls: type[Self], ld_value: Any) -> bool: """ - Returns wheter the given value is considered to be possible of representing an expanded JSON-LD node. + Returns wheter the given value is considered to be possible of representing an expanded JSON-LD node.\n I.e. if ld_value is of the form [{a: b, ..., y: z}]. Args: @@ -364,7 +361,7 @@ def is_ld_node(cls: type[Self], ld_value: Any) -> bool: def is_ld_id(cls: type[Self], ld_value: Any) -> bool: """ Returns wheter the given value is considered to be possible of representing an expanded JSON-LD node - containing only an @id value. + containing only an @id value.\n I.e. if ld_value is of the form [{"@id": ...}]. Args: @@ -378,7 +375,7 @@ def is_ld_id(cls: type[Self], ld_value: Any) -> bool: @classmethod def is_ld_value(cls: type[Self], ld_value: Any) -> bool: """ - Returns wheter the given value is considered to be possible of representing an expanded JSON-LD value. + Returns wheter the given value is considered to be possible of representing an expanded JSON-LD value.\n I.e. if ld_value is of the form [{"@value": a, ..., x: z}]. Args: @@ -393,7 +390,7 @@ def is_ld_value(cls: type[Self], ld_value: Any) -> bool: def is_typed_ld_value(cls: type[Self], ld_value: Any) -> bool: """ Returns wheter the given value is considered to be possible of representing an expanded JSON-LD value - containing a value type. + containing a value type.\n I.e. if ld_value is of the form [{"@value": a, "@type": b, ..., x: z}]. Args: @@ -408,7 +405,7 @@ def is_typed_ld_value(cls: type[Self], ld_value: Any) -> bool: def is_json_id(cls: type[Self], ld_value: Any) -> bool: """ Returns wheter the given value is considered to be possible of representing a non-expanded JSON-LD node - containing only an @id value. + containing only an @id value.\n I.e. if ld_value is of the form {"@id": ...}. Args: @@ -422,7 +419,7 @@ def is_json_id(cls: type[Self], ld_value: Any) -> bool: @classmethod def is_json_value(cls: type[Self], ld_value: Any) -> bool: """ - Returns wheter the given value is considered to be possible of representing a non-expanded JSON-LD value. + Returns wheter the given value is considered to be possible of representing a non-expanded JSON-LD value.\n I.e. if ld_value is of the form {"@value": b, ..., x: z}. Args: @@ -437,7 +434,7 @@ def is_json_value(cls: type[Self], ld_value: Any) -> bool: def is_typed_json_value(cls: type[Self], ld_value: Any) -> bool: """ Returns wheter the given value is considered to be possible of representing a non-expanded JSON-LD value - containing a value type. + containing a value type.\n I.e. if ld_value is of the form {"@value": a, "@type": b, ..., x: z}. Args: @@ -472,8 +469,7 @@ def are_values_equal( ) -> bool: """ Returns whether or not the given expanded JSON-LD values are considered equal. - The comparison compares the "@id" values first and returns the result if it is conclusive. - + The comparison compares the "@id" values first and returns the result if it is conclusive.\n If the comparison is inconclusive i.e. exactly one or zero of both values have an "@id" value: Return whether or not all other keys exist in both values and all values of the keys are the same. diff --git a/src/hermes/model/types/ld_context.py b/src/hermes/model/types/ld_context.py index ce2a09b1..566c7129 100644 --- a/src/hermes/model/types/ld_context.py +++ b/src/hermes/model/types/ld_context.py @@ -47,6 +47,10 @@ class ContextPrefix: - as a list of linked data vocabularies, where items can be vocabulary base IRI strings and/or dictionaries mapping arbitrary strings used to prefix terms from a specific vocabulary to their respective vocabulary IRI strings.; - as a dict mapping prefixes to vocabulary IRIs, where the default vocabulary has a prefix of None. + + Attributes: + vocabularies (list[str | dict]): TODO + context: TODO """ def __init__(self, vocabularies: list[str | dict]): @@ -55,9 +59,13 @@ def __init__(self, vocabularies: list[str | dict]): string is used more than once across all dictionaries in the list, the last item with this key will be included in the context. - :param vocabularies: A list of linked data vocabularies. Items can be vocabulary base IRI strings and/or - dictionaries mapping arbitrary strings used to prefix terms from a specific vocabulary to their respective - vocabulary IRI strings. + Args: + vocabularies (list[str | dict]): A list of linked data vocabularies. Items can be vocabulary base IRI + strings and/or dictionaries mapping arbitrary strings used to prefix terms from a specific vocabulary to + their respective vocabulary IRI strings. + + Returns: + None: """ self.vocabularies = vocabularies self.context = {} @@ -81,20 +89,22 @@ def __getitem__(self, compressed_term: str | tuple) -> str: Example uses: - context = (["iri_default", {"prefix1": "iri1"}]) - # access qualified term via str - term = context["term_in_default_vocabulary"] - term = context["prefix1:term"] - # access qualified term via tuple - term = context["prefix1", "term"] + context = (["iri_default", {"prefix1": "iri1"}])\n + # access qualified term via str\n + term = context["term_in_default_vocabulary"]\n + term = context["prefix1:term"]\n + # access qualified term via tuple\n + term = context["prefix1", "term"]\n term = context[None, "term_in_default_vocabulary"] - :param compressed_term: A term from a vocabulary in the context; terms from the default vocabulary are passed - with a prefix of None, or as an unprefixed string, terms from non-default vocabularies are prefixed with the - defined prefix for the vocabulary. The term can either be passed in as string if prefix is None, or - ":", or as a tuple. + Args: + compressed_term (str | tuple): A term from a vocabulary in the context; terms from the default vocabulary + are passed with a prefix of None, or as an unprefixed string, terms from non-default vocabularies are + prefixed with the defined prefix for the vocabulary. The term can either be passed in as string + if prefix is None, or ":", or as a tuple. - :return: The fully qualified IRI for the passed term + Returns: + str: The fully qualified IRI for the passed term """ if not isinstance(compressed_term, str): prefix, term = compressed_term diff --git a/src/hermes/model/types/ld_dict.py b/src/hermes/model/types/ld_dict.py index 5a673c9a..5bdc5bba 100644 --- a/src/hermes/model/types/ld_dict.py +++ b/src/hermes/model/types/ld_dict.py @@ -7,23 +7,22 @@ from __future__ import annotations +from collections.abc import Generator, Iterator, KeysView +from typing import Any, Literal, Union, TYPE_CHECKING +from typing_extensions import Self + +from .ld_container import ( + ld_container, + JSON_LD_CONTEXT_DICT, + EXPANDED_JSON_LD_VALUE, + PYTHONIZED_LD_CONTAINER, + JSON_LD_VALUE, + TIME_TYPE, + BASIC_TYPE, +) from .pyld_util import bundled_loader -from .ld_container import ld_container - -from typing import TYPE_CHECKING if TYPE_CHECKING: - from collections.abc import Generator, Iterator, KeysView - from .ld_container import ( - JSON_LD_CONTEXT_DICT, - EXPANDED_JSON_LD_VALUE, - PYTHONIZED_LD_CONTAINER, - JSON_LD_VALUE, - TIME_TYPE, - BASIC_TYPE, - ) from .ld_list import ld_list - from typing import Any, Union, Literal - from typing_extensions import Self class ld_dict(ld_container): @@ -31,11 +30,10 @@ class ld_dict(ld_container): An JSON-LD container resembling a dict. See also :class:`ld_container` - :ivar ref: A dict used to reference this object by its id. (Its form is {"@id": ...}) - :ivartype ref: dict[Literal["@id"], str] - - :cvar container_type: A type used as a placeholder to represent "no default". - :cvartype container_type: type[str] + Attributes: + data_dict (dict[str, EXPANDED_JSON_LD_VALUE]): The dict of items (in expanded JSON-LD form) + that are contained in this ld_dict. + _NO_DEFAULT (type[str]): (class attribute) A type used as a placeholder to represent "no default". """ _NO_DEFAULT = type("NO DEFAULT") @@ -51,23 +49,18 @@ def __init__( """ Create a new instance of an ld_dict. - :param self: The instance of ld_container to be initialized. - :type self: Self - :param data: The expanded json-ld data that is mapped. - :type data: EXPANDED_JSON_LD_VALUE - :param parent: parent node of this container. - :type parent: ld_dict | ld_list | None - :param key: key into the parent container. - :type key: str | None - :param index: index into the parent container. - :type index: int | None - :param context: local context for this container. - :type context: list[str | JSON_LD_CONTEXT_DICT] | None + Args: + data (EXPANDED_JSON_LD_VALUE): The expanded json-ld data that is mapped. + parent (ld_dict | ld_list | None): parent node of this container. + key (str | None): key into the parent container. + index (int | None): index into the parent container. + context (list[str | JSON_LD_CONTEXT_DICT] | None): local context for this container. - :return: - :rtype: None + Returns: + None: - :raises ValueError: If the given data doesn't represent an ld_dict. + Raises: + ValueError: If the given data doesn't represent an ld_dict. """ # check for validity of data if not self.is_ld_dict(data): @@ -78,16 +71,14 @@ def __init__( def __getitem__(self: Self, key: str) -> ld_list: """ - Get the item with the given key in a pythonized form. + Get the item with the given key in a pythonized form.\n If self contains no key, value pair with the given key, then an empty list is added as its value and returned. - :param self: The ld_dict the item is taken from. - :type self: ld_dict - :param key: The key (compacted or expanded) to the item. - :type key: str + Args: + key (str): The key (compacted or expanded) to the item. - :return: The pythonized item at the key. - :rtype: ld_list + Returns: + ld_list: The pythonized item at the key. """ full_iri = self.ld_proc.expand_iri(self.active_ctx, key) return self._to_python(full_iri, self.data_dict[full_iri]) @@ -97,15 +88,12 @@ def __setitem__(self: Self, key: str, value: Union[JSON_LD_VALUE, BASIC_TYPE, TI Set the item at the given key to the given value or delete it if value is None. The given value is expanded. - :param self: The ld_dict the item is set in. - :type self: ld_dict - :param key: The key at which the item is set. - :type key: str - :param value: The new value. - :type value: JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list + Args: + key (str): The key at which the item is set. + value (JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list): The new value. - :return: - :rtype: None + Returns: + None: """ # if the value is None delete the entry instead of updating it, but make sure it exists before deleting if value is None and key not in self: @@ -119,17 +107,15 @@ def __setitem__(self: Self, key: str, value: Union[JSON_LD_VALUE, BASIC_TYPE, TI def __delitem__(self: Self, key: str) -> None: """ - Delete the key, value pair with the given value pair. + Delete the key, value pair with the given value pair.\n Note that if a deleted object is represented by an ld_container druing this process it will still exist and not be modified afterwards. - :param self: The ld_dict the key, value pair is deleted from. - :type self: ld_dict - :param key: The key (expanded or compacted) of the key, value pair that is deleted. - :type key: str + Args: + key (str): The key (expanded or compacted) of the key, value pair that is deleted. - :return: - :rtype: None + Returns: + None: """ # expand key and delete the key, value pair full_iri = self.ld_proc.expand_iri(self.active_ctx, key) @@ -139,13 +125,11 @@ def __contains__(self: Self, key: str) -> bool: """ Returns whether or not self contains a key, value pair with the given key. - :param self: The ld_dict that is checked if it a key, value pair with the given key. - :type self: ld_dict - :param key: The key for which it is checked if a key, value pair is contained in self. - :type key: str + Args: + key (str): The key for which it is checked if a key, value pair is contained in self. - :return: Whether or not self contains a key, value pair with the given key. - :rtype: bool + Returns: + bool: Whether or not self contains a key, value pair with the given key. """ # expand the key and check if self contains a key, value pair with it full_iri = self.ld_proc.expand_iri(self.active_ctx, key) @@ -156,21 +140,21 @@ def __eq__( self: Self, other: Union[ld_dict, dict[str, Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]]] ) -> bool: """ - Returns wheter or not self is considered to be equal to other.
- If other is not an ld_dict, it is converted first. - If an id check is possible return its result otherwise: + Returns wheter or not self is considered to be equal to other.\n + If other is not an ld_dict, it is converted first.\n + If an id check is possible return its result otherwise:\n For each key, value pair its value is compared to the value with the same key in other. + Note that due to those circumstances equality is not transitve - meaning if a == b and b == c it is not guaranteed that a == c.
+ meaning if a == b and b == c it is not guaranteed that a == c. - :param self: The ld_dict other is compared to. - :type self: ld_dict - :param other: The dict/ ld_dict self is compared to. - :type other: ld_dict | dict[str, JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list] + Args: + other (ld_dict | dict[str, JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list]): + The dict/ ld_dict self is compared to. - :return: Whether or not self and other are considered equal. - If other is of the wrong type return the NotImplemented singleton instead. - :rtype: bool + Returns: + bool: Whether or not self and other are considered equal. + If other is of the wrong type return the NotImplemented singleton instead. """ # check if other has an acceptable type if not isinstance(other, (dict, ld_dict)): @@ -211,18 +195,18 @@ def __ne__( self: Self, other: Union[ld_dict, dict[str, Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]]] ) -> bool: """ - Returns whether or not self and other not considered to be equal. + Returns whether or not self and other not considered to be equal.\n (Returns not self.__eq__(other) if the return type is bool. - See ld_list.__eq__ for more details on the comparison.) + See :meth:`ld_dict.__eq__` for more details on the comparison.) - :param self: The ld_dict other is compared to. - :type self: ld_dict - :param other: The dict/ ld_dict self is compared to. - :type other: ld_dict | dict[str, JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list] + Args: + other (ld_dict | dict[str, JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list]): + The dict/ ld_dict self is compared to. - :return: Whether or not self and other are not considered equal. - If other is of the wrong type return the NotImplemented singleton instead. - :rtype: bool + Returns: + bool: + Whether or not self and other are not considered equal. If other is of the wrong type return the + NotImplemented singleton instead. """ # compare self and other using __eq__ x = self.__eq__(other) @@ -247,18 +231,17 @@ def get( self: Self, key: str, default: Any = _NO_DEFAULT ) -> Union[ld_list, Any]: """ - Get the item with the given key in a pythonized form using the build in get. + Get the item with the given key in a pythonized form using the build in get.\n If a KeyError is raised, return the default or reraise it if no default is given. - :param self: The ld_dict the item is taken from. - :type self: ld_dict - :param key: The key (compacted or expanded) to the item. - :type key: str + Args: + key (str): The key (compacted or expanded) to the item. - :return: The pythonized item at the key. - :rtype: ld_list + Returns: + ld_list: The pythonized item at the key. - :raises KeyError: If the build in get raised a KeyError. + Raises: + KeyError: If :meth:`__getitem__(key)` raised a KeyError and default isn't set. """ try: return self[key] @@ -274,13 +257,12 @@ def update( """ Set the items at the given keys to the given values or delete it if value is None by using build in set. - :param self: The ld_dict the items are set in. - :type self: ld_dict - :param other: The key, value pairs giving the new values and their keys. - :type other: ld_dict | dict[str, JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list] + Args: + other (ld_dict | dict[str, JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list]): + The key, value pairs giving the new values and their keys. - :return: - :rtype: None + Returns: + None: """ for key, value in other.items(): self[key] = value @@ -289,8 +271,8 @@ def keys(self: Self) -> KeysView[str]: """ Return the keys of the key, value pairs of self. - :param self: The ld_dict whose keys are returned. - :type self: ld_dict + Returns: + KeysView[str]: The keys of the values in self. """ return self.data_dict.keys() @@ -298,8 +280,8 @@ def compact_keys(self: Self) -> Iterator[str]: """ Return an iterator of the compacted keys of the key, value pairs of self. - :param self: The ld_dict whose compacted keys are returned. - :type self: ld_dict + Returns: + Iterator[str]: An iterator over the compacted keys in self. """ return map( lambda k: self.ld_proc.compact_iri(self.active_ctx, k), @@ -310,8 +292,8 @@ def items(self: Self) -> Generator[tuple[str, ld_list], None, None]: """ Return an generator of tuples of keys and their values in self. - :param self: The ld_dict whose items are returned. - :type self: ld_dict + Returns: + Generator[tuple[str, ld_list], None, None]: A Generator over all key, value pairs in self. """ for k in self.data_dict.keys(): yield k, self[k] @@ -321,10 +303,11 @@ def ref(self: Self) -> dict[Literal["@id"], str]: """ Return the dict used to reference this object by its id. (Its form is {"@id": ...}) - :param self: The ld_dict whose reference is returned. - :type self: ld_dict + Returns: + dict[Literal["@id"], str]: The minimal JSON_LD object referencing self. - :raises KeyError: If self has no id. + Raises: + KeyError: If self has no value for "@id". """ return {"@id": self.data_dict['@id']} @@ -332,11 +315,8 @@ def to_python(self: Self) -> dict[str, Union[BASIC_TYPE, TIME_TYPE, PYTHONIZED_L """ Return a fully pythonized version of this object where all ld_container are replaced by lists and dicts. - :param self: The ld_dict whose fully pythonized version is returned. - :type self: ld_dict - - :return: The fully pythonized version of self. - :rtype: dict[str, BASIC_TYPE | TIME_TYPE | PYTHONIZED_LD_CONTAINER] + Returns: + dict[str, BASIC_TYPE | TIME_TYPE | PYTHONIZED_LD_CONTAINER]: The fully pythonized version of self. """ res = {} for key in self.compact_keys(): @@ -358,22 +338,19 @@ def from_dict( ld_type: Union[str, list[str], None] = None ) -> ld_dict: """ - Creates a ld_dict from the given dict with the given parent, key, context and ld_type.
+ Creates a ld_dict from the given dict with the given parent, key, context and ld_type.\n Uses the expansion of the JSON-LD Processor and not the one of ld_container. - :param value: The dict of values the ld_dict should be created from. - :type value: dict[str, PYTHONIZED_LD_CONTAINER] - :param parent: The parent container of the new ld_list. - :type parent: ld_dict | ld_list | None - :param key: The key into the inner most parent container representing a dict of the new ld_list. - :type: key: str | None - :param context: The context for the new dict (it will also inherit the context of parent). - :type context: str | JSON_LD_CONTEXT_DICT | list[str | JSON_LD_CONTEXT_DICT] | None - :param ld_type: Additional value(s) for the new dict. - :type ld_type: str | list[str] | None + Args: + value (dict[str, PYTHONIZED_LD_CONTAINER]): The dict of values the ld_dict should be created from. + parent (ld_dict | ld_list | None): The parent container of the new ld_list. + key (str | None): The key into the inner most parent container representing a dict of the new ld_list. + context (str | JSON_LD_CONTEXT_DICT | list[str | JSON_LD_CONTEXT_DICT] | None): + The context for the new dict (it will also inherit the context of parent). + ld_type (str | list[str] | None): Additional value(s) for the new dict. - :return: The new ld_dict build from value. - :rtype: ld_dict + Returns: + ld_dict: The new ld_dict build from value. """ # make a copy of value and add the new type to it. ld_data = value.copy() @@ -399,30 +376,30 @@ def from_dict( @classmethod def is_ld_dict(cls: type[Self], ld_value: Any) -> bool: """ - Returns wheter the given value is considered to be possible of representing an expanded json-ld dict.
+ Returns wheter the given value is considered to be possible of representing an expanded json-ld dict.\n I.e. if ld_value is a list containing a dict containing none of the keys "@set", "@graph", "@list" and "@value" and not only the key "@id". - :param ld_value: The value that is checked. - :type ld_value: Any + Args: + ld_value (Any): The value that is checked. - :returns: Wheter or not ld_value could represent an expanded json-ld dict. - :rtype: bool + Returns: + bool: Wheter or not ld_value could represent an expanded json-ld dict. """ return cls.is_ld_node(ld_value) and cls.is_json_dict(ld_value[0]) @classmethod def is_json_dict(cls: type[Self], ld_value: Any) -> bool: """ - Returns wheter the given value is considered to be possible of representing an expanded json-ld dict.
+ Returns wheter the given value is considered to be possible of representing an expanded json-ld dict.\n I.e. if ld_value is a dict containing none of the keys "@set", "@graph", "@list" and "@value" and not only the key "@id". - :param ld_value: The value that is checked. - :type ld_value: Any + Args: + ld_value (Any): The value that is checked. - :returns: Wheter or not ld_value could represent an expanded json-ld dict. - :rtype: bool + Returns: + bool: Wheter or not ld_value could represent an expanded json-ld dict. """ if not isinstance(ld_value, dict): return False diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index d2d587b4..bcc1db15 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -121,7 +121,7 @@ def __setitem__( self: Self, index: Union[int, slice], value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] ) -> None: """ - Set the item(s) at position index to the given value(s). + Set the item(s) at position index to the given value(s).\n All given values are expanded. If any are assimilated by self all items that would be added by this are added. Args: @@ -153,7 +153,7 @@ def __setitem__( def __delitem__(self: Self, index: Union[int, slice]) -> None: """ - Delete the item(s) at position index. + Delete the item(s) at position index.\n Note that if a deleted object is represented by an ld_container druing this process it will still exist and not be modified afterwards. @@ -191,10 +191,10 @@ def __iter__(self: Self) -> Generator[Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_l def __contains__(self: Self, value: JSON_LD_VALUE) -> bool: """ - Returns whether or not value is contained in self. - Note that it is not directly checked if value is in self.item_list. + Returns whether or not value is contained in self.\n + Note that it is not directly checked if value is in self.item_list: First value is expanded then it is checked if value is in self.item_list. - If however value is assimilated by self it is checked if all values are contained in self.item_list. + If however value is assimilated by self it is checked if all values are contained in self.item_list.\n Also note that the checks whether the expanded value is in self.item_list is based on ld_list.__eq__. That means that this value is 'contained' in self.item_list if any object in self.item_list has the same @id like it or it xor the object in the item_list has an id an all other values are the same. @@ -230,15 +230,12 @@ def __eq__( dict[str, Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]]] ) -> bool: """ - Returns wheter or not self is considered to be equal to other. - + Returns wheter or not self is considered to be equal to other.\n If other is not an ld_list, it is converted first. For each index it is checked if the ids of the items at index in self and other match if both have one, - if only one has or neither have an id all other values are compared. - + if only one has or neither have an id all other values are compared.\n Note that due to those circumstances equality is not transitve - meaning if a == b and b == c it is not guaranteed that a == c. - + meaning if a == b and b == c it is not guaranteed that a == c.\n If self or other is considered unordered the comparison is more difficult. All items in self are compared with all items in other. On the resulting graph given by the realtion == the Hopcroft-Karp algoritm is used to determine if there exists a bijection reordering self so that the ordered comparison of self with other @@ -250,7 +247,7 @@ def __eq__( Returns: bool: - Whether or not self and other are considered equal. + Whether or not self and other are considered equal.\n If other is of the wrong type return the NotImplemented singleton instead. """ # check if other has an acceptable type @@ -347,11 +344,9 @@ def _bfs_step( distances: dict[Hashable, Union[int, float]] ) -> bool: """ - Completes the BFS step of Hopcroft-Karp. I.e.: - + Completes the BFS step of Hopcroft-Karp. I.e.:\n Finds the shortest path from all unmatched verticies in verticies1 to any unmatched vertex in any value in edges - where the connecting paths are alternating between matches and its complement. - + where the connecting paths are alternating between matches and its complement.\n It also marks each vertex in verticies1 with how few verticies from verticies1 have to be passed to reach the vertex from an unmatched one in verticies1. This is stored in distances. @@ -405,8 +400,7 @@ def _dfs_step( distances: dict[Hashable, Union[int, float]] ) -> bool: """ - Completes the DFS step of Hopcroft-Karp. I.e.: - + Completes the DFS step of Hopcroft-Karp. I.e.:\n Adds all edges on every path with the minimal path length to matches if they would be in the symmetric difference of matches and the set of edges on the union of the paths. @@ -451,11 +445,9 @@ def _hopcroft_karp( edges: dict[Hashable, tuple[Hashable]] ) -> int: """ - Implementation of Hopcroft-Karp. I.e.: - + Implementation of Hopcroft-Karp. I.e.:\n Finds how maximal number of edges with the property that no two edges share an endpoint (and startpoint) - in the given bipartite graph. - + in the given bipartite graph.\n Note that verticies1 and verticies2 have to be disjoint. Args: @@ -492,7 +484,7 @@ def __ne__( self: Self, other: Union[ld_list, list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]]] ) -> bool: """ - Returns whether or not self and other not considered to be equal. + Returns whether or not self and other not considered to be equal.\n (Returns not self.__eq__(other) if the return type is bool. See :meth:`ld_list.__eq__` for more details on the comparison.) @@ -514,7 +506,7 @@ def __ne__( def append(self: Self, value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]) -> None: """ - Append the item to the given ld_list self. + Append the item to the given ld_list self.\n The given value is expanded. If it is assimilated by self all items that would be added by this are added. Args: @@ -527,7 +519,7 @@ def append(self: Self, value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dic def extend(self: Self, value: list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]]) -> None: """ - Append the items in value to the given ld_list self. + Append the items in value to the given ld_list self.\n The given values are expanded. If any are assimilated by self all items that would be added by this are added. Args: @@ -554,7 +546,7 @@ def to_python(self: Self) -> list[Union[BASIC_TYPE, TIME_TYPE, PYTHONIZED_LD_CON @classmethod def is_ld_list(cls: type[Self], ld_value: Any) -> bool: """ - Returns wheter the given value is considered to be possible of representing an ld_list. + Returns wheter the given value is considered to be possible of representing an ld_list.\n I.e. if ld_value is of the form [{container_type: [...]}] where container_type is '@set', '@list' or '@graph'. Args: @@ -568,7 +560,7 @@ def is_ld_list(cls: type[Self], ld_value: Any) -> bool: @classmethod def is_container(cls: type[Self], value: Any) -> bool: """ - Returns wheter the given value is considered to be possible of representing an json-ld container. + Returns wheter the given value is considered to be possible of representing an json-ld container.\n I.e. if ld_value is of the form {container_type: [...]} where container_type is '@set', '@list' or '@graph'. Args: @@ -594,10 +586,8 @@ def from_list( container_type: str = "@set" ) -> ld_list: """ - Creates a ld_list from the given list with the given parent, key, context and container_type. - - Note that only container_type '@set' is valid for key '@type'. - + Creates a ld_list from the given list with the given parent, key, context and container_type.\n + Note that only container_type '@set' is valid for key '@type'.\n Further more note that if parent would assimilate the values in value no new ld_list is created and the given values are appended to parent instead and parent is returned. From 8afb37b6242bd740baecf7e132dcc5eb0119ca71 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 13 Mar 2026 10:57:04 +0100 Subject: [PATCH 217/247] split test_api_e2e.py into multiple files --- .../commands/curate/test_do_nothing_curate.py | 113 +++ .../commands/deposit/test_file_deposit.py | 52 ++ .../commands/deposit/test_invenio.py | 2 - .../commands/deposit/test_invenio_e2e.py | 97 +++ .../commands/harvest/test_harvest_cff.py | 231 +++++ .../commands/harvest/test_harvest_codemeta.py | 171 ++++ .../postprocess/test_invenio_postprocess.py | 66 ++ .../commands/process/test_process.py | 189 ++++ test/hermes_test/model/test_api_e2e.py | 815 ------------------ 9 files changed, 919 insertions(+), 817 deletions(-) create mode 100644 test/hermes_test/commands/curate/test_do_nothing_curate.py create mode 100644 test/hermes_test/commands/deposit/test_file_deposit.py create mode 100644 test/hermes_test/commands/deposit/test_invenio_e2e.py create mode 100644 test/hermes_test/commands/harvest/test_harvest_cff.py create mode 100644 test/hermes_test/commands/harvest/test_harvest_codemeta.py create mode 100644 test/hermes_test/commands/postprocess/test_invenio_postprocess.py create mode 100644 test/hermes_test/commands/process/test_process.py delete mode 100644 test/hermes_test/model/test_api_e2e.py diff --git a/test/hermes_test/commands/curate/test_do_nothing_curate.py b/test/hermes_test/commands/curate/test_do_nothing_curate.py new file mode 100644 index 00000000..3e2811c7 --- /dev/null +++ b/test/hermes_test/commands/curate/test_do_nothing_curate.py @@ -0,0 +1,113 @@ +# SPDX-FileCopyrightText: 2026 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Fritzsche + +import sys + +import pytest + +from hermes.commands import cli +from hermes.model import context_manager, SoftwareMetadata + + +@pytest.mark.parametrize( + "process_result, res", + [ + 2 * ( + SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/name": [{"@value": "Test"}] + }), + ), + 2 * ( + SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/applicationCategory": [{"@id": "Testing"}], + "http://schema.org/author": [ + { + "@list": [ + { + "@id": "_:author_1", + "@type": ["http://schema.org/Person"], + "http://schema.org/email": [{"@value": "test.testi@test.testi"}], + "http://schema.org/familyName": [{"@value": "Testi"}], + "http://schema.org/givenName": [{"@value": "Test"}] + } + ] + } + ], + "http://schema.org/codeRepository": [{"@id": "https://github.com/softwarepub/hermes"}], + "http://schema.org/contributor": [ + { + "@id": "_:contributor_1", + "@type": ["http://schema.org/Person"], + "http://schema.org/email": [{"@value": "test.testi@test.testi"}], + "http://schema.org/familyName": [{"@value": "Testi"}], + "http://schema.org/givenName": [{"@value": "Test"}] + } + ], + "http://schema.org/dateCreated": [{"@type": "http://schema.org/Date", "@value": "2026-01-16"}], + "http://schema.org/dateModified": [{"@type": "http://schema.org/Date", "@value": "2026-01-16"}], + "http://schema.org/datePublished": [{"@type": "http://schema.org/Date", "@value": "2026-01-16"}], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/funder": [ + { + "@type": ["http://schema.org/Organization"], + "http://schema.org/name": [{"@value": "TestsTests"}] + } + ], + "http://schema.org/keywords": [{"@value": "testing"}, {"@value": "more testing"}], + "http://schema.org/license": [ + {"@id": "https://spdx.org/licenses/Adobe-2006"}, + {"@id": "https://spdx.org/licenses/Abstyles"}, + {"@id": "https://spdx.org/licenses/AGPL-1.0-only"} + ], + "http://schema.org/name": [{"@value": "Test"}], + "http://schema.org/operatingSystem": [{"@value": "Windows"}], + "http://schema.org/programmingLanguage": [{"@value": "Python"}, {"@value": "Python 3"}], + "http://schema.org/relatedLink": [{"@id": "https://docs.software-metadata.pub/en/latest"}], + "http://schema.org/releaseNotes": [{"@value": "get it now"}], + "http://schema.org/version": [{"@value": "1.1.1"}], + "https://codemeta.github.io/terms/developmentStatus": [{"@id": "abandoned"}], + "https://codemeta.github.io/terms/funding": [{"@value": "none :("}], + "https://codemeta.github.io/terms/isSourceCodeOf": [{"@id": "HERMES"}], + "https://codemeta.github.io/terms/issueTracker": [ + {"@id": "https://github.com/softwarepub/hermes/issues"} + ], + "https://codemeta.github.io/terms/referencePublication": [{"@id": "https://arxiv.org/abs/2201.09015"}] + }), + ), + ] +) +def test_do_nothing_curate(tmp_path, monkeypatch, process_result, res): + monkeypatch.chdir(tmp_path) + + manager = context_manager.HermesContext(tmp_path) + manager.prepare_step("process") + with manager["result"] as cache: + cache["expanded"] = process_result.ld_value + cache["context"] = {"@context": process_result.full_context} + manager.finalize_step("process") + + config_file = tmp_path / "hermes.toml" + config_file.write_text("") + + orig_argv = sys.argv[:] + sys.argv = ["hermes", "curate", "--path", str(tmp_path), "--config", str(config_file)] + result = {} + try: + monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) + cli.main() + except SystemExit as e: + if e.code != 0: + raise e + finally: + manager.prepare_step("curate") + result = SoftwareMetadata.load_from_cache(manager, "result") + manager.finalize_step("curate") + sys.argv = orig_argv + + assert result.data_dict == res.data_dict diff --git a/test/hermes_test/commands/deposit/test_file_deposit.py b/test/hermes_test/commands/deposit/test_file_deposit.py new file mode 100644 index 00000000..ba801b4c --- /dev/null +++ b/test/hermes_test/commands/deposit/test_file_deposit.py @@ -0,0 +1,52 @@ +# SPDX-FileCopyrightText: 2026 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Fritzsche + +import json +import sys + +import pytest + +from hermes.commands import cli +from hermes.model import context_manager, SoftwareMetadata + + +@pytest.mark.parametrize( + "metadata", + [ + SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/name": [{"@value": "Test"}] + }), + ] +) +def test_file_deposit(tmp_path, monkeypatch, metadata): + monkeypatch.chdir(tmp_path) + + manager = context_manager.HermesContext(tmp_path) + manager.prepare_step("curate") + with manager["result"] as cache: + cache["codemeta"] = metadata.compact() + manager.finalize_step("curate") + + config_file = tmp_path / "hermes.toml" + config_file.write_text("[deposit]\ntarget = \"file\"") + + orig_argv = sys.argv[:] + sys.argv = ["hermes", "deposit", "--path", str(tmp_path), "--config", str(config_file)] + result = {} + try: + monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) + cli.main() + except SystemExit as e: + if e.code != 0: + raise e + finally: + with open("codemeta.json", "r") as cache: + result = SoftwareMetadata(json.load(cache)) + sys.argv = orig_argv + + assert result == metadata diff --git a/test/hermes_test/commands/deposit/test_invenio.py b/test/hermes_test/commands/deposit/test_invenio.py index 0ade0b82..b813e305 100644 --- a/test/hermes_test/commands/deposit/test_invenio.py +++ b/test/hermes_test/commands/deposit/test_invenio.py @@ -12,8 +12,6 @@ import click import pytest -pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) - from hermes.commands.deposit import invenio from hermes.error import MisconfigurationError diff --git a/test/hermes_test/commands/deposit/test_invenio_e2e.py b/test/hermes_test/commands/deposit/test_invenio_e2e.py new file mode 100644 index 00000000..82148bee --- /dev/null +++ b/test/hermes_test/commands/deposit/test_invenio_e2e.py @@ -0,0 +1,97 @@ +# SPDX-FileCopyrightText: 2026 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Fritzsche + +from datetime import date +from pathlib import Path +import sys + +import pytest + +from hermes.commands import cli +from hermes.model import context_manager +from hermes.model.api import SoftwareMetadata + + +@pytest.fixture +def sandbox_auth(): + path = Path("./../auth.txt") + if not path.exists(): + pytest.skip("Local auth token file does not exist.") + with path.open() as f: + yield f.read() + + +@pytest.mark.parametrize( + "metadata, invenio_metadata", + [ + ( + SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/name": [{"@value": "Test"}], + "http://schema.org/author": [{ + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Test"}], + "http://schema.org/givenName": [{"@value": "Testi"}] + }], + "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}] + }), + { + "upload_type": "software", + "publication_date": date.today().isoformat(), + "title": "Test", + "creators": [{"name": "Test, Testi"}], + "description": "for testing", + "access_right": "closed", + "license": "apache-2.0", + "prereserve_doi": True, + "related_identifiers": [ + {"identifier": "10.5281/zenodo.13311079", "relation": "isCompiledBy", "scheme": "doi"} + ] + } + ) + ] +) +def test_invenio_deposit(tmp_path, monkeypatch, sandbox_auth, metadata, invenio_metadata): + monkeypatch.chdir(tmp_path) + + manager = context_manager.HermesContext(tmp_path) + manager.prepare_step("curate") + with manager["result"] as cache: + cache["codemeta"] = metadata.compact() + manager.finalize_step("curate") + + (tmp_path / "test.txt").write_text("Test, oh wonderful test!\n") + + config_file = tmp_path / "hermes.toml" + config_file.write_text(f"""[deposit] +target = "invenio" +[deposit.invenio] +site_url = "https://sandbox.zenodo.org" +access_right = "closed" +auth_token = "{sandbox_auth}" +files = ["test.txt"] +[deposit.invenio.api_paths] +licenses = "api/vocabularies/licenses" +""") + + orig_argv = sys.argv[:] + sys.argv = ["hermes", "deposit", "--path", str(tmp_path), "--config", str(config_file), "--initial"] + result = {} + try: + monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) + cli.main() + except SystemExit as e: + if e.code != 0: + raise e + finally: + manager.prepare_step("deposit") + with manager["invenio"] as cache: + result = cache["deposit"] + manager.finalize_step("deposit") + sys.argv = orig_argv + + assert result == invenio_metadata diff --git a/test/hermes_test/commands/harvest/test_harvest_cff.py b/test/hermes_test/commands/harvest/test_harvest_cff.py new file mode 100644 index 00000000..eaac0168 --- /dev/null +++ b/test/hermes_test/commands/harvest/test_harvest_cff.py @@ -0,0 +1,231 @@ +# SPDX-FileCopyrightText: 2026 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Fritzsche + +import sys + +import pytest + +from hermes.commands import cli +from hermes.model import context_manager, SoftwareMetadata + + +@pytest.mark.parametrize( + "cff, res", + [ + ( + """cff-version: 1.2.0 +title: Temp\nmessage: >- + If you use this software, please cite it using the + metadata from this file. +type: software +authors: + - given-names: Max + family-names: Mustermann + email: max@muster.mann""", + SoftwareMetadata({ + "@type": "SoftwareSourceCode", + "author": { + "@list": [{ + "@type": "Person", + "email": ["max@muster.mann"], + "familyName": ["Mustermann"], + "givenName": ["Max"] + }] + }, + "name": ["Temp"] + }) + ), + ( + """# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR), Helmholtz-Zentrum Dresden-Rossendorf +# +# SPDX-License-Identifier: CC0-1.0 + +# SPDX-FileContributor: Michael Meinel + +cff-version: 1.2.0 +title: hermes +message: >- + If you use this software, please cite it using the + metadata from this file. +version: 0.9.0 +license: "Apache-2.0" +abstract: "Tool to automate software publication. Not stable yet." +type: software +authors: + - given-names: Michael + family-names: Meinel + email: michael.meinel@dlr.de + affiliation: German Aerospace Center (DLR) + orcid: "https://orcid.org/0000-0001-6372-3853" + - given-names: Stephan + family-names: Druskat + email: stephan.druskat@dlr.de + affiliation: German Aerospace Center (DLR) + orcid: "https://orcid.org/0000-0003-4925-7248" +identifiers: + - type: doi + value: 10.5281/zenodo.13221384 + description: Version 0.8.1b1 +""", + SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/author": [ + { + "@list": [ + { + "@id": "https://orcid.org/0000-0001-6372-3853", + "@type": ["http://schema.org/Person"], + "http://schema.org/affiliation": [ + { + "@type": ["http://schema.org/Organization"], + "http://schema.org/name": [{"@value": "German Aerospace Center (DLR)"}] + } + ], + "http://schema.org/email": [{"@value": "michael.meinel@dlr.de"}], + "http://schema.org/familyName": [{"@value": "Meinel"}], + "http://schema.org/givenName": [{"@value": "Michael"}] + }, + { + "@id": "https://orcid.org/0000-0003-4925-7248", + "@type": ["http://schema.org/Person"], + "http://schema.org/affiliation": [ + { + "@type": ["http://schema.org/Organization"], + "http://schema.org/name": [{"@value": "German Aerospace Center (DLR)"}] + } + ], + "http://schema.org/email": [{"@value": "stephan.druskat@dlr.de"}], + "http://schema.org/familyName": [{"@value": "Druskat"}], + "http://schema.org/givenName": [{"@value": "Stephan"}] + } + ] + } + ], + "http://schema.org/description": [{"@value": "Tool to automate software publication. Not stable yet."}], + "http://schema.org/identifier": [{"@id": "https://doi.org/10.5281/zenodo.13221384"}], + "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}], + "http://schema.org/name": [{"@value": "hermes"}], + "http://schema.org/version": [{"@value": "0.9.0"}] + }) + ) + ] +) +def test_cff_harvest(tmp_path, monkeypatch, cff, res): + monkeypatch.chdir(tmp_path) + cff_file = tmp_path / "CITATION.cff" + cff_file.write_text(cff) + + config_file = tmp_path / "hermes.toml" + config_file.write_text("[harvest]\nsources = [ \"cff\" ]") + + orig_argv = sys.argv[:] + sys.argv = ["hermes", "harvest", "--path", str(tmp_path), "--config", str(config_file)] + result = {} + try: + monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) + cli.main() + except SystemExit as e: + if e.code != 0: + raise e + finally: + manager = context_manager.HermesContext() + manager.prepare_step("harvest") + result = SoftwareMetadata.load_from_cache(manager, "cff") + manager.finalize_step("harvest") + sys.argv = orig_argv + + assert result == res + + +@pytest.mark.xfail +@pytest.mark.parametrize( + "cff, res", + [ + ( + """cff-version: 1.2.0 +title: Test +message: None +type: software +authors: + - given-names: Test + family-names: Testi + email: test.testi@test.testi + affiliation: German Aerospace Center (DLR) +identifiers: + - type: url + value: "https://arxiv.org/abs/2201.09015" + - type: doi + value: 10.5281/zenodo.13221384 +repository-code: "https://github.com/softwarepub/hermes" +abstract: for testing +url: "https://docs.software-metadata.pub/en/latest" +keywords: + - testing + - more testing +license: Apache-2.0 +version: 9.0.1 +date-released: "2026-01-16" """, + SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/author": [ + { + "@list": [ + { + "@type": ["http://schema.org/Person"], + "http://schema.org/affiliation": [ + { + "@type": ["http://schema.org/Organization"], + "http://schema.org/name": [{"@value": "German Aerospace Center (DLR)"}] + } + ], + "http://schema.org/email": [{"@value": "test.testi@test.testi"}], + "http://schema.org/familyName": [{"@value": "Testi"}], + "http://schema.org/givenName": [{"@value": "Test"}] + } + ] + } + ], + "http://schema.org/codeRepository": [{"@id": "https://github.com/softwarepub/hermes"}], + "http://schema.org/datePublished": [{"@type": "http://schema.org/Date", "@value": "2026-01-16"}], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/identifier": [{"@id": "https://doi.org/10.5281/zenodo.13221384"}], + "http://schema.org/keywords": [{"@value": "testing"}, {"@value": "more testing"}], + "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}], + "http://schema.org/name": [{"@value": "Test"}], + "http://schema.org/url": [ + {"@id": "https://arxiv.org/abs/2201.09015"}, + {"@id": "https://docs.software-metadata.pub/en/latest"} + ], + "http://schema.org/version": [{"@value": "9.0.1"}] + }) + ) + ] +) +def test_cff_harvest_multiple_urls(tmp_path, monkeypatch, cff, res): + monkeypatch.chdir(tmp_path) + cff_file = tmp_path / "CITATION.cff" + cff_file.write_text(cff) + + config_file = tmp_path / "hermes.toml" + config_file.write_text("[harvest]\nsources = [ \"cff\" ]") + + orig_argv = sys.argv[:] + sys.argv = ["hermes", "harvest", "--path", str(tmp_path), "--config", str(config_file)] + result = {} + try: + monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) + cli.main() + except SystemExit as e: + if e.code != 0: + raise e + finally: + manager = context_manager.HermesContext() + manager.prepare_step("harvest") + result = SoftwareMetadata.load_from_cache(manager, "cff") + manager.finalize_step("harvest") + sys.argv = orig_argv + + assert result == res diff --git a/test/hermes_test/commands/harvest/test_harvest_codemeta.py b/test/hermes_test/commands/harvest/test_harvest_codemeta.py new file mode 100644 index 00000000..5ff54af7 --- /dev/null +++ b/test/hermes_test/commands/harvest/test_harvest_codemeta.py @@ -0,0 +1,171 @@ +# SPDX-FileCopyrightText: 2026 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Fritzsche + +import sys + +import pytest + +from hermes.commands import cli +from hermes.model import context_manager, SoftwareMetadata + + +@pytest.mark.parametrize( + "codemeta, res", + [ + ( + """{ + "@context": "https://doi.org/10.5063/schema/codemeta-2.0", + "type": "SoftwareSourceCode", + "description": "for testing", + "name": "Test" +}""", + SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/name": [{"@value": "Test"}] + }) + ), + ( + """{ + "@context": "https://doi.org/10.5063/schema/codemeta-2.0", + "type": "SoftwareSourceCode", + "applicationCategory": "Testing", + "author": [ + { + "id": "_:author_1", + "type": "Person", + "email": "test.testi@test.testi", + "familyName": "Testi", + "givenName": "Test" + } + ], + "codeRepository": "https://github.com/softwarepub/hermes", + "contributor": { + "id": "_:contributor_1", + "type": "Person", + "email": "test.testi@test.testi", + "familyName": "Testi", + "givenName": "Test" + }, + "dateCreated": "2026-01-16", + "dateModified": "2026-01-16", + "datePublished": "2026-01-16", + "description": "for testing", + "funder": { + "type": "Organization", + "name": "TestsTests" + }, + "keywords": [ + "testing", + "more testing" + ], + "license": [ + "https://spdx.org/licenses/Adobe-2006", + "https://spdx.org/licenses/Abstyles", + "https://spdx.org/licenses/AGPL-1.0-only" + ], + "name": "Test", + "operatingSystem": "Windows", + "programmingLanguage": [ + "Python", + "Python 3" + ], + "relatedLink": "https://docs.software-metadata.pub/en/latest", + "schema:releaseNotes": "get it now", + "version": "1.1.1", + "developmentStatus": "abandoned", + "funding": "none :(", + "codemeta:isSourceCodeOf": { + "id": "HERMES" + }, + "issueTracker": "https://github.com/softwarepub/hermes/issues", + "referencePublication": "https://arxiv.org/abs/2201.09015" +}""", + SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/applicationCategory": [{"@id": "Testing"}], + "http://schema.org/author": [ + { + "@list": [ + { + "@id": "_:author_1", + "@type": ["http://schema.org/Person"], + "http://schema.org/email": [{"@value": "test.testi@test.testi"}], + "http://schema.org/familyName": [{"@value": "Testi"}], + "http://schema.org/givenName": [{"@value": "Test"}] + } + ] + } + ], + "http://schema.org/codeRepository": [{"@id": "https://github.com/softwarepub/hermes"}], + "http://schema.org/contributor": [ + { + "@id": "_:contributor_1", + "@type": ["http://schema.org/Person"], + "http://schema.org/email": [{"@value": "test.testi@test.testi"}], + "http://schema.org/familyName": [{"@value": "Testi"}], + "http://schema.org/givenName": [{"@value": "Test"}] + } + ], + "http://schema.org/dateCreated": [{"@type": "http://schema.org/Date", "@value": "2026-01-16"}], + "http://schema.org/dateModified": [{"@type": "http://schema.org/Date", "@value": "2026-01-16"}], + "http://schema.org/datePublished": [{"@type": "http://schema.org/Date", "@value": "2026-01-16"}], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/funder": [ + { + "@type": ["http://schema.org/Organization"], + "http://schema.org/name": [{"@value": "TestsTests"}] + } + ], + "http://schema.org/keywords": [{"@value": "testing"}, {"@value": "more testing"}], + "http://schema.org/license": [ + {"@id": "https://spdx.org/licenses/Adobe-2006"}, + {"@id": "https://spdx.org/licenses/Abstyles"}, + {"@id": "https://spdx.org/licenses/AGPL-1.0-only"} + ], + "http://schema.org/name": [{"@value": "Test"}], + "http://schema.org/operatingSystem": [{"@value": "Windows"}], + "http://schema.org/programmingLanguage": [{"@value": "Python"}, {"@value": "Python 3"}], + "http://schema.org/relatedLink": [{"@id": "https://docs.software-metadata.pub/en/latest"}], + "http://schema.org/releaseNotes": [{"@value": "get it now"}], + "http://schema.org/version": [{"@value": "1.1.1"}], + "https://codemeta.github.io/terms/developmentStatus": [{"@id": "abandoned"}], + "https://codemeta.github.io/terms/funding": [{"@value": "none :("}], + "https://codemeta.github.io/terms/isSourceCodeOf": [{"@id": "HERMES"}], + "https://codemeta.github.io/terms/issueTracker": [ + {"@id": "https://github.com/softwarepub/hermes/issues"} + ], + "https://codemeta.github.io/terms/referencePublication": [{"@id": "https://arxiv.org/abs/2201.09015"}] + }) + ) + ] +) +def test_codemeta_harvest(tmp_path, monkeypatch, codemeta, res): + monkeypatch.chdir(tmp_path) + + codemeta_file = tmp_path / "codemeta.json" + codemeta_file.write_text(codemeta) + + config_file = tmp_path / "hermes.toml" + config_file.write_text("[harvest]\nsources = [ \"codemeta\" ]") + + orig_argv = sys.argv[:] + sys.argv = ["hermes", "harvest", "--path", str(tmp_path), "--config", str(config_file)] + result = {} + try: + monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) + cli.main() + except SystemExit as e: + if e.code != 0: + raise e + finally: + manager = context_manager.HermesContext() + manager.prepare_step("harvest") + result = SoftwareMetadata.load_from_cache(manager, "codemeta") + manager.finalize_step("harvest") + sys.argv = orig_argv + + assert result == res diff --git a/test/hermes_test/commands/postprocess/test_invenio_postprocess.py b/test/hermes_test/commands/postprocess/test_invenio_postprocess.py new file mode 100644 index 00000000..091666f2 --- /dev/null +++ b/test/hermes_test/commands/postprocess/test_invenio_postprocess.py @@ -0,0 +1,66 @@ +# SPDX-FileCopyrightText: 2026 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Fritzsche + +import sys + +from ruamel import yaml +import toml + +from hermes.commands import cli +from hermes.model import context_manager + + +def test_invenio_postprocess(tmp_path, monkeypatch): + monkeypatch.chdir(tmp_path) + + input_file = tmp_path / ".hermes" / "deposit" / "invenio" / "result.json" + input_file.parent.mkdir(parents=True, exist_ok=True) + input_file.write_text("""{"record_id": "foo", "doi": "my_doi", "metadata": {"version": "1.0.0"}}""") + + citation_file = tmp_path / "CITATION.cff" + citation_file.write_text("cff-version: 1.2.0\ntitle: Test") + + config_file = tmp_path / "hermes.toml" + config_file.write_text( + """[postprocess] +run = ["config_invenio_record_id", "cff_doi"] +[deposit.invenio] +site_url = "https://zenodo.org" +""" + ) + + orig_argv = sys.argv[:] + sys.argv = ["hermes", "postprocess", "--path", str(tmp_path), "--config", str(config_file)] + print(" ".join(sys.argv)) + result_cff = result_toml = {} + try: + monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) + cli.main() + except SystemExit as e: + if e.code != 0: + raise e + finally: + result_toml = toml.load(config_file) + result_cff = yaml.YAML().load(citation_file) + sys.argv = orig_argv + + assert result_toml == toml.loads( + """[postprocess] +run = ["config_invenio_record_id", "cff_doi"] +[deposit.invenio] +site_url = "https://zenodo.org" +record_id = "foo" +""" + ) + assert result_cff == yaml.YAML().load( + """cff-version: 1.2.0 +title: Test +identifiers: + - type: doi + value: my_doi + description: DOI for the published version 1.0.0 [generated by hermes] +""" + ) diff --git a/test/hermes_test/commands/process/test_process.py b/test/hermes_test/commands/process/test_process.py new file mode 100644 index 00000000..0e25f8c0 --- /dev/null +++ b/test/hermes_test/commands/process/test_process.py @@ -0,0 +1,189 @@ +# SPDX-FileCopyrightText: 2026 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Fritzsche + +import sys + +import pytest + +from hermes.commands import cli +from hermes.model import context_manager, SoftwareMetadata + + +@pytest.mark.parametrize( + "metadata_in, metadata_out", + [ + ( + { + "cff": SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/name": [{"@value": "Test"}], + "http://schema.org/author": [{ + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Test"}], + "http://schema.org/givenName": [{"@value": "Testi"}] + }], + "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}] + }) + }, + SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/name": [{"@value": "Test"}], + "http://schema.org/author": [{ + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Test"}], + "http://schema.org/givenName": [{"@value": "Testi"}] + }], + "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}] + }) + ) + ] +) +def test_process(tmp_path, monkeypatch, metadata_in, metadata_out): + monkeypatch.chdir(tmp_path) + + manager = context_manager.HermesContext(tmp_path) + manager.prepare_step("harvest") + for harvester, result in metadata_in.items(): + with manager[harvester] as cache: + cache["codemeta"] = result.compact() + cache["context"] = {"@context": result.full_context} + cache["expanded"] = result.ld_value + manager.finalize_step("harvest") + + config_file = tmp_path / "hermes.toml" + config_file.write_text( + "[harvest]\nsources = [" + ", ".join('\"' + f'{harvester}' + '\"' for harvester in metadata_in) + "]" + ) + + orig_argv = sys.argv[:] + sys.argv = ["hermes", "process", "--path", str(tmp_path), "--config", str(config_file)] + result = {} + try: + monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) + cli.main() + except SystemExit as e: + if e.code != 0: + raise e + finally: + manager.prepare_step("process") + result = SoftwareMetadata.load_from_cache(manager, "result") + manager.finalize_step("process") + sys.argv = orig_argv + + assert result == metadata_out + + +@pytest.mark.parametrize( + "metadata_in, metadata_out", + [ + ( + { + "cff": SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/name": [{"@value": "Test"}], + "http://schema.org/author": [ + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Test"}], + "http://schema.org/email": [{"@value": "test.testi@testis.tests"}] + }, + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Testers"}] + }, + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Tester"}], + "http://schema.org/email": [{"@value": "test@tester.tests"}] + } + ], + "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}] + }), + "codemeta": SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/name": [{"@value": "Test"}, {"@value": "Testis Test"}], + "http://schema.org/author": [ + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Test"}], + "http://schema.org/givenName": [{"@value": "Testi"}], + "http://schema.org/email": [ + {"@value": "test.testi@testis.tests"}, + {"@value": "test.testi@testis.tests2"} + ] + }, + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Testers"}] + } + ] + }) + }, + SoftwareMetadata({ + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/name": [{"@value": "Test"}, {"@value": "Testis Test"}], + "http://schema.org/author": [ + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Test"}], + "http://schema.org/givenName": [{"@value": "Testi"}], + "http://schema.org/email": [ + {"@value": "test.testi@testis.tests"}, + {"@value": "test.testi@testis.tests2"} + ] + }, + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Testers"}] + }, + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Tester"}], + "http://schema.org/email": [{"@value": "test@tester.tests"}] + } + ], + "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}] + }) + ) + ] +) +def test_process_complex(tmp_path, monkeypatch, metadata_in, metadata_out): + monkeypatch.chdir(tmp_path) + + manager = context_manager.HermesContext(tmp_path) + manager.prepare_step("harvest") + for harvester, result in metadata_in.items(): + with manager[harvester] as cache: + cache["codemeta"] = result.compact() + cache["context"] = {"@context": result.full_context} + cache["expanded"] = result.ld_value + manager.finalize_step("harvest") + + config_file = tmp_path / "hermes.toml" + config_file.write_text( + "[harvest]\nsources = [" + ", ".join('\"' + f'{harvester}' + '\"' for harvester in metadata_in) + "]" + ) + + orig_argv = sys.argv[:] + sys.argv = ["hermes", "process", "--path", str(tmp_path), "--config", str(config_file)] + result = {} + try: + monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) + cli.main() + except SystemExit as e: + if e.code != 0: + raise e + finally: + manager.prepare_step("process") + result = SoftwareMetadata.load_from_cache(manager, "result") + manager.finalize_step("process") + sys.argv = orig_argv + + assert result == metadata_out diff --git a/test/hermes_test/model/test_api_e2e.py b/test/hermes_test/model/test_api_e2e.py deleted file mode 100644 index 8433bad6..00000000 --- a/test/hermes_test/model/test_api_e2e.py +++ /dev/null @@ -1,815 +0,0 @@ -# SPDX-FileCopyrightText: 2026 German Aerospace Center (DLR) -# -# SPDX-License-Identifier: Apache-2.0 - -# SPDX-FileContributor: Michael Fritzsche - -from datetime import date -import json -from pathlib import Path -import pytest -import sys - -from ruamel import yaml -import toml - -from hermes.model import context_manager, SoftwareMetadata -from hermes.commands import cli - - -@pytest.fixture -def sandbox_auth(): - path = Path("./../auth.txt") - if not path.exists(): - pytest.skip("Local auth token file does not exist.") - with path.open() as f: - yield f.read() - - -@pytest.mark.parametrize( - "cff, res", - [ - ( - """cff-version: 1.2.0 -title: Temp\nmessage: >- - If you use this software, please cite it using the - metadata from this file. -type: software -authors: - - given-names: Max - family-names: Mustermann - email: max@muster.mann""", - SoftwareMetadata({ - "@type": "SoftwareSourceCode", - "author": { - "@list": [{ - "@type": "Person", - "email": ["max@muster.mann"], - "familyName": ["Mustermann"], - "givenName": ["Max"] - }] - }, - "name": ["Temp"] - }) - ), - ( - """# SPDX-FileCopyrightText: 2022 German Aerospace Center (DLR), Helmholtz-Zentrum Dresden-Rossendorf -# -# SPDX-License-Identifier: CC0-1.0 - -# SPDX-FileContributor: Michael Meinel - -cff-version: 1.2.0 -title: hermes -message: >- - If you use this software, please cite it using the - metadata from this file. -version: 0.9.0 -license: "Apache-2.0" -abstract: "Tool to automate software publication. Not stable yet." -type: software -authors: - - given-names: Michael - family-names: Meinel - email: michael.meinel@dlr.de - affiliation: German Aerospace Center (DLR) - orcid: "https://orcid.org/0000-0001-6372-3853" - - given-names: Stephan - family-names: Druskat - email: stephan.druskat@dlr.de - affiliation: German Aerospace Center (DLR) - orcid: "https://orcid.org/0000-0003-4925-7248" -identifiers: - - type: doi - value: 10.5281/zenodo.13221384 - description: Version 0.8.1b1 -""", - SoftwareMetadata({ - "@type": ["http://schema.org/SoftwareSourceCode"], - "http://schema.org/author": [ - { - "@list": [ - { - "@id": "https://orcid.org/0000-0001-6372-3853", - "@type": ["http://schema.org/Person"], - "http://schema.org/affiliation": [ - { - "@type": ["http://schema.org/Organization"], - "http://schema.org/name": [{"@value": "German Aerospace Center (DLR)"}] - } - ], - "http://schema.org/email": [{"@value": "michael.meinel@dlr.de"}], - "http://schema.org/familyName": [{"@value": "Meinel"}], - "http://schema.org/givenName": [{"@value": "Michael"}] - }, - { - "@id": "https://orcid.org/0000-0003-4925-7248", - "@type": ["http://schema.org/Person"], - "http://schema.org/affiliation": [ - { - "@type": ["http://schema.org/Organization"], - "http://schema.org/name": [{"@value": "German Aerospace Center (DLR)"}] - } - ], - "http://schema.org/email": [{"@value": "stephan.druskat@dlr.de"}], - "http://schema.org/familyName": [{"@value": "Druskat"}], - "http://schema.org/givenName": [{"@value": "Stephan"}] - } - ] - } - ], - "http://schema.org/description": [{"@value": "Tool to automate software publication. Not stable yet."}], - "http://schema.org/identifier": [{"@id": "https://doi.org/10.5281/zenodo.13221384"}], - "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}], - "http://schema.org/name": [{"@value": "hermes"}], - "http://schema.org/version": [{"@value": "0.9.0"}] - }) - ), - ( - """cff-version: 1.2.0 -title: Test -message: None -type: software -authors: - - given-names: Test - family-names: Testi - email: test.testi@test.testi - affiliation: German Aerospace Center (DLR) -identifiers: - - type: url - value: "https://arxiv.org/abs/2201.09015" - - type: doi - value: 10.5281/zenodo.13221384 -repository-code: "https://github.com/softwarepub/hermes" -abstract: for testing -url: "https://docs.software-metadata.pub/en/latest" -keywords: - - testing - - more testing -license: Apache-2.0 -version: 9.0.1 -date-released: "2026-01-16" """, - SoftwareMetadata({ - "@type": ["http://schema.org/SoftwareSourceCode"], - "http://schema.org/author": [ - { - "@list": [ - { - "@type": ["http://schema.org/Person"], - "http://schema.org/affiliation": [ - { - "@type": ["http://schema.org/Organization"], - "http://schema.org/name": [{"@value": "German Aerospace Center (DLR)"}] - } - ], - "http://schema.org/email": [{"@value": "test.testi@test.testi"}], - "http://schema.org/familyName": [{"@value": "Testi"}], - "http://schema.org/givenName": [{"@value": "Test"}] - } - ] - } - ], - "http://schema.org/codeRepository": [{"@id": "https://github.com/softwarepub/hermes"}], - "http://schema.org/datePublished": [{"@type": "http://schema.org/Date", "@value": "2026-01-16"}], - "http://schema.org/description": [{"@value": "for testing"}], - "http://schema.org/identifier": [{"@id": "https://doi.org/10.5281/zenodo.13221384"}], - "http://schema.org/keywords": [{"@value": "testing"}, {"@value": "more testing"}], - "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}], - "http://schema.org/name": [{"@value": "Test"}], - "http://schema.org/url": [ - {"@id": "https://arxiv.org/abs/2201.09015"}, - {"@id": "https://docs.software-metadata.pub/en/latest"} - ], - "http://schema.org/version": [{"@value": "9.0.1"}] - }) - ) - ] -) -def test_cff_harvest(tmp_path, monkeypatch, cff, res): - monkeypatch.chdir(tmp_path) - cff_file = tmp_path / "CITATION.cff" - cff_file.write_text(cff) - - config_file = tmp_path / "hermes.toml" - config_file.write_text("[harvest]\nsources = [ \"cff\" ]") - - orig_argv = sys.argv[:] - sys.argv = ["hermes", "harvest", "--path", str(tmp_path), "--config", str(config_file)] - result = {} - try: - monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) - cli.main() - except SystemExit as e: - if e.code != 0: - raise e - finally: - manager = context_manager.HermesContext() - manager.prepare_step("harvest") - result = SoftwareMetadata.load_from_cache(manager, "cff") - manager.finalize_step("harvest") - sys.argv = orig_argv - - assert result == res - - -@pytest.mark.parametrize( - "codemeta, res", - [ - ( - """{ - "@context": "https://doi.org/10.5063/schema/codemeta-2.0", - "type": "SoftwareSourceCode", - "description": "for testing", - "name": "Test" -}""", - SoftwareMetadata({ - "@type": ["http://schema.org/SoftwareSourceCode"], - "http://schema.org/description": [{"@value": "for testing"}], - "http://schema.org/name": [{"@value": "Test"}] - }) - ), - ( - """{ - "@context": "https://doi.org/10.5063/schema/codemeta-2.0", - "type": "SoftwareSourceCode", - "applicationCategory": "Testing", - "author": [ - { - "id": "_:author_1", - "type": "Person", - "email": "test.testi@test.testi", - "familyName": "Testi", - "givenName": "Test" - } - ], - "codeRepository": "https://github.com/softwarepub/hermes", - "contributor": { - "id": "_:contributor_1", - "type": "Person", - "email": "test.testi@test.testi", - "familyName": "Testi", - "givenName": "Test" - }, - "dateCreated": "2026-01-16", - "dateModified": "2026-01-16", - "datePublished": "2026-01-16", - "description": "for testing", - "funder": { - "type": "Organization", - "name": "TestsTests" - }, - "keywords": [ - "testing", - "more testing" - ], - "license": [ - "https://spdx.org/licenses/Adobe-2006", - "https://spdx.org/licenses/Abstyles", - "https://spdx.org/licenses/AGPL-1.0-only" - ], - "name": "Test", - "operatingSystem": "Windows", - "programmingLanguage": [ - "Python", - "Python 3" - ], - "relatedLink": "https://docs.software-metadata.pub/en/latest", - "schema:releaseNotes": "get it now", - "version": "1.1.1", - "developmentStatus": "abandoned", - "funding": "none :(", - "codemeta:isSourceCodeOf": { - "id": "HERMES" - }, - "issueTracker": "https://github.com/softwarepub/hermes/issues", - "referencePublication": "https://arxiv.org/abs/2201.09015" -}""", - SoftwareMetadata({ - "@type": ["http://schema.org/SoftwareSourceCode"], - "http://schema.org/applicationCategory": [{"@id": "Testing"}], - "http://schema.org/author": [ - { - "@list": [ - { - "@id": "_:author_1", - "@type": ["http://schema.org/Person"], - "http://schema.org/email": [{"@value": "test.testi@test.testi"}], - "http://schema.org/familyName": [{"@value": "Testi"}], - "http://schema.org/givenName": [{"@value": "Test"}] - } - ] - } - ], - "http://schema.org/codeRepository": [{"@id": "https://github.com/softwarepub/hermes"}], - "http://schema.org/contributor": [ - { - "@id": "_:contributor_1", - "@type": ["http://schema.org/Person"], - "http://schema.org/email": [{"@value": "test.testi@test.testi"}], - "http://schema.org/familyName": [{"@value": "Testi"}], - "http://schema.org/givenName": [{"@value": "Test"}] - } - ], - "http://schema.org/dateCreated": [{"@type": "http://schema.org/Date", "@value": "2026-01-16"}], - "http://schema.org/dateModified": [{"@type": "http://schema.org/Date", "@value": "2026-01-16"}], - "http://schema.org/datePublished": [{"@type": "http://schema.org/Date", "@value": "2026-01-16"}], - "http://schema.org/description": [{"@value": "for testing"}], - "http://schema.org/funder": [ - { - "@type": ["http://schema.org/Organization"], - "http://schema.org/name": [{"@value": "TestsTests"}] - } - ], - "http://schema.org/keywords": [{"@value": "testing"}, {"@value": "more testing"}], - "http://schema.org/license": [ - {"@id": "https://spdx.org/licenses/Adobe-2006"}, - {"@id": "https://spdx.org/licenses/Abstyles"}, - {"@id": "https://spdx.org/licenses/AGPL-1.0-only"} - ], - "http://schema.org/name": [{"@value": "Test"}], - "http://schema.org/operatingSystem": [{"@value": "Windows"}], - "http://schema.org/programmingLanguage": [{"@value": "Python"}, {"@value": "Python 3"}], - "http://schema.org/relatedLink": [{"@id": "https://docs.software-metadata.pub/en/latest"}], - "http://schema.org/releaseNotes": [{"@value": "get it now"}], - "http://schema.org/version": [{"@value": "1.1.1"}], - "https://codemeta.github.io/terms/developmentStatus": [{"@id": "abandoned"}], - "https://codemeta.github.io/terms/funding": [{"@value": "none :("}], - "https://codemeta.github.io/terms/isSourceCodeOf": [{"@id": "HERMES"}], - "https://codemeta.github.io/terms/issueTracker": [ - {"@id": "https://github.com/softwarepub/hermes/issues"} - ], - "https://codemeta.github.io/terms/referencePublication": [{"@id": "https://arxiv.org/abs/2201.09015"}] - }) - ) - ] -) -def test_codemeta_harvest(tmp_path, monkeypatch, codemeta, res): - monkeypatch.chdir(tmp_path) - - codemeta_file = tmp_path / "codemeta.json" - codemeta_file.write_text(codemeta) - - config_file = tmp_path / "hermes.toml" - config_file.write_text("[harvest]\nsources = [ \"codemeta\" ]") - - orig_argv = sys.argv[:] - sys.argv = ["hermes", "harvest", "--path", str(tmp_path), "--config", str(config_file)] - result = {} - try: - monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) - cli.main() - except SystemExit as e: - if e.code != 0: - raise e - finally: - manager = context_manager.HermesContext() - manager.prepare_step("harvest") - result = SoftwareMetadata.load_from_cache(manager, "codemeta") - manager.finalize_step("harvest") - sys.argv = orig_argv - - assert result == res - - -@pytest.mark.parametrize( - "process_result, res", - [ - 2 * ( - SoftwareMetadata({ - "@type": ["http://schema.org/SoftwareSourceCode"], - "http://schema.org/description": [{"@value": "for testing"}], - "http://schema.org/name": [{"@value": "Test"}] - }), - ), - 2 * ( - SoftwareMetadata({ - "@type": ["http://schema.org/SoftwareSourceCode"], - "http://schema.org/applicationCategory": [{"@id": "Testing"}], - "http://schema.org/author": [ - { - "@list": [ - { - "@id": "_:author_1", - "@type": ["http://schema.org/Person"], - "http://schema.org/email": [{"@value": "test.testi@test.testi"}], - "http://schema.org/familyName": [{"@value": "Testi"}], - "http://schema.org/givenName": [{"@value": "Test"}] - } - ] - } - ], - "http://schema.org/codeRepository": [{"@id": "https://github.com/softwarepub/hermes"}], - "http://schema.org/contributor": [ - { - "@id": "_:contributor_1", - "@type": ["http://schema.org/Person"], - "http://schema.org/email": [{"@value": "test.testi@test.testi"}], - "http://schema.org/familyName": [{"@value": "Testi"}], - "http://schema.org/givenName": [{"@value": "Test"}] - } - ], - "http://schema.org/dateCreated": [{"@type": "http://schema.org/Date", "@value": "2026-01-16"}], - "http://schema.org/dateModified": [{"@type": "http://schema.org/Date", "@value": "2026-01-16"}], - "http://schema.org/datePublished": [{"@type": "http://schema.org/Date", "@value": "2026-01-16"}], - "http://schema.org/description": [{"@value": "for testing"}], - "http://schema.org/funder": [ - { - "@type": ["http://schema.org/Organization"], - "http://schema.org/name": [{"@value": "TestsTests"}] - } - ], - "http://schema.org/keywords": [{"@value": "testing"}, {"@value": "more testing"}], - "http://schema.org/license": [ - {"@id": "https://spdx.org/licenses/Adobe-2006"}, - {"@id": "https://spdx.org/licenses/Abstyles"}, - {"@id": "https://spdx.org/licenses/AGPL-1.0-only"} - ], - "http://schema.org/name": [{"@value": "Test"}], - "http://schema.org/operatingSystem": [{"@value": "Windows"}], - "http://schema.org/programmingLanguage": [{"@value": "Python"}, {"@value": "Python 3"}], - "http://schema.org/relatedLink": [{"@id": "https://docs.software-metadata.pub/en/latest"}], - "http://schema.org/releaseNotes": [{"@value": "get it now"}], - "http://schema.org/version": [{"@value": "1.1.1"}], - "https://codemeta.github.io/terms/developmentStatus": [{"@id": "abandoned"}], - "https://codemeta.github.io/terms/funding": [{"@value": "none :("}], - "https://codemeta.github.io/terms/isSourceCodeOf": [{"@id": "HERMES"}], - "https://codemeta.github.io/terms/issueTracker": [ - {"@id": "https://github.com/softwarepub/hermes/issues"} - ], - "https://codemeta.github.io/terms/referencePublication": [{"@id": "https://arxiv.org/abs/2201.09015"}] - }), - ), - ] -) -def test_do_nothing_curate(tmp_path, monkeypatch, process_result, res): - monkeypatch.chdir(tmp_path) - - manager = context_manager.HermesContext(tmp_path) - manager.prepare_step("process") - with manager["result"] as cache: - cache["expanded"] = process_result.ld_value - cache["context"] = {"@context": process_result.full_context} - manager.finalize_step("process") - - config_file = tmp_path / "hermes.toml" - config_file.write_text("") - - orig_argv = sys.argv[:] - sys.argv = ["hermes", "curate", "--path", str(tmp_path), "--config", str(config_file)] - result = {} - try: - monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) - cli.main() - except SystemExit as e: - if e.code != 0: - raise e - finally: - manager.prepare_step("curate") - result = SoftwareMetadata.load_from_cache(manager, "result") - manager.finalize_step("curate") - sys.argv = orig_argv - - assert result.data_dict == res.data_dict - - -@pytest.mark.parametrize( - "metadata", - [ - SoftwareMetadata({ - "@type": ["http://schema.org/SoftwareSourceCode"], - "http://schema.org/description": [{"@value": "for testing"}], - "http://schema.org/name": [{"@value": "Test"}] - }), - ] -) -def test_file_deposit(tmp_path, monkeypatch, metadata): - monkeypatch.chdir(tmp_path) - - manager = context_manager.HermesContext(tmp_path) - manager.prepare_step("curate") - with manager["result"] as cache: - cache["codemeta"] = metadata.compact() - manager.finalize_step("curate") - - config_file = tmp_path / "hermes.toml" - config_file.write_text("[deposit]\ntarget = \"file\"") - - orig_argv = sys.argv[:] - sys.argv = ["hermes", "deposit", "--path", str(tmp_path), "--config", str(config_file)] - result = {} - try: - monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) - cli.main() - except SystemExit as e: - if e.code != 0: - raise e - finally: - with open("codemeta.json", "r") as cache: - result = SoftwareMetadata(json.load(cache)) - sys.argv = orig_argv - - assert result == metadata - - -@pytest.mark.parametrize( - "metadata, invenio_metadata", - [ - ( - SoftwareMetadata({ - "@type": ["http://schema.org/SoftwareSourceCode"], - "http://schema.org/description": [{"@value": "for testing"}], - "http://schema.org/name": [{"@value": "Test"}], - "http://schema.org/author": [{ - "@type": "http://schema.org/Person", - "http://schema.org/familyName": [{"@value": "Test"}], - "http://schema.org/givenName": [{"@value": "Testi"}] - }], - "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}] - }), - { - "upload_type": "software", - "publication_date": date.today().isoformat(), - "title": "Test", - "creators": [{"name": "Test, Testi"}], - "description": "for testing", - "access_right": "closed", - "license": "apache-2.0", - "prereserve_doi": True, - "related_identifiers": [ - {"identifier": "10.5281/zenodo.13311079", "relation": "isCompiledBy", "scheme": "doi"} - ] - } - ) - ] -) -def test_invenio_deposit(tmp_path, monkeypatch, sandbox_auth, metadata, invenio_metadata): - monkeypatch.chdir(tmp_path) - - manager = context_manager.HermesContext(tmp_path) - manager.prepare_step("curate") - with manager["result"] as cache: - cache["codemeta"] = metadata.compact() - manager.finalize_step("curate") - - (tmp_path / "test.txt").write_text("Test, oh wonderful test!\n") - - config_file = tmp_path / "hermes.toml" - config_file.write_text(f"""[deposit] -target = "invenio" -[deposit.invenio] -site_url = "https://sandbox.zenodo.org" -access_right = "closed" -auth_token = "{sandbox_auth}" -files = ["test.txt"] -[deposit.invenio.api_paths] -licenses = "api/vocabularies/licenses" -""") - - orig_argv = sys.argv[:] - sys.argv = ["hermes", "deposit", "--path", str(tmp_path), "--config", str(config_file), "--initial"] - result = {} - try: - monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) - cli.main() - except SystemExit as e: - if e.code != 0: - raise e - finally: - manager.prepare_step("deposit") - with manager["invenio"] as cache: - result = cache["deposit"] - manager.finalize_step("deposit") - sys.argv = orig_argv - - # TODO: compare to actually expected value - assert result == invenio_metadata - - -@pytest.mark.parametrize( - "metadata_in, metadata_out", - [ - ( - { - "cff": SoftwareMetadata({ - "@type": ["http://schema.org/SoftwareSourceCode"], - "http://schema.org/description": [{"@value": "for testing"}], - "http://schema.org/name": [{"@value": "Test"}], - "http://schema.org/author": [{ - "@type": "http://schema.org/Person", - "http://schema.org/familyName": [{"@value": "Test"}], - "http://schema.org/givenName": [{"@value": "Testi"}] - }], - "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}] - }) - }, - SoftwareMetadata({ - "@type": ["http://schema.org/SoftwareSourceCode"], - "http://schema.org/description": [{"@value": "for testing"}], - "http://schema.org/name": [{"@value": "Test"}], - "http://schema.org/author": [{ - "@type": "http://schema.org/Person", - "http://schema.org/familyName": [{"@value": "Test"}], - "http://schema.org/givenName": [{"@value": "Testi"}] - }], - "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}] - }) - ) - ] -) -def test_process(tmp_path, monkeypatch, metadata_in, metadata_out): - monkeypatch.chdir(tmp_path) - - manager = context_manager.HermesContext(tmp_path) - manager.prepare_step("harvest") - for harvester, result in metadata_in.items(): - with manager[harvester] as cache: - cache["codemeta"] = result.compact() - cache["context"] = {"@context": result.full_context} - cache["expanded"] = result.ld_value - manager.finalize_step("harvest") - - config_file = tmp_path / "hermes.toml" - config_file.write_text( - "[harvest]\nsources = [" + ", ".join('\"' + f'{harvester}' + '\"' for harvester in metadata_in) + "]" - ) - - orig_argv = sys.argv[:] - sys.argv = ["hermes", "process", "--path", str(tmp_path), "--config", str(config_file)] - result = {} - try: - monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) - cli.main() - except SystemExit as e: - if e.code != 0: - raise e - finally: - manager.prepare_step("process") - result = SoftwareMetadata.load_from_cache(manager, "result") - manager.finalize_step("process") - sys.argv = orig_argv - - assert result == metadata_out - - -@pytest.mark.parametrize( - "metadata_in, metadata_out", - [ - ( - { - "cff": SoftwareMetadata({ - "@type": ["http://schema.org/SoftwareSourceCode"], - "http://schema.org/name": [{"@value": "Test"}], - "http://schema.org/author": [ - { - "@type": "http://schema.org/Person", - "http://schema.org/familyName": [{"@value": "Test"}], - "http://schema.org/email": [{"@value": "test.testi@testis.tests"}] - }, - { - "@type": "http://schema.org/Person", - "http://schema.org/familyName": [{"@value": "Testers"}] - }, - { - "@type": "http://schema.org/Person", - "http://schema.org/familyName": [{"@value": "Tester"}], - "http://schema.org/email": [{"@value": "test@tester.tests"}] - } - ], - "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}] - }), - "codemeta": SoftwareMetadata({ - "@type": ["http://schema.org/SoftwareSourceCode"], - "http://schema.org/description": [{"@value": "for testing"}], - "http://schema.org/name": [{"@value": "Test"}, {"@value": "Testis Test"}], - "http://schema.org/author": [ - { - "@type": "http://schema.org/Person", - "http://schema.org/familyName": [{"@value": "Test"}], - "http://schema.org/givenName": [{"@value": "Testi"}], - "http://schema.org/email": [ - {"@value": "test.testi@testis.tests"}, - {"@value": "test.testi@testis.tests2"} - ] - }, - { - "@type": "http://schema.org/Person", - "http://schema.org/familyName": [{"@value": "Testers"}] - } - ] - }) - }, - SoftwareMetadata({ - "@type": ["http://schema.org/SoftwareSourceCode"], - "http://schema.org/description": [{"@value": "for testing"}], - "http://schema.org/name": [{"@value": "Test"}, {"@value": "Testis Test"}], - "http://schema.org/author": [ - { - "@type": "http://schema.org/Person", - "http://schema.org/familyName": [{"@value": "Test"}], - "http://schema.org/givenName": [{"@value": "Testi"}], - "http://schema.org/email": [ - {"@value": "test.testi@testis.tests"}, - {"@value": "test.testi@testis.tests2"} - ] - }, - { - "@type": "http://schema.org/Person", - "http://schema.org/familyName": [{"@value": "Testers"}] - }, - { - "@type": "http://schema.org/Person", - "http://schema.org/familyName": [{"@value": "Tester"}], - "http://schema.org/email": [{"@value": "test@tester.tests"}] - } - ], - "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}] - }) - ) - ] -) -def test_process_complex(tmp_path, monkeypatch, metadata_in, metadata_out): - monkeypatch.chdir(tmp_path) - - manager = context_manager.HermesContext(tmp_path) - manager.prepare_step("harvest") - for harvester, result in metadata_in.items(): - with manager[harvester] as cache: - cache["codemeta"] = result.compact() - cache["context"] = {"@context": result.full_context} - cache["expanded"] = result.ld_value - manager.finalize_step("harvest") - - config_file = tmp_path / "hermes.toml" - config_file.write_text( - "[harvest]\nsources = [" + ", ".join('\"' + f'{harvester}' + '\"' for harvester in metadata_in) + "]" - ) - - orig_argv = sys.argv[:] - sys.argv = ["hermes", "process", "--path", str(tmp_path), "--config", str(config_file)] - result = {} - try: - monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) - cli.main() - except SystemExit as e: - if e.code != 0: - raise e - finally: - manager.prepare_step("process") - result = SoftwareMetadata.load_from_cache(manager, "result") - manager.finalize_step("process") - sys.argv = orig_argv - - assert result == metadata_out - - -def test_invenio_postprocess(tmp_path, monkeypatch): - monkeypatch.chdir(tmp_path) - - input_file = tmp_path / ".hermes" / "deposit" / "invenio" / "result.json" - input_file.parent.mkdir(parents=True, exist_ok=True) - input_file.write_text("""{"record_id": "foo", "doi": "my_doi", "metadata": {"version": "1.0.0"}}""") - - citation_file = tmp_path / "CITATION.cff" - citation_file.write_text("cff-version: 1.2.0\ntitle: Test") - - config_file = tmp_path / "hermes.toml" - config_file.write_text( - """[postprocess] -run = ["config_invenio_record_id", "cff_doi"] -[deposit.invenio] -site_url = "https://zenodo.org" -""" - ) - - orig_argv = sys.argv[:] - sys.argv = ["hermes", "postprocess", "--path", str(tmp_path), "--config", str(config_file)] - print(" ".join(sys.argv)) - result_cff = result_toml = {} - try: - monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) - cli.main() - except SystemExit as e: - if e.code != 0: - raise e - finally: - result_toml = toml.load(config_file) - result_cff = yaml.YAML().load(citation_file) - sys.argv = orig_argv - - assert result_toml == toml.loads( - """[postprocess] -run = ["config_invenio_record_id", "cff_doi"] -[deposit.invenio] -site_url = "https://zenodo.org" -record_id = "foo" -""" - ) - assert result_cff == yaml.YAML().load( - """cff-version: 1.2.0 -title: Test -identifiers: - - type: doi - value: my_doi - description: DOI for the published version 1.0.0 [generated by hermes] -""" - ) From 4a08fbe6f6ddc6a08fe242b610323630751cd060 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 13 Mar 2026 16:00:24 +0100 Subject: [PATCH 218/247] added and updated comments --- docs/source/conf.py | 3 +- src/hermes/model/api.py | 54 ++++++- src/hermes/model/context_manager.py | 141 ++++++++++++++++-- src/hermes/model/error.py | 22 ++- src/hermes/model/types/__init__.py | 13 +- src/hermes/model/types/ld_context.py | 57 ++++--- src/hermes/model/types/ld_dict.py | 38 ++++- .../hermes_test/model/test_context_manager.py | 5 +- 8 files changed, 280 insertions(+), 53 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index caaea466..f643abd8 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -194,7 +194,8 @@ def read_version_from_pyproject(): def autoapi_skip_member(app, obj_type, name, obj, skip, options): if obj_type == "attribute": if any(documented_type in obj.id for documented_type in [ - "ld_container", "ld_dict", "ld_list", "ld_merge_container", "ld_merge_dict", "ld_merge_list", "ld_context" + "ld_container", "ld_dict", "ld_list", "ld_merge_container", "ld_merge_dict", "ld_merge_list", "ld_context", + "HermesCache", "HermesContext", "HermesMergeError" ]): return True diff --git a/src/hermes/model/api.py b/src/hermes/model/api.py index a0e71b54..0263bfb5 100644 --- a/src/hermes/model/api.py +++ b/src/hermes/model/api.py @@ -5,32 +5,76 @@ # SPDX-FileContributor: Michael Fritzsche # SPDX-FileContributor: Stephan Druskat -from hermes.model.context_manager import HermesContext, HermesContexError +from typing import Union +from typing_extensions import Self + +from hermes.model.context_manager import HermesContext +from hermes.model.error import HermesContextError from hermes.model.types import ld_dict +from hermes.model.types.ld_container import PYTHONIZED_LD_CONTAINER from hermes.model.types.ld_context import ALL_CONTEXTS -from hermes.model.types.ld_dict import bundled_loader +from hermes.model.types.pyld_util import bundled_loader class SoftwareMetadata(ld_dict): + """ + An :class:`ld_dict` wrapper that has the standard context used by HERMES (:const:`ld_context.ALL_CONTEXTS`) + and supports loading data from the HERMES cache. + """ + + def __init__( + self: Self, + data: Union[dict[str, PYTHONIZED_LD_CONTAINER], None] = None, + extra_vocabs: Union[dict[str, str], None] = None + ) -> None: + """ + Create a new instance of an SoftwareMetadata. + + Args: + data (dict[str, PYTHONIZED_LD_CONTAINER] | None): The data the SoftwareMetadata object starts out with. + extra_vocabs (dict[str, str] | None): Extra JSON_LD context for the object. - def __init__(self, data: dict = None, extra_vocabs: dict[str, str] = None) -> None: + Returns: + None: + """ ctx = ALL_CONTEXTS + [{**extra_vocabs}] if extra_vocabs is not None else ALL_CONTEXTS super().__init__([ld_dict.from_dict(data, context=ctx).data_dict if data else {}], context=ctx) @classmethod - def load_from_cache(cls, ctx: HermesContext, source: str) -> "SoftwareMetadata": + def load_from_cache(cls: type[Self], ctx: HermesContext, source: str) -> "SoftwareMetadata": + """ + Loads the JSON_LD data from the given HermesContext object at the given source.\n + Note that only data from "codemeta.json" or ("context.json" and "expanded.json") is loaded where "codemeta.json" + is preferred. + + Args: + ctx (HermesContext): The HERMES cache the data is loaded from. + source (str): The directory the inside the cache the data is loaded from. + + Returns: + SoftwareMetadata: The SoftwareMetadata loaded from the cache. + + Raises: + HermesContextError: If neither of the listed files contains valid data for a SoftwareMetadata object. + """ + # open the directory in the context with ctx[source] as cache: + # Try loading from the "codemeta.json" file. try: return SoftwareMetadata(cache["codemeta"]) except Exception: pass + # Loading failed try from the other files. try: + # Load and set the context. context = cache["context"]["@context"] data = SoftwareMetadata() data.active_ctx = data.ld_proc.initial_ctx(context, {"documentLoader": bundled_loader}) data.context = context + # Fill the SoftwareMetadata object with data. for key, value in cache["expanded"][0].items(): data[key] = value return data except Exception as e: - raise HermesContexError("There is no (valid) data stored in the cache.") from e + # No data could be loaded, raise an error instead. + raise HermesContextError("There is no (valid) data stored in the cache.") from e diff --git a/src/hermes/model/context_manager.py b/src/hermes/model/context_manager.py index 0c641619..f92c2010 100644 --- a/src/hermes/model/context_manager.py +++ b/src/hermes/model/context_manager.py @@ -6,15 +6,42 @@ import json import os.path -import pathlib +from pathlib import Path +from types import TracebackType +from typing import Union +from typing_extensions import Self + +from hermes.model.error import HermesContextError class HermesCache: - def __init__(self, cache_dir: pathlib.Path): + """ + The HermesCache supplies the user with easy (read and write) access to the JSON files in the cache. + + Attributes: + _cache_dir (Path): The directory the cache is located at. + _cached_data (dict[str, dict]): The cache of the files in the cache. The key is the filename. + """ + def __init__(self: Self, cache_dir: Path) -> None: + """ + Creates a new HermesCache instance. + + Args: + cache_dir (Path): The directory the files are located in. + + Returns: + None: + """ self._cache_dir = cache_dir self._cached_data = {} - def __enter__(self): + def __enter__(self: Self) -> None: + """ + Caches all files in the cache_dir. + + Returns: + None: + """ if self._cache_dir.is_dir(): for filepath in self._cache_dir.glob('*'): basename, _ = os.path.splitext(filepath.name) @@ -22,7 +49,16 @@ def __enter__(self): return self - def __getitem__(self, item: str) -> dict: + def __getitem__(self: Self, item: str) -> dict: + """ + Loads a file if necessary or returns the cached value. + + Args: + item (str): The name of the file. + + Returns: + dict: The JSON value in the given file. + """ if item not in self._cached_data: filepath = self._cache_dir / f'{item}.json' if filepath.is_file(): @@ -30,10 +66,37 @@ def __getitem__(self, item: str) -> dict: return self._cached_data[item] - def __setitem__(self, key: str, value: dict): + def __setitem__(self: Self, key: str, value: dict) -> None: + """ + Writes a value into the cache.\n + Note that the files isn't immediately updated only the cache is. + + Args: + key (str): The filename the data is written too. + value (dict): The JSON value for the file. + + Returns: + None: + """ self._cached_data[key] = value - def __exit__(self, exc_type, exc_val, exc_tb): + def __exit__( + self: Self, + exc_type: Union[type[BaseException], None], + exc_val: Union[BaseException, None], + exc_tb: Union[TracebackType, None] + ) -> None: + """ + Updates the files from the cache. + + Args: + exc_type (type[BaseException] | None): The type of the exception. + exc_val: (BaseException | None): Unused + exc_tb: (TracebackType | None): Unused + + Returns: + None: + """ if exc_type is None: self._cache_dir.mkdir(exist_ok=True, parents=True) @@ -43,30 +106,78 @@ def __exit__(self, exc_type, exc_val, exc_tb): class HermesContext: + """ + The HermesContext supplies the user with easy access to the HERMES cache. + + Attributes: + project_dir (Path): The directory the project is located in. + cache_dir (Path): The cache directory inside the project_dir. + _current_step (list[str]): The list of steps (i.e. cache names). + CACHE_DIR_NAME (str): (class attribute) The relative directory all HERMES caches are located in. + """ CACHE_DIR_NAME = '.hermes' - def __init__(self, project_dir: pathlib.Path = pathlib.Path.cwd()): + def __init__(self: Self, project_dir: Path = Path.cwd()) -> None: + """ + Creates a new instance of the HermesContext. + + Args: + project_dir (Path): The directory the project is located in. + + Returns: + None: + """ self.project_dir = project_dir self.cache_dir = project_dir / self.CACHE_DIR_NAME self._current_step = [] - def prepare_step(self, step: str, *depends: str) -> None: + def prepare_step(self: Self, step: str) -> None: + """ + Add another cache dir to the list of steps. + + Args: + step (str): The new cache dir. + + Returns: + None: + """ self._current_step.append(step) - def finalize_step(self, step: str) -> None: + def finalize_step(self: Self, step: str) -> None: + """ + Remove the step from the list of steps if it is the last one. + + Args: + step (str): The cache dir that is removed. + + Returns: + None: + + Raises: + ValueError: If no step can be removed. + ValueError: If the given step is not the last one. + """ if len(self._current_step) < 1: raise ValueError("There is no step to end.") if self._current_step[-1] != step: raise ValueError(f"Cannot end step {step} while in {self._current_step[-1]}.") self._current_step.pop() - def __getitem__(self, source_name: str) -> HermesCache: + def __getitem__(self: Self, source_name: str) -> HermesCache: + """ + Return the HERMES cache at the current cache dir and the given sub dir (source_name). + + Args: + source_name (str): The name of the sub dir of the current cache dir. + + Returns: + HermesCache: The HermesCache object of the cache. + + Raises: + HermesContextError: If no step has been prepared (i.e. no current cache dir is set). + """ if len(self._current_step) < 1: - raise HermesContexError("Prepare a step first.") + raise HermesContextError("Prepare a step first.") subdir = self.cache_dir / self._current_step[-1] / source_name return HermesCache(subdir) - - -class HermesContexError(Exception): - pass diff --git a/src/hermes/model/error.py b/src/hermes/model/error.py index ae3452ae..7b480dd7 100644 --- a/src/hermes/model/error.py +++ b/src/hermes/model/error.py @@ -5,7 +5,7 @@ # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: Stephan Druskat -import typing as t +from typing import Any, Union class HermesValidationError(Exception): @@ -45,15 +45,25 @@ class HermesContextError(Exception): class HermesMergeError(Exception): """ This exception should be raised when there is an error during a merge / set operation. + + Attributes: + path (list[str | int]): The path where the merge error occured. + old_Value (Any): Old value that was stored at `path`. + new_value (Any): New value that was to be assinged. + tag: Tag data for the new value. """ - def __init__(self, path: t.List[str | int], old_value: t.Any, new_value: t.Any, **kwargs): + def __init__(self, path: list[Union[str, int]], old_value: Any, new_value: Any, **kwargs) -> None: """ Create a new merge incident. - :param path: The path where the merge error occured. - :param old_Value: Old value that was stored at `path`. - :param new_value: New value that was to be assinged. - :param kwargs: Tag data for the new value. + Args: + path (list[str | int]): The path where the merge error occured. + old_Value (Any): Old value that was stored at `path`. + new_value (Any): New value that was to be assinged. + kwargs: Tag data for the new value. + + Returns: + None: """ self.path = path self.old_value = old_value diff --git a/src/hermes/model/types/__init__.py b/src/hermes/model/types/__init__.py index 3b2089b9..1137472b 100644 --- a/src/hermes/model/types/__init__.py +++ b/src/hermes/model/types/__init__.py @@ -26,9 +26,20 @@ (ld_dict.is_json_dict, {"ld_container": lambda c, **kw: ld_dict([c], **kw)}), (lambda v: isinstance(v, str), {"python": lambda v, parent, **_: parent.ld_proc.compact_iri(parent.active_ctx, v)}), ] +""" +A list of tuples each containing a function to check if the conversion function (the second item in the tuple which +converts the given object into a JSON_LD represented by an ld_container) is applicable for a given pythonized expanded +JSON_LD value. +""" -def init_typemap(): +def init_typemap() -> None: + """ + A function registering the type conversions in _TYPEMAP with the :class:`JsonLdProcessor` class. + + Returns: + None: + """ for typecheck, conversions in _TYPEMAP: JsonLdProcessor.register_typemap(typecheck, **conversions) diff --git a/src/hermes/model/types/ld_context.py b/src/hermes/model/types/ld_context.py index 566c7129..09dd8085 100644 --- a/src/hermes/model/types/ld_context.py +++ b/src/hermes/model/types/ld_context.py @@ -5,43 +5,59 @@ # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: Stephan Druskat +from typing import Union +from typing_extensions import Self + from hermes.model.error import HermesContextError -CODEMETA_PREFIX = "https://doi.org/10.5063/schema/codemeta-2.0" -CODEMETA_CONTEXT = [CODEMETA_PREFIX] -SCHEMA_ORG_PREFIX = "http://schema.org/" -SCHEMA_ORG_CONTEXT = [{"schema": SCHEMA_ORG_PREFIX}] +CODEMETA_PREFIX: str = "https://doi.org/10.5063/schema/codemeta-2.0" +""" The prefix for codemeta terms. """ +CODEMETA_CONTEXT: list[str] = [CODEMETA_PREFIX] +""" The prefix for codemeta terms wrapped inside a list. """ + +SCHEMA_ORG_PREFIX: str = "http://schema.org/" +""" The prefix for schema.org terms. """ +SCHEMA_ORG_CONTEXT: list[dict[str, str]] = [{"schema": SCHEMA_ORG_PREFIX}] +""" The prefix for schema.org terms as value of the shortend prefix schema in a dict inside of a list. """ -PROV_PREFIX = "http://www.w3.org/ns/prov#" -PROV_CONTEXT = [{"prov": PROV_PREFIX}] +PROV_PREFIX: str = "http://www.w3.org/ns/prov#" +""" The prefix for provenance terms. """ +PROV_CONTEXT: list[dict[str, str]] = [{"prov": PROV_PREFIX}] +""" The prefix for provenance terms as value of the shortend prefix schema in a dict inside of a list. """ -HERMES_RT_PREFIX = "https://schema.software-metadata.pub/hermes-runtime/1.0/" -HERMES_RT_CONTEXT = [{"hermes-rt": HERMES_RT_PREFIX}] -HERMES_CONTENT_CONTEXT = [ +HERMES_RT_PREFIX: str = "https://schema.software-metadata.pub/hermes-runtime/1.0/" +""" The prefix for HERMES runtime terms. """ +HERMES_RT_CONTEXT: list[dict[str, str]] = [{"hermes-rt": HERMES_RT_PREFIX}] +""" The prefix for HERMES runtime terms as value of the shortend prefix schema in a dict inside of a list. """ +HERMES_CONTENT_CONTEXT: list[dict[str, str]] = [ {"hermes": "https://schema.software-metadata.pub/hermes-content/1.0/"} ] +""" The prefix for HERMES content terms as value of the shortend prefix schema in a dict inside of a list. """ -HERMES_CONTEXT = [{**HERMES_RT_CONTEXT[0], **HERMES_CONTENT_CONTEXT[0]}] +HERMES_CONTEXT: list[dict[str, str]] = [{**HERMES_RT_CONTEXT[0], **HERMES_CONTENT_CONTEXT[0]}] +""" A list containing a dict containing all key, value pairs from HERMES_RT_CONTEXT and HERMES_CONTENT_CONTEXT. """ -HERMES_BASE_CONTEXT = [ +HERMES_BASE_CONTEXT: list[dict[str, str]] = [ *CODEMETA_CONTEXT, {**SCHEMA_ORG_CONTEXT[0], **HERMES_CONTENT_CONTEXT[0]}, ] -HERMES_PROV_CONTEXT = [ +""" The JSON_LD context commonly used by HERMES excluding provenance context. """ +HERMES_PROV_CONTEXT: list[dict[str, str]] = [ {**SCHEMA_ORG_CONTEXT[0], **HERMES_RT_CONTEXT[0], **PROV_CONTEXT[0]} ] +""" The JSON_LD context commonly used by HERMES excluding codemeta context. """ -ALL_CONTEXTS = [ +ALL_CONTEXTS: list[Union[str, dict[str, str]]] = [ *CODEMETA_CONTEXT, {**SCHEMA_ORG_CONTEXT[0], **PROV_CONTEXT[0], **HERMES_CONTEXT[0]}, ] +""" list[str | dict[str, str]]: The JSON_LD context commonly used by HERMES. """ class ContextPrefix: """ - FIXME: Rename to `LDContext`, `HermesLDContext` or similar, - FIXME: as this class represents JSON-LD contexts. + FIXME: Rename to `LDContext`, `HermesLDContext` or similar, as this class represents JSON-LD contexts. Represents the context of the hermes JSON-LD data model and provides two views on the model: - as a list of linked data vocabularies, where items can be vocabulary base IRI strings and/or dictionaries mapping @@ -49,11 +65,11 @@ class ContextPrefix: - as a dict mapping prefixes to vocabulary IRIs, where the default vocabulary has a prefix of None. Attributes: - vocabularies (list[str | dict]): TODO - context: TODO + vocabularies (list[str | dict]): The list of JSON_LD context used for expansion. + context dict[str | None, str]: The mapping of prefix its expanded IRI. """ - def __init__(self, vocabularies: list[str | dict]): + def __init__(self: Self, vocabularies: list[str | dict]) -> None: """ If the list contains more than one string item, the last one will be used as the default vocabulary. If a prefix string is used more than once across all dictionaries in the list, the last item with this key will be included @@ -82,7 +98,7 @@ def __init__(self, vocabularies: list[str | dict]): } ) - def __getitem__(self, compressed_term: str | tuple) -> str: + def __getitem__(self: Self, compressed_term: str | tuple) -> str: """ Gets the fully qualified IRI for a term from a vocabulary inside the initialized context. The vocabulary must have been added to the context at initialization. @@ -125,4 +141,5 @@ def __getitem__(self, compressed_term: str | tuple) -> str: return base_iri + term -iri_map = ContextPrefix(ALL_CONTEXTS) +iri_map: ContextPrefix = ContextPrefix(ALL_CONTEXTS) +""" An object returning the fully qualified IRI for a compressed term using the contexts in ALL_CONTEXTS. """ diff --git a/src/hermes/model/types/ld_dict.py b/src/hermes/model/types/ld_dict.py index 5bdc5bba..8561887c 100644 --- a/src/hermes/model/types/ld_dict.py +++ b/src/hermes/model/types/ld_dict.py @@ -215,15 +215,47 @@ def __ne__( return NotImplemented return not x - def __bool__(self): + def __bool__(self: Self) -> bool: + """ + Returns the truth value self would have if it was a normal dict.\n + I.e. returns true if no key, value pair is in self. + + Returns: + bool: The truth value of self. + """ return bool(self.data_dict) - def setdefault(self, key, default): + def setdefault( + self: Self, + key: str, + default: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] + ) -> ld_list: + """ + Get the value for the given key if self has a value for the key. Otherwise set the value for key to default and + then return the value at key in self. + + Args: + key (str): The key at which the value is returned. + default (JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list): The value that is set at key in self + if there is no value for key in self. + + Returns: + ld_list: The value at key in self (if no value at key in self, it is set to default first). + """ if key not in self: self[key] = default return self[key] - def emplace(self, key): + def emplace(self: Self, key: str) -> None: + """ + Emplace the value at key in self (it is set to an empty list) if there is no value yet. + + Args: + key (str): The key at which the value in self is emplaced. + + Returns: + None: + """ if key not in self: self[key] = [] diff --git a/test/hermes_test/model/test_context_manager.py b/test/hermes_test/model/test_context_manager.py index 231e4df1..010d6cc0 100644 --- a/test/hermes_test/model/test_context_manager.py +++ b/test/hermes_test/model/test_context_manager.py @@ -7,7 +7,8 @@ import pytest from pathlib import Path -from hermes.model.context_manager import HermesContext, HermesCache, HermesContexError +from hermes.model.context_manager import HermesContext, HermesCache +from hermes.model.error import HermesContextError def test_context_hermes_dir_default(): @@ -30,7 +31,7 @@ def test_context_get_error(): ctx = HermesContext() ctx.prepare_step("ham") ctx.finalize_step("ham") - with pytest.raises(HermesContexError, match="Prepare a step first."): + with pytest.raises(HermesContextError, match="Prepare a step first."): ctx["spam"]._cache_dir == Path('./.hermes/spam').absolute() From 1d1c18d156d0c3876a2b86f154ccb2e7d6ec8340 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Mon, 16 Mar 2026 11:34:15 +0100 Subject: [PATCH 219/247] commented the rest of the new files --- docs/source/conf.py | 4 +- src/hermes/model/api.py | 4 +- src/hermes/model/context_manager.py | 2 +- src/hermes/model/error.py | 4 +- src/hermes/model/merge/action.py | 198 +++++++++++----------- src/hermes/model/merge/container.py | 249 ++++++++++++---------------- src/hermes/model/merge/match.py | 75 +++++---- src/hermes/model/merge/strategy.py | 9 +- 8 files changed, 266 insertions(+), 279 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index f643abd8..cc5b0fec 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -194,8 +194,8 @@ def read_version_from_pyproject(): def autoapi_skip_member(app, obj_type, name, obj, skip, options): if obj_type == "attribute": if any(documented_type in obj.id for documented_type in [ - "ld_container", "ld_dict", "ld_list", "ld_merge_container", "ld_merge_dict", "ld_merge_list", "ld_context", - "HermesCache", "HermesContext", "HermesMergeError" + "Collect", "HermesCache", "HermesContext", "HermesMergeError", "ld_container", "ld_context", "ld_dict", + "ld_list", "ld_merge_dict", "MergeSet" ]): return True diff --git a/src/hermes/model/api.py b/src/hermes/model/api.py index 0263bfb5..aac88b9e 100644 --- a/src/hermes/model/api.py +++ b/src/hermes/model/api.py @@ -8,12 +8,12 @@ from typing import Union from typing_extensions import Self -from hermes.model.context_manager import HermesContext -from hermes.model.error import HermesContextError from hermes.model.types import ld_dict from hermes.model.types.ld_container import PYTHONIZED_LD_CONTAINER from hermes.model.types.ld_context import ALL_CONTEXTS from hermes.model.types.pyld_util import bundled_loader +from .context_manager import HermesContext +from .error import HermesContextError class SoftwareMetadata(ld_dict): diff --git a/src/hermes/model/context_manager.py b/src/hermes/model/context_manager.py index f92c2010..837c1518 100644 --- a/src/hermes/model/context_manager.py +++ b/src/hermes/model/context_manager.py @@ -11,7 +11,7 @@ from typing import Union from typing_extensions import Self -from hermes.model.error import HermesContextError +from .error import HermesContextError class HermesCache: diff --git a/src/hermes/model/error.py b/src/hermes/model/error.py index 7b480dd7..1318420d 100644 --- a/src/hermes/model/error.py +++ b/src/hermes/model/error.py @@ -48,7 +48,7 @@ class HermesMergeError(Exception): Attributes: path (list[str | int]): The path where the merge error occured. - old_Value (Any): Old value that was stored at `path`. + old_value (Any): Old value that was stored at `path`. new_value (Any): New value that was to be assinged. tag: Tag data for the new value. """ @@ -58,7 +58,7 @@ def __init__(self, path: list[Union[str, int]], old_value: Any, new_value: Any, Args: path (list[str | int]): The path where the merge error occured. - old_Value (Any): Old value that was stored at `path`. + old_value (Any): Old value that was stored at `path`. new_value (Any): New value that was to be assinged. kwargs: Tag data for the new value. diff --git a/src/hermes/model/merge/action.py b/src/hermes/model/merge/action.py index b9b516ef..1a45d67e 100644 --- a/src/hermes/model/merge/action.py +++ b/src/hermes/model/merge/action.py @@ -10,8 +10,8 @@ from typing import TYPE_CHECKING, Any, Callable, Union from typing_extensions import Self -from ..types import ld_dict, ld_list -from ..types.ld_container import BASIC_TYPE, JSON_LD_VALUE, TIME_TYPE +from hermes.model.types import ld_dict, ld_list +from hermes.model.types.ld_container import BASIC_TYPE, JSON_LD_VALUE, TIME_TYPE if TYPE_CHECKING: from .container import ld_merge_dict, ld_merge_list @@ -35,23 +35,23 @@ def merge( An abstract method that needs to be implemented by all subclasses to have a generic way to use the merge actions. - :param target: The ld_merge_dict inside of which the items are merged. - :type target: ld_merge_dict - :param key: The "path" of keys so that parent[key[-1]] is value and - for the outermost parent of target out_parent out_parent[key[0]]...[key[-1]] results in value. - :type key: list[str | int] - :param value: The value inside target that is to be merged with update. - :type value: ld_merge_list - :param update: The value that is to be merged into target with value. - :type update: BASIC_TYPE | TIME_TYPE | ld_dict | ld_list - - :return: The merged value in an arbitrary format that is supported by :meth:`ld_dict.__setitem__`. - :rtype: JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list + Args: + target (ld_merge_dict): The ld_merge_dict inside of which the items are merged. + key (list[str | int]): The "path" of keys so that ``target[key[-1]]`` is ``value`` and for the outermost + parent of ``target`` out_parent ``out_parent[key[0]]...[key[-1]]`` results in ``value``. + value (ld_merge_list): The value inside ``target`` that is to be merged with ``update``. + update (BASIC_TYPE | TIME_TYPE | ld_dict | ld_list): The value that is to be merged into ``target`` + with ``value``. + + Returns: + JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list: + The merged value in an arbitrary format that is supported by :meth:`ld_dict.__setitem__`. """ raise NotImplementedError() class Reject(MergeAction): + """ :class:`MergeAction` providing a merge function for rejecting the incoming item. """ def merge( self: Self, target: ld_merge_dict, @@ -60,21 +60,20 @@ def merge( update: Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] ) -> ld_merge_list: """ - Rejects the new data ``update`` and lets target add an entry to itself documenting what data has been rejected. - - :param target: The ld_merge_dict inside of which the items are merged. - :type target: ld_merge_dict - :param key: The "path" of keys so that parent[key[-1]] is value and - for the outermost parent of target out_parent out_parent[key[0]]...[key[-1]] results in value. - :type key: list[str | int] - :param value: The value inside target that is to be merged with update.
This value won't be changed. - :type value: ld_merge_list - :param update: The value that is to be merged into target with value.
This value will be rejected. - :type update: BASIC_TYPE | TIME_TYPE | ld_dict | ld_list - - :return: The merged value.
- This value will always be value. - :rtype: ld_merge_list + Rejects the new data ``update`` and lets ``target`` add an entry to itself + documenting what data has been rejected. + + Args: + target (ld_merge_dict): The ld_merge_dict inside of which the items are merged. + key (list[str | int]): The "path" of keys so that ``target[key[-1]]`` is ``value`` and for the outermost + parent of ``target`` out_parent ``out_parent[key[0]]...[key[-1]]`` results in ``value``. + value (ld_merge_list): The value inside ``target`` that is to be merged with ``update``. + This value won't be changed. + update (BASIC_TYPE | TIME_TYPE | ld_dict | ld_list): The value that is to be merged into ``target`` with + ``value``. This value will be rejected. + + Returns: + ld_merge_list: The merged value. This value will always be ``value``. """ # Add the entry that data has been rejected. target.reject(key, update) @@ -83,6 +82,7 @@ def merge( class Replace(MergeAction): + """ :class:`MergeAction` providing a merge function for replacing the current item with the incoming one. """ def merge( self: Self, target: ld_merge_dict, @@ -92,30 +92,28 @@ def merge( ) -> Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]: """ Replaces the old data ``value`` with the new data ``update`` - and lets target add an entry to itself documenting what data has been replaced. - - :param target: The ld_merge_dict inside of which the items are merged. - :type target: ld_merge_dict - :param key: The "path" of keys so that parent[key[-1]] is value and - for the outermost parent of target out_parent out_parent[key[0]]...[key[-1]] results in value. - :type key: list[str | int] - :param value: The value inside target that is to be merged with update.
This value will bew replaced. - :type value: ld_merge_list - :param update: The value that is to be merged into target with value.
- This value will be used instead of value. - :type update: BASIC_TYPE | TIME_TYPE | ld_dict | ld_list - - :return: The merged value.
- This value will be update. - :rtype: BASIC_TYPE | TIME_TYPE | ld_dict | ld_list + and lets ``target`` add an entry to itself documenting what data has been replaced. + + Args: + target (ld_merge_dict): The ld_merge_dict inside of which the items are merged. + key (list[str | int]): The "path" of keys so that ``target[key[-1]]`` is ``value`` and for the outermost + parent of ``target`` out_parent ``out_parent[key[0]]...[key[-1]]`` results in ``value``. + value (ld_merge_list): The value inside ``target`` that is to be merged with ``update``. + This value will bew replaced. + update (BASIC_TYPE | TIME_TYPE | ld_dict | ld_list): The value that is to be merged into ``target`` with + ``value``. This value will be used instead of ``value``. + + Returns: + BASIC_TYPE | TIME_TYPE | ld_dict | ld_list: The merged value. This value will be ``update``. """ - # If necessary, add the entry that data has been replaced. + # Add the entry that data has been replaced. target.replace(key, value) # Return the new value. return update class Concat(MergeAction): + """ :class:`MergeAction` providing a merge function for appending the incoming items to the current items. """ def merge( self: Self, target: ld_merge_dict, @@ -126,19 +124,16 @@ def merge( """ Concatenates the new data ``update`` to the old data ``value``. - :param target: The ld_merge_dict inside of which the items are merged. - :type target: ld_merge_dict - :param key: The "path" of keys so that parent[key[-1]] is value and - for the outermost parent of target out_parent out_parent[key[0]]...[key[-1]] results in value. - :type key: list[str | int] - :param value: The value inside target that is to be merged with update. - :type value: ld_merge_list - :param update: The value that is to be merged into target with value. - :type update: BASIC_TYPE | TIME_TYPE | ld_dict | ld_list - - :return: The merged value.
- ``value`` concatenated with ``update``. - :rtype: ld_merge_list + Args: + target (ld_merge_dict): The ld_merge_dict inside of which the items are merged. + key (list[str | int]): The "path" of keys so that ``target[key[-1]]`` is ``value`` and for the outermost + parent of ``target`` out_parent ``out_parent[key[0]]...[key[-1]]`` results in ``value``. + value (ld_merge_list): The value inside ``target`` that is to be merged with ``update``. + update (BASIC_TYPE | TIME_TYPE | ld_dict | ld_list): The value that is to be merged into ``target`` + with ``value``. + + Returns: + ld_merge_list: The merged value (``value`` concatenated with ``update``). """ # Concatenate the items and return the result. if isinstance(update, (list, ld_list)): @@ -149,18 +144,27 @@ def merge( class Collect(MergeAction): + """ + :class:`MergeAction` providing a merge function for appending the incoming items to the current items. But an item + will only be appended if it has no match in the list of current items (including the already appended ones). + + Attributes: + match (Callable[[Any, Any], bool]): The function used to evaluate equality while merging. + reject_incoming (bool): Whether the incoming item in a match should get rejected (True) or replaced (False). + """ + def __init__(self: Self, match: Callable[[Any, Any], bool], reject_incoming: bool = True) -> None: """ Set the match function for this collect merge action. And the behaivior for matches. - :param match: The function used to evaluate equality while merging. - :type match: Callable[[Any, Any], bool] - :param reject_incoming: If an incoming item matches an already collected one, if ``reject_incoming`` True, - the incoming item gets rejected, if ``reject_incoming`` False, the match of the incoming item gets replaced. - :type reject_incoming: bool + Args: + match (Callable[[Any, Any], bool]): The function used to evaluate equality while merging. + reject_incoming (bool): If an incoming item matches an already collected one, if ``reject_incoming`` True, + the incoming item gets rejected, if ``reject_incoming`` False, the match of the incoming item gets + replaced. - :return: - :rtype: None + Returns: + None: """ self.match = match self.reject_incoming = reject_incoming @@ -175,18 +179,16 @@ def merge( """ Collects the unique items (according to :attr:`match`) from ``value`` and ``update``. - :param target: The ld_merge_dict inside of which the items are merged. - :type target: ld_merge_dict - :param key: The "path" of keys so that parent[key[-1]] is value and - for the outermost parent of target out_parent out_parent[key[0]]...[key[-1]] results in value. - :type key: list[str | int] - :param value: The value inside target that is to be merged with update. - :type value: ld_merge_list - :param update: The value that is to be merged into target with value. - :type update: BASIC_TYPE | TIME_TYPE | ld_dict | ld_list - - :return: The merged value. - :rtype: ld_merge_list + Args: + target (ld_merge_dict): The ld_merge_dict inside of which the items are merged. + key (list[str | int]): The "path" of keys so that ``target[key[-1]]`` is ``value`` and for the outermost + parent of ``target`` out_parent ``out_parent[key[0]]...[key[-1]]`` results in ``value``. + value (ld_merge_list): The value inside ``target`` that is to be merged with ``update``. + update (BASIC_TYPE | TIME_TYPE | ld_dict | ld_list): The value that is to be merged into ``target`` + with ``value``. + + Returns: + ld_merge_list: The merged value. """ if not isinstance(update, (list, ld_list)): update = [update] @@ -207,17 +209,27 @@ def merge( class MergeSet(MergeAction): + """ + :class:`MergeAction` providing a merge function for merging the incoming items with the current items. An item + will be appended if it has no match in the list of current items (including the already appended ones), otherwise + it will be merged with its first match. + + Attributes: + match (Callable[[Any, Any], bool]): The function used to evaluate equality while merging. + """ + def __init__(self: Self, match: Callable[[Any, Any], bool]) -> None: """ Set the match function for this collect merge action. - :param match: The function used to evaluate equality while merging. - :type match: Callable[[ANy, Any], bool] + Args: + match (Callable[[Any, Any], bool]): The function used to evaluate equality while merging. - :return: - :rtype: None + Returns: + None: """ self.match = match + """ Callable[[Any, Any], bool]: The function used to evaluate equality while merging. """ def merge( self: Self, @@ -229,18 +241,16 @@ def merge( """ Merges similar items (according to :attr:`match`) from ``value`` and ``update``. - :param target: The ld_merge_dict inside of which the items are merged. - :type target: ld_merge_dict - :param key: The "path" of keys so that parent[key[-1]] is value and - for the outermost parent of target out_parent out_parent[key[0]]...[key[-1]] results in value. - :type key: list[str | int] - :param value: The value inside target that is to be merged with update. - :type value: ld_merge_list - :param update: The value that is to be merged into target with value. - :type update: BASIC_TYPE | TIME_TYPE | ld_dict | ld_list - - :return: The merged value. - :rtype: ld_merge_list + Args: + target (ld_merge_dict): The ld_merge_dict inside of which the items are merged. + key (list[str | int]): The "path" of keys so that ``target[key[-1]]`` is ``value`` and for the outermost + parent of ``target`` out_parent out_parent[key[0]]...[key[-1]] results in ``value``. + value (ld_merge_list): The value inside ``target`` that is to be merged with ``update``. + update (BASIC_TYPE | TIME_TYPE | ld_dict | ld_list): The value that is to be merged into ``target`` + with ``value``. + + Returns: + ld_merge_list: The merged value. """ if not isinstance(update, (list, ld_list)): update = [update] diff --git a/src/hermes/model/merge/container.py b/src/hermes/model/merge/container.py index 2be14694..d2278dfd 100644 --- a/src/hermes/model/merge/container.py +++ b/src/hermes/model/merge/container.py @@ -10,11 +10,11 @@ from typing import TYPE_CHECKING, Any, Callable, Union from typing_extensions import Self -from ..types import ld_container, ld_context, ld_dict, ld_list -from ..types.ld_container import ( +from hermes.model.types import ld_container, ld_context, ld_dict, ld_list +from hermes.model.types.ld_container import ( BASIC_TYPE, EXPANDED_JSON_LD_VALUE, JSON_LD_CONTEXT_DICT, JSON_LD_VALUE, TIME_TYPE ) -from ..types.pyld_util import bundled_loader +from hermes.model.types.pyld_util import bundled_loader from .strategy import CODEMETA_STRATEGY, PROV_STRATEGY if TYPE_CHECKING: @@ -24,7 +24,7 @@ class _ld_merge_container: """ Abstract base class for ld_merge_dict and ld_merge_list, - providing the merge containers with overrides of ld_container._to_python(). + providing the merge containers with an override of :meth:`ld_container._to_python`. See also :class:`ld_dict`, :class:`ld_list` and :class:`ld_container`. """ @@ -34,18 +34,16 @@ def _to_python( ld_value: Union[EXPANDED_JSON_LD_VALUE, dict[str, EXPANDED_JSON_LD_VALUE], list[str], str] ) -> Union["ld_merge_dict", "ld_merge_list", BASIC_TYPE, TIME_TYPE]: """ - Returns a pythonized version of the given value pretending the value is in self and full_iri its key. - - :param self: the ld_container ld_value is considered to be in. - :type self: Self - :param full_iri: The expanded iri of the key of ld_value / self (later if self is not a dictionary). - :type full_iri: str - :param ld_value: The value thats pythonized value is requested. ld_value has to be valid expanded JSON-LD if it - was embeded in self._data. - :type ld_value: EXPANDED_JSON_LD_VALUE | dict[str, EXPANDED_JSON_LD_VALUE] | list[str] | str - - :return: The pythonized value of the ld_value. - :rtype: ld_merge_dict | ld_merge_list | BASIC_TYPE | TIME_TYPE + Returns a pythonized version of ``ld_value`` pretending the value is in ``self`` and ``full_iri`` its key. + + Args: + full_iri (str): The expanded iri of the key of ``ld_value`` / ``self`` (later if self is not a dictionary). + ld_value (EXPANDED_JSON_LD_VALUE | dict[str, EXPANDED_JSON_LD_VALUE] | list[str] | str): + The value thats pythonized value is requested. ``ld_value`` has to be valid expanded JSON-LD if it + was embeded in ``self._data``. + + Returns: + ld_merge_dict | ld_merge_list | BASIC_TYPE | TIME_TYPE: The pythonized value of ``ld_value``. """ value = super()._to_python(full_iri, ld_value) # replace ld_dicts with ld_merge_dicts @@ -88,21 +86,16 @@ def __init__( Create a new ld_merge_list. For further information on this function and the errors it throws see :meth:`ld_list.__init__`. - :param self: The instance of ld_merge_list to be initialized. - :type self: Self - :param data: The expanded json-ld data that is mapped (must be valid for @set, @list or @graph) - :type data: list[str] | list[dict[str, BASIC_TYPE | EXPANDED_JSON_LD_VALUE]] - :param parent: parent node of this container. - :type parent: ld_container | None - :param key: key into the parent container. - :type key: str | None - :param index: index into the parent container. - :type index: int | None - :param context: local context for this container. - :type context: list[str | JSON_LD_CONTEXT_DICT] | None - - :return: - :rtype: None + Args: + data (list[str] | list[dict[str, BASIC_TYPE | EXPANDED_JSON_LD_VALUE]]): + The expanded json-ld data that is + parent (ld_container | None): parent node of this container. + key (str | None): key into the parent container. + index (int | None): index into the parent container. + context (list[str | JSON_LD_CONTEXT_DICT] | None): local context for this container. + + Returns: + None: """ super().__init__(data, parent=parent, key=key, index=index, context=context) @@ -112,8 +105,9 @@ class ld_merge_dict(_ld_merge_container, ld_dict): ld_dict wrapper providing methods to merge an object of this class with an ld_dict object. See also :class:`ld_dict` and :class:`ld_merge_container`. - :ivar strategies: The strategies for merging different types of values in the ld_dicts. - :ivartype strategies: dict[str | None, dict[str | None, MergeAction]] + Attributes: + strategies (dict[str | None, dict[str | None, MergeAction]]): + The strategies for merging different types of values in the ld_dicts. """ def __init__( @@ -126,26 +120,20 @@ def __init__( context: Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None ) -> None: """ - Create a new instance of an ld_merge_dict. - See also :meth:`ld_dict.__init__`. - - :param self: The instance of ld_container to be initialized. - :type self: Self - :param data: The expanded json-ld data that is mapped. - :type data: EXPANDED_JSON_LD_VALUE - :param parent: parent node of this container. - :type parent: ld_dict | ld_list | None - :param key: key into the parent container. - :type key: str | None - :param index: index into the parent container. - :type index: int | None - :param context: local context for this container. - :type context: list[str | JSON_LD_CONTEXT_DICT] | None - - :return: - :rtype: None - - :raises ValueError: If the given data doesn't represent an ld_dict. + Create a new instance of an ld_merge_dict. See also :meth:`ld_dict.__init__`. + + Args: + data (EXPANDED_JSON_LD_VALUE): The expanded json-ld data that is mapped. + parent (ld_dict | ld_list | None): parent node of this container. + key (str | None): key into the parent container. + index (int | None): index into the parent container. + context (list[str | JSON_LD_CONTEXT_DICT] | None): local context for this container. + + Returns: + None: + + Raises: + ValueError: If ``data`` doesn't represent an ld_dict. """ super().__init__(data, parent=parent, key=key, index=index, context=context) @@ -160,16 +148,15 @@ def update_context( self: Self, other_context: Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None] ) -> None: """ - Updates selfs context with other_context. + Updates ``self`` s context with ``other_context``. JSON-LD processing prioritizes the context values in order (first least important, last most important). - :param self: The instance of the ld_merge_dict context is added to. - :type self: Self - :param other_context: The context object that is added to selfs context. - :type other_context: list[str | JSON_LD_CONTEXT_DICT] | None + Args: + other_context (list[str | JSON_LD_CONTEXT_DICT] | None): + The context object that is added to ``self`` s context. - :return: - :rtype: None + Returns: + None: """ if other_context: if not isinstance(self.context, list): @@ -184,16 +171,14 @@ def update_context( def update(self: Self, other: ld_dict) -> None: """ - Updates/ Merges this ld_merge dict with the given ld_dict other. - This overwrites :meth:`ld_dict.update`, and may cause unexpected behavior if not used carefully. + Updates/ Merges ``self`` with the given ld_dict ``other``. + Note that this overwrites :meth:`ld_dict.update`, and may cause unexpected behavior if not used carefully. - :param self: The ld_merge_dict that is updated with other. - :type self: Self - :param other: The ld_container that is merged into self. - :type other: ld_dict + Args: + other (ld_dict): The ld_container that is merged into ``self``. - :return: - :rtype: None + Returns: + None: """ # update add all new context if isinstance(other, ld_dict): @@ -205,27 +190,27 @@ def update(self: Self, other: ld_dict) -> None: def add_strategy(self: Self, strategy: dict[Union[str, None], dict[Union[str, None], MergeAction]]) -> None: """ - Adds the given strategy to the self.strategies. + Adds ``strategy`` to the ``self.strategies``. + + Args: + strategy (dict[str | None, dict[str | None, MergeAction]]): The object describing how which object types are + supposed to be merged. - :param self: The ld_merge_dict the strategy is added to. - :type self: Self - :param strategy: The object describing how which object types are supposed to be merged. - :type strategy: dict[str | None, dict[str | None, MergeAction]] + Returns: + None: """ for key, value in strategy.items(): self.strategies[key] = {**value, **self.strategies.get(key, {})} - def __setitem__(self: Self, key: str, value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]): + def __setitem__(self: Self, key: str, value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]) -> None: """ - Creates the new entry for self[key] using self.strategies on the values in self[key] and value. - Wraps :meth:`ld_dict.__setitem__`, and may cause unexpected behavior if not used carefully. - - :param self: The ld_merge_dict whose value at key gets updated/ merged with value. - :type self: Self - :param key: The key at whicht the value is updated/ merged at in self. - :type key: str - :param value: The value that is merged into self[key]. - :type value: JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list + Creates the new entry for ``self[key]`` using ``self.strategies`` on the values in ``self[key]`` and ``value``. + Note that this overwrites :meth:`ld_dict.__setitem__` and may cause unexpected behavior if not used carefully. + + Args: + key (str): The key at which the value is updated/ merged at in ``self``. + value (JSON_LD_VALUE | BASIC_TYPE | TIME_TYPE | ld_dict | ld_list): The value that is merged into + ``self[key]``. """ # create the new item if self[key] and value have to be merged. if key in self: @@ -240,20 +225,18 @@ def match( match: Callable[[Any, Any], bool] ) -> Union[BASIC_TYPE, TIME_TYPE, "ld_merge_dict", ld_merge_list]: """ - Returns the first item in self[key] for which match(item, value) returns true. - If no such item is found None is returned instead. - - :param self: The ld_merge_dict in whose entry for key a match for value is searched. - :type self: Self - :param key: The key to the items in self in which a match for value is searched. - :type key: str - :param value: The value a match is searched for in self[key]. - :type value: Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] - :param match: The method defining if two objects are a match. - :type match: Callable[[Any, Any], bool] - - :return: The item in self[key] that is a match to value if one exists else None - :rtype: BASIC_TYPE | TIME_TYPE | ld_merge_dict | ld_merge_list + Returns the first item in ``self[key]`` for which ``match(item, value)`` returns ``True``. + If no such item is found ``None`` is returned instead. + + Args: + key (str): The key to the items in ``self`` from which a match for ``value`` is searched. + value (Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]): The value a match is searched for in + ``self[key]``. + match (Callable[[Any, Any], bool]): The method defining if two objects are a match. + + Returns: + BASIC_TYPE | TIME_TYPE | ld_merge_dict | ld_merge_list: + The item in ``self[key]`` that is a match for``value`` if one exists otherwise ``None``. """ # iterate over all items in self[key] and return the first that is a match for item in self[key]: @@ -264,17 +247,15 @@ def _merge_item( self: Self, key: str, value: Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] ) -> Union[BASIC_TYPE, TIME_TYPE, "ld_merge_dict", ld_merge_list]: """ - Applies the most suitable merge strategy to merge self[key] and value and then returns the result. + Applies the most suitable merge strategy to merge ``self[key]`` and value and then returns the result. - :param self: The ld_merge_dict whose entry at key is to be merged with value. - :type self: Self - :param key: The key to the entry in self that is to be merged with value. - :type key: str - :param value: The value that is to be merged with self[key]. - :type value: BASIC_TYPE | TIME_TYPE | ld_dict | ld_list + Args: + key (str): The key to the entry in ``self`` that is to be merged with ``value``. + value (BASIC_TYPE | TIME_TYPE | ld_dict | ld_list): The value that is to be merged with ``self[key]``. - :return: The result of the merge from self[key] with value. - :rtype: BASIC_TYPE | TIME_TYPE | ld_merge_dict | ld_merge_list + Returns: + BASIC_TYPE | TIME_TYPE | ld_merge_dict | ld_merge_list: + The result of the merge from ``self[key]`` with ``value``. """ # search for all applicable strategies strategy = {**self.strategies[None]} @@ -290,19 +271,15 @@ def _add_related( self: Self, rel: str, key: str, value: Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] ) -> None: """ - Adds an entry for rel to self containing which key and value is affected. - - :param self: The ld_merge_container the special entry is added to. - :type self: Self - :param rel: The "type" of the special entry (used as the key). - :type rel: str - :param key: The key of the affected key, value pair in self. - :type key: str - :param value: The value of the affected key, value pair in self. - :type value: BASIC_TYPE | TIME_TYPE | ld_dict | ld_list - - :return: - :rtype: None + Adds an entry for ``rel`` to ``self`` containing which key and value is affected. + + Args: + rel (str): The "type" of the special entry (used as the key). + key (str): The key of the affected key, value pair in ``self``. + value (BASIC_TYPE | TIME_TYPE | ld_dict | ld_list): The value of the affected key, value pair in ``self``. + + Returns: + None: """ # FIXME: key not only string # make sure appending is possible @@ -312,38 +289,32 @@ def _add_related( def reject(self: Self, key: str, value: Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]) -> None: """ - Adds an entry to self containing containing information that the key, value pair - key, value has been rejected in the merge. + Adds an entry to ``self`` containing containing information that the key, value pair + ``key``, ``value`` has been rejected in the merge. For further information see :meth:`ld_merge_dict._add_related`. - :param self: The ld_merge_container the special entry is added to. - :type self: Self - :param key: The key of the rejected key, value pair in self. - :type key: str - :param value: The value of the rejected key, value pair in self. - :type value: BASIC_TYPE | TIME_TYPE | ld_dict | ld_list + Args: + key (str): The key of the rejected key, value pair in ``self``. + value (BASIC_TYPE | TIME_TYPE | ld_dict | ld_list): The value of the rejected key, value pair in ``self``. - :return: - :rtype: None + Returns: + None: """ # FIXME: key not only string self._add_related("hermes-rt:reject", key, value) def replace(self: Self, key: str, value: Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]) -> None: """ - Adds an entry to self containing containing information that the key, value pair - key, value was replaced in the merge. + Adds an entry to ``self`` containing containing information that the key, value pair + ``key``, ``value`` was replaced in the merge. For further information see :meth:`ld_merge_dict._add_related`. - :param self: The ld_merge_container the special entry is added to. - :type self: Self - :param key: The key of the old key, value pair in self. - :type key: str - :param value: The value of the old key, value pair in self. - :type value: BASIC_TYPE | TIME_TYPE | ld_dict | ld_list + Args: + key (str): The key of the old key, value pair in ``self``. + value (BASIC_TYPE | TIME_TYPE | ld_dict | ld_list): The value of the old key, value pair in ``self``. - :return: - :rtype: None + Returns: + None: """ # FIXME: key not only string self._add_related("hermes-rt:replace", key, value) diff --git a/src/hermes/model/merge/match.py b/src/hermes/model/merge/match.py index cbcad94d..d2ff706e 100644 --- a/src/hermes/model/merge/match.py +++ b/src/hermes/model/merge/match.py @@ -7,40 +7,41 @@ from typing import Any, Callable -from ..types import ld_dict +from hermes.model.types import ld_dict def match_keys(*keys: list[str], fall_back_to_equals: bool = False) -> Callable[[Any, Any], bool]: """ Creates a function taking to parameters that returns true if both given parameter have at least one common key in the given list of keys - and for all common keys in the given list of keys the values of both objects are the same.
+ and for all common keys in the given list of keys the values of both objects are the same.\n If fall_back_to_equals is True, the returned function returns the value of normal == comparison if no key from keys is in both objects. - :param keys: The list of important keys for the comparison method. - :type keys: list[str] - :param fall_back_to_equals: Whether or not a fall back option should be used. - :type fall_back_to_equals: bool + Args: + keys (list[str]): The list of important keys for the comparison method. + fall_back_to_equals (bool): Whether or not a fall back option should be used. - :return: A function comparing two given objects values for the keys in keys. - :rtype: Callable[[ld_merge_dict, ld_dict], bool] + Returns: + Callable[[Any, Any], bool]: A function comparing two given objects values for the keys in keys. """ # create and return the match function using the given keys def match_func(left: Any, right: Any) -> bool: """ - Compares left to right by checking if a) they have at least one common key in a predetermined list of keys and - b) testing if both objects have equal values for all common keys in the predetermined key list.
+ Compares left to right by checking if + + - they have at least one common key in a predetermined list of keys and + - testing if both objects have equal values for all common keys in the predetermined key list. + It may fall back on == if no common key in the predetermined list of keys exists. - :param left: The first object for the comparison. - :type left: ld_merge_dict - :param right: The second object for the comparison. - :type right: ld_dict + Args: + left (Any): The first object for the comparison. + right (Any): The second object for the comparison. - :return: The result of the comparison. - :rtype: bool + Returns: + bool: The result of the comparison. """ if not (isinstance(left, ld_dict) and isinstance(right, ld_dict)): return fall_back_to_equals and (left == right) @@ -60,19 +61,18 @@ def match_func(left: Any, right: Any) -> bool: def match_person(left: Any, right: Any) -> bool: """ Compares two objects assuming they are representing schema:Person's - if they are not ld_dicts, == is used as a fallback.
- If both objects have an @id value, the truth value returned by this function is the comparison of both ids. + if they are not ld_dicts, == is used as a fallback.\n + If both objects have an @id value, the truth value returned by this function is the comparison of both ids.\n If either other has no @id value and both objects have at least one email value, - they are considered equal if they have one common email. + they are considered equal if they have one common email.\n If the equality of the objects is not yet decided, == comparison of the objects is returned. - :param left: The first object for the comparison. - :type left: ld_merge_dict - :param right: The second object for the comparison. - :type right: ld_dict + Args: + left (Any): The first object for the comparison. + right (Any): The second object for the comparison. - :return: The result of the comparison. - :rtype: bool + Returns: + bool: The result of the comparison. """ if not (isinstance(left, ld_dict) and isinstance(right, ld_dict)): return left == right @@ -92,28 +92,27 @@ def match_multiple_types( """ Returns a function that compares two objects using the given functions. - :param functions_for_types: Tuples of type and match_function. - The returned function will compare two objects of a the same, given type with the specified function. - :type functions_for_types: list[tuple[str, Callable[[Any, Any], bool]]] - :param fall_back_function: The fallback for comparison if the objects that are being compared don't have a common - type with specified compare function or at least one object is not a JSON-LD dictionary. - :type fall_back_function: Callable[[Any, Any], bool] + Args: + functions_for_types (list[tuple[str, Callable[[Any, Any], bool]]]): Tuples of type and match_function. + The returned function will compare two objects of a the same, given type with the specified function. + fall_back_function (Callable[[Any, Any], bool]): The fallback for comparison if the objects that are being + compared don't have a common type with specified compare function or at least one object + is not a JSON-LD dictionary. - :return: The function that compares the two given objects using the given functions. - :rtype: Callable[[Any, Any], bool] + Returns: + Callable[[Any, Any], bool]: The function that compares the two given objects using the given functions. """ # create and return the match function using the given keys def match_func(left: Any, right: Any) -> bool: """ Compares two objects using a predetermined function if either objects is not an ld_dict - or they don't have a common type in a predetermined list of types.
+ or they don't have a common type in a predetermined list of types.\n If the objects are ld_dicts and have the same type with a known comparison function this is used instead. - :param left: The first object for the comparison. - :type left: ld_merge_dict - :param right: The second object for the comparison. - :type right: ld_dict + Args: + left (Any): The first object for the comparison. + right (Any): The second object for the comparison. :return: The result of the comparison. :rtype: bool diff --git a/src/hermes/model/merge/strategy.py b/src/hermes/model/merge/strategy.py index ac78545c..01628866 100644 --- a/src/hermes/model/merge/strategy.py +++ b/src/hermes/model/merge/strategy.py @@ -5,14 +5,18 @@ # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: Michael Fritzsche -from ..types.ld_context import iri_map as iri +from hermes.model.types.ld_context import iri_map as iri from .action import Concat, MergeSet from .match import match_keys, match_person, match_multiple_types DEFAULT_MATCH = match_keys("@id", fall_back_to_equals=True) +""" Callable[[Any, Any], bool]: The default match function used for comparison. """ MATCH_FUNCTION_FOR_TYPE = {"schema:Person": match_person} +""" +dict[str, Callable[[Any, Any], bool]]: A dict containing for JSON_LD types the match function (not DEFAULT_MATCH). +""" ACTIONS = { "default": MergeSet(DEFAULT_MATCH), @@ -79,6 +83,7 @@ ] } } +""" dict[str, MergeAction]: A dict containing some common MergeActions. """ PROV_STRATEGY = { @@ -88,11 +93,13 @@ iri["hermes-rt:reject"]: ACTIONS["concat"] } } +""" dict[Literal[None], dict[str, MergeAction]]: MergeActions for provenance values. """ # Filled with entries for every schema-type that can be found inside an JSON-LD dict of type # SoftwareSourceCode or SoftwareApplication using schema and CodeMeta as Context. CODEMETA_STRATEGY = {None: {None: ACTIONS["default"]}} +""" dict[str | None, dict[str | None, MergeAction]]: MergeActions for the standard JSON_LD contexts objects. """ CODEMETA_STRATEGY[iri["schema:Thing"]] = {iri["schema:owner"]: ACTIONS["OrganizationOrPerson"]} From 02340e0d693e848f24fa28e3be1c58bb222d0b22 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Mon, 16 Mar 2026 13:52:24 +0100 Subject: [PATCH 220/247] added support for user defined merge strategies --- pyproject.toml | 2 + src/hermes/commands/process/base.py | 13 +- .../process/standard_merge.py} | 143 +++++++++++++++++- src/hermes/model/merge/container.py | 34 +++-- src/hermes/model/merge/match.py | 133 ---------------- .../commands/process/test_process.py | 2 + 6 files changed, 179 insertions(+), 148 deletions(-) rename src/hermes/{model/merge/strategy.py => commands/process/standard_merge.py} (85%) delete mode 100644 src/hermes/model/merge/match.py diff --git a/pyproject.toml b/pyproject.toml index 621c02be..489cef29 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -71,6 +71,8 @@ config_invenio_record_id = "hermes.commands.postprocess.invenio:config_record_id config_invenio_rdm_record_id = "hermes.commands.postprocess.invenio_rdm:config_record_id" cff_doi = "hermes.commands.postprocess.invenio:cff_doi" +[project.entry-points."hermes.process"] +codemeta = "hermes.commands.process.standard_merge:CodemetaProcessPlugin" [tool.poetry.group.dev.dependencies] pytest = "^7.1.1" diff --git a/src/hermes/commands/process/base.py b/src/hermes/commands/process/base.py index 1aae0dab..19f781e1 100644 --- a/src/hermes/commands/process/base.py +++ b/src/hermes/commands/process/base.py @@ -5,6 +5,7 @@ # SPDX-FileContributor: Michael Meinel import argparse +from typing import Union from pydantic import BaseModel @@ -12,18 +13,21 @@ from hermes.model.api import SoftwareMetadata from hermes.model.context_manager import HermesContext from hermes.model.error import HermesContextError +from hermes.model.merge.action import MergeAction from hermes.model.merge.container import ld_merge_dict class HermesProcessPlugin(HermesPlugin): + """ Base plugin that defines additional merge strategies.""" - pass + def __call__(self, command: HermesCommand) -> dict[Union[str, None], dict[Union[str, None], MergeAction]]: + pass class ProcessSettings(BaseModel): """Generic deposition settings.""" - pass + plugins: list = [] class HermesProcessCommand(HermesCommand): @@ -37,6 +41,11 @@ def __call__(self, args: argparse.Namespace) -> None: ctx = HermesContext() merged_doc = ld_merge_dict([{}]) + # add the strategies from the plugins + for plugin_name in reversed(self.settings.plugins): + additional_strategies = self.plugins[plugin_name]()(self) + merged_doc.add_strategy(additional_strategies) + # Get all harvesters harvester_names = self.root_settings.harvest.sources diff --git a/src/hermes/model/merge/strategy.py b/src/hermes/commands/process/standard_merge.py similarity index 85% rename from src/hermes/model/merge/strategy.py rename to src/hermes/commands/process/standard_merge.py index 01628866..463be4ff 100644 --- a/src/hermes/model/merge/strategy.py +++ b/src/hermes/commands/process/standard_merge.py @@ -1,13 +1,140 @@ -# SPDX-FileCopyrightText: 2025 German Aerospace Center (DLR) +# SPDX-FileCopyrightText: 2026 German Aerospace Center (DLR) # # SPDX-License-Identifier: Apache-2.0 -# SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: Michael Fritzsche + +from typing import Any, Callable, Union + +from hermes.commands.base import HermesCommand +from hermes.model.merge.action import Concat, MergeAction, MergeSet +from hermes.model.types import ld_dict from hermes.model.types.ld_context import iri_map as iri -from .action import Concat, MergeSet -from .match import match_keys, match_person, match_multiple_types +from .base import HermesProcessPlugin + + +def match_keys(*keys: list[str], fall_back_to_equals: bool = False) -> Callable[[Any, Any], bool]: + """ + Creates a function taking to parameters that returns true + if both given parameter have at least one common key in the given list of keys + and for all common keys in the given list of keys the values of both objects are the same.\n + If fall_back_to_equals is True, the returned function returns the value of normal == comparison + if no key from keys is in both objects. + + Args: + keys (list[str]): The list of important keys for the comparison method. + fall_back_to_equals (bool): Whether or not a fall back option should be used. + + Returns: + Callable[[Any, Any], bool]: A function comparing two given objects values for the keys in keys. + """ + + # create and return the match function using the given keys + def match_func(left: Any, right: Any) -> bool: + """ + Compares left to right by checking if + + - they have at least one common key in a predetermined list of keys and + - testing if both objects have equal values for all common keys in the predetermined key list. + + It may fall back on == if no common key in the predetermined list of keys exists. + + Args: + left (Any): The first object for the comparison. + right (Any): The second object for the comparison. + + Returns: + bool: The result of the comparison. + """ + if not (isinstance(left, ld_dict) and isinstance(right, ld_dict)): + return fall_back_to_equals and (left == right) + # create a list of all common important keys + active_keys = [key for key in keys if key in left and key in right] + # fall back to == if no active keys + if fall_back_to_equals and not active_keys: + return left == right + # check if both objects have the same values for all active keys + pairs = [(left[key] == right[key]) for key in active_keys] + # return whether or not both objects had the same values for all active keys + # and there was at least one active key + return len(active_keys) > 0 and all(pairs) + return match_func + + +def match_person(left: Any, right: Any) -> bool: + """ + Compares two objects assuming they are representing schema:Person's + if they are not ld_dicts, == is used as a fallback.\n + If both objects have an @id value, the truth value returned by this function is the comparison of both ids.\n + If either other has no @id value and both objects have at least one email value, + they are considered equal if they have one common email.\n + If the equality of the objects is not yet decided, == comparison of the objects is returned. + + Args: + left (Any): The first object for the comparison. + right (Any): The second object for the comparison. + + Returns: + bool: The result of the comparison. + """ + if not (isinstance(left, ld_dict) and isinstance(right, ld_dict)): + return left == right + if "@id" in left and "@id" in right: + return left["@id"] == right["@id"] + if "schema:email" in left and "schema:email" in right: + if len(left["schema:email"]) > 0 and len(right["schema:email"]) > 0: + mails_right = right["schema:email"] + return any((mail in mails_right) for mail in left["schema:email"]) + return left == right + + +def match_multiple_types( + *functions_for_types: list[tuple[str, Callable[[Any, Any], bool]]], + fall_back_function: Callable[[Any, Any], bool] = match_keys("@id", fall_back_to_equals=True) +) -> Callable[[Any, Any], bool]: + """ + Returns a function that compares two objects using the given functions. + + Args: + functions_for_types (list[tuple[str, Callable[[Any, Any], bool]]]): Tuples of type and match_function. + The returned function will compare two objects of a the same, given type with the specified function. + fall_back_function (Callable[[Any, Any], bool]): The fallback for comparison if the objects that are being + compared don't have a common type with specified compare function or at least one object + is not a JSON-LD dictionary. + + Returns: + Callable[[Any, Any], bool]: The function that compares the two given objects using the given functions. + """ + + # create and return the match function using the given keys + def match_func(left: Any, right: Any) -> bool: + """ + Compares two objects using a predetermined function if either objects is not an ld_dict + or they don't have a common type in a predetermined list of types.\n + If the objects are ld_dicts and have the same type with a known comparison function this is used instead. + + Args: + left (Any): The first object for the comparison. + right (Any): The second object for the comparison. + + :return: The result of the comparison. + :rtype: bool + """ + # If at least one of the objects is not an ld_dict or contains no value for the key "@type", use the fallback. + if not (isinstance(left, ld_dict) and isinstance(right, ld_dict) and "@type" in left and "@type" in right): + return fall_back_function(left, right) + # Extract the list of types + types_left = left["@type"] + types_right = right["@type"] + # Iterate over all known type, match_function pairs. + # If one type is in both objects return the result of the comparison with the match_function. + for ld_type, func in functions_for_types: + if ld_type in types_left and ld_type in types_right: + return func(left, right) + # No common type with known match_function: Fallback + return fall_back_function(left, right) + return match_func DEFAULT_MATCH = match_keys("@id", fall_back_to_equals=True) @@ -713,3 +840,11 @@ **CODEMETA_STRATEGY[iri["schema:LoanOrCredit"]], **CODEMETA_STRATEGY[iri["schema:PaymentCard"]] } + + +class CodemetaProcessPlugin(HermesProcessPlugin): + def __call__(self, command: HermesCommand) -> dict[Union[str, None], dict[Union[str, None], MergeAction]]: + strats = {**CODEMETA_STRATEGY} + for key, value in PROV_STRATEGY.items(): + strats[key] = {**value, **strats.get(key, {})} + return strats diff --git a/src/hermes/model/merge/container.py b/src/hermes/model/merge/container.py index d2278dfd..a4ae1e2c 100644 --- a/src/hermes/model/merge/container.py +++ b/src/hermes/model/merge/container.py @@ -15,7 +15,7 @@ BASIC_TYPE, EXPANDED_JSON_LD_VALUE, JSON_LD_CONTEXT_DICT, JSON_LD_VALUE, TIME_TYPE ) from hermes.model.types.pyld_util import bundled_loader -from .strategy import CODEMETA_STRATEGY, PROV_STRATEGY +from .action import MergeError if TYPE_CHECKING: from .action import MergeAction @@ -53,7 +53,8 @@ def _to_python( parent=value.parent, key=value.key, index=value.index, - context=value.context + context=value.context, + strategies=self.strategies ) # replace ld_lists with ld_merge_lists if isinstance(value, ld_list) and not isinstance(value, ld_merge_list): @@ -62,7 +63,8 @@ def _to_python( parent=value.parent, key=value.key, index=value.index, - context=value.context + context=value.context, + strategies=self.strategies ) return value @@ -71,6 +73,10 @@ class ld_merge_list(_ld_merge_container, ld_list): """ ld_list wrapper to ensure the 'merge_container'-property does not get lost, while merging. See also :class:`ld_list` and :class:`ld_merge_container`. + + Attributes: + strategies (dict[str | None, dict[str | None, MergeAction]]): The strategies used inside the child + ld_merge_dicts. """ def __init__( @@ -80,7 +86,8 @@ def __init__( parent: Union[ld_container, None] = None, key: Union[str, None] = None, index: Union[int, None] = None, - context: Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None + context: Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None, + strategies: dict[Union[str, None], dict[Union[str, None], MergeAction]] = {} ) -> None: """ Create a new ld_merge_list. @@ -93,12 +100,15 @@ def __init__( key (str | None): key into the parent container. index (int | None): index into the parent container. context (list[str | JSON_LD_CONTEXT_DICT] | None): local context for this container. + strategies (dict[str | None, dict[str | None, MergeAction]]): The strategies for merging in the childs. Returns: None: """ super().__init__(data, parent=parent, key=key, index=index, context=context) + self.strategies = strategies + class ld_merge_dict(_ld_merge_container, ld_dict): """ @@ -117,7 +127,8 @@ def __init__( parent: Union[ld_dict, ld_list, None] = None, key: Union[str, None] = None, index: Union[int, None] = None, - context: Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None + context: Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None, + strategies: dict[Union[str, None], dict[Union[str, None], MergeAction]] = {} ) -> None: """ Create a new instance of an ld_merge_dict. See also :meth:`ld_dict.__init__`. @@ -128,6 +139,7 @@ def __init__( key (str | None): key into the parent container. index (int | None): index into the parent container. context (list[str | JSON_LD_CONTEXT_DICT] | None): local context for this container. + strategies (dict[str | None, dict[str | None, MergeAction]]): The initial strategies. Returns: None: @@ -141,8 +153,7 @@ def __init__( self.update_context(ld_context.HERMES_PROV_CONTEXT) # add strategies - self.strategies = {**CODEMETA_STRATEGY} - self.add_strategy(PROV_STRATEGY) + self.strategies = strategies def update_context( self: Self, other_context: Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None] @@ -256,15 +267,20 @@ def _merge_item( Returns: BASIC_TYPE | TIME_TYPE | ld_merge_dict | ld_merge_list: The result of the merge from ``self[key]`` with ``value``. + + Raises: + MergeError: If there is no strategy for this key. """ # search for all applicable strategies - strategy = {**self.strategies[None]} + strategy = {**self.strategies.get(None, {})} ld_types = self.data_dict.get('@type', []) for ld_type in ld_types: strategy.update(self.strategies.get(ld_type, {})) # choose one merge strategy and return the item returned by following the merge startegy - merger = strategy.get(key, strategy[None]) + merger = strategy.get(key, strategy.get(None, None)) + if merger is None: + raise MergeError(f"Can't merge, no strategy found for key '{key}'.") return merger.merge(self, [*self.path, key], self[key], value) def _add_related( diff --git a/src/hermes/model/merge/match.py b/src/hermes/model/merge/match.py deleted file mode 100644 index d2ff706e..00000000 --- a/src/hermes/model/merge/match.py +++ /dev/null @@ -1,133 +0,0 @@ -# SPDX-FileCopyrightText: 2025 German Aerospace Center (DLR) -# -# SPDX-License-Identifier: Apache-2.0 - -# SPDX-FileContributor: Michael Meinel -# SPDX-FileContributor: Michael Fritzsche - -from typing import Any, Callable - -from hermes.model.types import ld_dict - - -def match_keys(*keys: list[str], fall_back_to_equals: bool = False) -> Callable[[Any, Any], bool]: - """ - Creates a function taking to parameters that returns true - if both given parameter have at least one common key in the given list of keys - and for all common keys in the given list of keys the values of both objects are the same.\n - If fall_back_to_equals is True, the returned function returns the value of normal == comparison - if no key from keys is in both objects. - - Args: - keys (list[str]): The list of important keys for the comparison method. - fall_back_to_equals (bool): Whether or not a fall back option should be used. - - Returns: - Callable[[Any, Any], bool]: A function comparing two given objects values for the keys in keys. - """ - - # create and return the match function using the given keys - def match_func(left: Any, right: Any) -> bool: - """ - Compares left to right by checking if - - - they have at least one common key in a predetermined list of keys and - - testing if both objects have equal values for all common keys in the predetermined key list. - - It may fall back on == if no common key in the predetermined list of keys exists. - - Args: - left (Any): The first object for the comparison. - right (Any): The second object for the comparison. - - Returns: - bool: The result of the comparison. - """ - if not (isinstance(left, ld_dict) and isinstance(right, ld_dict)): - return fall_back_to_equals and (left == right) - # create a list of all common important keys - active_keys = [key for key in keys if key in left and key in right] - # fall back to == if no active keys - if fall_back_to_equals and not active_keys: - return left == right - # check if both objects have the same values for all active keys - pairs = [(left[key] == right[key]) for key in active_keys] - # return whether or not both objects had the same values for all active keys - # and there was at least one active key - return len(active_keys) > 0 and all(pairs) - return match_func - - -def match_person(left: Any, right: Any) -> bool: - """ - Compares two objects assuming they are representing schema:Person's - if they are not ld_dicts, == is used as a fallback.\n - If both objects have an @id value, the truth value returned by this function is the comparison of both ids.\n - If either other has no @id value and both objects have at least one email value, - they are considered equal if they have one common email.\n - If the equality of the objects is not yet decided, == comparison of the objects is returned. - - Args: - left (Any): The first object for the comparison. - right (Any): The second object for the comparison. - - Returns: - bool: The result of the comparison. - """ - if not (isinstance(left, ld_dict) and isinstance(right, ld_dict)): - return left == right - if "@id" in left and "@id" in right: - return left["@id"] == right["@id"] - if "schema:email" in left and "schema:email" in right: - if len(left["schema:email"]) > 0 and len(right["schema:email"]) > 0: - mails_right = right["schema:email"] - return any((mail in mails_right) for mail in left["schema:email"]) - return left == right - - -def match_multiple_types( - *functions_for_types: list[tuple[str, Callable[[Any, Any], bool]]], - fall_back_function: Callable[[Any, Any], bool] = match_keys("@id", fall_back_to_equals=True) -) -> Callable[[Any, Any], bool]: - """ - Returns a function that compares two objects using the given functions. - - Args: - functions_for_types (list[tuple[str, Callable[[Any, Any], bool]]]): Tuples of type and match_function. - The returned function will compare two objects of a the same, given type with the specified function. - fall_back_function (Callable[[Any, Any], bool]): The fallback for comparison if the objects that are being - compared don't have a common type with specified compare function or at least one object - is not a JSON-LD dictionary. - - Returns: - Callable[[Any, Any], bool]: The function that compares the two given objects using the given functions. - """ - - # create and return the match function using the given keys - def match_func(left: Any, right: Any) -> bool: - """ - Compares two objects using a predetermined function if either objects is not an ld_dict - or they don't have a common type in a predetermined list of types.\n - If the objects are ld_dicts and have the same type with a known comparison function this is used instead. - - Args: - left (Any): The first object for the comparison. - right (Any): The second object for the comparison. - - :return: The result of the comparison. - :rtype: bool - """ - # If at least one of the objects is not an ld_dict or contains no value for the key "@type", use the fallback. - if not (isinstance(left, ld_dict) and isinstance(right, ld_dict) and "@type" in left and "@type" in right): - return fall_back_function(left, right) - # Extract the list of types - types_left = left["@type"] - types_right = right["@type"] - # Iterate over all known type, match_function pairs. - # If one type is in both objects return the result of the comparison with the match_function. - for ld_type, func in functions_for_types: - if ld_type in types_left and ld_type in types_right: - return func(left, right) - # No common type with known match_function: Fallback - return fall_back_function(left, right) - return match_func diff --git a/test/hermes_test/commands/process/test_process.py b/test/hermes_test/commands/process/test_process.py index 0e25f8c0..24fe6d4c 100644 --- a/test/hermes_test/commands/process/test_process.py +++ b/test/hermes_test/commands/process/test_process.py @@ -57,6 +57,7 @@ def test_process(tmp_path, monkeypatch, metadata_in, metadata_out): config_file = tmp_path / "hermes.toml" config_file.write_text( + "[process]\nplugins=[\"codemeta\"]\n" "[harvest]\nsources = [" + ", ".join('\"' + f'{harvester}' + '\"' for harvester in metadata_in) + "]" ) @@ -168,6 +169,7 @@ def test_process_complex(tmp_path, monkeypatch, metadata_in, metadata_out): config_file = tmp_path / "hermes.toml" config_file.write_text( + "[process]\nplugins=[\"codemeta\"]\n" "[harvest]\nsources = [" + ", ".join('\"' + f'{harvester}' + '\"' for harvester in metadata_in) + "]" ) From 5e296cbc0c3914913bbbefbdd64739b89913200a Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Wed, 18 Mar 2026 11:48:34 +0100 Subject: [PATCH 221/247] added class to ignore list for autoapi --- docs/source/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index cc5b0fec..53c86957 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -195,7 +195,7 @@ def autoapi_skip_member(app, obj_type, name, obj, skip, options): if obj_type == "attribute": if any(documented_type in obj.id for documented_type in [ "Collect", "HermesCache", "HermesContext", "HermesMergeError", "ld_container", "ld_context", "ld_dict", - "ld_list", "ld_merge_dict", "MergeSet" + "ld_list", "ld_merge_dict", "ld_merge_list", "MergeSet" ]): return True From 630dd00d4a1c38387076a733b0514f025e6cd754 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Thu, 19 Mar 2026 17:43:51 +0100 Subject: [PATCH 222/247] updated documentation for plugin development, made curate pluginizable and changed the way commands handle errors in plugin runs --- docs/source/_static/custom.css | 9 +- .../automated-publication-with-ci.md | 2 +- .../tutorials/writing-a-plugin-for-hermes.md | 341 ++++++++++++++---- hermes.toml | 3 + pyproject.toml | 3 + src/hermes/commands/cli.py | 12 +- src/hermes/commands/curate/base.py | 47 ++- src/hermes/commands/curate/pass_curate.py | 15 + src/hermes/commands/deposit/base.py | 26 +- src/hermes/commands/harvest/base.py | 36 +- src/hermes/commands/harvest/cff.py | 2 +- src/hermes/commands/harvest/codemeta.py | 2 +- src/hermes/commands/postprocess/base.py | 27 +- src/hermes/commands/process/base.py | 51 ++- src/hermes/error.py | 3 + src/hermes/model/api.py | 18 + .../commands/curate/test_do_nothing_curate.py | 2 +- 17 files changed, 475 insertions(+), 124 deletions(-) create mode 100644 src/hermes/commands/curate/pass_curate.py diff --git a/docs/source/_static/custom.css b/docs/source/_static/custom.css index 20c1f57f..875358c8 100644 --- a/docs/source/_static/custom.css +++ b/docs/source/_static/custom.css @@ -45,9 +45,14 @@ img { } .bd-sidebar-primary.bd-sidebar { - max-width: 340px; + max-width: min-content; +} + +.bd-docs-nav { + min-width: max-content; } .bd-sidebar-secondary{ - max-width: min-content; + max-width: 15%; + width: max-content; } \ No newline at end of file diff --git a/docs/source/tutorials/automated-publication-with-ci.md b/docs/source/tutorials/automated-publication-with-ci.md index 172cb7df..00518615 100644 --- a/docs/source/tutorials/automated-publication-with-ci.md +++ b/docs/source/tutorials/automated-publication-with-ci.md @@ -110,7 +110,7 @@ Each step in the publication workflow has its own section. Configure HERMES to: -- harvest metadata from Git and `CITATION.cff` +- harvest metadata from `CITATION.cff` - deposit on Zenodo Sandbox (which is built on the InvenioRDM) - use Zenodo Sandbox as the target publication repository diff --git a/docs/source/tutorials/writing-a-plugin-for-hermes.md b/docs/source/tutorials/writing-a-plugin-for-hermes.md index 424596c1..9c88f0d7 100644 --- a/docs/source/tutorials/writing-a-plugin-for-hermes.md +++ b/docs/source/tutorials/writing-a-plugin-for-hermes.md @@ -14,26 +14,35 @@ SPDX-FileContributor: Oliver Bertuch # Write a plugin for HERMES -This tutorial will present the basic steps for writing an additional harvester. -At the moment only the architecture for harvester plugins is stable. -The full code and structure is available at [hermes-plugin-git](https://github.com/softwarepub/hermes-plugin-git). +This tutorial will present the basic steps for writing additional plugins. + +The full code and structure of a harvest plugin is available at [hermes-plugin-git](https://github.com/softwarepub/hermes-plugin-git). This plugin extracts information from the local git history. The hermes-plugin-git will help to gather contributing and branch metadata. + ```{note} For this tutorial you should be familiar with HERMES. -If you never used HERMES before, you might want to check the tutorial: [Automated Publication with HERMES](https://docs.software-metadata.pub/en/latest/tutorials/automated-publication-with-ci.html). +If you never used HERMES before, you might want to check the tutorial: [Automated Publication with HERMES](./automated-publication-with-ci). + +Also all metadata directly handled by HERMES is [JSON-LD](https://json-ld.org/) so you should be familiar with that when writing a plugin. +And uses the [schmea.org](https://schema.org/) (with prefix "schema") and the [CodeMeta](https://codemeta.github.io/) (without prefix) context. ``` ## Plugin Architecture HERMES uses a plugin architecture. Therefore, users are invited to contribute own features. + The structure for every plugin follows the same schema. -There is a top-level base class for every plugin. In this `HermesPlugin` class there is one abstract method `__call__` which needs to be overwritten. -Furthermore, the `HermesCommand` class provides all needs for writing a plugin used in a HERMES command. -So the `HermesPlugin`s call method gets an instance of the `HermesCommand` that triggered this plugin to run. -In our case this will be the `HermesHarvestCommand` which calls all harvest plugins. +Every plugin is a sub class of a sub class of the `HermesPlugin` class. +This class implements one abstract method, `__call__`, which needs to be overwritten by every plugin. +In between the `HermesPlugin` class and the class of a specific plugin there is another class which follows the naming scheme `Hermes{Step}Plugin` where `{Step}` is the step the plugin is for. +These base classes may implement additional (abstract) methods that may have to be implemented by the plugins class. + +The first positional attribute of the `__call__` method is an object of class `Hermes{Step}Command` (where `{Step}` is the step the plugin is for), which is a sub class of `HermesCommand`, which triggered this plugin to run. +An exception to this are the deposit plugins. Those don't implement the `__call__` method and instead can implement (and have to implement some) other functions. + The plugin class also uses a derivative of `HermesSettings` to add parameters that can be adapted by the configuration file. -`HermesSettings` are the base class for command specific settings. +`HermesSettings` is the base class for command specific settings. It uses [pydantic](https://docs.pydantic.dev/latest/) [settings](https://docs.pydantic.dev/latest/api/pydantic_settings/) to specify and validate the parameters. The user can either set the parameters in the `hermes.toml` or overwrite them in the command line. To overwrite a parameter from command line, use the `-O` command line option followed by the dotted parameter name and the value. @@ -42,42 +51,233 @@ E.g., you can set your authentication token for InvenioRDM by adding the followi hermes deposit -O invenio_rdm.auth_token YourSecretAuthToken ``` -## Set Up Plugin +## Implement plugin class To write a new plugin, it is important to follow the given structure. -This means your plugins source code has a pydantic class with Settings and the plugin class which inherits from one base class. -For our specific case, we want to write a git harvest plugin. -Our class Structure should look like this: +This means your plugins source code has a pydantic class with Settings and the plugin class which inherits from the plugins steps base class. +### Harvest plugin +The class structure of a harvest plugin should look like this: ```{code-block} python -from hermes.commands.harvest.base import HermesHarvestPlugin +from hermes.commands.harvest.base import HermesHarvestCommand, HermesHarvestPlugin +from hermes.model import SoftwareMetadata from pydantic import BaseModel -class GitHarvestSettings(BaseModel): - from_branch: str = 'main' +class YourHarvestSettings(BaseModel): + # TODO: add your settings + pass + +class YourHarvestPlugin(HermesHarvestPlugin): + settings_class = YourHarvestSettings -class GitHarvestPlugin(HermesHarvestPlugin): - settings_class = GitHarvestSettings + def __call__(self, command: HermesHarvestCommand) -> SoftwareMetadata: + data = SoftwareMetadata() - def __call__(self, command): - print("Hello World!") + # TODO: collect the metadata and write it into data - return {}, {} + return data ``` - -The code uses the `HermesHarvestPlugin` as base class and pydantic's base model for the settings. -In the `GitHarvestSettings` you can see that an additional parameter is defined. -The Parameter `from_branch` is specific for this plugin and can be accessed inside the plugin using `self.settings.harvest.git.from_branch` as long as our plugin will be named `git`. -In the `hermes.toml` this would be achieved by [harvest.{plugin_name}]. -The `GitHarvestSettings` are associated with the `GitHarvestPlugin`. -In the plugin you need to overwrite the `__call__` method. -For now a simple "Hello World" will do. The method returns two dictionaries. -These will contain the harvested data in CodeMeta (JSON-LD) and additional information, e.g., to provide provenance information. -That is the basic structure for the plugins source code. - -To integrate this code, you have to register it as a plugin in the `pyproject.toml`. + +The `__call__` method of harest plugins needs to return a SoftwareMetadata object containing the harvested metadata. +For more information on how to use this object see [here](../dev/data_model.md). + +### Process plugin +The class structure of a process plugin should look like this: + +```{code-block} python +from typing import Union + +from hermes.commands.process.base import HermesProcessCommand, HermesProcessPlugin +from hermes.model.merge.action import MergeAction +from pydantic import BaseModel + + +class YourProcessSettings(BaseModel): + # TODO: add your settings + pass + + +class YourProcessPlugin(HermesProcessPlugin): + settings_class = YourProcessSettings + + def __call__(self, command: HermesProcessCommand) -> dict[Union[str, None], dict[Union[str, None], MergeAction]]: + strategies = {} + + # TODO: define the merge strategies that will be used by HERMES + + return strategies +``` + +The `__call__` method of process plugins needs to return a dictionary mappings strings and/ or `None` to dictionaries mapping strings or `None` to {py:class}`hermes.model.merge.action.MergeAction`. +If `strategies` looked like this (where `Reject` is imported from `hermes.model.merge.action`) +```{code-block} python +strategies = { + full_type_iri: { + full_property_iri: Reject(), + ... + }, + ... +} +``` + +HERMES would use the `Reject` strategy for merging values of the key `full_property_iri` in objects of type `full_type_iri`. (A key in strategies being `None` instead of a string indicates to HERMES that its value is to be used as a default [i.e. if no more specific entry exists].) + +HERMES will prioritize strategies from other plugins depending on the order of the plugins in the `hermes.toml`. Generally the hierarchy is as follows (first most important): +1. strategies with `full_property_iri` and `full_type_iri` not `None`. +2. strategies with `full_property_iri` not `None` and `full_type_iri` `None`. +3. strategies with `full_property_iri` `None` and `full_type_iri` not `None`. +4. strategies with `full_property_iri` and `full_type_iri` `None`. + +But if multiple plugins specify overlapping strategies on the same hierarchy level the strategy of the plugin listed first in the `hermes.toml` is used. + +### Curate plugin +The class structure of a curate plugin should look like this: + +```{code-block} python +from hermes.commands.curate.base import HermesCurateCommand, HermesCuratePlugin +from hermes.model import SoftwareMetadata +from pydantic import BaseModel + + +class YourCurateSettings(BaseModel): + # TODO: add your settings + pass + + +class YourCuratePlugin(HermesCuratePlugin): + settings_class = YourCurateSettings + + def __call__(self, command: HermesCurateCommand, metadata: SoftwareMetadata) -> SoftwareMetadata: + data = SoftwareMetadata() + + # TODO: curate the metadata and write it into data + + return data +``` + +The `__call__` method of harest plugins needs to return a SoftwareMetadata object containing the curated metadata. +For more information on how to use this object see [here](../dev/data_model.md). +The returned object may be the object `metadata` passed to `__call__`. + +### Deposit plugin +The class structure of a deposit plugin should look like this: + +```{code-block} python +from hermes.commands.deposit.base import HermesDepositPlugin +from hermes.model import SoftwareMetadata +from pydantic import BaseModel + + +class YourDepositSettings(BaseModel): + # TODO: add your settings + pass + + +class YourDepositPlugin(HermesDepositPlugin): + settings_class = YourDepositSettings + + def prepare(self) -> None: + """ not neccessary """ + pass + + def map_metadata(self) -> dict: + """ neccessary """ + mapped_metadata = {} + # TODO: implement + return mapped_metadata + + def is_initial_publication(self) -> bool: + """ neccessary """ + is_initial = True + # TODO: implement logic + return is_initial + + def create_initial_version(self) -> None: + """ necessary if is_initial_publication can return True """ + pass + + def create_new_version(self) -> None: + """ necessary if is_initial_publication can return False """ + pass + + def update_metadata(self) -> dict: + """ necessary """ + mapped_metadata = {} + # TODO: implement + return mapped_metadata + + def delete_artifacts(self) -> None: + """ not necessary """ + pass + + def upload_artifacts(self) -> None: + """ not necessary """ + pass + + def publish(self) -> None: + """ necessary """ + # TODO: implement logic + pass +``` + +A deposit plugin doesn't implement a `__call__` method like plugins for other steps. +Instead it can (and in some cases has to) implement methods, which will be called in a predefined order. + +The plugin still has access to the command (via self.command) and the metadata for the software (via self.metadata). + +### Postprocess plugin +The class structure of a postprocess plugin should look like this: + +```{code-block} python +from hermes.commands.postprocess.base import HermesPostprocessCommand, HermesPostprocessPlugin +from hermes.model import SoftwareMetadata +from pydantic import BaseModel + + +class YourPostprocessSettings(BaseModel): + # TODO: add your settings + pass + + +class YourPostprocessPlugin(HermesPostprocessPlugin): + settings_class = YourPostprocessSettings + + def __call__(self, command: HermesPostprocessCommand) -> None: + # TODO: implement logic + pass +``` + +The metadata from a deposit plugin can be loaded via + +```python +ctx = HermesContext() +ctx.prepare_step("deposit") +with ctx[deposit_plugin_name] as manager: + deposition = manager["result"] +ctx.finalize_step("deposit") +``` + +where `deposit_plugin_name` is the name of the deposit plugin the data is loaded from and HermesContext is {py:class}`hermes.model.context_manager.HermesContext`. +The loaded data is some valid JSON data and has no fixed format. + +## Implement and use plugin specific settings +The class set in the `settings_class` attribute of your plugin class is your plugins settings class. +All attributes in it can be set in the `hermes.toml` of your project or passed via the command line. +If not set, they will be set to the (in the class) specified default value. +Pydantic will also validate the attributes value against the type hint of the attribute. + +The settings of your plugin can be accessed via `self.settings.{plugin_step}.{plugin_name}.{attribute_name}`. +And setting it in the `hermes.toml` works like this: +```shell +[{plugin_step}.{plugin_name}] +{attribute_name} = value +``` + +## Configure HERMES to use your plugin + +To integrate your plugin, you have to register it as a plugin in the `pyproject.toml`. To learn more about the `pyproject.toml` check https://python-poetry.org/docs/pyproject/ or refer to [PEP621](https://peps.python.org/pep-0621/). We will just look at the important places for this plugin. There are two ways to integrate this plugin. @@ -90,19 +290,19 @@ The idea is that your project is the main part. You create the `pyproject.toml` In the dependencies block you need to include `hermes`. Then you just have to declare your plugin. The HERMES software will look for installed plugins and use them. In the code below you can see the parts of the `pyproject.toml` that are important. -```{code-block} toml +```{code-block} ... [tool.poetry.dependencies] python = "^3.10" hermes = "^0.8.0" ... ... -[tool.poetry.plugins."hermes.harvest"] -git = "hermes_plugin_git.harvest:GitHarvestPlugin" +[tool.poetry.plugins."hermes.{plugin_step}"] +{plugin_name} = "{plugin_package}.{plugin_module}:{plugin_class}" ... ``` -As you can see the plugin class from `hermes_plugin_git` is declared as `git` for the `hermes.harvest` entrypoint. -To use the plugin you have to adapt the harvest settings in the `hermes.toml`. +As you can see the plugin class from `plugin_package` is declared as `plugin_name` for the `hermes.{plugin_step}` entrypoint. +To use the plugin you have to adapt the settings for `plugin_step` in the `hermes.toml`. We will discuss the exact step after showing the other `pyproject.toml` configuration. ```{note} You have to run poetry install to add and install all entrypoints declared in the pyproject.toml. @@ -113,18 +313,16 @@ This variant is used to contribute to the HERMES community or adapt the HERMES w If you want to contribute, see the [Contribution Guidelines](https://docs.software-metadata.pub/en/latest/dev/contribute.html). After cloning the HERMES workflow repository you can adapt the pyproject.toml. In the code below you see the parts with the important lines. -```{code-block} toml +```{code-block} ... [tool.poetry.dependencies] ... pydantic-settings = "^2.1.0" -hermes-plugin-git = { git = "https://github.com/softwarepub/hermes-plugin-git.git", branch = "main" } +{plugin_package} = { {plugin_name} = "{link_to_your_repo}", branch = "main" } ... ... -[tool.poetry.plugins."hermes.harvest"] -cff = "hermes.commands.harvest.cff:CffHarvestPlugin" -codemeta = "hermes.commands.harvest.codemeta:CodeMetaHarvestPlugin" -git = "hermes_plugin_git.harvest:GitHarvestPlugin" +[tool.poetry.plugins."hermes.{plugin_step}"] +{plugin_name} = "{plugin_package}.{plugin_module}:{plugin_class}" ... ``` In the dependencies you have to install your plugin. If your Plugin is pip installable than you can just give the name and the version. @@ -132,8 +330,8 @@ If your plugin is in a buildable git repository, you can install it with the giv Note that this differs with the accessibility and your wishes, check [Explicit Package Sources](https://python-poetry.org/docs/repositories/#explicit-package-sources). The second thing to adapt is to declare the access point for the plugin. -You can do that with `git = "hermes_plugin_git.harvest:GitHarvestPlugin"`. -This expression makes the `GitHarvestPlugin` from the `hermes_plugin_git` package, a `hermes.harvest` plugin named `git`. +You can do that with `{plugin_name} = "{plugin_package}.{plugin_module}:{plugin_class}"`. +This expression makes the `plugin_class` from the `plugin_package` package, a `hermes.{plugin_step}` plugin named `plugin_name`. So you need to configure this line with your plugin properties. Now you just need to add the plugin to the `hermes.toml` and reinstall the adapted poetry package. @@ -141,27 +339,48 @@ Now you just need to add the plugin to the `hermes.toml` and reinstall the adapt ### Configure hermes.toml To use the plugin, you have to activate it in the `hermes.toml`. The settings for the plugins are also set there. -For the harvest plugin the `hermes.toml` could look like this: -```{code-block} toml -[harvest] -sources = [ "cff", "git" ] # ordered priority (first one is most important) -[harvest.cff] -enable_validation = false +Here are some examples how to integrate your plugin... -[harvest.git] -from_branch = "develop" +#### ... for a harvest plugin. +```{code-block} +... +[harvest] +sources = [ ..., "{plugin_name}", ... ] # ordered priority (first one is most important) ... ``` -In the `[harvest]` section you define that this plugin is used with less priority than the built-in `cff` plugin. -in the `[harvest.git]` section you set the configuration for the plugin. -In the beginning of this tutorial we set the parameter `from_branch` in the git settings. Now we change the default `from_branch` to `develop`. -With this configuration the plugin will be used. If you run `hermes harvest`, you should see the "Hello World" message. - +#### ... for a process plugin. +```{code-block} +... +[process] +plugins = [ ..., "{plugin_name}", ... ] # ordered priority (first one is most important) +... +``` +#### ... for a curate plugin. +```{code-block} +... +[curate] +plugin = "{plugin_name}" +... +``` +#### ... for a deposit plugin. +```{code-block} +... +[deposit] +target = "{plugin_name}" +... +``` +#### ... for a postprocess plugin. +```{code-block} +... +[postprocess] +run = [ ..., "{plugin_name}", ... ] +... +``` +

```{admonition} Congratulations! You can now write plugins for HERMES. +Consider publishing it for others to use following this guide. TODO: add link ``` -To fill the plugin with code, you can check our [hermes-plugin-git](https://github.com/softwarepub/hermes-plugin-git) repository. -There is the code to check the local git history and extract contributors of the given branch. If you have any questions, wishes or requests, feel free to contact us. diff --git a/hermes.toml b/hermes.toml index 3aa44a8f..a42a9406 100644 --- a/hermes.toml +++ b/hermes.toml @@ -5,6 +5,9 @@ [harvest] sources = [ "cff", "toml" ] # ordered priority (first one is most important) +[curate] +plugin = "pass_curate" + [deposit] target = "invenio_rdm" diff --git a/pyproject.toml b/pyproject.toml index 489cef29..fa2885ae 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -74,6 +74,9 @@ cff_doi = "hermes.commands.postprocess.invenio:cff_doi" [project.entry-points."hermes.process"] codemeta = "hermes.commands.process.standard_merge:CodemetaProcessPlugin" +[project.entry-points."hermes.curate"] +pass_curate = "hermes.commands.curate.pass_curate:DoNothingCuratePlugin" + [tool.poetry.group.dev.dependencies] pytest = "^7.1.1" pytest-cov = "^3.0.0" diff --git a/src/hermes/commands/cli.py b/src/hermes/commands/cli.py index d772d09d..079a073f 100644 --- a/src/hermes/commands/cli.py +++ b/src/hermes/commands/cli.py @@ -21,6 +21,7 @@ HermesProcessCommand, HermesVersionCommand ) from hermes.commands.base import HermesCommand +from hermes.error import HermesPluginRunError def main() -> None: @@ -79,16 +80,15 @@ def main() -> None: log.info("Run subcommand %s", args.command.command_name) args.command(args) + except HermesPluginRunError as e: + log.error("An error occurred during the execution of a plugin %s (Find details in './hermes.log')", + args.command.command_name) + log.debug("Original exception was: %s", e) + sys.exit(2) except Exception as e: log.error("An error occurred during execution of %s (Find details in './hermes.log')", args.command.command_name) log.debug("Original exception was: %s", e) - - sys.exit(2) - - if args.command.errors: - for e in args.command.errors: - log.error(e) sys.exit(1) sys.exit(0) diff --git a/src/hermes/commands/curate/base.py b/src/hermes/commands/curate/base.py index 15d7c8db..8aedd956 100644 --- a/src/hermes/commands/curate/base.py +++ b/src/hermes/commands/curate/base.py @@ -8,16 +8,24 @@ from pydantic import BaseModel -from hermes.commands.base import HermesCommand +from hermes.commands.base import HermesCommand, HermesPlugin +from hermes.error import HermesPluginRunError, MisconfigurationError from hermes.model import SoftwareMetadata from hermes.model.context_manager import HermesContext from hermes.model.error import HermesValidationError +class HermesCuratePlugin(HermesPlugin): + """ Base plugin for curate plugins. """ + + def __call__(self, command: HermesCommand, metadata: SoftwareMetadata) -> SoftwareMetadata: + pass + + class CurateSettings(BaseModel): """Generic deposition settings.""" - pass + plugin: str = "" class HermesCurateCommand(HermesCommand): @@ -26,28 +34,41 @@ class HermesCurateCommand(HermesCommand): command_name = "curate" settings_class = CurateSettings - def init_command_parser(self, command_parser: argparse.ArgumentParser) -> None: - pass - def __call__(self, args: argparse.Namespace) -> None: self.log.info("# Metadata curation") + plugin_name = self.settings.plugin ctx = HermesContext() ctx.prepare_step("curate") + self.log.info("## Load processed metadata") + # load processed data ctx.prepare_step("process") - with ctx["result"] as process_ctx: - expanded_data = process_ctx["expanded"] - context_data = process_ctx["context"] + try: + metadata = SoftwareMetadata.load_from_cache(ctx, "result") + except Exception as e: + self.log.error("The data from the process step could not be loaded or is invalid for some reason.") + raise HermesValidationError("The results of the process step are invalid.") from e ctx.finalize_step("process") + self.log.info("## Load curation plugin") + # load plugin try: - data = SoftwareMetadata(expanded_data[0], context_data["@context"][1]) + plugin_func = self.plugins[plugin_name]() + except KeyError as e: + self.log.error(f"Plugin {plugin_name} not found.") + raise MisconfigurationError(f"Curate plugin {plugin_name} not found.") + + self.log.info("## Run curation plugin") + # run plugin + try: + curated_metadata = plugin_func(self, metadata) except Exception as e: - raise HermesValidationError("The results of the process step are invalid.") from e + self.log.error(f"Unknown error while executing the {plugin_name} plugin.") + raise HermesPluginRunError(f"Something went wrong while running the curate plugin {plugin_name}") from e - with ctx["result"] as curate_ctx: - curate_ctx["expanded"] = data.ld_value - curate_ctx["context"] = {"@context": data.full_context} + self.log.info("## Store curated data") + # store metadata + curated_metadata.write_to_cache(ctx, "result") ctx.finalize_step("curate") diff --git a/src/hermes/commands/curate/pass_curate.py b/src/hermes/commands/curate/pass_curate.py new file mode 100644 index 00000000..a8cacb91 --- /dev/null +++ b/src/hermes/commands/curate/pass_curate.py @@ -0,0 +1,15 @@ +from pydantic import BaseModel + +from hermes.model import SoftwareMetadata +from .base import HermesCurateCommand, HermesCuratePlugin + + +class DoNothingCurateSettings(BaseModel): + pass + + +class DoNothingCuratePlugin(HermesCuratePlugin): + settings_class = DoNothingCurateSettings + + def __call__(self, command: HermesCurateCommand, metadata: SoftwareMetadata) -> SoftwareMetadata: + return metadata diff --git a/src/hermes/commands/deposit/base.py b/src/hermes/commands/deposit/base.py index be84ed29..6b6f2358 100644 --- a/src/hermes/commands/deposit/base.py +++ b/src/hermes/commands/deposit/base.py @@ -11,6 +11,7 @@ from pydantic import BaseModel from hermes.commands.base import HermesCommand, HermesPlugin +from hermes.error import HermesPluginRunError, MisconfigurationError from hermes.model.context_manager import HermesContext from hermes.model import SoftwareMetadata from hermes.model.error import HermesValidationError @@ -29,17 +30,19 @@ def __call__(self, command: HermesCommand) -> None: """ self.command = command self.ctx = HermesContext() + self.ctx.prepare_step("deposit") self.ctx.prepare_step("curate") - self.metadata = SoftwareMetadata.load_from_cache(self.ctx, "result") + try: + self.metadata = SoftwareMetadata.load_from_cache(self.ctx, "result") + except Exception as e: + raise HermesValidationError("The results of the curate step are invalid.") from e self.ctx.finalize_step("curate") self.prepare() deposit = self.map_metadata() - self.ctx.prepare_step("deposit") with self.ctx[command.settings.target] as cache: cache["deposit"] = deposit - self.ctx.finalize_step("deposit") if self.is_initial_publication(): self.create_initial_version() @@ -47,7 +50,6 @@ def __call__(self, command: HermesCommand) -> None: self.create_new_version() deposit = self.update_metadata() - self.ctx.prepare_step("deposit") with self.ctx[command.settings.target] as cache: cache["result"] = deposit self.ctx.finalize_step("deposit") @@ -133,16 +135,24 @@ def init_command_parser(self, command_parser: argparse.ArgumentParser) -> None: help="Allow initial deposition (i.e., minting a new PID).") def __call__(self, args: argparse.Namespace) -> None: + self.log.info("# Metadata deposition") self.args = args plugin_name = self.settings.target + self.log.info("## Load deposit plugin") + # load plugin try: plugin_func = self.plugins[plugin_name]() except KeyError as e: - self.log.error("Plugin '%s' not found.", plugin_name) - self.errors.append(e) + self.log.error(f"Plugin {plugin_name} not found.") + raise MisconfigurationError(f"Deposit plugin {self.settings.plugin} not found.") + + self.log.info("## Run deposit plugin") + # run plugin try: plugin_func(self) except HermesValidationError as e: - self.log.error("Error while executing %s: %s", plugin_name, e) - self.errors.append(e) + self.log.error(f"Error while executing {plugin_name}: {e}") + raise HermesPluginRunError( + f"Something went wrong while running the curate plugin {self.settings.plugin}" + ) from e diff --git a/src/hermes/commands/harvest/base.py b/src/hermes/commands/harvest/base.py index 19ccc623..2873a778 100644 --- a/src/hermes/commands/harvest/base.py +++ b/src/hermes/commands/harvest/base.py @@ -9,6 +9,7 @@ from pydantic import BaseModel from hermes.commands.base import HermesCommand, HermesPlugin +from hermes.error import HermesPluginRunError, MisconfigurationError from hermes.model.context_manager import HermesContext from hermes.model.error import HermesValidationError from hermes.model import SoftwareMetadata @@ -20,7 +21,7 @@ class HermesHarvestPlugin(HermesPlugin): TODO: describe the harvesting process and how this is mapped to this plugin. """ - def __call__(self, command: HermesCommand) -> tuple[SoftwareMetadata, dict]: + def __call__(self, command: HermesCommand) -> SoftwareMetadata: pass @@ -37,28 +38,35 @@ class HermesHarvestCommand(HermesCommand): settings_class = HarvestSettings def __call__(self, args: argparse.Namespace) -> None: + self.log.info("# Metadata harvesting") self.args = args # Initialize the harvest cache directory here to indicate the step ran ctx = HermesContext() ctx.prepare_step('harvest') + self.log.info("## Load and run the plugins") for plugin_name in self.settings.sources: - plugin_cls = self.plugins[plugin_name] + self.log.info(f"### Load {plugin_name} plugin") + # load plugin + try: + plugin_func = self.plugins[plugin_name]() + except KeyError as e: + self.log.error(f"Plugin {plugin_name} not found.") + raise MisconfigurationError(f"Harvest plugin {plugin_name} not found.") + self.log.info(f"### Run {plugin_name} plugin") + # run plugin try: - # Load plugin and run the harvester - plugin_func = plugin_cls() harvested_data = plugin_func(self) - - with ctx[plugin_name] as plugin_ctx: - plugin_ctx["codemeta"] = harvested_data[0].compact() - plugin_ctx["context"] = {"@context": harvested_data[0].full_context} - - plugin_ctx["expanded"] = harvested_data[0].ld_value - - except HermesValidationError as e: - self.log.error("Error while executing %s: %s", plugin_name, e) - self.errors.append(e) + except Exception as e: + self.log.error(f"Unknown error while executing the {plugin_name} plugin.") + raise HermesPluginRunError( + f"Something went wrong while running the harvest plugin {plugin_name}" + ) from e + + self.log.info(f"### Store metadata harvested by {plugin_name} plugin") + # store harvested data + harvested_data.write_to_cache(ctx, plugin_name) ctx.finalize_step('harvest') diff --git a/src/hermes/commands/harvest/cff.py b/src/hermes/commands/harvest/cff.py index 6c2b6594..5a2d16c1 100644 --- a/src/hermes/commands/harvest/cff.py +++ b/src/hermes/commands/harvest/cff.py @@ -59,7 +59,7 @@ def __call__(self, command: HermesHarvestCommand) -> tuple[SoftwareMetadata, dic # TODO Replace the following temp patch for #112 once there is a new cffconvert version with cffconvert#309 codemeta_dict = self._patch_author_emails(cff_dict, codemeta_dict) ld_codemeta = SoftwareMetadata(codemeta_dict, extra_vocabs={'legalName': {'@id': "http://schema.org/name"}}) - return ld_codemeta, {} + return ld_codemeta def _load_cff_from_file(self, cff_data: str) -> Any: yaml = YAML(typ='safe') diff --git a/src/hermes/commands/harvest/codemeta.py b/src/hermes/commands/harvest/codemeta.py index 5f211222..3dc84296 100644 --- a/src/hermes/commands/harvest/codemeta.py +++ b/src/hermes/commands/harvest/codemeta.py @@ -40,7 +40,7 @@ def __call__(self, command: HermesHarvestCommand) -> tuple[SoftwareMetadata, dic raise HermesValidationError(codemeta_file) codemeta = json.loads(codemeta_str) - return SoftwareMetadata(codemeta), {'local_path': str(codemeta_file)} + return SoftwareMetadata(codemeta) # , {'local_path': str(codemeta_file)} def _validate(self, codemeta_file: pathlib.Path) -> bool: with open(codemeta_file, "r") as fi: diff --git a/src/hermes/commands/postprocess/base.py b/src/hermes/commands/postprocess/base.py index e528ae65..c36d7b8a 100644 --- a/src/hermes/commands/postprocess/base.py +++ b/src/hermes/commands/postprocess/base.py @@ -9,11 +9,15 @@ from pydantic import BaseModel -from ..base import HermesCommand, HermesPlugin +from hermes.commands.base import HermesCommand, HermesPlugin +from hermes.error import HermesPluginRunError, MisconfigurationError class HermesPostprocessPlugin(HermesPlugin): - pass + """ Base plugin for postprocess plugins. """ + + def __call__(self, command: HermesCommand) -> None: + pass class PostprocessSettings(BaseModel): @@ -29,13 +33,26 @@ class HermesPostprocessCommand(HermesCommand): settings_class = PostprocessSettings def __call__(self, args: argparse.Namespace) -> None: + self.log.info("# Postprocessing") self.args = args plugin_names = self.settings.run + self.log.info("## Load and run the plugins") for plugin_name in plugin_names: + self.log.info(f"### Load {plugin_name} plugin") + # load plugin try: plugin_func = self.plugins[plugin_name]() - plugin_func(self) except KeyError as e: - self.log.error("Plugin '%s' not found.", plugin_name) - self.errors.append(e) + self.log.error(f"Plugin {plugin_name} not found.") + raise MisconfigurationError(f"Postprocess plugin {plugin_name} not found.") + + self.log.info(f"### Run {plugin_name} plugin") + # run plugin + try: + plugin_func(self) + except Exception as e: + self.log.error(f"Unknown error while executing the {plugin_name} plugin.") + raise HermesPluginRunError( + f"Something went wrong while running the postprocess plugin {plugin_name}" + ) from e diff --git a/src/hermes/commands/process/base.py b/src/hermes/commands/process/base.py index 19f781e1..fb26fd68 100644 --- a/src/hermes/commands/process/base.py +++ b/src/hermes/commands/process/base.py @@ -10,9 +10,10 @@ from pydantic import BaseModel from hermes.commands.base import HermesCommand, HermesPlugin +from hermes.error import HermesPluginRunError, MisconfigurationError from hermes.model.api import SoftwareMetadata from hermes.model.context_manager import HermesContext -from hermes.model.error import HermesContextError +from hermes.model.error import HermesValidationError from hermes.model.merge.action import MergeAction from hermes.model.merge.container import ld_merge_dict @@ -37,33 +38,61 @@ class HermesProcessCommand(HermesCommand): settings_class = ProcessSettings def __call__(self, args: argparse.Namespace) -> None: + self.log.info("# Metadata processing") self.args = args - ctx = HermesContext() merged_doc = ld_merge_dict([{}]) + self.log.info("## Load and run the plugins") # add the strategies from the plugins for plugin_name in reversed(self.settings.plugins): - additional_strategies = self.plugins[plugin_name]()(self) + self.log.info(f"### Load {plugin_name} plugin") + # load plugin + try: + plugin_func = self.plugins[plugin_name]() + except KeyError as e: + self.log.error(f"Plugin {plugin_name} not found.") + raise MisconfigurationError(f"Postprocess plugin {plugin_name} not found.") + + self.log.info(f"### Run {plugin_name} plugin") + # run plugin + try: + additional_strategies = plugin_func(self) + except Exception as e: + self.log.error(f"Unknown error while executing the {plugin_name} plugin.") + raise HermesPluginRunError( + f"Something went wrong while running the postprocess plugin {plugin_name}" + ) from e + + self.log.info(f"### Add the strategies to the merge document {plugin_name} plugin") + # add strategies to the merge document merged_doc.add_strategy(additional_strategies) + ctx = HermesContext() + ctx.prepare_step('harvest') + + self.log.info("## Merge the metadata of the harvesters") # Get all harvesters harvester_names = self.root_settings.harvest.sources - - ctx.prepare_step('harvest') for harvester in harvester_names: - self.log.info("## Process data from %s", harvester) + self.log.info(f"## Load data from {harvester} plugin") + # load data from harvester try: metadata = SoftwareMetadata.load_from_cache(ctx, harvester) - except HermesContextError as e: - self.log.error("Error while trying to load data from harvest plugin '%s': %s", harvester, e) - self.errors.append(e) - continue + except Exception as e: + self.log.error(f"The data from the harvester {harvester} could not be loaded or is invalid.") + raise HermesValidationError(f"The results of the harvest plugin {harvester} is invalid.") from e + + self.log.info(f"## Merge data from {harvester} plugin") + # merge data into the merge dict merged_doc.update(metadata) - ctx.finalize_step("harvest") + self.log.info("## Store processed metadata") + # store processed data ctx.prepare_step("process") with ctx["result"] as result_ctx: result_ctx["codemeta"] = merged_doc.compact() result_ctx["context"] = {"@context": merged_doc.full_context} result_ctx["expanded"] = merged_doc.ld_value ctx.finalize_step("process") + + ctx.finalize_step("harvest") diff --git a/src/hermes/error.py b/src/hermes/error.py index 1669ed39..50007133 100644 --- a/src/hermes/error.py +++ b/src/hermes/error.py @@ -6,3 +6,6 @@ class MisconfigurationError(Exception): pass + +class HermesPluginRunError(Exception): + pass \ No newline at end of file diff --git a/src/hermes/model/api.py b/src/hermes/model/api.py index aac88b9e..2b467636 100644 --- a/src/hermes/model/api.py +++ b/src/hermes/model/api.py @@ -78,3 +78,21 @@ def load_from_cache(cls: type[Self], ctx: HermesContext, source: str) -> "Softwa except Exception as e: # No data could be loaded, raise an error instead. raise HermesContextError("There is no (valid) data stored in the cache.") from e + + def write_to_cache(self: Self, ctx: HermesContext, target_dir: str) -> None: + """ + Writes the JSON_LD data of `self` to the given HermesContext object at the given target.\n + Note that data is written into "codemeta.json" (compacted value), "context.json" (context value) and + "expanded.json" (expanded value). + + Args: + ctx (HermesContext): The HERMES cache the data is written to. + target_dir (str): The directory the inside the cache the data is written to. + + Returns: + None: + """ + with ctx[target_dir] as cache: + cache["codemeta"] = self.compact() + cache["context"] = {"@context": self.full_context} + cache["expanded"] = self.ld_value diff --git a/test/hermes_test/commands/curate/test_do_nothing_curate.py b/test/hermes_test/commands/curate/test_do_nothing_curate.py index 3e2811c7..df8fe118 100644 --- a/test/hermes_test/commands/curate/test_do_nothing_curate.py +++ b/test/hermes_test/commands/curate/test_do_nothing_curate.py @@ -93,7 +93,7 @@ def test_do_nothing_curate(tmp_path, monkeypatch, process_result, res): manager.finalize_step("process") config_file = tmp_path / "hermes.toml" - config_file.write_text("") + config_file.write_text("[curate]\nplugin = \"pass_curate\"") orig_argv = sys.argv[:] sys.argv = ["hermes", "curate", "--path", str(tmp_path), "--config", str(config_file)] From 296fbb7e71217a3b0fd214fb2d4bf78b754c79af Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Thu, 19 Mar 2026 17:47:21 +0100 Subject: [PATCH 223/247] fixed formatting errors --- src/hermes/commands/curate/base.py | 2 +- src/hermes/commands/deposit/base.py | 2 +- src/hermes/commands/harvest/base.py | 3 +-- src/hermes/commands/postprocess/base.py | 2 +- src/hermes/commands/process/base.py | 2 +- src/hermes/error.py | 3 ++- test/hermes_test/model/types/test_pyld_util.py | 2 -- 7 files changed, 7 insertions(+), 9 deletions(-) diff --git a/src/hermes/commands/curate/base.py b/src/hermes/commands/curate/base.py index 8aedd956..cf3c3457 100644 --- a/src/hermes/commands/curate/base.py +++ b/src/hermes/commands/curate/base.py @@ -55,7 +55,7 @@ def __call__(self, args: argparse.Namespace) -> None: # load plugin try: plugin_func = self.plugins[plugin_name]() - except KeyError as e: + except KeyError: self.log.error(f"Plugin {plugin_name} not found.") raise MisconfigurationError(f"Curate plugin {plugin_name} not found.") diff --git a/src/hermes/commands/deposit/base.py b/src/hermes/commands/deposit/base.py index 6b6f2358..f6b911c7 100644 --- a/src/hermes/commands/deposit/base.py +++ b/src/hermes/commands/deposit/base.py @@ -143,7 +143,7 @@ def __call__(self, args: argparse.Namespace) -> None: # load plugin try: plugin_func = self.plugins[plugin_name]() - except KeyError as e: + except KeyError: self.log.error(f"Plugin {plugin_name} not found.") raise MisconfigurationError(f"Deposit plugin {self.settings.plugin} not found.") diff --git a/src/hermes/commands/harvest/base.py b/src/hermes/commands/harvest/base.py index 2873a778..6a7a8d82 100644 --- a/src/hermes/commands/harvest/base.py +++ b/src/hermes/commands/harvest/base.py @@ -11,7 +11,6 @@ from hermes.commands.base import HermesCommand, HermesPlugin from hermes.error import HermesPluginRunError, MisconfigurationError from hermes.model.context_manager import HermesContext -from hermes.model.error import HermesValidationError from hermes.model import SoftwareMetadata @@ -51,7 +50,7 @@ def __call__(self, args: argparse.Namespace) -> None: # load plugin try: plugin_func = self.plugins[plugin_name]() - except KeyError as e: + except KeyError: self.log.error(f"Plugin {plugin_name} not found.") raise MisconfigurationError(f"Harvest plugin {plugin_name} not found.") diff --git a/src/hermes/commands/postprocess/base.py b/src/hermes/commands/postprocess/base.py index c36d7b8a..776576b1 100644 --- a/src/hermes/commands/postprocess/base.py +++ b/src/hermes/commands/postprocess/base.py @@ -43,7 +43,7 @@ def __call__(self, args: argparse.Namespace) -> None: # load plugin try: plugin_func = self.plugins[plugin_name]() - except KeyError as e: + except KeyError: self.log.error(f"Plugin {plugin_name} not found.") raise MisconfigurationError(f"Postprocess plugin {plugin_name} not found.") diff --git a/src/hermes/commands/process/base.py b/src/hermes/commands/process/base.py index fb26fd68..9662cde0 100644 --- a/src/hermes/commands/process/base.py +++ b/src/hermes/commands/process/base.py @@ -49,7 +49,7 @@ def __call__(self, args: argparse.Namespace) -> None: # load plugin try: plugin_func = self.plugins[plugin_name]() - except KeyError as e: + except KeyError: self.log.error(f"Plugin {plugin_name} not found.") raise MisconfigurationError(f"Postprocess plugin {plugin_name} not found.") diff --git a/src/hermes/error.py b/src/hermes/error.py index 50007133..697bfe92 100644 --- a/src/hermes/error.py +++ b/src/hermes/error.py @@ -7,5 +7,6 @@ class MisconfigurationError(Exception): pass + class HermesPluginRunError(Exception): - pass \ No newline at end of file + pass diff --git a/test/hermes_test/model/types/test_pyld_util.py b/test/hermes_test/model/types/test_pyld_util.py index 46e3eab1..a206e4b2 100644 --- a/test/hermes_test/model/types/test_pyld_util.py +++ b/test/hermes_test/model/types/test_pyld_util.py @@ -6,8 +6,6 @@ import pytest -from unittest import mock - from pyld import jsonld from hermes.model.types import pyld_util From 758dbde5377bc7e69a38c2dd2331d6fb5cde5342 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 20 Mar 2026 13:00:04 +0100 Subject: [PATCH 224/247] reactivated tests and added afew log calls --- src/hermes/commands/__init__.py | 2 +- src/hermes/commands/cli.py | 12 ++++-------- src/hermes/commands/curate/base.py | 6 +++--- src/hermes/commands/deposit/base.py | 4 ++-- src/hermes/commands/harvest/base.py | 3 +++ .../hermes_test/commands/deposit/test_invenio_e2e.py | 2 +- test/hermes_test/commands/init/test_init.py | 2 -- test/hermes_test/conftest.py | 3 ++- test/hermes_test/test_cli.py | 3 --- test/hermes_test/test_main.py | 4 ---- test/hermes_test/test_marketplace.py | 4 ---- 11 files changed, 16 insertions(+), 29 deletions(-) diff --git a/src/hermes/commands/__init__.py b/src/hermes/commands/__init__.py index 3a2906d4..5203ac18 100644 --- a/src/hermes/commands/__init__.py +++ b/src/hermes/commands/__init__.py @@ -11,7 +11,7 @@ from hermes.commands.base import HermesHelpCommand from hermes.commands.base import HermesVersionCommand from hermes.commands.clean.base import HermesCleanCommand -# from hermes.commands.init.base import HermesInitCommand +from hermes.commands.init.base import HermesInitCommand from hermes.commands.curate.base import HermesCurateCommand from hermes.commands.harvest.base import HermesHarvestCommand from hermes.commands.process.base import HermesProcessCommand diff --git a/src/hermes/commands/cli.py b/src/hermes/commands/cli.py index 079a073f..23daae3e 100644 --- a/src/hermes/commands/cli.py +++ b/src/hermes/commands/cli.py @@ -12,13 +12,9 @@ import sys from hermes import logger -# FIXME: remove comments after new implementation of modules is available -# from hermes.commands import (HermesHelpCommand, HermesVersionCommand, HermesCleanCommand, -# HermesHarvestCommand, HermesProcessCommand, HermesCurateCommand, -# HermesDepositCommand, HermesPostprocessCommand, HermesInitCommand) from hermes.commands import ( - HermesCurateCommand, HermesDepositCommand, HermesHarvestCommand, HermesHelpCommand, HermesPostprocessCommand, - HermesProcessCommand, HermesVersionCommand + HermesCurateCommand, HermesCleanCommand, HermesDepositCommand, HermesHarvestCommand, HermesHelpCommand, + HermesInitCommand, HermesPostprocessCommand, HermesProcessCommand, HermesVersionCommand ) from hermes.commands.base import HermesCommand from hermes.error import HermesPluginRunError @@ -42,12 +38,12 @@ def main() -> None: setting_types = {} for command in ( - # HermesCleanCommand(parser), + HermesCleanCommand(parser), HermesCurateCommand(parser), HermesDepositCommand(parser), HermesHarvestCommand(parser), HermesHelpCommand(parser), - # HermesInitCommand(parser), + HermesInitCommand(parser), HermesPostprocessCommand(parser), HermesProcessCommand(parser), HermesVersionCommand(parser), diff --git a/src/hermes/commands/curate/base.py b/src/hermes/commands/curate/base.py index cf3c3457..8983f8d6 100644 --- a/src/hermes/commands/curate/base.py +++ b/src/hermes/commands/curate/base.py @@ -25,7 +25,7 @@ def __call__(self, command: HermesCommand, metadata: SoftwareMetadata) -> Softwa class CurateSettings(BaseModel): """Generic deposition settings.""" - plugin: str = "" + plugin: str = "pass_curate" class HermesCurateCommand(HermesCommand): @@ -51,7 +51,7 @@ def __call__(self, args: argparse.Namespace) -> None: raise HermesValidationError("The results of the process step are invalid.") from e ctx.finalize_step("process") - self.log.info("## Load curation plugin") + self.log.info(f"## Load curation plugin {plugin_name}") # load plugin try: plugin_func = self.plugins[plugin_name]() @@ -59,7 +59,7 @@ def __call__(self, args: argparse.Namespace) -> None: self.log.error(f"Plugin {plugin_name} not found.") raise MisconfigurationError(f"Curate plugin {plugin_name} not found.") - self.log.info("## Run curation plugin") + self.log.info(f"## Run curation plugin {plugin_name}") # run plugin try: curated_metadata = plugin_func(self, metadata) diff --git a/src/hermes/commands/deposit/base.py b/src/hermes/commands/deposit/base.py index f6b911c7..2d26df51 100644 --- a/src/hermes/commands/deposit/base.py +++ b/src/hermes/commands/deposit/base.py @@ -139,7 +139,7 @@ def __call__(self, args: argparse.Namespace) -> None: self.args = args plugin_name = self.settings.target - self.log.info("## Load deposit plugin") + self.log.info(f"## Load deposit plugin {plugin_name}") # load plugin try: plugin_func = self.plugins[plugin_name]() @@ -147,7 +147,7 @@ def __call__(self, args: argparse.Namespace) -> None: self.log.error(f"Plugin {plugin_name} not found.") raise MisconfigurationError(f"Deposit plugin {self.settings.plugin} not found.") - self.log.info("## Run deposit plugin") + self.log.info(f"## Run deposit plugin {plugin_name}") # run plugin try: plugin_func(self) diff --git a/src/hermes/commands/harvest/base.py b/src/hermes/commands/harvest/base.py index 6a7a8d82..b9fcd573 100644 --- a/src/hermes/commands/harvest/base.py +++ b/src/hermes/commands/harvest/base.py @@ -40,6 +40,9 @@ def __call__(self, args: argparse.Namespace) -> None: self.log.info("# Metadata harvesting") self.args = args + if len(self.settings.sources) == 0: + self.log.info("# No plugin was configured to be run and loaded.") + # Initialize the harvest cache directory here to indicate the step ran ctx = HermesContext() ctx.prepare_step('harvest') diff --git a/test/hermes_test/commands/deposit/test_invenio_e2e.py b/test/hermes_test/commands/deposit/test_invenio_e2e.py index 82148bee..115042a6 100644 --- a/test/hermes_test/commands/deposit/test_invenio_e2e.py +++ b/test/hermes_test/commands/deposit/test_invenio_e2e.py @@ -37,7 +37,7 @@ def sandbox_auth(): "http://schema.org/familyName": [{"@value": "Test"}], "http://schema.org/givenName": [{"@value": "Testi"}] }], - "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}] + "http://schema.org/license": ["https://spdx.org/licenses/Apache-2.0"] }), { "upload_type": "software", diff --git a/test/hermes_test/commands/init/test_init.py b/test/hermes_test/commands/init/test_init.py index 98653dda..c77b705f 100644 --- a/test/hermes_test/commands/init/test_init.py +++ b/test/hermes_test/commands/init/test_init.py @@ -7,8 +7,6 @@ import json import pytest -pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) - from hermes.commands.init.base import string_in_file, download_file_from_url from unittest.mock import patch, MagicMock import hermes.commands.init.util.oauth_process as oauth_process diff --git a/test/hermes_test/conftest.py b/test/hermes_test/conftest.py index 2d3e52b2..c0f5a4d5 100644 --- a/test/hermes_test/conftest.py +++ b/test/hermes_test/conftest.py @@ -7,6 +7,7 @@ import os import shutil import subprocess +import sys import pytest @@ -33,7 +34,7 @@ def __enter__(self): def run(self, *args): proc = subprocess.Popen( - [self.hermes_exe, *args], stdout=subprocess.PIPE, stderr=subprocess.PIPE + [sys.executable, "-m", self.hermes_exe, *args], stdout=subprocess.PIPE, stderr=subprocess.PIPE ) proc.wait() return proc diff --git a/test/hermes_test/test_cli.py b/test/hermes_test/test_cli.py index 26d8c7ef..4a747851 100644 --- a/test/hermes_test/test_cli.py +++ b/test/hermes_test/test_cli.py @@ -8,8 +8,6 @@ import pytest -pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) - from hermes.commands import cli @@ -34,6 +32,5 @@ def test_hermes_process(hermes_env): with hermes_env: result = hermes_env.run("process") - print(result.stdout.read()) assert result.returncode == 0 diff --git a/test/hermes_test/test_main.py b/test/hermes_test/test_main.py index 74023020..52780024 100644 --- a/test/hermes_test/test_main.py +++ b/test/hermes_test/test_main.py @@ -6,10 +6,6 @@ # flake8: noqa -import pytest - -pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) - import subprocess import sys diff --git a/test/hermes_test/test_marketplace.py b/test/hermes_test/test_marketplace.py index ec76f240..489d5f65 100644 --- a/test/hermes_test/test_marketplace.py +++ b/test/hermes_test/test_marketplace.py @@ -4,10 +4,6 @@ # flake8: noqa -import pytest - -pytest.skip("FIXME: Re-enable test after data model refactoring is done.", allow_module_level=True) - import requests_mock from hermes.commands.marketplace import ( From 7de8c7d22490676aef92edee40260411b83dc09c Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 20 Mar 2026 13:25:40 +0100 Subject: [PATCH 225/247] applied simple suggestions --- .../tutorials/writing-a-plugin-for-hermes.md | 30 ++++++++++--------- src/hermes/commands/deposit/file.py | 2 +- .../commands/deposit/test_file_deposit.py | 2 +- 3 files changed, 18 insertions(+), 16 deletions(-) diff --git a/docs/source/tutorials/writing-a-plugin-for-hermes.md b/docs/source/tutorials/writing-a-plugin-for-hermes.md index 9c88f0d7..8607f31e 100644 --- a/docs/source/tutorials/writing-a-plugin-for-hermes.md +++ b/docs/source/tutorials/writing-a-plugin-for-hermes.md @@ -33,16 +33,16 @@ And uses the [schmea.org](https://schema.org/) (with prefix "schema") and the [C HERMES uses a plugin architecture. Therefore, users are invited to contribute own features. The structure for every plugin follows the same schema. -Every plugin is a sub class of a sub class of the `HermesPlugin` class. -This class implements one abstract method, `__call__`, which needs to be overwritten by every plugin. -In between the `HermesPlugin` class and the class of a specific plugin there is another class which follows the naming scheme `Hermes{Step}Plugin` where `{Step}` is the step the plugin is for. +Every plugin is a sub class of a sub class of the {py:class}`~hermes.commands.base.HermesPlugin` class. +This class implements one abstract method, {py:meth}`~hermes.commands.base.HermesPlugin.__call__`, which needs to be overwritten by every plugin. +In between the {py:class}`~hermes.commands.base.HermesPlugin` class and the class of a specific plugin there is another class which follows the naming scheme `Hermes{Step}Plugin` where `{Step}` is the step the plugin is for. These base classes may implement additional (abstract) methods that may have to be implemented by the plugins class. -The first positional attribute of the `__call__` method is an object of class `Hermes{Step}Command` (where `{Step}` is the step the plugin is for), which is a sub class of `HermesCommand`, which triggered this plugin to run. +The first positional attribute of the `__call__` method is an object of class `Hermes{Step}Command` (where `{Step}` is the step the plugin is for), which is a sub class of {py:class}`~hermes.commands.base.HermesCommand`, which triggered this plugin to run. An exception to this are the deposit plugins. Those don't implement the `__call__` method and instead can implement (and have to implement some) other functions. -The plugin class also uses a derivative of `HermesSettings` to add parameters that can be adapted by the configuration file. -`HermesSettings` is the base class for command specific settings. +The plugin class also uses a derivative of {py:class}`~hermes.commands.base.HermesSettings` to add parameters that can be adapted by the configuration file. +{py:class}`~hermes.commands.base.HermesSettings` is the base class for command specific settings. It uses [pydantic](https://docs.pydantic.dev/latest/) [settings](https://docs.pydantic.dev/latest/api/pydantic_settings/) to specify and validate the parameters. The user can either set the parameters in the `hermes.toml` or overwrite them in the command line. To overwrite a parameter from command line, use the `-O` command line option followed by the dotted parameter name and the value. @@ -80,7 +80,7 @@ class YourHarvestPlugin(HermesHarvestPlugin): return data ``` -The `__call__` method of harest plugins needs to return a SoftwareMetadata object containing the harvested metadata. +The {py:meth}`~hermes.commands.harvest.base.HermesHarvestPlugin.__call__` method of harest plugins needs to return a {py:class}`~hermes.model.api.SoftwareMetadata` object containing the harvested metadata. For more information on how to use this object see [here](../dev/data_model.md). ### Process plugin @@ -110,8 +110,8 @@ class YourProcessPlugin(HermesProcessPlugin): return strategies ``` -The `__call__` method of process plugins needs to return a dictionary mappings strings and/ or `None` to dictionaries mapping strings or `None` to {py:class}`hermes.model.merge.action.MergeAction`. -If `strategies` looked like this (where `Reject` is imported from `hermes.model.merge.action`) +The {py:meth}`~hermes.commands.process.base.HermesProcessPlugin.__call__` method of process plugins needs to return a dictionary mappings strings and/ or `None` to dictionaries mapping strings or `None` to {py:class}`~hermes.model.merge.action.MergeAction`. +If `strategies` looked like this (where {py:class}`~hermes.model.merge.action.Reject` is imported from {py:mod}`hermes.model.merge.action`) ```{code-block} python strategies = { full_type_iri: { @@ -122,7 +122,7 @@ strategies = { } ``` -HERMES would use the `Reject` strategy for merging values of the key `full_property_iri` in objects of type `full_type_iri`. (A key in strategies being `None` instead of a string indicates to HERMES that its value is to be used as a default [i.e. if no more specific entry exists].) +HERMES would use the {py:class}`~hermes.model.merge.action.Reject` strategy for merging values of the key `full_property_iri` in objects of type `full_type_iri`. (A key in strategies being `None` instead of a string indicates to HERMES that its value is to be used as a default [i.e. if no more specific entry exists].) HERMES will prioritize strategies from other plugins depending on the order of the plugins in the `hermes.toml`. Generally the hierarchy is as follows (first most important): 1. strategies with `full_property_iri` and `full_type_iri` not `None`. @@ -157,7 +157,7 @@ class YourCuratePlugin(HermesCuratePlugin): return data ``` -The `__call__` method of harest plugins needs to return a SoftwareMetadata object containing the curated metadata. +The {py:meth}`~hermes.commands.curate.base.HermesCuratePlugin.__call__` method of curate plugins needs to return a {py:class}`~hermes.model.api.SoftwareMetadata` object containing the curated metadata. For more information on how to use this object see [here](../dev/data_model.md). The returned object may be the object `metadata` passed to `__call__`. @@ -225,7 +225,7 @@ class YourDepositPlugin(HermesDepositPlugin): A deposit plugin doesn't implement a `__call__` method like plugins for other steps. Instead it can (and in some cases has to) implement methods, which will be called in a predefined order. -The plugin still has access to the command (via self.command) and the metadata for the software (via self.metadata). +The plugin still has access to the command (via `self.command`) and the metadata for the software (via `self.metadata`). ### Postprocess plugin The class structure of a postprocess plugin should look like this: @@ -259,7 +259,7 @@ with ctx[deposit_plugin_name] as manager: ctx.finalize_step("deposit") ``` -where `deposit_plugin_name` is the name of the deposit plugin the data is loaded from and HermesContext is {py:class}`hermes.model.context_manager.HermesContext`. +where `deposit_plugin_name` is the name of the deposit plugin the data is loaded from and {py:class}`~hermes.model.context_manager.HermesContext` is imported from {py:mod}`hermes.model.context_manager`. The loaded data is some valid JSON data and has no fixed format. ## Implement and use plugin specific settings @@ -376,11 +376,13 @@ target = "{plugin_name}" [postprocess] run = [ ..., "{plugin_name}", ... ] ... + ```

```{admonition} Congratulations! You can now write plugins for HERMES. -Consider publishing it for others to use following this guide. TODO: add link ``` +Consider publishing it to the [HERMES plugin marketplace](../index.md#plugins) for others to use following this guide. TODO: add link + If you have any questions, wishes or requests, feel free to contact us. diff --git a/src/hermes/commands/deposit/file.py b/src/hermes/commands/deposit/file.py index ed6bd570..e1211885 100644 --- a/src/hermes/commands/deposit/file.py +++ b/src/hermes/commands/deposit/file.py @@ -14,7 +14,7 @@ class FileDepositSettings(BaseModel): - filename: str = 'codemeta.json' + filename: str = 'hermes.json' class FileDepositPlugin(BaseDepositPlugin): diff --git a/test/hermes_test/commands/deposit/test_file_deposit.py b/test/hermes_test/commands/deposit/test_file_deposit.py index ba801b4c..a8d4058b 100644 --- a/test/hermes_test/commands/deposit/test_file_deposit.py +++ b/test/hermes_test/commands/deposit/test_file_deposit.py @@ -45,7 +45,7 @@ def test_file_deposit(tmp_path, monkeypatch, metadata): if e.code != 0: raise e finally: - with open("codemeta.json", "r") as cache: + with open("hermes.json", "r") as cache: result = SoftwareMetadata(json.load(cache)) sys.argv = orig_argv From ad63e5a66ade2f1fe988053d74618ad45d43129a Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Mon, 23 Mar 2026 09:34:27 +0100 Subject: [PATCH 226/247] fixed misspelled class name --- pyproject.toml | 2 +- src/hermes/commands/deposit/invenio_rdm.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index fa2885ae..bcc4dc3c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -63,7 +63,7 @@ codemeta = "hermes.commands.harvest.codemeta:CodeMetaHarvestPlugin" [project.entry-points."hermes.deposit"] file = "hermes.commands.deposit.file:FileDepositPlugin" invenio = "hermes.commands.deposit.invenio:InvenioDepositPlugin" -invenio_rdm = "hermes.commands.deposit.invenio_rdm:IvenioRDMDepositPlugin" +invenio_rdm = "hermes.commands.deposit.invenio_rdm:InvenioRDMDepositPlugin" rodare = "hermes.commands.deposit.rodare:RodareDepositPlugin" [project.entry-points."hermes.postprocess"] diff --git a/src/hermes/commands/deposit/invenio_rdm.py b/src/hermes/commands/deposit/invenio_rdm.py index 01e08371..2faeb29f 100644 --- a/src/hermes/commands/deposit/invenio_rdm.py +++ b/src/hermes/commands/deposit/invenio_rdm.py @@ -90,7 +90,7 @@ def _search_license_info(self, _url: str, valid_licenses: dict) -> Union[dict, N return None -class IvenioRDMDepositPlugin(InvenioDepositPlugin): +class InvenioRDMDepositPlugin(InvenioDepositPlugin): platform_name = "invenio_rdm" invenio_client_class = InvenioRDMClient invenio_resolver_class = InvenioRDMResolver From 7a8e8aeea654a9147f5a364de663cfef2c3d24ff Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Mon, 23 Mar 2026 10:00:21 +0100 Subject: [PATCH 227/247] implemented suggestions and fixed bug --- src/hermes/commands/deposit/file.py | 6 ++++++ src/hermes/commands/deposit/invenio.py | 24 +++++++++++++++++------- src/hermes/commands/process/base.py | 23 ++++++++++++++++++----- 3 files changed, 41 insertions(+), 12 deletions(-) diff --git a/src/hermes/commands/deposit/file.py b/src/hermes/commands/deposit/file.py index e1211885..24bea5e8 100644 --- a/src/hermes/commands/deposit/file.py +++ b/src/hermes/commands/deposit/file.py @@ -7,12 +7,17 @@ # SPDX-FileContributor: Stephan Druskat import json +import logging +import os from pydantic import BaseModel from hermes.commands.deposit.base import BaseDepositPlugin +_log = logging.getLogger("cli.deposit.file") + + class FileDepositSettings(BaseModel): filename: str = 'hermes.json' @@ -31,3 +36,4 @@ def publish(self) -> None: with open(file_config.filename, 'w') as deposition_file: json.dump(self.metadata.compact(), deposition_file, indent=2) + _log.info(f"The deposited metadata can be found in {os.path.abspath(file_config.filename)}.") diff --git a/src/hermes/commands/deposit/invenio.py b/src/hermes/commands/deposit/invenio.py index ba45c146..fb4e05c1 100644 --- a/src/hermes/commands/deposit/invenio.py +++ b/src/hermes/commands/deposit/invenio.py @@ -311,13 +311,23 @@ def prepare(self) -> None: - update ``self.metadata`` with metadata collected during the checks """ - rec_id = self.config.record_id - doi = self.config.doi - - codemeta_identifier = self.metadata.get("identifier", None) - rec_id, rec_meta = self.resolver.resolve_latest_id( - record_id=rec_id, doi=doi, codemeta_identifier=codemeta_identifier - ) + conf_rec_id = self.config.record_id + conf_doi = self.config.doi + + codemeta_identifiers = self.metadata.get("identifier", [None]) + rec_id, rec_meta = None, {} + for codemeta_identifier in codemeta_identifiers: + if not isinstance(codemeta_identifier, str): + # FIXME: Can also be PropertyValue (i.e. ld_dict), that case has to be handled. + codemeta_identifier = None + tmp_rec_id, tmp_rec_meta = self.resolver.resolve_latest_id( + record_id=conf_rec_id, doi=conf_doi, codemeta_identifier=codemeta_identifier + ) + if tmp_rec_id is not None or tmp_rec_meta != {}: + if rec_id != tmp_rec_id or rec_meta != tmp_rec_meta: + # FIXME: Maybe finding different record ids is not fatal? + raise HermesValidationError("Found two different record ids or conflicting metadata.") + rec_id, rec_meta = tmp_rec_id, tmp_rec_meta if len(self.metadata.get("version", [])) > 1: raise HermesValidationError("Too many licenses for invenio deposit.") diff --git a/src/hermes/commands/process/base.py b/src/hermes/commands/process/base.py index 9662cde0..a1b1504f 100644 --- a/src/hermes/commands/process/base.py +++ b/src/hermes/commands/process/base.py @@ -13,7 +13,6 @@ from hermes.error import HermesPluginRunError, MisconfigurationError from hermes.model.api import SoftwareMetadata from hermes.model.context_manager import HermesContext -from hermes.model.error import HermesValidationError from hermes.model.merge.action import MergeAction from hermes.model.merge.container import ld_merge_dict @@ -28,6 +27,7 @@ def __call__(self, command: HermesCommand) -> dict[Union[str, None], dict[Union[ class ProcessSettings(BaseModel): """Generic deposition settings.""" + sources: list = [] plugins: list = [] @@ -72,19 +72,32 @@ def __call__(self, args: argparse.Namespace) -> None: self.log.info("## Merge the metadata of the harvesters") # Get all harvesters - harvester_names = self.root_settings.harvest.sources + harvester_names = self.settings.sources if self.settings.sources else self.root_settings.harvest.sources + merged_any = False for harvester in harvester_names: self.log.info(f"## Load data from {harvester} plugin") # load data from harvester try: metadata = SoftwareMetadata.load_from_cache(ctx, harvester) - except Exception as e: - self.log.error(f"The data from the harvester {harvester} could not be loaded or is invalid.") - raise HermesValidationError(f"The results of the harvest plugin {harvester} is invalid.") from e + except Exception: + # skip this harvester when the data is invalid + self.log.warning(f"The data from the harvester {harvester} could not be loaded or is invalid.") + self.log.info(f"## Aborting merge for {harvester}") + continue self.log.info(f"## Merge data from {harvester} plugin") # merge data into the merge dict merged_doc.update(metadata) + merged_any = True + + # error if nothing was merged + if not merged_any: + self.log.error( + f"""No metadata has been merged. { + "No harvesters to merge from were supplied" if not harvester_names else + "The merging failed for all harvesters." + }""" + ) self.log.info("## Store processed metadata") # store processed data From 9d3dc7f683009e9c8db8b2229aaa850b31d8f485 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Wed, 25 Mar 2026 11:05:13 +0100 Subject: [PATCH 228/247] added test_case and generate strategies automatically --- src/hermes/commands/process/standard_merge.py | 96 ++++++- .../commands/process/test_process.py | 255 +++++++++++++----- 2 files changed, 275 insertions(+), 76 deletions(-) diff --git a/src/hermes/commands/process/standard_merge.py b/src/hermes/commands/process/standard_merge.py index 463be4ff..97e94404 100644 --- a/src/hermes/commands/process/standard_merge.py +++ b/src/hermes/commands/process/standard_merge.py @@ -5,8 +5,11 @@ # SPDX-FileContributor: Michael Fritzsche +import csv from typing import Any, Callable, Union +import requests + from hermes.commands.base import HermesCommand from hermes.model.merge.action import Concat, MergeAction, MergeSet from hermes.model.types import ld_dict @@ -14,6 +17,20 @@ from .base import HermesProcessPlugin +def match_equals(left: Any, right: Any) -> bool: + """ + Compares two objects with ==. + + Args: + left (Any): The first object for the comparison. + right (Any): The second object for the comparison. + + Returns: + bool: The result of the comparison. + """ + return left == right + + def match_keys(*keys: list[str], fall_back_to_equals: bool = False) -> Callable[[Any, Any], bool]: """ Creates a function taking to parameters that returns true @@ -140,7 +157,7 @@ def match_func(left: Any, right: Any) -> bool: DEFAULT_MATCH = match_keys("@id", fall_back_to_equals=True) """ Callable[[Any, Any], bool]: The default match function used for comparison. """ -MATCH_FUNCTION_FOR_TYPE = {"schema:Person": match_person} +MATCH_FUNCTION_FOR_TYPE = {iri["schema:Person"]: match_person} """ dict[str, Callable[[Any, Any], bool]]: A dict containing for JSON_LD types the match function (not DEFAULT_MATCH). """ @@ -148,10 +165,10 @@ def match_func(left: Any, right: Any) -> bool: ACTIONS = { "default": MergeSet(DEFAULT_MATCH), "concat": Concat(), - "Person": MergeSet(MATCH_FUNCTION_FOR_TYPE["schema:Person"]), + "Person": MergeSet(MATCH_FUNCTION_FOR_TYPE[iri["schema:Person"]]), **{ "Or".join(types): MergeSet(match_multiple_types( - *(("schema:" + type, MATCH_FUNCTION_FOR_TYPE.get("schema:" + type, DEFAULT_MATCH)) for type in types) + *(("schema:" + type, MATCH_FUNCTION_FOR_TYPE.get(iri["schema:" + type], DEFAULT_MATCH)) for type in types) )) for types in [ ("AboutPage", "CreativeWork"), @@ -844,7 +861,78 @@ def match_func(left: Any, right: Any) -> bool: class CodemetaProcessPlugin(HermesProcessPlugin): def __call__(self, command: HermesCommand) -> dict[Union[str, None], dict[Union[str, None], MergeAction]]: - strats = {**CODEMETA_STRATEGY} + try: + strats = CodemetaProcessPlugin.get_schema_strategies() + strats.update(CodemetaProcessPlugin.get_codemeta_strategies()) + strats[None] = {None: MergeSet(DEFAULT_MATCH)} + except Exception: + strats = {**CODEMETA_STRATEGY} for key, value in PROV_STRATEGY.items(): strats[key] = {**value, **strats.get(key, {})} return strats + + @classmethod + def get_schema_strategies(cls): + # get a set of all types that have to be handled separately + special_types = set(MATCH_FUNCTION_FOR_TYPE.keys()) + + # get and read csv file containing information on schema.org types + # switch to schemaorg-current-https-types.csv on change of standard context in HERMES + download = requests.get("https://schema.org/version/latest/schemaorg-current-http-types.csv") + decoded_content = download.content.decode('utf-8') + cr = csv.reader(decoded_content.splitlines(), delimiter=',') + # remove the first line (headers) + type_table = list(cr)[1:] + # build list of all subtypes for every type + subtypes_for_types = {} + for type_row in type_table: + if len(type_row[7]) == 0: + # no (direct) subtype + subtypes_for_types[type_row[0]] = set() + else: + # add direct subtypes + subtypes_for_types[type_row[0]] = set(type_row[7].split(", ")) + # only immediate subtypes have been recorded now, add sub...subtypes too + for super_type in subtypes_for_types: + for other_type in subtypes_for_types: + if super_type in subtypes_for_types[other_type]: + subtypes_for_types[other_type].update(subtypes_for_types[super_type]) + + # get and read csv file containing information on schema.org properties + # switch to schemaorg-current-https-properties.csv on change of standard context in HERMES + download = requests.get("https://schema.org/version/latest/schemaorg-current-http-properties.csv") + decoded_content = download.content.decode('utf-8') + cr = csv.reader(decoded_content.splitlines(), delimiter=',') + # remove the first line (headers) + property_table = list(cr)[1:] + strategies = {} + # add the strategies for all properties to all types they can occur in + for property_row in property_table: + # generate a set of all types this property can have values of + shallow_range_types = set(property_row[7].split(", ")) if property_row[7] != "" else set() + range_types = shallow_range_types.union( + *(subtypes_for_types.get(range_type, set()) for range_type in shallow_range_types) + ) + # get all special types this property can have values of + special_range_types = special_types.intersection(range_types) + # if there is a special range type this property needs a special match function + if len(special_range_types) != 0: + # construct the match function + match_function = MergeSet(match_multiple_types( + *((range_type, MATCH_FUNCTION_FOR_TYPE[range_type]) for range_type in special_range_types), + fall_back_function=DEFAULT_MATCH + )) + # iterate over a set of all types this property can occur in + shallow_domain_types = set(property_row[6].split(", ")) if property_row[6] != "" else set() + for domain_type in shallow_domain_types.union( + *(subtypes_for_types.get(domain_type, set()) for domain_type in shallow_domain_types) + ): + # add the match function to the types match functions + strategies.setdefault(domain_type, {})[property_row[0]] = match_function + # return the strategies + return strategies + + @classmethod + def get_codemeta_strategies(cls): + # FIXME: implement + return {} diff --git a/test/hermes_test/commands/process/test_process.py b/test/hermes_test/commands/process/test_process.py index 24fe6d4c..92a8b35b 100644 --- a/test/hermes_test/commands/process/test_process.py +++ b/test/hermes_test/commands/process/test_process.py @@ -17,31 +17,39 @@ [ ( { - "cff": SoftwareMetadata({ + "cff": SoftwareMetadata( + { + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/name": [{"@value": "Test"}], + "http://schema.org/author": [ + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Test"}], + "http://schema.org/givenName": [{"@value": "Testi"}], + } + ], + "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}], + } + ) + }, + SoftwareMetadata( + { "@type": ["http://schema.org/SoftwareSourceCode"], "http://schema.org/description": [{"@value": "for testing"}], "http://schema.org/name": [{"@value": "Test"}], - "http://schema.org/author": [{ - "@type": "http://schema.org/Person", - "http://schema.org/familyName": [{"@value": "Test"}], - "http://schema.org/givenName": [{"@value": "Testi"}] - }], - "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}] - }) - }, - SoftwareMetadata({ - "@type": ["http://schema.org/SoftwareSourceCode"], - "http://schema.org/description": [{"@value": "for testing"}], - "http://schema.org/name": [{"@value": "Test"}], - "http://schema.org/author": [{ - "@type": "http://schema.org/Person", - "http://schema.org/familyName": [{"@value": "Test"}], - "http://schema.org/givenName": [{"@value": "Testi"}] - }], - "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}] - }) + "http://schema.org/author": [ + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Test"}], + "http://schema.org/givenName": [{"@value": "Testi"}], + } + ], + "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}], + } + ), ) - ] + ], ) def test_process(tmp_path, monkeypatch, metadata_in, metadata_out): monkeypatch.chdir(tmp_path) @@ -57,8 +65,8 @@ def test_process(tmp_path, monkeypatch, metadata_in, metadata_out): config_file = tmp_path / "hermes.toml" config_file.write_text( - "[process]\nplugins=[\"codemeta\"]\n" - "[harvest]\nsources = [" + ", ".join('\"' + f'{harvester}' + '\"' for harvester in metadata_in) + "]" + '[process]\nplugins=["codemeta"]\n' + "[harvest]\nsources = [" + ", ".join('"' + f"{harvester}" + '"' for harvester in metadata_in) + "]" ) orig_argv = sys.argv[:] @@ -84,28 +92,149 @@ def test_process(tmp_path, monkeypatch, metadata_in, metadata_out): [ ( { - "cff": SoftwareMetadata({ + "cff": SoftwareMetadata( + { + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/name": [{"@value": "Test"}], + "http://schema.org/author": [ + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Test"}], + "http://schema.org/email": [{"@value": "test.testi@testis.tests"}], + }, + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Testers"}], + }, + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Tester"}], + "http://schema.org/email": [{"@value": "test@tester.tests"}], + }, + ], + "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}], + } + ), + "codemeta": SoftwareMetadata( + { + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/name": [{"@value": "Test"}, {"@value": "Testis Test"}], + "http://schema.org/author": [ + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Test"}], + "http://schema.org/givenName": [{"@value": "Testi"}], + "http://schema.org/email": [ + {"@value": "test.testi@testis.tests"}, + {"@value": "test.testi@testis.tests2"}, + ], + }, + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Testers"}], + }, + ], + } + ), + }, + SoftwareMetadata( + { "@type": ["http://schema.org/SoftwareSourceCode"], - "http://schema.org/name": [{"@value": "Test"}], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/name": [{"@value": "Test"}, {"@value": "Testis Test"}], "http://schema.org/author": [ { "@type": "http://schema.org/Person", "http://schema.org/familyName": [{"@value": "Test"}], - "http://schema.org/email": [{"@value": "test.testi@testis.tests"}] - }, - { - "@type": "http://schema.org/Person", - "http://schema.org/familyName": [{"@value": "Testers"}] + "http://schema.org/givenName": [{"@value": "Testi"}], + "http://schema.org/email": [ + {"@value": "test.testi@testis.tests"}, + {"@value": "test.testi@testis.tests2"}, + ], }, + {"@type": "http://schema.org/Person", "http://schema.org/familyName": [{"@value": "Testers"}]}, { "@type": "http://schema.org/Person", "http://schema.org/familyName": [{"@value": "Tester"}], - "http://schema.org/email": [{"@value": "test@tester.tests"}] - } + "http://schema.org/email": [{"@value": "test@tester.tests"}], + }, ], - "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}] - }), - "codemeta": SoftwareMetadata({ + "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}], + } + ), + ), + ( + { + "python": SoftwareMetadata( + { + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/author": [ + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Testers"}], + }, + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Tester"}], + "http://schema.org/email": [{"@value": "test@tester.tests"}], + }, + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Testis"}], + "http://schema.org/email": [{"@value": "testis.testis@tester.tests"}], + }, + ], + } + ), + "cff": SoftwareMetadata( + { + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/name": [{"@value": "Test"}], + "http://schema.org/author": [ + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Test"}], + "http://schema.org/email": [{"@value": "test.testi@testis.tests"}], + }, + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Testers"}], + }, + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Tester"}], + "http://schema.org/email": [{"@value": "test@tester.tests"}], + }, + ], + "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}], + } + ), + "codemeta": SoftwareMetadata( + { + "@type": ["http://schema.org/SoftwareSourceCode"], + "http://schema.org/description": [{"@value": "for testing"}], + "http://schema.org/name": [{"@value": "Test"}, {"@value": "Testis Test"}], + "http://schema.org/author": [ + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Test"}], + "http://schema.org/givenName": [{"@value": "Testi"}], + "http://schema.org/email": [ + {"@value": "test.testi@testis.tests"}, + {"@value": "test.testi@testis.tests2"}, + ], + }, + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Testers"}], + }, + ], + } + ), + }, + SoftwareMetadata( + { "@type": ["http://schema.org/SoftwareSourceCode"], "http://schema.org/description": [{"@value": "for testing"}], "http://schema.org/name": [{"@value": "Test"}, {"@value": "Testis Test"}], @@ -116,44 +245,26 @@ def test_process(tmp_path, monkeypatch, metadata_in, metadata_out): "http://schema.org/givenName": [{"@value": "Testi"}], "http://schema.org/email": [ {"@value": "test.testi@testis.tests"}, - {"@value": "test.testi@testis.tests2"} - ] + {"@value": "test.testi@testis.tests2"}, + ], }, + {"@type": "http://schema.org/Person", "http://schema.org/familyName": [{"@value": "Testers"}]}, { "@type": "http://schema.org/Person", - "http://schema.org/familyName": [{"@value": "Testers"}] - } - ] - }) - }, - SoftwareMetadata({ - "@type": ["http://schema.org/SoftwareSourceCode"], - "http://schema.org/description": [{"@value": "for testing"}], - "http://schema.org/name": [{"@value": "Test"}, {"@value": "Testis Test"}], - "http://schema.org/author": [ - { - "@type": "http://schema.org/Person", - "http://schema.org/familyName": [{"@value": "Test"}], - "http://schema.org/givenName": [{"@value": "Testi"}], - "http://schema.org/email": [ - {"@value": "test.testi@testis.tests"}, - {"@value": "test.testi@testis.tests2"} - ] - }, - { - "@type": "http://schema.org/Person", - "http://schema.org/familyName": [{"@value": "Testers"}] - }, - { - "@type": "http://schema.org/Person", - "http://schema.org/familyName": [{"@value": "Tester"}], - "http://schema.org/email": [{"@value": "test@tester.tests"}] - } - ], - "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}] - }) - ) - ] + "http://schema.org/familyName": [{"@value": "Tester"}], + "http://schema.org/email": [{"@value": "test@tester.tests"}], + }, + { + "@type": "http://schema.org/Person", + "http://schema.org/familyName": [{"@value": "Testis"}], + "http://schema.org/email": [{"@value": "testis.testis@tester.tests"}], + }, + ], + "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}], + } + ), + ), + ], ) def test_process_complex(tmp_path, monkeypatch, metadata_in, metadata_out): monkeypatch.chdir(tmp_path) @@ -169,8 +280,8 @@ def test_process_complex(tmp_path, monkeypatch, metadata_in, metadata_out): config_file = tmp_path / "hermes.toml" config_file.write_text( - "[process]\nplugins=[\"codemeta\"]\n" - "[harvest]\nsources = [" + ", ".join('\"' + f'{harvester}' + '\"' for harvester in metadata_in) + "]" + '[process]\nplugins=["codemeta"]\n' + "[harvest]\nsources = [" + ", ".join('"' + f"{harvester}" + '"' for harvester in metadata_in) + "]" ) orig_argv = sys.argv[:] From c699ae212bb70d1a64f88480d66cb6928beee102 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Thu, 26 Mar 2026 11:02:43 +0100 Subject: [PATCH 229/247] finished implementation of CodemetaProcessPlugin --- src/hermes/commands/process/standard_merge.py | 55 ++++++++++++++++--- 1 file changed, 46 insertions(+), 9 deletions(-) diff --git a/src/hermes/commands/process/standard_merge.py b/src/hermes/commands/process/standard_merge.py index 97e94404..eacc077a 100644 --- a/src/hermes/commands/process/standard_merge.py +++ b/src/hermes/commands/process/standard_merge.py @@ -862,8 +862,9 @@ def match_func(left: Any, right: Any) -> bool: class CodemetaProcessPlugin(HermesProcessPlugin): def __call__(self, command: HermesCommand) -> dict[Union[str, None], dict[Union[str, None], MergeAction]]: try: - strats = CodemetaProcessPlugin.get_schema_strategies() - strats.update(CodemetaProcessPlugin.get_codemeta_strategies()) + subtypes_for_types = CodemetaProcessPlugin.get_schema_type_hierarchy() + strats = CodemetaProcessPlugin.get_schema_strategies(subtypes_for_types) + strats.update(CodemetaProcessPlugin.get_codemeta_strategies(subtypes_for_types)) strats[None] = {None: MergeSet(DEFAULT_MATCH)} except Exception: strats = {**CODEMETA_STRATEGY} @@ -872,10 +873,7 @@ def __call__(self, command: HermesCommand) -> dict[Union[str, None], dict[Union[ return strats @classmethod - def get_schema_strategies(cls): - # get a set of all types that have to be handled separately - special_types = set(MATCH_FUNCTION_FOR_TYPE.keys()) - + def get_schema_type_hierarchy(cls): # get and read csv file containing information on schema.org types # switch to schemaorg-current-https-types.csv on change of standard context in HERMES download = requests.get("https://schema.org/version/latest/schemaorg-current-http-types.csv") @@ -897,6 +895,12 @@ def get_schema_strategies(cls): for other_type in subtypes_for_types: if super_type in subtypes_for_types[other_type]: subtypes_for_types[other_type].update(subtypes_for_types[super_type]) + return subtypes_for_types + + @classmethod + def get_schema_strategies(cls, subtypes_for_types): + # get a set of all types that have to be handled separately + special_types = set(MATCH_FUNCTION_FOR_TYPE.keys()) # get and read csv file containing information on schema.org properties # switch to schemaorg-current-https-properties.csv on change of standard context in HERMES @@ -933,6 +937,39 @@ def get_schema_strategies(cls): return strategies @classmethod - def get_codemeta_strategies(cls): - # FIXME: implement - return {} + def get_codemeta_strategies(cls, subtypes_for_types): + # get a set of all types that have to be handled separately + special_types = set(MATCH_FUNCTION_FOR_TYPE.keys()) + + # FIXME: change URL on change of context to codemeta 3.0 + download = requests.get("https://github.com/codemeta/codemeta/blob/2.0/crosswalk.csv") + decoded_content = download.content.decode('utf-8') + cr = csv.reader(decoded_content.splitlines(), delimiter=',') + # remove the first line (headers) + property_table = list(cr)[1:] + strategies = {} + for property_row in property_table: + if property_row[0] == "schema" or len(property_row[0]) == 0: + # skip empty rows + continue + # generate a set of all types this property can have values of + shallow_range_types = set(iri["schema:" + range_type] for range_type in property_row[2].split(" or ")) + range_types = shallow_range_types.union( + *(subtypes_for_types.get(range_type, set()) for range_type in shallow_range_types) + ) + # get all special types this property can have values of + special_range_types = special_types.intersection(range_types) + # if there is a special range type this property needs a special match function + if len(special_range_types) != 0: + # construct the match function + match_function = MergeSet(match_multiple_types( + *((range_type, MATCH_FUNCTION_FOR_TYPE[range_type]) for range_type in special_range_types), + fall_back_function=DEFAULT_MATCH + )) + # iterate over a set of all types this property can occur in + shallow_domain_type = {iri[property_row[0]]} + for domain_type in shallow_domain_type.union(subtypes_for_types.get(shallow_domain_type, set())): + # add the match function to the types match functions + strategies.setdefault(domain_type, {})[iri[property_row[1]]] = match_function + # return the strategies + return strategies From d514c9f278ad0cb77c56bf53bac26173d789f1c7 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Thu, 26 Mar 2026 11:32:45 +0100 Subject: [PATCH 230/247] implement more comments --- src/hermes/commands/deposit/base.py | 2 +- src/hermes/commands/deposit/invenio.py | 4 ++++ test/hermes_test/commands/deposit/test_invenio_e2e.py | 2 +- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/src/hermes/commands/deposit/base.py b/src/hermes/commands/deposit/base.py index 2d26df51..0ae39536 100644 --- a/src/hermes/commands/deposit/base.py +++ b/src/hermes/commands/deposit/base.py @@ -154,5 +154,5 @@ def __call__(self, args: argparse.Namespace) -> None: except HermesValidationError as e: self.log.error(f"Error while executing {plugin_name}: {e}") raise HermesPluginRunError( - f"Something went wrong while running the curate plugin {self.settings.plugin}" + f"Something went wrong while running the deposit plugin {self.settings.plugin}" ) from e diff --git a/src/hermes/commands/deposit/invenio.py b/src/hermes/commands/deposit/invenio.py index fb4e05c1..a93c2d85 100644 --- a/src/hermes/commands/deposit/invenio.py +++ b/src/hermes/commands/deposit/invenio.py @@ -20,6 +20,7 @@ from hermes.commands.deposit.error import DepositionUnauthorizedError from hermes.error import MisconfigurationError from hermes.model.error import HermesValidationError +from hermes.model.types import ld_dict from hermes.utils import hermes_doi, hermes_user_agent @@ -209,6 +210,9 @@ def resolve_license_id(self, license_url: Union[str, None]) -> Union[str, None]: if license_url is None: return None + if isinstance(license_url, (dict, ld_dict)) and [*license_url.keys()] == ["@id"]: + license_url = license_url["@id"] + if not isinstance(license_url, str): raise RuntimeError( "The given license in CodeMeta must be of type str. " diff --git a/test/hermes_test/commands/deposit/test_invenio_e2e.py b/test/hermes_test/commands/deposit/test_invenio_e2e.py index 115042a6..82148bee 100644 --- a/test/hermes_test/commands/deposit/test_invenio_e2e.py +++ b/test/hermes_test/commands/deposit/test_invenio_e2e.py @@ -37,7 +37,7 @@ def sandbox_auth(): "http://schema.org/familyName": [{"@value": "Test"}], "http://schema.org/givenName": [{"@value": "Testi"}] }], - "http://schema.org/license": ["https://spdx.org/licenses/Apache-2.0"] + "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}] }), { "upload_type": "software", From ba8b5496d0e25015d9908c2ab2d09debeda3f76b Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Thu, 26 Mar 2026 12:08:42 +0100 Subject: [PATCH 231/247] add codemeta_doi postprocess plugin --- pyproject.toml | 1 + src/hermes/commands/postprocess/invenio.py | 26 +++++++++++++++++++ .../postprocess/test_invenio_postprocess.py | 26 +++++++++++++++++-- 3 files changed, 51 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index bcc4dc3c..fba0299a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -70,6 +70,7 @@ rodare = "hermes.commands.deposit.rodare:RodareDepositPlugin" config_invenio_record_id = "hermes.commands.postprocess.invenio:config_record_id" config_invenio_rdm_record_id = "hermes.commands.postprocess.invenio_rdm:config_record_id" cff_doi = "hermes.commands.postprocess.invenio:cff_doi" +codemeta_doi = "hermes.commands.postprocess.invenio:codemeta_doi" [project.entry-points."hermes.process"] codemeta = "hermes.commands.process.standard_merge:CodemetaProcessPlugin" diff --git a/src/hermes/commands/postprocess/invenio.py b/src/hermes/commands/postprocess/invenio.py index 2fbbc713..f90d9714 100644 --- a/src/hermes/commands/postprocess/invenio.py +++ b/src/hermes/commands/postprocess/invenio.py @@ -6,6 +6,7 @@ # SPDX-FileContributor: Michael Fritzsche # SPDX-FileContributor: Stephan Druskat +import json import logging from ruamel.yaml import YAML @@ -73,3 +74,28 @@ def __call__(self, command: HermesCommand): yaml.dump(cff, open('CITATION.cff', 'w')) except Exception as e: raise RuntimeError("Update of CITATION.cff failed.") from e + + +class codemeta_doi(HermesPostprocessPlugin): + def __call__(self, command: HermesCommand): + ctx = HermesContext() + ctx.prepare_step("deposit") + with ctx["invenio"] as manager: + deposition = manager["result"] + ctx.finalize_step("deposit") + + try: + with open("codemeta.json", "r") as file: + codemeta = json.load(file) + if "@id" not in codemeta: + codemeta["@id"] = deposition['doi'] + if "referencePublication" not in codemeta: + codemeta["referencePublication"] = deposition['doi'] + elif isinstance(codemeta["referencePublication"], list): + codemeta["referencePublication"].append(deposition['doi']) + else: + codemeta["referencePublication"] = [codemeta["referencePublication"], deposition['doi']] + with open("codemeta.json", "w") as file: + json.dump(codemeta, file) + except Exception as e: + raise RuntimeError("Update of CITATION.cff failed.") from e diff --git a/test/hermes_test/commands/postprocess/test_invenio_postprocess.py b/test/hermes_test/commands/postprocess/test_invenio_postprocess.py index 091666f2..00688dcb 100644 --- a/test/hermes_test/commands/postprocess/test_invenio_postprocess.py +++ b/test/hermes_test/commands/postprocess/test_invenio_postprocess.py @@ -4,6 +4,7 @@ # SPDX-FileContributor: Michael Fritzsche +import json import sys from ruamel import yaml @@ -23,10 +24,20 @@ def test_invenio_postprocess(tmp_path, monkeypatch): citation_file = tmp_path / "CITATION.cff" citation_file.write_text("cff-version: 1.2.0\ntitle: Test") + codemeta_file = tmp_path / "codemeta.json" + codemeta_file.write_text( + """{ + "@context": "https://doi.org/10.5063/schema/codemeta-2.0", + "type": "SoftwareSourceCode", + "name": "Test" +} +""" + ) + config_file = tmp_path / "hermes.toml" config_file.write_text( """[postprocess] -run = ["config_invenio_record_id", "cff_doi"] +run = ["config_invenio_record_id", "cff_doi", "codemeta_doi"] [deposit.invenio] site_url = "https://zenodo.org" """ @@ -45,11 +56,12 @@ def test_invenio_postprocess(tmp_path, monkeypatch): finally: result_toml = toml.load(config_file) result_cff = yaml.YAML().load(citation_file) + result_codemeta = json.loads(codemeta_file.read_text()) sys.argv = orig_argv assert result_toml == toml.loads( """[postprocess] -run = ["config_invenio_record_id", "cff_doi"] +run = ["config_invenio_record_id", "cff_doi", "codemeta_doi"] [deposit.invenio] site_url = "https://zenodo.org" record_id = "foo" @@ -64,3 +76,13 @@ def test_invenio_postprocess(tmp_path, monkeypatch): description: DOI for the published version 1.0.0 [generated by hermes] """ ) + assert result_codemeta == json.loads( + """{ + "@context": "https://doi.org/10.5063/schema/codemeta-2.0", + "type": "SoftwareSourceCode", + "@id": "my_doi", + "name": "Test", + "referencePublication": "my_doi" +} +""" + ) \ No newline at end of file From 9b1c48a31e6338dfd2a3fa8e4b1646e7755838ce Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Thu, 26 Mar 2026 13:29:12 +0100 Subject: [PATCH 232/247] reworked zenodo_sandbox_auth for cli testing --- conftest.py | 3 +++ test/hermes_test/commands/deposit/test_invenio_e2e.py | 10 ++++------ .../commands/postprocess/test_invenio_postprocess.py | 2 +- 3 files changed, 8 insertions(+), 7 deletions(-) create mode 100644 conftest.py diff --git a/conftest.py b/conftest.py new file mode 100644 index 00000000..71115aea --- /dev/null +++ b/conftest.py @@ -0,0 +1,3 @@ +# add option to pass zenodo sandbox auth token to pytest to run ./test/hermes_test/commands/deposit/test_invenio_e2e.py +def pytest_addoption(parser): + parser.addoption("--sandbox_auth", action="store", default=None) diff --git a/test/hermes_test/commands/deposit/test_invenio_e2e.py b/test/hermes_test/commands/deposit/test_invenio_e2e.py index 82148bee..52d33ba6 100644 --- a/test/hermes_test/commands/deposit/test_invenio_e2e.py +++ b/test/hermes_test/commands/deposit/test_invenio_e2e.py @@ -5,7 +5,6 @@ # SPDX-FileContributor: Michael Fritzsche from datetime import date -from pathlib import Path import sys import pytest @@ -16,12 +15,11 @@ @pytest.fixture -def sandbox_auth(): - path = Path("./../auth.txt") - if not path.exists(): +def sandbox_auth(pytestconfig): + if pytestconfig.getoption("sandbox_auth"): + yield pytestconfig.getoption("sandbox_auth") + else: pytest.skip("Local auth token file does not exist.") - with path.open() as f: - yield f.read() @pytest.mark.parametrize( diff --git a/test/hermes_test/commands/postprocess/test_invenio_postprocess.py b/test/hermes_test/commands/postprocess/test_invenio_postprocess.py index 00688dcb..93c64536 100644 --- a/test/hermes_test/commands/postprocess/test_invenio_postprocess.py +++ b/test/hermes_test/commands/postprocess/test_invenio_postprocess.py @@ -85,4 +85,4 @@ def test_invenio_postprocess(tmp_path, monkeypatch): "referencePublication": "my_doi" } """ - ) \ No newline at end of file + ) From 55e086d52adb80a7bd8ef35378072eeddced98ca Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Thu, 26 Mar 2026 14:23:54 +0100 Subject: [PATCH 233/247] fix post process and add license header to conftest.py --- conftest.py | 6 +++ src/hermes/commands/postprocess/invenio.py | 6 +-- .../commands/postprocess/invenio_rdm.py | 6 +-- .../postprocess/test_invenio_postprocess.py | 51 ++++++++++++++++--- 4 files changed, 56 insertions(+), 13 deletions(-) diff --git a/conftest.py b/conftest.py index 71115aea..293f0afd 100644 --- a/conftest.py +++ b/conftest.py @@ -1,3 +1,9 @@ +# SPDX-FileCopyrightText: 2026 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: Apache-2.0 + +# SPDX-FileContributor: Michael Fritzsche + # add option to pass zenodo sandbox auth token to pytest to run ./test/hermes_test/commands/deposit/test_invenio_e2e.py def pytest_addoption(parser): parser.addoption("--sandbox_auth", action="store", default=None) diff --git a/src/hermes/commands/postprocess/invenio.py b/src/hermes/commands/postprocess/invenio.py index f90d9714..5c0de3e6 100644 --- a/src/hermes/commands/postprocess/invenio.py +++ b/src/hermes/commands/postprocess/invenio.py @@ -10,7 +10,7 @@ import logging from ruamel.yaml import YAML -import toml +import tomlkit from hermes.error import MisconfigurationError from hermes.model.context_manager import HermesContext @@ -29,7 +29,7 @@ def __call__(self, command: HermesCommand): deposition = manager["result"] ctx.finalize_step("deposit") - conf = toml.load(open('hermes.toml', 'r')) + conf = tomlkit.load(open('hermes.toml', 'r')) try: old_record_id = conf["deposit"]["invenio"]["record_id"] if old_record_id == deposition["record_id"]: @@ -42,7 +42,7 @@ def __call__(self, command: HermesCommand): except KeyError: pass conf.setdefault("deposit", {}).setdefault("invenio", {})["record_id"] = deposition['record_id'] - toml.dump(conf, open('hermes.toml', 'w')) + tomlkit.dump(conf, open('hermes.toml', 'w')) class cff_doi(HermesPostprocessPlugin): diff --git a/src/hermes/commands/postprocess/invenio_rdm.py b/src/hermes/commands/postprocess/invenio_rdm.py index 3c6cb4a7..afee8dd2 100644 --- a/src/hermes/commands/postprocess/invenio_rdm.py +++ b/src/hermes/commands/postprocess/invenio_rdm.py @@ -8,7 +8,7 @@ import logging -import toml +import tomlkit from hermes.error import MisconfigurationError from hermes.model.context_manager import HermesContext @@ -27,7 +27,7 @@ def __call__(self, command: HermesCommand): deposition = manager["result"] ctx.finalize_step("deposit") - conf = toml.load(open('hermes.toml', 'r')) + conf = tomlkit.load(open('hermes.toml', 'r')) try: old_record_id = conf["deposit"]["invenio_rdm"]["record_id"] if old_record_id == deposition["record_id"]: @@ -40,4 +40,4 @@ def __call__(self, command: HermesCommand): except KeyError: pass conf.setdefault("deposit", {}).setdefault("invenio_rdm", {})["record_id"] = deposition['record_id'] - toml.dump(conf, open('hermes.toml', 'w')) + tomlkit.dump(conf, open('hermes.toml', 'w')) diff --git a/test/hermes_test/commands/postprocess/test_invenio_postprocess.py b/test/hermes_test/commands/postprocess/test_invenio_postprocess.py index 93c64536..8ba4efc2 100644 --- a/test/hermes_test/commands/postprocess/test_invenio_postprocess.py +++ b/test/hermes_test/commands/postprocess/test_invenio_postprocess.py @@ -36,16 +36,34 @@ def test_invenio_postprocess(tmp_path, monkeypatch): config_file = tmp_path / "hermes.toml" config_file.write_text( - """[postprocess] -run = ["config_invenio_record_id", "cff_doi", "codemeta_doi"] + """# SPDX-FileCopyrightText: 2023 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: CC0-1.0 + +[harvest] +sources = [ "cff", "toml" ] # ordered priority (first one is most important) + +[curate] +plugin = "pass_curate" + +[deposit] +target = "invenio" + [deposit.invenio] -site_url = "https://zenodo.org" +site_url = "https://sandbox.zenodo.org" + +[deposit.invenio.api_paths] +depositions = "api/deposit/depositions" +licenses = "api/vocabularies/licenses" +communities = "api/communities" + +[postprocess] +run = ["config_invenio_record_id", "cff_doi", "codemeta_doi"] """ ) orig_argv = sys.argv[:] sys.argv = ["hermes", "postprocess", "--path", str(tmp_path), "--config", str(config_file)] - print(" ".join(sys.argv)) result_cff = result_toml = {} try: monkeypatch.setattr(context_manager.HermesContext.__init__, "__defaults__", (tmp_path.cwd(),)) @@ -60,11 +78,30 @@ def test_invenio_postprocess(tmp_path, monkeypatch): sys.argv = orig_argv assert result_toml == toml.loads( - """[postprocess] -run = ["config_invenio_record_id", "cff_doi", "codemeta_doi"] + """# SPDX-FileCopyrightText: 2023 German Aerospace Center (DLR) +# +# SPDX-License-Identifier: CC0-1.0 + +[harvest] +sources = [ "cff", "toml" ] # ordered priority (first one is most important) + +[curate] +plugin = "pass_curate" + +[deposit] +target = "invenio" + [deposit.invenio] -site_url = "https://zenodo.org" +site_url = "https://sandbox.zenodo.org" record_id = "foo" + +[deposit.invenio.api_paths] +depositions = "api/deposit/depositions" +licenses = "api/vocabularies/licenses" +communities = "api/communities" + +[postprocess] +run = ["config_invenio_record_id", "cff_doi", "codemeta_doi"] """ ) assert result_cff == yaml.YAML().load( From 9d770a19cae01394b178e439128ce820227278e8 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Thu, 26 Mar 2026 15:31:51 +0100 Subject: [PATCH 234/247] improve error handling of commands --- src/hermes/commands/harvest/base.py | 19 ++++++++------- src/hermes/commands/postprocess/base.py | 16 ++++++++----- src/hermes/commands/process/base.py | 24 +++++++++++-------- .../commands/deposit/test_invenio_e2e.py | 2 +- 4 files changed, 36 insertions(+), 25 deletions(-) diff --git a/src/hermes/commands/harvest/base.py b/src/hermes/commands/harvest/base.py index b9fcd573..c526b330 100644 --- a/src/hermes/commands/harvest/base.py +++ b/src/hermes/commands/harvest/base.py @@ -9,7 +9,6 @@ from pydantic import BaseModel from hermes.commands.base import HermesCommand, HermesPlugin -from hermes.error import HermesPluginRunError, MisconfigurationError from hermes.model.context_manager import HermesContext from hermes.model import SoftwareMetadata @@ -42,33 +41,37 @@ def __call__(self, args: argparse.Namespace) -> None: if len(self.settings.sources) == 0: self.log.info("# No plugin was configured to be run and loaded.") + return # Initialize the harvest cache directory here to indicate the step ran ctx = HermesContext() ctx.prepare_step('harvest') self.log.info("## Load and run the plugins") + harvested_any = False for plugin_name in self.settings.sources: self.log.info(f"### Load {plugin_name} plugin") # load plugin try: plugin_func = self.plugins[plugin_name]() except KeyError: - self.log.error(f"Plugin {plugin_name} not found.") - raise MisconfigurationError(f"Harvest plugin {plugin_name} not found.") + self.log.warning(f"Plugin {plugin_name} not found, skipping it now.") + continue self.log.info(f"### Run {plugin_name} plugin") # run plugin try: harvested_data = plugin_func(self) - except Exception as e: - self.log.error(f"Unknown error while executing the {plugin_name} plugin.") - raise HermesPluginRunError( - f"Something went wrong while running the harvest plugin {plugin_name}" - ) from e + except Exception: + self.log.warning(f"Unknown error while executing the {plugin_name} plugin, skipping it now.") + continue self.log.info(f"### Store metadata harvested by {plugin_name} plugin") # store harvested data harvested_data.write_to_cache(ctx, plugin_name) + harvested_any = True ctx.finalize_step('harvest') + if not harvested_any: + self.log.error("No harvest plugin ran successfully.") + raise RuntimeError("No harvest plugin ran successfully.") diff --git a/src/hermes/commands/postprocess/base.py b/src/hermes/commands/postprocess/base.py index 776576b1..becda233 100644 --- a/src/hermes/commands/postprocess/base.py +++ b/src/hermes/commands/postprocess/base.py @@ -10,7 +10,6 @@ from pydantic import BaseModel from hermes.commands.base import HermesCommand, HermesPlugin -from hermes.error import HermesPluginRunError, MisconfigurationError class HermesPostprocessPlugin(HermesPlugin): @@ -38,6 +37,7 @@ def __call__(self, args: argparse.Namespace) -> None: plugin_names = self.settings.run self.log.info("## Load and run the plugins") + ran_any = False for plugin_name in plugin_names: self.log.info(f"### Load {plugin_name} plugin") # load plugin @@ -45,14 +45,18 @@ def __call__(self, args: argparse.Namespace) -> None: plugin_func = self.plugins[plugin_name]() except KeyError: self.log.error(f"Plugin {plugin_name} not found.") - raise MisconfigurationError(f"Postprocess plugin {plugin_name} not found.") + continue self.log.info(f"### Run {plugin_name} plugin") # run plugin try: plugin_func(self) - except Exception as e: + except Exception: self.log.error(f"Unknown error while executing the {plugin_name} plugin.") - raise HermesPluginRunError( - f"Something went wrong while running the postprocess plugin {plugin_name}" - ) from e + continue + + ran_any = True + + if not ran_any: + self.log.error("No postprocess plugin ran successfully.") + raise RuntimeError("No postprocess plugin ran successfully.") diff --git a/src/hermes/commands/process/base.py b/src/hermes/commands/process/base.py index a1b1504f..9a7e196a 100644 --- a/src/hermes/commands/process/base.py +++ b/src/hermes/commands/process/base.py @@ -10,7 +10,6 @@ from pydantic import BaseModel from hermes.commands.base import HermesCommand, HermesPlugin -from hermes.error import HermesPluginRunError, MisconfigurationError from hermes.model.api import SoftwareMetadata from hermes.model.context_manager import HermesContext from hermes.model.merge.action import MergeAction @@ -28,7 +27,7 @@ class ProcessSettings(BaseModel): """Generic deposition settings.""" sources: list = [] - plugins: list = [] + plugins: list = ["codemeta"] class HermesProcessCommand(HermesCommand): @@ -43,6 +42,7 @@ def __call__(self, args: argparse.Namespace) -> None: merged_doc = ld_merge_dict([{}]) self.log.info("## Load and run the plugins") + any_strategies_loaded = False # add the strategies from the plugins for plugin_name in reversed(self.settings.plugins): self.log.info(f"### Load {plugin_name} plugin") @@ -50,22 +50,25 @@ def __call__(self, args: argparse.Namespace) -> None: try: plugin_func = self.plugins[plugin_name]() except KeyError: - self.log.error(f"Plugin {plugin_name} not found.") - raise MisconfigurationError(f"Postprocess plugin {plugin_name} not found.") + self.log.warning(f"Plugin {plugin_name} not found, skipping it now.") + continue self.log.info(f"### Run {plugin_name} plugin") # run plugin try: additional_strategies = plugin_func(self) - except Exception as e: - self.log.error(f"Unknown error while executing the {plugin_name} plugin.") - raise HermesPluginRunError( - f"Something went wrong while running the postprocess plugin {plugin_name}" - ) from e + except Exception: + self.log.warning(f"Unknown error while executing the {plugin_name} plugin, skipping it now.") + continue self.log.info(f"### Add the strategies to the merge document {plugin_name} plugin") # add strategies to the merge document merged_doc.add_strategy(additional_strategies) + any_strategies_loaded = True + + if not any_strategies_loaded: + self.log.error("No process plugin was ran successfully.") + raise RuntimeError("No process plugin was ran successfully.") ctx = HermesContext() ctx.prepare_step('harvest') @@ -91,13 +94,14 @@ def __call__(self, args: argparse.Namespace) -> None: merged_any = True # error if nothing was merged - if not merged_any: + if harvester_names and not merged_any: self.log.error( f"""No metadata has been merged. { "No harvesters to merge from were supplied" if not harvester_names else "The merging failed for all harvesters." }""" ) + raise RuntimeError("No metadata has been merged.") self.log.info("## Store processed metadata") # store processed data diff --git a/test/hermes_test/commands/deposit/test_invenio_e2e.py b/test/hermes_test/commands/deposit/test_invenio_e2e.py index 52d33ba6..f28ad862 100644 --- a/test/hermes_test/commands/deposit/test_invenio_e2e.py +++ b/test/hermes_test/commands/deposit/test_invenio_e2e.py @@ -19,7 +19,7 @@ def sandbox_auth(pytestconfig): if pytestconfig.getoption("sandbox_auth"): yield pytestconfig.getoption("sandbox_auth") else: - pytest.skip("Local auth token file does not exist.") + pytest.skip("No auth token was supplied. Hint: Supply it with --sandbox_auth your_token") @pytest.mark.parametrize( From 180cc10e217fb761e4977cd0f55c6fb6455ba36b Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Thu, 26 Mar 2026 15:37:54 +0100 Subject: [PATCH 235/247] make process more verbose for errors while merging --- src/hermes/commands/process/base.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/hermes/commands/process/base.py b/src/hermes/commands/process/base.py index 9a7e196a..7bfba796 100644 --- a/src/hermes/commands/process/base.py +++ b/src/hermes/commands/process/base.py @@ -10,6 +10,7 @@ from pydantic import BaseModel from hermes.commands.base import HermesCommand, HermesPlugin +from hermes.error import HermesPluginRunError from hermes.model.api import SoftwareMetadata from hermes.model.context_manager import HermesContext from hermes.model.merge.action import MergeAction @@ -90,7 +91,11 @@ def __call__(self, args: argparse.Namespace) -> None: self.log.info(f"## Merge data from {harvester} plugin") # merge data into the merge dict - merged_doc.update(metadata) + try: + merged_doc.update(metadata) + except Exception as e: + self.log.error(f"Merging the data from {harvester} plugin resulted in an error.") + raise HermesPluginRunError(f"Merging the data from {harvester} plugin failed.") from e merged_any = True # error if nothing was merged From 0251cd1a7c8e017ad99bda4f6a13f6b2e1029a44 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Thu, 26 Mar 2026 15:45:46 +0100 Subject: [PATCH 236/247] potentially fixed error where multiple record ids are halucinated. --- src/hermes/commands/deposit/invenio.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/hermes/commands/deposit/invenio.py b/src/hermes/commands/deposit/invenio.py index a93c2d85..a6a74f14 100644 --- a/src/hermes/commands/deposit/invenio.py +++ b/src/hermes/commands/deposit/invenio.py @@ -327,11 +327,11 @@ def prepare(self) -> None: tmp_rec_id, tmp_rec_meta = self.resolver.resolve_latest_id( record_id=conf_rec_id, doi=conf_doi, codemeta_identifier=codemeta_identifier ) - if tmp_rec_id is not None or tmp_rec_meta != {}: - if rec_id != tmp_rec_id or rec_meta != tmp_rec_meta: - # FIXME: Maybe finding different record ids is not fatal? - raise HermesValidationError("Found two different record ids or conflicting metadata.") + if rec_id is None and rec_meta == {}: rec_id, rec_meta = tmp_rec_id, tmp_rec_meta + elif (tmp_rec_id is not None or tmp_rec_meta != {}) and(rec_id != tmp_rec_id or rec_meta != tmp_rec_meta): + # FIXME: Maybe finding different record ids is not fatal? + raise HermesValidationError("Found two different record ids or conflicting metadata.") if len(self.metadata.get("version", [])) > 1: raise HermesValidationError("Too many licenses for invenio deposit.") From 73467f588bdd4c87fb2e8585b51c057b053e6ee1 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Thu, 26 Mar 2026 15:46:52 +0100 Subject: [PATCH 237/247] flake8 --- src/hermes/commands/deposit/invenio.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/hermes/commands/deposit/invenio.py b/src/hermes/commands/deposit/invenio.py index a6a74f14..79ae672f 100644 --- a/src/hermes/commands/deposit/invenio.py +++ b/src/hermes/commands/deposit/invenio.py @@ -329,7 +329,7 @@ def prepare(self) -> None: ) if rec_id is None and rec_meta == {}: rec_id, rec_meta = tmp_rec_id, tmp_rec_meta - elif (tmp_rec_id is not None or tmp_rec_meta != {}) and(rec_id != tmp_rec_id or rec_meta != tmp_rec_meta): + elif (tmp_rec_id is not None or tmp_rec_meta != {}) and (rec_id != tmp_rec_id or rec_meta != tmp_rec_meta): # FIXME: Maybe finding different record ids is not fatal? raise HermesValidationError("Found two different record ids or conflicting metadata.") From 34877b956cef710f3d993e2064e1ef4b41b47cc7 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 27 Mar 2026 11:57:05 +0100 Subject: [PATCH 238/247] adjusted logging a bit --- src/hermes/commands/cli.py | 21 +++++++---- src/hermes/commands/curate/base.py | 9 +++-- src/hermes/commands/deposit/base.py | 4 +- src/hermes/commands/harvest/base.py | 13 ++++--- src/hermes/commands/postprocess/base.py | 13 +++++-- src/hermes/commands/process/base.py | 49 ++++++++++++++----------- src/hermes/logger.py | 2 +- 7 files changed, 66 insertions(+), 45 deletions(-) diff --git a/src/hermes/commands/cli.py b/src/hermes/commands/cli.py index 23daae3e..68cc23e1 100644 --- a/src/hermes/commands/cli.py +++ b/src/hermes/commands/cli.py @@ -76,15 +76,20 @@ def main() -> None: log.info("Run subcommand %s", args.command.command_name) args.command(args) - except HermesPluginRunError as e: - log.error("An error occurred during the execution of a plugin %s (Find details in './hermes.log')", - args.command.command_name) - log.debug("Original exception was: %s", e) + except HermesPluginRunError: + log.critical( + "An error occurred during the execution of the %s command (Find details in './hermes.log')", + args.command.command_name, + exc_info=1 + ) sys.exit(2) - except Exception as e: - log.error("An error occurred during execution of %s (Find details in './hermes.log')", - args.command.command_name) - log.debug("Original exception was: %s", e) + except Exception: + log.critical( + "An error occurred during execution of the %s command (Find details in './hermes.log')", + args.command.command_name, + exc_info=1 + ) sys.exit(1) + log.info("Finished run of %s command successfully.", args.command.command_name) sys.exit(0) diff --git a/src/hermes/commands/curate/base.py b/src/hermes/commands/curate/base.py index 8983f8d6..51f2da08 100644 --- a/src/hermes/commands/curate/base.py +++ b/src/hermes/commands/curate/base.py @@ -47,7 +47,10 @@ def __call__(self, args: argparse.Namespace) -> None: try: metadata = SoftwareMetadata.load_from_cache(ctx, "result") except Exception as e: - self.log.error("The data from the process step could not be loaded or is invalid for some reason.") + self.log.critical( + "## The data from the process step could not be loaded or is invalid for some reason.", + exc_info=1 + ) raise HermesValidationError("The results of the process step are invalid.") from e ctx.finalize_step("process") @@ -56,7 +59,7 @@ def __call__(self, args: argparse.Namespace) -> None: try: plugin_func = self.plugins[plugin_name]() except KeyError: - self.log.error(f"Plugin {plugin_name} not found.") + self.log.error(f"## Curate plugin {plugin_name} not found.") raise MisconfigurationError(f"Curate plugin {plugin_name} not found.") self.log.info(f"## Run curation plugin {plugin_name}") @@ -64,7 +67,7 @@ def __call__(self, args: argparse.Namespace) -> None: try: curated_metadata = plugin_func(self, metadata) except Exception as e: - self.log.error(f"Unknown error while executing the {plugin_name} plugin.") + self.log.critical(f"## Unknown error while executing the {plugin_name} plugin.", exc_info=1) raise HermesPluginRunError(f"Something went wrong while running the curate plugin {plugin_name}") from e self.log.info("## Store curated data") diff --git a/src/hermes/commands/deposit/base.py b/src/hermes/commands/deposit/base.py index 0ae39536..57bed627 100644 --- a/src/hermes/commands/deposit/base.py +++ b/src/hermes/commands/deposit/base.py @@ -144,7 +144,7 @@ def __call__(self, args: argparse.Namespace) -> None: try: plugin_func = self.plugins[plugin_name]() except KeyError: - self.log.error(f"Plugin {plugin_name} not found.") + self.log.critical(f"## Deposit plugin {plugin_name} not found.") raise MisconfigurationError(f"Deposit plugin {self.settings.plugin} not found.") self.log.info(f"## Run deposit plugin {plugin_name}") @@ -152,7 +152,7 @@ def __call__(self, args: argparse.Namespace) -> None: try: plugin_func(self) except HermesValidationError as e: - self.log.error(f"Error while executing {plugin_name}: {e}") + self.log.critical(f"## Error while executing {plugin_name} plugin.", exc_info=1) raise HermesPluginRunError( f"Something went wrong while running the deposit plugin {self.settings.plugin}" ) from e diff --git a/src/hermes/commands/harvest/base.py b/src/hermes/commands/harvest/base.py index c526b330..0d3d9e5f 100644 --- a/src/hermes/commands/harvest/base.py +++ b/src/hermes/commands/harvest/base.py @@ -9,6 +9,7 @@ from pydantic import BaseModel from hermes.commands.base import HermesCommand, HermesPlugin +from hermes.error import HermesPluginRunError, MisconfigurationError from hermes.model.context_manager import HermesContext from hermes.model import SoftwareMetadata @@ -40,8 +41,8 @@ def __call__(self, args: argparse.Namespace) -> None: self.args = args if len(self.settings.sources) == 0: - self.log.info("# No plugin was configured to be run and loaded.") - return + self.log.critical("# No harvest plugin was configured to be run and loaded.") + raise MisconfigurationError("No harvest plugin was configured to be run and loaded.") # Initialize the harvest cache directory here to indicate the step ran ctx = HermesContext() @@ -55,7 +56,7 @@ def __call__(self, args: argparse.Namespace) -> None: try: plugin_func = self.plugins[plugin_name]() except KeyError: - self.log.warning(f"Plugin {plugin_name} not found, skipping it now.") + self.log.error(f"### Plugin {plugin_name} not found, skipping it now.") continue self.log.info(f"### Run {plugin_name} plugin") @@ -63,7 +64,7 @@ def __call__(self, args: argparse.Namespace) -> None: try: harvested_data = plugin_func(self) except Exception: - self.log.warning(f"Unknown error while executing the {plugin_name} plugin, skipping it now.") + self.log.exception(f"### Unknown error while executing the {plugin_name} plugin, skipping it now.") continue self.log.info(f"### Store metadata harvested by {plugin_name} plugin") @@ -73,5 +74,5 @@ def __call__(self, args: argparse.Namespace) -> None: ctx.finalize_step('harvest') if not harvested_any: - self.log.error("No harvest plugin ran successfully.") - raise RuntimeError("No harvest plugin ran successfully.") + self.log.critical("No harvest plugin ran successfully.") + raise HermesPluginRunError("No harvest plugin ran successfully.") diff --git a/src/hermes/commands/postprocess/base.py b/src/hermes/commands/postprocess/base.py index becda233..99a26d73 100644 --- a/src/hermes/commands/postprocess/base.py +++ b/src/hermes/commands/postprocess/base.py @@ -10,6 +10,7 @@ from pydantic import BaseModel from hermes.commands.base import HermesCommand, HermesPlugin +from hermes.error import HermesPluginRunError class HermesPostprocessPlugin(HermesPlugin): @@ -36,6 +37,10 @@ def __call__(self, args: argparse.Namespace) -> None: self.args = args plugin_names = self.settings.run + if not plugin_names: + self.log.warning("# No plugin was configured to be run yet the postprocess command was executed.") + return + self.log.info("## Load and run the plugins") ran_any = False for plugin_name in plugin_names: @@ -44,7 +49,7 @@ def __call__(self, args: argparse.Namespace) -> None: try: plugin_func = self.plugins[plugin_name]() except KeyError: - self.log.error(f"Plugin {plugin_name} not found.") + self.log.error(f"### Plugin {plugin_name} not found.") continue self.log.info(f"### Run {plugin_name} plugin") @@ -52,11 +57,11 @@ def __call__(self, args: argparse.Namespace) -> None: try: plugin_func(self) except Exception: - self.log.error(f"Unknown error while executing the {plugin_name} plugin.") + self.log.exception(f"### Unknown error while executing the {plugin_name} plugin.") continue ran_any = True if not ran_any: - self.log.error("No postprocess plugin ran successfully.") - raise RuntimeError("No postprocess plugin ran successfully.") + self.log.critical("## No postprocess plugin ran successfully.") + raise HermesPluginRunError("No postprocess plugin ran successfully.") diff --git a/src/hermes/commands/process/base.py b/src/hermes/commands/process/base.py index 7bfba796..725f6487 100644 --- a/src/hermes/commands/process/base.py +++ b/src/hermes/commands/process/base.py @@ -10,7 +10,7 @@ from pydantic import BaseModel from hermes.commands.base import HermesCommand, HermesPlugin -from hermes.error import HermesPluginRunError +from hermes.error import HermesPluginRunError, MisconfigurationError from hermes.model.api import SoftwareMetadata from hermes.model.context_manager import HermesContext from hermes.model.merge.action import MergeAction @@ -39,9 +39,21 @@ class HermesProcessCommand(HermesCommand): def __call__(self, args: argparse.Namespace) -> None: self.log.info("# Metadata processing") - self.args = args merged_doc = ld_merge_dict([{}]) + if not self.settings.plugins: + self.log.critical( + "# It was explicitly configured that no process plugin should be used." + " Hint: Do not configure anything to use standard 'codemeta' plugin." + ) + raise MisconfigurationError("Explicit configuration to use no process plugin.") + + # Get all harvesters + harvester_names = self.settings.sources if self.settings.sources else self.root_settings.harvest.sources + if not harvester_names: + self.log.critical("# No harvesters to merge from were configured.") + raise MisconfigurationError("No harvesters to merge from were configured.") + self.log.info("## Load and run the plugins") any_strategies_loaded = False # add the strategies from the plugins @@ -51,7 +63,7 @@ def __call__(self, args: argparse.Namespace) -> None: try: plugin_func = self.plugins[plugin_name]() except KeyError: - self.log.warning(f"Plugin {plugin_name} not found, skipping it now.") + self.log.error(f"### Plugin {plugin_name} not found, skipping it now.") continue self.log.info(f"### Run {plugin_name} plugin") @@ -59,7 +71,7 @@ def __call__(self, args: argparse.Namespace) -> None: try: additional_strategies = plugin_func(self) except Exception: - self.log.warning(f"Unknown error while executing the {plugin_name} plugin, skipping it now.") + self.log.exception(f"### Unknown error while executing the {plugin_name} plugin, skipping it now.") continue self.log.info(f"### Add the strategies to the merge document {plugin_name} plugin") @@ -68,44 +80,39 @@ def __call__(self, args: argparse.Namespace) -> None: any_strategies_loaded = True if not any_strategies_loaded: - self.log.error("No process plugin was ran successfully.") - raise RuntimeError("No process plugin was ran successfully.") + self.log.critical("## No process plugin was ran successfully.") + raise HermesPluginRunError("No process plugin was ran successfully.") ctx = HermesContext() ctx.prepare_step('harvest') + # merge data from harvesters self.log.info("## Merge the metadata of the harvesters") - # Get all harvesters - harvester_names = self.settings.sources if self.settings.sources else self.root_settings.harvest.sources merged_any = False for harvester in harvester_names: - self.log.info(f"## Load data from {harvester} plugin") + self.log.info(f"### Load data from {harvester} plugin") # load data from harvester try: metadata = SoftwareMetadata.load_from_cache(ctx, harvester) except Exception: # skip this harvester when the data is invalid - self.log.warning(f"The data from the harvester {harvester} could not be loaded or is invalid.") - self.log.info(f"## Aborting merge for {harvester}") + self.log.exception( + f"### The data from the harvester {harvester} could not be loaded or is invalid, skipping it now." + ) continue - self.log.info(f"## Merge data from {harvester} plugin") + self.log.info(f"### Merge data from {harvester} plugin") # merge data into the merge dict try: merged_doc.update(metadata) except Exception as e: - self.log.error(f"Merging the data from {harvester} plugin resulted in an error.") - raise HermesPluginRunError(f"Merging the data from {harvester} plugin failed.") from e + self.log.critical(f"### Merging the data from {harvester} plugin resulted in an error.", exc_info=True) + raise RuntimeError(f"Merging the data from {harvester} plugin failed.") from e merged_any = True # error if nothing was merged - if harvester_names and not merged_any: - self.log.error( - f"""No metadata has been merged. { - "No harvesters to merge from were supplied" if not harvester_names else - "The merging failed for all harvesters." - }""" - ) + if not merged_any: + self.log.critical("No metadata has been merged, the loading of the data failed for all harvesters.") raise RuntimeError("No metadata has been merged.") self.log.info("## Store processed metadata") diff --git a/src/hermes/logger.py b/src/hermes/logger.py index 7b6dd981..2c184f79 100644 --- a/src/hermes/logger.py +++ b/src/hermes/logger.py @@ -69,7 +69,7 @@ def init_logging(): _loggers[log_name] = logging.getLogger(log_name) -def getLogger(log_name): +def getLogger(log_name) -> logging.Logger: init_logging() if log_name not in _loggers: _loggers[log_name] = logging.getLogger(log_name) From a0c000542b3d9041c3f22b32d7fdbd6c2b3624b8 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 27 Mar 2026 12:38:56 +0100 Subject: [PATCH 239/247] fix tests that are affected by error handling update --- test/hermes_test/test_cli.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/test/hermes_test/test_cli.py b/test/hermes_test/test_cli.py index 4a747851..d5a382b2 100644 --- a/test/hermes_test/test_cli.py +++ b/test/hermes_test/test_cli.py @@ -18,7 +18,14 @@ def test_hermes_full(): def test_hermes_harvest(hermes_env): - hermes_env['hermes.toml'] = "" + hermes_env['hermes.toml'] = "[harvest]\nsources = [\"cff\"]\n" + hermes_env['CITATION.cff'] = """cff-version: 1.2.0 +title: Test +message: >- + test tests +type: software +authors: + - given-names: Testi""" with hermes_env: result = hermes_env.run("harvest") @@ -27,8 +34,8 @@ def test_hermes_harvest(hermes_env): def test_hermes_process(hermes_env): - hermes_env['hermes.toml'] = "" - hermes_env['.hermes/harvest/test.json'] = "" + hermes_env['hermes.toml'] = "[process]\nsources = [\"cff\"]" + hermes_env['.hermes/harvest/cff/codemeta.json'] = "{}" with hermes_env: result = hermes_env.run("process") From 279e67201a92b46e7a70d4e486ff5c1394768d84 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 27 Mar 2026 14:18:35 +0100 Subject: [PATCH 240/247] added another process test and fixed small bug --- src/hermes/commands/process/standard_merge.py | 10 +- src/hermes/model/merge/action.py | 71 ++++++--- .../commands/process/test_process.py | 150 ++++++++++++++++++ 3 files changed, 206 insertions(+), 25 deletions(-) diff --git a/src/hermes/commands/process/standard_merge.py b/src/hermes/commands/process/standard_merge.py index eacc077a..b18df309 100644 --- a/src/hermes/commands/process/standard_merge.py +++ b/src/hermes/commands/process/standard_merge.py @@ -11,7 +11,7 @@ import requests from hermes.commands.base import HermesCommand -from hermes.model.merge.action import Concat, MergeAction, MergeSet +from hermes.model.merge.action import Concat, IdMerge, MergeAction, MergeSet from hermes.model.types import ld_dict from hermes.model.types.ld_context import iri_map as iri from .base import HermesProcessPlugin @@ -242,7 +242,7 @@ def match_func(left: Any, right: Any) -> bool: # Filled with entries for every schema-type that can be found inside an JSON-LD dict of type # SoftwareSourceCode or SoftwareApplication using schema and CodeMeta as Context. -CODEMETA_STRATEGY = {None: {None: ACTIONS["default"]}} +CODEMETA_STRATEGY = {None: {None: ACTIONS["default"], "@id": IdMerge()}} """ dict[str | None, dict[str | None, MergeAction]]: MergeActions for the standard JSON_LD contexts objects. """ CODEMETA_STRATEGY[iri["schema:Thing"]] = {iri["schema:owner"]: ACTIONS["OrganizationOrPerson"]} @@ -865,7 +865,7 @@ def __call__(self, command: HermesCommand) -> dict[Union[str, None], dict[Union[ subtypes_for_types = CodemetaProcessPlugin.get_schema_type_hierarchy() strats = CodemetaProcessPlugin.get_schema_strategies(subtypes_for_types) strats.update(CodemetaProcessPlugin.get_codemeta_strategies(subtypes_for_types)) - strats[None] = {None: MergeSet(DEFAULT_MATCH)} + strats[None] = {None: MergeSet(DEFAULT_MATCH), "@id": IdMerge()} except Exception: strats = {**CODEMETA_STRATEGY} for key, value in PROV_STRATEGY.items(): @@ -942,14 +942,14 @@ def get_codemeta_strategies(cls, subtypes_for_types): special_types = set(MATCH_FUNCTION_FOR_TYPE.keys()) # FIXME: change URL on change of context to codemeta 3.0 - download = requests.get("https://github.com/codemeta/codemeta/blob/2.0/crosswalk.csv") + download = requests.get("https://raw.githubusercontent.com/codemeta/codemeta/blob/2.0/crosswalk.csv") decoded_content = download.content.decode('utf-8') cr = csv.reader(decoded_content.splitlines(), delimiter=',') # remove the first line (headers) property_table = list(cr)[1:] strategies = {} for property_row in property_table: - if property_row[0] == "schema" or len(property_row[0]) == 0: + if property_row[0] in ("schema", ""): # skip empty rows continue # generate a set of all types this property can have values of diff --git a/src/hermes/model/merge/action.py b/src/hermes/model/merge/action.py index 1a45d67e..7d5714b4 100644 --- a/src/hermes/model/merge/action.py +++ b/src/hermes/model/merge/action.py @@ -28,7 +28,7 @@ def merge( self: Self, target: ld_merge_dict, key: list[Union[str, int]], - value: ld_merge_list, + value: Union[ld_merge_list, str], update: Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] ) -> Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]: """ @@ -39,7 +39,7 @@ def merge( target (ld_merge_dict): The ld_merge_dict inside of which the items are merged. key (list[str | int]): The "path" of keys so that ``target[key[-1]]`` is ``value`` and for the outermost parent of ``target`` out_parent ``out_parent[key[0]]...[key[-1]]`` results in ``value``. - value (ld_merge_list): The value inside ``target`` that is to be merged with ``update``. + value (ld_merge_list | str): The value inside ``target`` that is to be merged with ``update``. update (BASIC_TYPE | TIME_TYPE | ld_dict | ld_list): The value that is to be merged into ``target`` with ``value``. @@ -56,7 +56,7 @@ def merge( self: Self, target: ld_merge_dict, key: list[Union[str, int]], - value: ld_merge_list, + value: Union[ld_merge_list, str], update: Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] ) -> ld_merge_list: """ @@ -67,16 +67,17 @@ def merge( target (ld_merge_dict): The ld_merge_dict inside of which the items are merged. key (list[str | int]): The "path" of keys so that ``target[key[-1]]`` is ``value`` and for the outermost parent of ``target`` out_parent ``out_parent[key[0]]...[key[-1]]`` results in ``value``. - value (ld_merge_list): The value inside ``target`` that is to be merged with ``update``. + value (ld_merge_list | str): The value inside ``target`` that is to be merged with ``update``. This value won't be changed. update (BASIC_TYPE | TIME_TYPE | ld_dict | ld_list): The value that is to be merged into ``target`` with ``value``. This value will be rejected. Returns: - ld_merge_list: The merged value. This value will always be ``value``. + ld_merge_list | str: The merged value. This value will always be ``value``. """ - # Add the entry that data has been rejected. - target.reject(key, update) + if value != update: + # Add the entry that data has been rejected. + target.reject(key, update) # Return value unchanged. return value @@ -87,7 +88,7 @@ def merge( self: Self, target: ld_merge_dict, key: list[Union[str, int]], - value: ld_merge_list, + value: Union[ld_merge_list, str], update: Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] ) -> Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list]: """ @@ -98,7 +99,7 @@ def merge( target (ld_merge_dict): The ld_merge_dict inside of which the items are merged. key (list[str | int]): The "path" of keys so that ``target[key[-1]]`` is ``value`` and for the outermost parent of ``target`` out_parent ``out_parent[key[0]]...[key[-1]]`` results in ``value``. - value (ld_merge_list): The value inside ``target`` that is to be merged with ``update``. + value (ld_merge_list | str): The value inside ``target`` that is to be merged with ``update``. This value will bew replaced. update (BASIC_TYPE | TIME_TYPE | ld_dict | ld_list): The value that is to be merged into ``target`` with ``value``. This value will be used instead of ``value``. @@ -106,8 +107,9 @@ def merge( Returns: BASIC_TYPE | TIME_TYPE | ld_dict | ld_list: The merged value. This value will be ``update``. """ - # Add the entry that data has been replaced. - target.replace(key, value) + if value != update: + # Add the entry that data has been replaced. + target.replace(key, value) # Return the new value. return update @@ -118,7 +120,7 @@ def merge( self: Self, target: ld_merge_dict, key: list[Union[str, int]], - value: ld_merge_list, + value: Union[ld_merge_list, str], update: Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] ) -> ld_merge_list: """ @@ -128,12 +130,12 @@ def merge( target (ld_merge_dict): The ld_merge_dict inside of which the items are merged. key (list[str | int]): The "path" of keys so that ``target[key[-1]]`` is ``value`` and for the outermost parent of ``target`` out_parent ``out_parent[key[0]]...[key[-1]]`` results in ``value``. - value (ld_merge_list): The value inside ``target`` that is to be merged with ``update``. + value (ld_merge_list | str): The value inside ``target`` that is to be merged with ``update``. update (BASIC_TYPE | TIME_TYPE | ld_dict | ld_list): The value that is to be merged into ``target`` with ``value``. Returns: - ld_merge_list: The merged value (``value`` concatenated with ``update``). + ld_merge_list | str: The merged value (``value`` concatenated with ``update``). """ # Concatenate the items and return the result. if isinstance(update, (list, ld_list)): @@ -173,7 +175,7 @@ def merge( self: Self, target: ld_merge_dict, key: list[Union[str, int]], - value: ld_merge_list, + value: Union[ld_merge_list, str], update: Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] ) -> ld_merge_list: """ @@ -183,12 +185,12 @@ def merge( target (ld_merge_dict): The ld_merge_dict inside of which the items are merged. key (list[str | int]): The "path" of keys so that ``target[key[-1]]`` is ``value`` and for the outermost parent of ``target`` out_parent ``out_parent[key[0]]...[key[-1]]`` results in ``value``. - value (ld_merge_list): The value inside ``target`` that is to be merged with ``update``. + value (ld_merge_list | str): The value inside ``target`` that is to be merged with ``update``. update (BASIC_TYPE | TIME_TYPE | ld_dict | ld_list): The value that is to be merged into ``target`` with ``value``. Returns: - ld_merge_list: The merged value. + ld_merge_list | str: The merged value. """ if not isinstance(update, (list, ld_list)): update = [update] @@ -235,7 +237,7 @@ def merge( self: Self, target: ld_merge_dict, key: list[Union[str, int]], - value: ld_merge_list, + value: Union[ld_merge_list, str], update: Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] ) -> ld_merge_list: """ @@ -245,12 +247,12 @@ def merge( target (ld_merge_dict): The ld_merge_dict inside of which the items are merged. key (list[str | int]): The "path" of keys so that ``target[key[-1]]`` is ``value`` and for the outermost parent of ``target`` out_parent out_parent[key[0]]...[key[-1]] results in ``value``. - value (ld_merge_list): The value inside ``target`` that is to be merged with ``update``. + value (ld_merge_list | str): The value inside ``target`` that is to be merged with ``update``. update (BASIC_TYPE | TIME_TYPE | ld_dict | ld_list): The value that is to be merged into ``target`` with ``value``. Returns: - ld_merge_list: The merged value. + ld_merge_list | str: The merged value. """ if not isinstance(update, (list, ld_list)): update = [update] @@ -272,3 +274,32 @@ def merge( value.append(update_item) # Return the merged values. return value + +class IdMerge(MergeAction): + """ :class:`MergeAction` providing a merge function for merging ids, i.e. error if not equals else do nothing. """ + def merge( + self: Self, + target: ld_merge_dict, + key: list[Union[str, int]], + value: Union[ld_merge_list, str], + update: Union[BASIC_TYPE, TIME_TYPE, ld_dict, ld_list] + ) -> ld_merge_list: + """ + Error if value != update or key != "@id". Else do nothing. + + Args: + target (ld_merge_dict): The ld_merge_dict inside of which the items are merged. + key (list[str | int]): The "path" of keys so that ``target[key[-1]]`` is ``value`` and for the outermost + parent of ``target`` out_parent out_parent[key[0]]...[key[-1]] results in ``value``. + value (ld_merge_list | str): The value inside ``target`` that is to be merged with ``update``. + update (BASIC_TYPE | TIME_TYPE | ld_dict | ld_list): The value that is to be merged into ``target`` + with ``value``. + + Returns: + ld_merge_list | str: The merged value. + """ + if key[-1] != "@id": + raise MergeError("Can't merge non-'@id' values.") + if value != update: + raise MergeError("Two different '@id' values are merged into the same object.") + return value \ No newline at end of file diff --git a/test/hermes_test/commands/process/test_process.py b/test/hermes_test/commands/process/test_process.py index 92a8b35b..f55ed22e 100644 --- a/test/hermes_test/commands/process/test_process.py +++ b/test/hermes_test/commands/process/test_process.py @@ -48,6 +48,156 @@ "http://schema.org/license": [{"@id": "https://spdx.org/licenses/Apache-2.0"}], } ), + ), + ( + { + "cff": SoftwareMetadata( + { + "type": "SoftwareSourceCode", + "author": [ + { + "id": "https://orcid.org/0000-0003-4925-7248", + "type": "Person", + "affiliation": { + "type": "Organization", + "name": "German Aerospace Center (DLR)" + }, + "email": "stephan.druskat@dlr.de" + }, + { + "type": "Person", + "affiliation": { + "type": "Organization", + "name": "Forschungszentrum J\u00c3\u00bclich" + }, + "email": "o.bertuch@fz-juelich.de", + "givenName": "Oliver" + }, + { + "id": "https://orcid.org/0000-0001-8174-7795", + "type": "Person", + "email": "o.knodel@hzdr.de", + "familyName": "Knodel", + "givenName": "Oliver" + } + ], + "description": "Tool to automate software publication. Not stable yet.", + "identifier": "https://doi.org/10.5281/zenodo.13221384", + "license": "https://spdx.org/licenses/Apache-2.0" + } + ), + "codemeta": SoftwareMetadata( + { + "type": "SoftwareSourceCode", + "author": [ + { + "id": "https://orcid.org/0000-0001-6372-3853", + "type": "Person", + "affiliation": { + "type": "Organization", + "legalName": "German Aerospace Center (DLR)" + }, + "email": "michael.meinel@dlr.de", + "familyName": "Meinel", + "givenName": "Michael" + }, + { + "id": "https://orcid.org/0000-0003-4925-7248", + "type": "Person", + "affiliation": { + "type": "Organization", + "legalName": "German Aerospace Center (DLR)" + }, + "email": "stephan.druskat@dlr.de", + "familyName": "Druskat", + "givenName": "Stephan" + }, + { + "id": "https://orcid.org/0000-0002-2702-3419", + "type": "Person", + "affiliation": { + "type": "Organization", + "legalName": "Forschungszentrum J\u00c3\u00bclich" + }, + "email": "o.bertuch@fz-juelich.de", + "familyName": "Bertuch" + }, + { + "id": "https://orcid.org/0000-0001-8174-7795", + "type": "Person", + "affiliation": { + "type": "Organization", + "legalName": "Helmholtz-Zentrum Dresden-Rossendorf (HZDR)" + }, + "familyName": "Knodel", + "givenName": "Oliver" + } + ], + "identifier": "https://doi.org/10.5281/zenodo.13221384", + "license": "https://spdx.org/licenses/Apache-2.0", + "legalName": "hermes", + "version": "0.9.0" + }, + extra_vocabs = {"legalName": {"@id": "http://schema.org/name"}} + ) + }, + SoftwareMetadata( + { + "type": "SoftwareSourceCode", + "schema:author": [ + { + "id": "https://orcid.org/0000-0001-6372-3853", + "type": "Person", + "affiliation": { + "type": "Organization", + "legalName": "German Aerospace Center (DLR)" + }, + "email": "michael.meinel@dlr.de", + "familyName": "Meinel", + "givenName": "Michael" + }, + { + "id": "https://orcid.org/0000-0003-4925-7248", + "type": "Person", + "affiliation": { + "type": "Organization", + "legalName": "German Aerospace Center (DLR)" + }, + "email": "stephan.druskat@dlr.de", + "familyName": "Druskat", + "givenName": "Stephan" + }, + { + "id": "https://orcid.org/0000-0002-2702-3419", + "type": "Person", + "affiliation": { + "type": "Organization", + "legalName": "Forschungszentrum J\u00c3\u00bclich" + }, + "email": "o.bertuch@fz-juelich.de", + "familyName": "Bertuch", + "givenName": "Oliver" + }, + { + "id": "https://orcid.org/0000-0001-8174-7795", + "type": "Person", + "affiliation": { + "type": "Organization", + "legalName": "Helmholtz-Zentrum Dresden-Rossendorf (HZDR)" + }, + "email": "o.knodel@hzdr.de", + "familyName": "Knodel", + "givenName": "Oliver" + } + ], + "description": "Tool to automate software publication. Not stable yet.", + "identifier": "https://doi.org/10.5281/zenodo.13221384", + "license": "https://spdx.org/licenses/Apache-2.0", + "legalName": "hermes", + "version": "0.9.0" + }, + extra_vocabs = {"legalName": {"@id": "http://schema.org/name"}} + ), ) ], ) From 65399be42939e5a03b827ac5576947d46e6f78ce Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Fri, 27 Mar 2026 14:20:57 +0100 Subject: [PATCH 241/247] flake8 --- src/hermes/model/merge/action.py | 3 ++- test/hermes_test/commands/process/test_process.py | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/hermes/model/merge/action.py b/src/hermes/model/merge/action.py index 7d5714b4..f2cfc7b3 100644 --- a/src/hermes/model/merge/action.py +++ b/src/hermes/model/merge/action.py @@ -275,6 +275,7 @@ def merge( # Return the merged values. return value + class IdMerge(MergeAction): """ :class:`MergeAction` providing a merge function for merging ids, i.e. error if not equals else do nothing. """ def merge( @@ -302,4 +303,4 @@ def merge( raise MergeError("Can't merge non-'@id' values.") if value != update: raise MergeError("Two different '@id' values are merged into the same object.") - return value \ No newline at end of file + return value diff --git a/test/hermes_test/commands/process/test_process.py b/test/hermes_test/commands/process/test_process.py index f55ed22e..ca43b225 100644 --- a/test/hermes_test/commands/process/test_process.py +++ b/test/hermes_test/commands/process/test_process.py @@ -138,7 +138,7 @@ "legalName": "hermes", "version": "0.9.0" }, - extra_vocabs = {"legalName": {"@id": "http://schema.org/name"}} + extra_vocabs={"legalName": {"@id": "http://schema.org/name"}} ) }, SoftwareMetadata( @@ -196,7 +196,7 @@ "legalName": "hermes", "version": "0.9.0" }, - extra_vocabs = {"legalName": {"@id": "http://schema.org/name"}} + extra_vocabs={"legalName": {"@id": "http://schema.org/name"}} ), ) ], From 89191eca8f96dfda245b6923a562013e03dd90ad Mon Sep 17 00:00:00 2001 From: "Kernchen, Sophie" Date: Mon, 13 Apr 2026 16:30:30 +0200 Subject: [PATCH 242/247] Imporve first cli harvest tests --- test/hermes_test/conftest.py | 2 +- test/hermes_test/test_cli.py | 28 +++++++++++++++++++++++++++- 2 files changed, 28 insertions(+), 2 deletions(-) diff --git a/test/hermes_test/conftest.py b/test/hermes_test/conftest.py index c0f5a4d5..9cc0bc70 100644 --- a/test/hermes_test/conftest.py +++ b/test/hermes_test/conftest.py @@ -34,7 +34,7 @@ def __enter__(self): def run(self, *args): proc = subprocess.Popen( - [sys.executable, "-m", self.hermes_exe, *args], stdout=subprocess.PIPE, stderr=subprocess.PIPE + [sys.executable, "-m", self.hermes_exe, *args], bufsize=-1, stdout=subprocess.PIPE, stderr=subprocess.PIPE ) proc.wait() return proc diff --git a/test/hermes_test/test_cli.py b/test/hermes_test/test_cli.py index d5a382b2..9b6fd08e 100644 --- a/test/hermes_test/test_cli.py +++ b/test/hermes_test/test_cli.py @@ -7,8 +7,10 @@ # flake8: noqa import pytest +import os from hermes.commands import cli +from hermes import error def test_hermes_full(): @@ -18,7 +20,7 @@ def test_hermes_full(): def test_hermes_harvest(hermes_env): - hermes_env['hermes.toml'] = "[harvest]\nsources = [\"cff\"]\n" + hermes_env['hermes.toml'] = "[harvest]\nsources = [\"cff\", \"foo\"]\n" hermes_env['CITATION.cff'] = """cff-version: 1.2.0 title: Test message: >- @@ -29,8 +31,32 @@ def test_hermes_harvest(hermes_env): with hermes_env: result = hermes_env.run("harvest") + stdout_res = result.stdout.read().decode() + test_dir = hermes_env.test_path + assert len(os.listdir(test_dir)) == 4 + log = test_dir / "hermes.log" + #print(log.read_text()) + assert log.exists() + assert len(os.listdir(test_dir/ ".hermes/harvest/cff/")) == 3 + output_file = test_dir / ".hermes/harvest/cff/codemeta.json" + assert output_file.exists() assert result.returncode == 0 + assert "Run cff plugin" in stdout_res + assert "Plugin foo not found" in stdout_res + + +@pytest.mark.dev +def test_hermes_harvest_no_plugin(hermes_env): + hermes_env['hermes.toml'] = "[harvest]\nsources = []\n" + with hermes_env: + result = hermes_env.run("harvest") + stdout_res = result.stdout.read().decode() + test_dir = hermes_env.test_path + log = test_dir / "hermes.log" + + assert result.returncode == 1 + assert "hermes.error.MisconfigurationError: No harvest plugin was configured to be run and loaded." in stdout_res def test_hermes_process(hermes_env): From a42c3151ae9614cb6af4a497f029596ae81e79c1 Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Mon, 13 Apr 2026 17:02:48 +0200 Subject: [PATCH 243/247] add reject strategies for special properties --- src/hermes/commands/process/standard_merge.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/src/hermes/commands/process/standard_merge.py b/src/hermes/commands/process/standard_merge.py index b18df309..93b1105b 100644 --- a/src/hermes/commands/process/standard_merge.py +++ b/src/hermes/commands/process/standard_merge.py @@ -11,7 +11,7 @@ import requests from hermes.commands.base import HermesCommand -from hermes.model.merge.action import Concat, IdMerge, MergeAction, MergeSet +from hermes.model.merge.action import Concat, IdMerge, MergeAction, MergeSet, Reject from hermes.model.types import ld_dict from hermes.model.types.ld_context import iri_map as iri from .base import HermesProcessPlugin @@ -865,7 +865,15 @@ def __call__(self, command: HermesCommand) -> dict[Union[str, None], dict[Union[ subtypes_for_types = CodemetaProcessPlugin.get_schema_type_hierarchy() strats = CodemetaProcessPlugin.get_schema_strategies(subtypes_for_types) strats.update(CodemetaProcessPlugin.get_codemeta_strategies(subtypes_for_types)) - strats[None] = {None: MergeSet(DEFAULT_MATCH), "@id": IdMerge()} + strats[None] = { + None: MergeSet(DEFAULT_MATCH), + "@id": IdMerge(), + **{ + iri[f"schema:{term}"]: Reject() for term in ( + "version", "name", "givenName", "familyName", "description", "license" + ) + } + } except Exception: strats = {**CODEMETA_STRATEGY} for key, value in PROV_STRATEGY.items(): @@ -942,7 +950,7 @@ def get_codemeta_strategies(cls, subtypes_for_types): special_types = set(MATCH_FUNCTION_FOR_TYPE.keys()) # FIXME: change URL on change of context to codemeta 3.0 - download = requests.get("https://raw.githubusercontent.com/codemeta/codemeta/blob/2.0/crosswalk.csv") + download = requests.get("https://raw.githubusercontent.com/codemeta/codemeta/2.0/crosswalk.csv") decoded_content = download.content.decode('utf-8') cr = csv.reader(decoded_content.splitlines(), delimiter=',') # remove the first line (headers) From 9294d0a7c3df33cb28a5ae54c62b55adba0b398b Mon Sep 17 00:00:00 2001 From: "Kernchen, Sophie" Date: Wed, 22 Apr 2026 13:11:12 +0200 Subject: [PATCH 244/247] Add good case tests for cli --- test/hermes_test/test_cli.py | 56 +++++++++++++++++++++++++++++++++++- 1 file changed, 55 insertions(+), 1 deletion(-) diff --git a/test/hermes_test/test_cli.py b/test/hermes_test/test_cli.py index 9b6fd08e..52584e73 100644 --- a/test/hermes_test/test_cli.py +++ b/test/hermes_test/test_cli.py @@ -46,7 +46,6 @@ def test_hermes_harvest(hermes_env): assert "Plugin foo not found" in stdout_res -@pytest.mark.dev def test_hermes_harvest_no_plugin(hermes_env): hermes_env['hermes.toml'] = "[harvest]\nsources = []\n" with hermes_env: @@ -65,5 +64,60 @@ def test_hermes_process(hermes_env): with hermes_env: result = hermes_env.run("process") + test_dir = hermes_env.test_path + assert len(os.listdir(test_dir / ".hermes/process/result/")) == 3 + output_file = test_dir / ".hermes/process/result/codemeta.json" + assert output_file.exists() + + assert result.returncode == 0 + + +def test_hermes_curate(hermes_env, tmpdir): + hermes_env['hermes.toml'] = "[curate]\nplugin = \"pass_curate\"" + hermes_env['.hermes/process/result/codemeta.json'] = "{\"@context\": [\"https://doi.org/10.5063/schema/codemeta-2.0\"], \"name\": \"hermes\", \"version\": \"0.9.0\"}" + + with hermes_env: + result = hermes_env.run("curate") + test_dir = hermes_env.test_path + assert len(os.listdir(test_dir / ".hermes/curate/result/")) == 3 + output_file = test_dir / ".hermes/curate/result/codemeta.json" + + # One small change and it breaks, so maybe adapt it to a simpler includes test + content = "{\"@context\": [\"https://doi.org/10.5063/schema/codemeta-2.0\", {\"schema\": " \ + "\"http://schema.org/\", \"prov\": \"http://www.w3.org/ns/prov#\", \"hermes-rt\": " \ + "\"https://schema.software-metadata.pub/hermes-runtime/1.0/\", \"hermes\": " \ + "\"https://schema.software-metadata.pub/hermes-content/1.0/\"}], \"name\": \"hermes\"," \ + " \"version\": \"0.9.0\"}" + assert output_file.read_text() == content + + assert result.returncode == 0 + + +def test_hermes_deposit(hermes_env): + hermes_env['hermes.toml'] = "[deposit]\ntarget = \"file\"" + hermes_env['.hermes/curate/result/codemeta.json'] = "{}" + + with hermes_env: + result = hermes_env.run("deposit") + test_dir = hermes_env.test_path + assert len(os.listdir(test_dir / ".hermes/deposit/file/")) == 2 + output_file = test_dir / "hermes.json" + assert "@context" in output_file.read_text() + + assert result.returncode == 0 + + +def test_hermes_postprocess(hermes_env): + hermes_env['hermes.toml'] = "[postprocess]\nrun = [ \"config_invenio_rdm_record_id\"] \n" \ + "[deposit.invenio_rdm]\ncommunities = []\n" + hermes_env['.hermes/deposit/invenio_rdm/result.json'] = "{\"record_id\": 1234}" + + with hermes_env: + result = hermes_env.run("postprocess") + #log = hermes_env.test_path / "hermes.log" + #print(log.read_text()) + output_file = hermes_env.test_path / "hermes.toml" + assert output_file.read_text() == "[postprocess]\nrun = [ \"config_invenio_rdm_record_id\"] \n" \ + "[deposit.invenio_rdm]\ncommunities = []\nrecord_id = 1234\n" assert result.returncode == 0 From 51086961ea5356e417a7fa9b50088cad08cb5bd2 Mon Sep 17 00:00:00 2001 From: "Kernchen, Sophie" Date: Wed, 22 Apr 2026 13:21:10 +0200 Subject: [PATCH 245/247] Ignore flake8 C901 (too complex) --- src/hermes/model/types/ld_container.py | 2 ++ src/hermes/model/types/ld_list.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index 580d403a..3e888688 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -5,6 +5,8 @@ # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: Michael Fritzsche +# flake8: noqa: C901 + from __future__ import annotations from datetime import date, datetime, time diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index bcc1db15..3aa34c5d 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -5,6 +5,8 @@ # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: Michael Fritzsche +# flake8: noqa: C901 + from __future__ import annotations from collections import deque From 3e7c83d44f3e35ad70790792811402291d8139cc Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Thu, 23 Apr 2026 12:36:09 +0200 Subject: [PATCH 246/247] update dependencies --- poetry.lock | 2213 ++++++++++++++++++++++++++++-------------------- pyproject.toml | 1 + 2 files changed, 1280 insertions(+), 934 deletions(-) diff --git a/poetry.lock b/poetry.lock index 0eeefe31..95732ef1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -45,14 +45,14 @@ files = [ [[package]] name = "astroid" -version = "3.3.11" +version = "4.1.2" description = "An abstract syntax tree for Python with inference support." optional = false -python-versions = ">=3.9.0" +python-versions = ">=3.10.0" groups = ["docs"] files = [ - {file = "astroid-3.3.11-py3-none-any.whl", hash = "sha256:54c760ae8322ece1abd213057c4b5bba7c49818853fc901ef09719a60dbf9dec"}, - {file = "astroid-3.3.11.tar.gz", hash = "sha256:1e5a5011af2920c7c67a53f65d536d65bfa7116feeaf2354d8b94f29573bb0ce"}, + {file = "astroid-4.1.2-py3-none-any.whl", hash = "sha256:21312e682c0866dc5a309ee57e4b88ea92751b9955a58b1c31371cbbeb088707"}, + {file = "astroid-4.1.2.tar.gz", hash = "sha256:d6c4a52bfcda4bbeb7359dead642b0248b90f7d9a07e690230bd86fefd6d37f1"}, ] [package.dependencies] @@ -60,34 +60,26 @@ typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} [[package]] name = "attrs" -version = "25.3.0" +version = "26.1.0" description = "Classes Without Boilerplate" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, - {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, + {file = "attrs-26.1.0-py3-none-any.whl", hash = "sha256:c647aa4a12dfbad9333ca4e71fe62ddc36f4e63b2d260a37a8b83d2f043ac309"}, + {file = "attrs-26.1.0.tar.gz", hash = "sha256:d03ceb89cb322a8fd706d4fb91940737b6642aa36998fe130a9bc96c985eff32"}, ] -[package.extras] -benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] -tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] - [[package]] name = "babel" -version = "2.17.0" +version = "2.18.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" groups = ["docs"] files = [ - {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, - {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, + {file = "babel-2.18.0-py3-none-any.whl", hash = "sha256:e2b422b277c2b9a9630c1d7903c2a00d0830c409c59ac8cae9081c92f1aeba35"}, + {file = "babel-2.18.0.tar.gz", hash = "sha256:b80b99a14bd085fcacfa15c9165f651fbb3406e66cc603abf11c5750937c992d"}, ] [package.extras] @@ -95,18 +87,18 @@ dev = ["backports.zoneinfo ; python_version < \"3.9\"", "freezegun (>=1.0,<2.0)" [[package]] name = "beautifulsoup4" -version = "4.13.4" +version = "4.14.3" description = "Screen-scraping library" optional = false python-versions = ">=3.7.0" groups = ["docs"] files = [ - {file = "beautifulsoup4-4.13.4-py3-none-any.whl", hash = "sha256:9bbbb14bfde9d79f38b8cd5f8c7c85f4b8f2523190ebed90e950a8dea4cb1c4b"}, - {file = "beautifulsoup4-4.13.4.tar.gz", hash = "sha256:dbb3c4e1ceae6aefebdaf2423247260cd062430a410e38c66f2baa50a8437195"}, + {file = "beautifulsoup4-4.14.3-py3-none-any.whl", hash = "sha256:0918bfe44902e6ad8d57732ba310582e98da931428d231a5ecb9e7c703a735bb"}, + {file = "beautifulsoup4-4.14.3.tar.gz", hash = "sha256:6292b1c5186d356bba669ef9f7f051757099565ad9ada5dd630bd9de5fa7fb86"}, ] [package.dependencies] -soupsieve = ">1.2" +soupsieve = ">=1.6.1" typing-extensions = ">=4.0.0" [package.extras] @@ -151,26 +143,26 @@ testing = ["pytest (>=6,!=7.0.0)", "pytest-xdist (>=2)"] [[package]] name = "cachetools" -version = "6.1.0" +version = "7.0.6" description = "Extensible memoizing collections and decorators" optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" groups = ["main"] files = [ - {file = "cachetools-6.1.0-py3-none-any.whl", hash = "sha256:1c7bb3cf9193deaf3508b7c5f2a79986c13ea38965c5adcff1f84519cf39163e"}, - {file = "cachetools-6.1.0.tar.gz", hash = "sha256:b4c4f404392848db3ce7aac34950d17be4d864da4b8b66911008e430bc544587"}, + {file = "cachetools-7.0.6-py3-none-any.whl", hash = "sha256:4e94956cfdd3086f12042cdd29318f5ced3893014f7d0d059bf3ead3f85b7f8b"}, + {file = "cachetools-7.0.6.tar.gz", hash = "sha256:e5d524d36d65703a87243a26ff08ad84f73352adbeafb1cde81e207b456aaf24"}, ] [[package]] name = "certifi" -version = "2025.7.14" +version = "2026.4.22" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.7" groups = ["main", "dev", "docs"] files = [ - {file = "certifi-2025.7.14-py3-none-any.whl", hash = "sha256:6b31f564a415d79ee77df69d757bb49a5bb53bd9f756cbbe24394ffd6fc1f4b2"}, - {file = "certifi-2025.7.14.tar.gz", hash = "sha256:8ea99dbdfaaf2ba2f9bac77b9249ef62ec5218e7c2b2e903378ed5fccf765995"}, + {file = "certifi-2026.4.22-py3-none-any.whl", hash = "sha256:3cb2210c8f88ba2318d29b0388d1023c8492ff72ecdde4ebdaddbb13a31b1c4a"}, + {file = "certifi-2026.4.22.tar.gz", hash = "sha256:8d455352a37b71bf76a79caa83a3d6c25afee4a385d632127b6afb3963f1c580"}, ] [[package]] @@ -199,208 +191,297 @@ publishing = ["twine", "wheel"] [[package]] name = "cffi" -version = "1.17.1" +version = "2.0.0" description = "Foreign Function Interface for Python calling C code." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] -files = [ - {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, - {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, - {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, - {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, - {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, - {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, - {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, - {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, - {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, - {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, - {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, - {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, - {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, - {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, - {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, - {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, - {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, - {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, - {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, - {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, - {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, - {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, - {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, - {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, - {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, - {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, - {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, - {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +markers = "platform_python_implementation != \"PyPy\"" +files = [ + {file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"}, + {file = "cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb"}, + {file = "cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a"}, + {file = "cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739"}, + {file = "cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe"}, + {file = "cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743"}, + {file = "cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5"}, + {file = "cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5"}, + {file = "cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d"}, + {file = "cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d"}, + {file = "cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba"}, + {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94"}, + {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187"}, + {file = "cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18"}, + {file = "cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5"}, + {file = "cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6"}, + {file = "cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb"}, + {file = "cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26"}, + {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c"}, + {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b"}, + {file = "cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27"}, + {file = "cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75"}, + {file = "cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91"}, + {file = "cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5"}, + {file = "cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775"}, + {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205"}, + {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1"}, + {file = "cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f"}, + {file = "cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25"}, + {file = "cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad"}, + {file = "cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9"}, + {file = "cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592"}, + {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512"}, + {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4"}, + {file = "cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e"}, + {file = "cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6"}, + {file = "cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9"}, + {file = "cffi-2.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:fe562eb1a64e67dd297ccc4f5addea2501664954f2692b69a76449ec7913ecbf"}, + {file = "cffi-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de8dad4425a6ca6e4e5e297b27b5c824ecc7581910bf9aee86cb6835e6812aa7"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:4647afc2f90d1ddd33441e5b0e85b16b12ddec4fca55f0d9671fef036ecca27c"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3f4d46d8b35698056ec29bca21546e1551a205058ae1a181d871e278b0b28165"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e6e73b9e02893c764e7e8d5bb5ce277f1a009cd5243f8228f75f842bf937c534"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:cb527a79772e5ef98fb1d700678fe031e353e765d1ca2d409c92263c6d43e09f"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61d028e90346df14fedc3d1e5441df818d095f3b87d286825dfcbd6459b7ef63"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0f6084a0ea23d05d20c3edcda20c3d006f9b6f3fefeac38f59262e10cef47ee2"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1cd13c99ce269b3ed80b417dcd591415d3372bcac067009b6e0f59c7d4015e65"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:89472c9762729b5ae1ad974b777416bfda4ac5642423fa93bd57a09204712322"}, + {file = "cffi-2.0.0-cp39-cp39-win32.whl", hash = "sha256:2081580ebb843f759b9f617314a24ed5738c51d2aee65d31e02f6f7a2b97707a"}, + {file = "cffi-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:b882b3df248017dba09d6b16defe9b5c407fe32fc7c65a9c69798e6175601be9"}, + {file = "cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529"}, ] [package.dependencies] -pycparser = "*" +pycparser = {version = "*", markers = "implementation_name != \"PyPy\""} [[package]] name = "chardet" -version = "5.2.0" -description = "Universal encoding detector for Python 3" +version = "7.4.3" +description = "Universal character encoding detector" optional = false -python-versions = ">=3.7" +python-versions = ">=3.10" groups = ["docs"] files = [ - {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, - {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, + {file = "chardet-7.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0c79b13c9908ac7dfe0a74116ebc9a0f28b2319d23c32f3dfcdfbe1279c7eaf"}, + {file = "chardet-7.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bba8bea1b28d927b3e99e47deafe53658d34497c0a891d95ff1ba8ff6663f01c"}, + {file = "chardet-7.4.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:23163921dccf3103ce59540b0443c106d2c0a0ff2e0503e05196f5e6fdea453f"}, + {file = "chardet-7.4.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cfb54563fe5f130da17c44c6a4e2e8052ba628e5ab4eab7ef8190f736f0f8f72"}, + {file = "chardet-7.4.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3990fffcc6a6045f2234ab72752ad037e3b2d48c72037f244d42738db397eb75"}, + {file = "chardet-7.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:c7116b0452994734ccff35e154b44240090eb0f4f74b9106292668133557c175"}, + {file = "chardet-7.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:25a862cddc6a9ac07023e808aedd297115345fbaabc2690479481ddc0f980e09"}, + {file = "chardet-7.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7005c88da26fd95d8abb8acbe6281d833e9a9181b03cf49b4546c4555389bd97"}, + {file = "chardet-7.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc50f28bad067393cce0af9091052c3b8df7a23115afd8ba7b2e0947f0cef1f8"}, + {file = "chardet-7.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c3da294de1a681097848ab58bd3f2771a674f8039d2d87a5538b28856b815e9"}, + {file = "chardet-7.4.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:93c45e116dd51b66226a53ade3f9f635e870de5399b90e00ce45dcc311093bf4"}, + {file = "chardet-7.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:ccc1f83ab4bcfb901cf39e0c4ba6bc6e726fc6264735f10e24ceb5cb47387578"}, + {file = "chardet-7.4.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:75d3c65cc16bddf40b8da1fd25ba84fca5f8070f2b14e86083653c1c85aee971"}, + {file = "chardet-7.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:29af5999f654e8729d251f1724a62b538b1262d9292cccaefddf8a02aae1ef6a"}, + {file = "chardet-7.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:626f00299ad62dfe937058a09572beed442ccc7b58f87aa667949b20fd3db235"}, + {file = "chardet-7.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9a4904dd5f071b7a7d7f50b4a67a86db3c902d243bf31708f1d5cde2f68239cb"}, + {file = "chardet-7.4.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:5d2879598bc220689e8ce509fe9c3f37ad2fca53a36be9c9bd91abdd91dd364f"}, + {file = "chardet-7.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:4b2799bd58e7245cfa8d4ab2e8ad1d76a5c3a5b1f32318eb6acca4c69a3e7101"}, + {file = "chardet-7.4.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a9e4486df251b8962e86ea9f139ca235aa6e0542a00f7844c9a04160afb99aa9"}, + {file = "chardet-7.4.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4fbff1907925b0c5a1064cffb5e040cd5e338585c9c552625f30de6bc2f3107a"}, + {file = "chardet-7.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:365135eaf37ba65a828f8e668eb0a8c38c479dcbec724dc25f4dfd781049c357"}, + {file = "chardet-7.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bfc134b70c846c21ead8e43ada3ae1a805fff732f6922f8abcf2ff27b8f6493d"}, + {file = "chardet-7.4.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9acd9988a93e09390f3cd231201ea7166c415eb8da1b735928990ffc05cb9fbb"}, + {file = "chardet-7.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:e1b98790c284ff813f18f7cf7de5f05ea2435a080030c7f1a8318f3a4f80b131"}, + {file = "chardet-7.4.3-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:d892d3dcd652fdef53e3d6327d39b17c0df40a899dfc919abaeb64c974497531"}, + {file = "chardet-7.4.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:acc46d1b8b7d5783216afe15db56d1c179b9a40e5a1558bc13164c4fd20674c4"}, + {file = "chardet-7.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ac3bf11c645734a1701a3804e43eabd98851838192267d08c353a834ab79fea"}, + {file = "chardet-7.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e3bd9f936e04bae89c254262af08d9e5b98f805175ba1e29d454e6cba3107b7"}, + {file = "chardet-7.4.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:27cc23da03630cdecc9aa81a895aa86629c211f995cd57651f0fbc280717bf93"}, + {file = "chardet-7.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:b95c934b9ad59e2ba8abb9be49df70d3ad1b0d95d864b9fdb7588d4fa8bd921c"}, + {file = "chardet-7.4.3-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:c77867f0c1cb8bd819502249fcdc500364aedb07881e11b743726fa2148e7b6e"}, + {file = "chardet-7.4.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cf1efeaf65a6ef2f5b9cc3a1df6f08ba2831b369ccaa4c7018eaf90aa757bb11"}, + {file = "chardet-7.4.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9f3504c139a2ad544077dd2d9e412cd08b01786843d76997cd43bb6de311723c"}, + {file = "chardet-7.4.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457f619882ba66327d4d8d14c6c342269bdb1e4e1c38e8117df941d14d351b04"}, + {file = "chardet-7.4.3-py3-none-any.whl", hash = "sha256:1173b74051570cf08099d7429d92e4882d375ad4217f92a6e5240ccfb26f231e"}, + {file = "chardet-7.4.3.tar.gz", hash = "sha256:cc1d4eb92a4ec1c2df3b490836ffa46922e599d34ce0bb75cf41fd2bf6303d56"}, ] [[package]] name = "charset-normalizer" -version = "3.4.2" +version = "3.4.7" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" groups = ["main", "dev", "docs"] files = [ - {file = "charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a"}, - {file = "charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a"}, - {file = "charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c"}, - {file = "charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7"}, - {file = "charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cad5f45b3146325bb38d6855642f6fd609c3f7cad4dbaf75549bf3b904d3184"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2680962a4848b3c4f155dc2ee64505a9c57186d0d56b43123b17ca3de18f0fa"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:36b31da18b8890a76ec181c3cf44326bf2c48e36d393ca1b72b3f484113ea344"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4074c5a429281bf056ddd4c5d3b740ebca4d43ffffe2ef4bf4d2d05114299da"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9e36a97bee9b86ef9a1cf7bb96747eb7a15c2f22bdb5b516434b00f2a599f02"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:1b1bde144d98e446b056ef98e59c256e9294f6b74d7af6846bf5ffdafd687a7d"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:915f3849a011c1f593ab99092f3cecfcb4d65d8feb4a64cf1bf2d22074dc0ec4"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:fb707f3e15060adf5b7ada797624a6c6e0138e2a26baa089df64c68ee98e040f"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:25a23ea5c7edc53e0f29bae2c44fcb5a1aa10591aae107f2a2b2583a9c5cbc64"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:770cab594ecf99ae64c236bc9ee3439c3f46be49796e265ce0cc8bc17b10294f"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-win32.whl", hash = "sha256:6a0289e4589e8bdfef02a80478f1dfcb14f0ab696b5a00e1f4b8a14a307a3c58"}, - {file = "charset_normalizer-3.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6fc1f5b51fa4cecaa18f2bd7a003f3dd039dd615cd69a2afd6d3b19aed6775f2"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76af085e67e56c8816c3ccf256ebd136def2ed9654525348cfa744b6802b69eb"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e45ba65510e2647721e35323d6ef54c7974959f6081b58d4ef5d87c60c84919a"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:046595208aae0120559a67693ecc65dd75d46f7bf687f159127046628178dc45"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75d10d37a47afee94919c4fab4c22b9bc2a8bf7d4f46f87363bcf0573f3ff4f5"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6333b3aa5a12c26b2a4d4e7335a28f1475e0e5e17d69d55141ee3cab736f66d1"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8323a9b031aa0393768b87f04b4164a40037fb2a3c11ac06a03ffecd3618027"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:24498ba8ed6c2e0b56d4acbf83f2d989720a93b41d712ebd4f4979660db4417b"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:844da2b5728b5ce0e32d863af26f32b5ce61bc4273a9c720a9f3aa9df73b1455"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:65c981bdbd3f57670af8b59777cbfae75364b483fa8a9f420f08094531d54a01"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:3c21d4fca343c805a52c0c78edc01e3477f6dd1ad7c47653241cf2a206d4fc58"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dc7039885fa1baf9be153a0626e337aa7ec8bf96b0128605fb0d77788ddc1681"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-win32.whl", hash = "sha256:8272b73e1c5603666618805fe821edba66892e2870058c94c53147602eab29c7"}, - {file = "charset_normalizer-3.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:70f7172939fdf8790425ba31915bfbe8335030f05b9913d7ae00a87d4395620a"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-win32.whl", hash = "sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471"}, - {file = "charset_normalizer-3.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e"}, - {file = "charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0"}, - {file = "charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63"}, + {file = "charset_normalizer-3.4.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cdd68a1fb318e290a2077696b7eb7a21a49163c455979c639bf5a5dcdc46617d"}, + {file = "charset_normalizer-3.4.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e17b8d5d6a8c47c85e68ca8379def1303fd360c3e22093a807cd34a71cd082b8"}, + {file = "charset_normalizer-3.4.7-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:511ef87c8aec0783e08ac18565a16d435372bc1ac25a91e6ac7f5ef2b0bff790"}, + {file = "charset_normalizer-3.4.7-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:007d05ec7321d12a40227aae9e2bc6dca73f3cb21058999a1df9e193555a9dcc"}, + {file = "charset_normalizer-3.4.7-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cf29836da5119f3c8a8a70667b0ef5fdca3bb12f80fd06487cfa575b3909b393"}, + {file = "charset_normalizer-3.4.7-cp310-cp310-manylinux_2_31_armv7l.whl", hash = "sha256:12d8baf840cc7889b37c7c770f478adea7adce3dcb3944d02ec87508e2dcf153"}, + {file = "charset_normalizer-3.4.7-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d560742f3c0d62afaccf9f41fe485ed69bd7661a241f86a3ef0f0fb8b1a397af"}, + {file = "charset_normalizer-3.4.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b14b2d9dac08e28bb8046a1a0434b1750eb221c8f5b87a68f4fa11a6f97b5e34"}, + {file = "charset_normalizer-3.4.7-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:bc17a677b21b3502a21f66a8cc64f5bfad4df8a0b8434d661666f8ce90ac3af1"}, + {file = "charset_normalizer-3.4.7-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:750e02e074872a3fad7f233b47734166440af3cdea0add3e95163110816d6752"}, + {file = "charset_normalizer-3.4.7-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:4e5163c14bffd570ef2affbfdd77bba66383890797df43dc8b4cc7d6f500bf53"}, + {file = "charset_normalizer-3.4.7-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6ed74185b2db44f41ef35fd1617c5888e59792da9bbc9190d6c7300617182616"}, + {file = "charset_normalizer-3.4.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:94e1885b270625a9a828c9793b4d52a64445299baa1fea5a173bf1d3dd9a1a5a"}, + {file = "charset_normalizer-3.4.7-cp310-cp310-win32.whl", hash = "sha256:6785f414ae0f3c733c437e0f3929197934f526d19dfaa75e18fdb4f94c6fb374"}, + {file = "charset_normalizer-3.4.7-cp310-cp310-win_amd64.whl", hash = "sha256:6696b7688f54f5af4462118f0bfa7c1621eeb87154f77fa04b9295ce7a8f2943"}, + {file = "charset_normalizer-3.4.7-cp310-cp310-win_arm64.whl", hash = "sha256:66671f93accb62ed07da56613636f3641f1a12c13046ce91ffc923721f23c008"}, + {file = "charset_normalizer-3.4.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7641bb8895e77f921102f72833904dcd9901df5d6d72a2ab8f31d04b7e51e4e7"}, + {file = "charset_normalizer-3.4.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:202389074300232baeb53ae2569a60901f7efadd4245cf3a3bf0617d60b439d7"}, + {file = "charset_normalizer-3.4.7-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:30b8d1d8c52a48c2c5690e152c169b673487a2a58de1ec7393196753063fcd5e"}, + {file = "charset_normalizer-3.4.7-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:532bc9bf33a68613fd7d65e4b1c71a6a38d7d42604ecf239c77392e9b4e8998c"}, + {file = "charset_normalizer-3.4.7-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2fe249cb4651fd12605b7288b24751d8bfd46d35f12a20b1ba33dea122e690df"}, + {file = "charset_normalizer-3.4.7-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:65bcd23054beab4d166035cabbc868a09c1a49d1efe458fe8e4361215df40265"}, + {file = "charset_normalizer-3.4.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:08e721811161356f97b4059a9ba7bafb23ea5ee2255402c42881c214e173c6b4"}, + {file = "charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e060d01aec0a910bdccb8be71faf34e7799ce36950f8294c8bf612cba65a2c9e"}, + {file = "charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:38c0109396c4cfc574d502df99742a45c72c08eff0a36158b6f04000043dbf38"}, + {file = "charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:1c2a768fdd44ee4a9339a9b0b130049139b8ce3c01d2ce09f67f5a68048d477c"}, + {file = "charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:1a87ca9d5df6fe460483d9a5bbf2b18f620cbed41b432e2bddb686228282d10b"}, + {file = "charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:d635aab80466bc95771bb78d5370e74d36d1fe31467b6b29b8b57b2a3cd7d22c"}, + {file = "charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ae196f021b5e7c78e918242d217db021ed2a6ace2bc6ae94c0fc596221c7f58d"}, + {file = "charset_normalizer-3.4.7-cp311-cp311-win32.whl", hash = "sha256:adb2597b428735679446b46c8badf467b4ca5f5056aae4d51a19f9570301b1ad"}, + {file = "charset_normalizer-3.4.7-cp311-cp311-win_amd64.whl", hash = "sha256:8e385e4267ab76874ae30db04c627faaaf0b509e1ccc11a95b3fc3e83f855c00"}, + {file = "charset_normalizer-3.4.7-cp311-cp311-win_arm64.whl", hash = "sha256:d4a48e5b3c2a489fae013b7589308a40146ee081f6f509e047e0e096084ceca1"}, + {file = "charset_normalizer-3.4.7-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:eca9705049ad3c7345d574e3510665cb2cf844c2f2dcfe675332677f081cbd46"}, + {file = "charset_normalizer-3.4.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6178f72c5508bfc5fd446a5905e698c6212932f25bcdd4b47a757a50605a90e2"}, + {file = "charset_normalizer-3.4.7-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1421b502d83040e6d7fb2fb18dff63957f720da3d77b2fbd3187ceb63755d7b"}, + {file = "charset_normalizer-3.4.7-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:edac0f1ab77644605be2cbba52e6b7f630731fc42b34cb0f634be1a6eface56a"}, + {file = "charset_normalizer-3.4.7-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5649fd1c7bade02f320a462fdefd0b4bd3ce036065836d4f42e0de958038e116"}, + {file = "charset_normalizer-3.4.7-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:203104ed3e428044fd943bc4bf45fa73c0730391f9621e37fe39ecf477b128cb"}, + {file = "charset_normalizer-3.4.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:298930cec56029e05497a76988377cbd7457ba864beeea92ad7e844fe74cd1f1"}, + {file = "charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:708838739abf24b2ceb208d0e22403dd018faeef86ddac04319a62ae884c4f15"}, + {file = "charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:0f7eb884681e3938906ed0434f20c63046eacd0111c4ba96f27b76084cd679f5"}, + {file = "charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4dc1e73c36828f982bfe79fadf5919923f8a6f4df2860804db9a98c48824ce8d"}, + {file = "charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:aed52fea0513bac0ccde438c188c8a471c4e0f457c2dd20cdbf6ea7a450046c7"}, + {file = "charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:fea24543955a6a729c45a73fe90e08c743f0b3334bbf3201e6c4bc1b0c7fa464"}, + {file = "charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bb6d88045545b26da47aa879dd4a89a71d1dce0f0e549b1abcb31dfe4a8eac49"}, + {file = "charset_normalizer-3.4.7-cp312-cp312-win32.whl", hash = "sha256:2257141f39fe65a3fdf38aeccae4b953e5f3b3324f4ff0daf9f15b8518666a2c"}, + {file = "charset_normalizer-3.4.7-cp312-cp312-win_amd64.whl", hash = "sha256:5ed6ab538499c8644b8a3e18debabcd7ce684f3fa91cf867521a7a0279cab2d6"}, + {file = "charset_normalizer-3.4.7-cp312-cp312-win_arm64.whl", hash = "sha256:56be790f86bfb2c98fb742ce566dfb4816e5a83384616ab59c49e0604d49c51d"}, + {file = "charset_normalizer-3.4.7-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f496c9c3cc02230093d8330875c4c3cdfc3b73612a5fd921c65d39cbcef08063"}, + {file = "charset_normalizer-3.4.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ea948db76d31190bf08bd371623927ee1339d5f2a0b4b1b4a4439a65298703c"}, + {file = "charset_normalizer-3.4.7-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a277ab8928b9f299723bc1a2dabb1265911b1a76341f90a510368ca44ad9ab66"}, + {file = "charset_normalizer-3.4.7-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3bec022aec2c514d9cf199522a802bd007cd588ab17ab2525f20f9c34d067c18"}, + {file = "charset_normalizer-3.4.7-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e044c39e41b92c845bc815e5ae4230804e8e7bc29e399b0437d64222d92809dd"}, + {file = "charset_normalizer-3.4.7-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:f495a1652cf3fbab2eb0639776dad966c2fb874d79d87ca07f9d5f059b8bd215"}, + {file = "charset_normalizer-3.4.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e712b419df8ba5e42b226c510472b37bd57b38e897d3eca5e8cfd410a29fa859"}, + {file = "charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7804338df6fcc08105c7745f1502ba68d900f45fd770d5bdd5288ddccb8a42d8"}, + {file = "charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:481551899c856c704d58119b5025793fa6730adda3571971af568f66d2424bb5"}, + {file = "charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f59099f9b66f0d7145115e6f80dd8b1d847176df89b234a5a6b3f00437aa0832"}, + {file = "charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:f59ad4c0e8f6bba240a9bb85504faa1ab438237199d4cce5f622761507b8f6a6"}, + {file = "charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:3dedcc22d73ec993f42055eff4fcfed9318d1eeb9a6606c55892a26964964e48"}, + {file = "charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:64f02c6841d7d83f832cd97ccf8eb8a906d06eb95d5276069175c696b024b60a"}, + {file = "charset_normalizer-3.4.7-cp313-cp313-win32.whl", hash = "sha256:4042d5c8f957e15221d423ba781e85d553722fc4113f523f2feb7b188cc34c5e"}, + {file = "charset_normalizer-3.4.7-cp313-cp313-win_amd64.whl", hash = "sha256:3946fa46a0cf3e4c8cb1cc52f56bb536310d34f25f01ca9b6c16afa767dab110"}, + {file = "charset_normalizer-3.4.7-cp313-cp313-win_arm64.whl", hash = "sha256:80d04837f55fc81da168b98de4f4b797ef007fc8a79ab71c6ec9bc4dd662b15b"}, + {file = "charset_normalizer-3.4.7-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:c36c333c39be2dbca264d7803333c896ab8fa7d4d6f0ab7edb7dfd7aea6e98c0"}, + {file = "charset_normalizer-3.4.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1c2aed2e5e41f24ea8ef1590b8e848a79b56f3a5564a65ceec43c9d692dc7d8a"}, + {file = "charset_normalizer-3.4.7-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:54523e136b8948060c0fa0bc7b1b50c32c186f2fceee897a495406bb6e311d2b"}, + {file = "charset_normalizer-3.4.7-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:715479b9a2802ecac752a3b0efa2b0b60285cf962ee38414211abdfccc233b41"}, + {file = "charset_normalizer-3.4.7-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bd6c2a1c7573c64738d716488d2cdd3c00e340e4835707d8fdb8dc1a66ef164e"}, + {file = "charset_normalizer-3.4.7-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:c45e9440fb78f8ddabcf714b68f936737a121355bf59f3907f4e17721b9d1aae"}, + {file = "charset_normalizer-3.4.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3534e7dcbdcf757da6b85a0bbf5b6868786d5982dd959b065e65481644817a18"}, + {file = "charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e8ac484bf18ce6975760921bb6148041faa8fef0547200386ea0b52b5d27bf7b"}, + {file = "charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:a5fe03b42827c13cdccd08e6c0247b6a6d4b5e3cdc53fd1749f5896adcdc2356"}, + {file = "charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:2d6eb928e13016cea4f1f21d1e10c1cebd5a421bc57ddf5b1142ae3f86824fab"}, + {file = "charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e74327fb75de8986940def6e8dee4f127cc9752bee7355bb323cc5b2659b6d46"}, + {file = "charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:d6038d37043bced98a66e68d3aa2b6a35505dc01328cd65217cefe82f25def44"}, + {file = "charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7579e913a5339fb8fa133f6bbcfd8e6749696206cf05acdbdca71a1b436d8e72"}, + {file = "charset_normalizer-3.4.7-cp314-cp314-win32.whl", hash = "sha256:5b77459df20e08151cd6f8b9ef8ef1f961ef73d85c21a555c7eed5b79410ec10"}, + {file = "charset_normalizer-3.4.7-cp314-cp314-win_amd64.whl", hash = "sha256:92a0a01ead5e668468e952e4238cccd7c537364eb7d851ab144ab6627dbbe12f"}, + {file = "charset_normalizer-3.4.7-cp314-cp314-win_arm64.whl", hash = "sha256:67f6279d125ca0046a7fd386d01b311c6363844deac3e5b069b514ba3e63c246"}, + {file = "charset_normalizer-3.4.7-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:effc3f449787117233702311a1b7d8f59cba9ced946ba727bdc329ec69028e24"}, + {file = "charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fbccdc05410c9ee21bbf16a35f4c1d16123dcdeb8a1d38f33654fa21d0234f79"}, + {file = "charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:733784b6d6def852c814bce5f318d25da2ee65dd4839a0718641c696e09a2960"}, + {file = "charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a89c23ef8d2c6b27fd200a42aa4ac72786e7c60d40efdc76e6011260b6e949c4"}, + {file = "charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6c114670c45346afedc0d947faf3c7f701051d2518b943679c8ff88befe14f8e"}, + {file = "charset_normalizer-3.4.7-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:a180c5e59792af262bf263b21a3c49353f25945d8d9f70628e73de370d55e1e1"}, + {file = "charset_normalizer-3.4.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3c9a494bc5ec77d43cea229c4f6db1e4d8fe7e1bbffa8b6f0f0032430ff8ab44"}, + {file = "charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8d828b6667a32a728a1ad1d93957cdf37489c57b97ae6c4de2860fa749b8fc1e"}, + {file = "charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:cf1493cd8607bec4d8a7b9b004e699fcf8f9103a9284cc94962cb73d20f9d4a3"}, + {file = "charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:0c96c3b819b5c3e9e165495db84d41914d6894d55181d2d108cc1a69bfc9cce0"}, + {file = "charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:752a45dc4a6934060b3b0dab47e04edc3326575f82be64bc4fc293914566503e"}, + {file = "charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:8778f0c7a52e56f75d12dae53ae320fae900a8b9b4164b981b9c5ce059cd1fcb"}, + {file = "charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ce3412fbe1e31eb81ea42f4169ed94861c56e643189e1e75f0041f3fe7020abe"}, + {file = "charset_normalizer-3.4.7-cp314-cp314t-win32.whl", hash = "sha256:c03a41a8784091e67a39648f70c5f97b5b6a37f216896d44d2cdcb82615339a0"}, + {file = "charset_normalizer-3.4.7-cp314-cp314t-win_amd64.whl", hash = "sha256:03853ed82eeebbce3c2abfdbc98c96dc205f32a79627688ac9a27370ea61a49c"}, + {file = "charset_normalizer-3.4.7-cp314-cp314t-win_arm64.whl", hash = "sha256:c35abb8bfff0185efac5878da64c45dafd2b37fb0383add1be155a763c1f083d"}, + {file = "charset_normalizer-3.4.7-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e5f4d355f0a2b1a31bc3edec6795b46324349c9cb25eed068049e4f472fb4259"}, + {file = "charset_normalizer-3.4.7-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:16d971e29578a5e97d7117866d15889a4a07befe0e87e703ed63cd90cb348c01"}, + {file = "charset_normalizer-3.4.7-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dca4bbc466a95ba9c0234ef56d7dd9509f63da22274589ebd4ed7f1f4d4c54e3"}, + {file = "charset_normalizer-3.4.7-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e80c8378d8f3d83cd3164da1ad2df9e37a666cdde7b1cb2298ed0b558064be30"}, + {file = "charset_normalizer-3.4.7-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:36836d6ff945a00b88ba1e4572d721e60b5b8c98c155d465f56ad19d68f23734"}, + {file = "charset_normalizer-3.4.7-cp38-cp38-manylinux_2_31_armv7l.whl", hash = "sha256:bd9b23791fe793e4968dba0c447e12f78e425c59fc0e3b97f6450f4781f3ee60"}, + {file = "charset_normalizer-3.4.7-cp38-cp38-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:aef65cd602a6d0e0ff6f9930fcb1c8fec60dd2cfcb6facaf4bdb0e5873042db0"}, + {file = "charset_normalizer-3.4.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:82b271f5137d07749f7bf32f70b17ab6eaabedd297e75dce75081a24f76eb545"}, + {file = "charset_normalizer-3.4.7-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:1efde3cae86c8c273f1eb3b287be7d8499420cf2fe7585c41d370d3e790054a5"}, + {file = "charset_normalizer-3.4.7-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:c593052c465475e64bbfe5dbd81680f64a67fdc752c56d7a0ae205dc8aeefe0f"}, + {file = "charset_normalizer-3.4.7-cp38-cp38-musllinux_1_2_riscv64.whl", hash = "sha256:af21eb4409a119e365397b2adbaca4c9ccab56543a65d5dbd9f920d6ac29f686"}, + {file = "charset_normalizer-3.4.7-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:84c018e49c3bf790f9c2771c45e9313a08c2c2a6342b162cd650258b57817706"}, + {file = "charset_normalizer-3.4.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dd915403e231e6b1809fe9b6d9fc55cf8fb5e02765ac625d9cd623342a7905d7"}, + {file = "charset_normalizer-3.4.7-cp38-cp38-win32.whl", hash = "sha256:320ade88cfb846b8cd6b4ddf5ee9e80ee0c1f52401f2456b84ae1ae6a1a5f207"}, + {file = "charset_normalizer-3.4.7-cp38-cp38-win_amd64.whl", hash = "sha256:1dc8b0ea451d6e69735094606991f32867807881400f808a106ee1d963c46a83"}, + {file = "charset_normalizer-3.4.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:177a0ba5f0211d488e295aaf82707237e331c24788d8d76c96c5a41594723217"}, + {file = "charset_normalizer-3.4.7-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e0d51f618228538a3e8f46bd246f87a6cd030565e015803691603f55e12afb5"}, + {file = "charset_normalizer-3.4.7-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:14265bfe1f09498b9d8ec91e9ec9fa52775edf90fcbde092b25f4a33d444fea9"}, + {file = "charset_normalizer-3.4.7-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:87fad7d9ba98c86bcb41b2dc8dbb326619be2562af1f8ff50776a39e55721c5a"}, + {file = "charset_normalizer-3.4.7-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f22dec1690b584cea26fade98b2435c132c1b5f68e39f5a0b7627cd7ae31f1dc"}, + {file = "charset_normalizer-3.4.7-cp39-cp39-manylinux_2_31_armv7l.whl", hash = "sha256:d61f00a0869d77422d9b2aba989e2d24afa6ffd552af442e0e58de4f35ea6d00"}, + {file = "charset_normalizer-3.4.7-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6370e8686f662e6a3941ee48ed4742317cafbe5707e36406e9df792cdb535776"}, + {file = "charset_normalizer-3.4.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a6c5863edfbe888d9eff9c8b8087354e27618d9da76425c119293f11712a6319"}, + {file = "charset_normalizer-3.4.7-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:ed065083d0898c9d5b4bbec7b026fd755ff7454e6e8b73a67f8c744b13986e24"}, + {file = "charset_normalizer-3.4.7-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:2cd4a60d0e2fb04537162c62bbbb4182f53541fe0ede35cdf270a1c1e723cc42"}, + {file = "charset_normalizer-3.4.7-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:813c0e0132266c08eb87469a642cb30aaff57c5f426255419572aaeceeaa7bf4"}, + {file = "charset_normalizer-3.4.7-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:07d9e39b01743c3717745f4c530a6349eadbfa043c7577eef86c502c15df2c67"}, + {file = "charset_normalizer-3.4.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c0f081d69a6e58272819b70288d3221a6ee64b98df852631c80f293514d3b274"}, + {file = "charset_normalizer-3.4.7-cp39-cp39-win32.whl", hash = "sha256:8751d2787c9131302398b11e6c8068053dcb55d5a8964e114b6e196cf16cb366"}, + {file = "charset_normalizer-3.4.7-cp39-cp39-win_amd64.whl", hash = "sha256:12a6fff75f6bc66711b73a2f0addfc4c8c15a20e805146a02d147a318962c444"}, + {file = "charset_normalizer-3.4.7-cp39-cp39-win_arm64.whl", hash = "sha256:bb8cc7534f51d9a017b93e3e85b260924f909601c3df002bcdb58ddb4dc41a5c"}, + {file = "charset_normalizer-3.4.7-py3-none-any.whl", hash = "sha256:3dce51d0f5e7951f8bb4900c257dad282f49190fdbebecd4ba99bcc41fef404d"}, + {file = "charset_normalizer-3.4.7.tar.gz", hash = "sha256:ae89db9e5f98a11a4bf50407d4363e7b09b31e55bc117b4f7d80aab97ba009e5"}, ] [[package]] name = "click" -version = "8.2.1" +version = "8.3.3" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.10" groups = ["main"] files = [ - {file = "click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b"}, - {file = "click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202"}, + {file = "click-8.3.3-py3-none-any.whl", hash = "sha256:a2bf429bb3033c89fa4936ffb35d5cb471e3719e1f3c8a7c3fff0b8314305613"}, + {file = "click-8.3.3.tar.gz", hash = "sha256:398329ad4837b2ff7cbe1dd166a4c0f8900c3ca3a218de04466f38f6497f18a2"}, ] [package.dependencies] @@ -421,100 +502,118 @@ markers = {main = "platform_system == \"Windows\""} [[package]] name = "coverage" -version = "7.10.1" +version = "7.13.5" description = "Code coverage measurement for Python" optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" groups = ["dev"] files = [ - {file = "coverage-7.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1c86eb388bbd609d15560e7cc0eb936c102b6f43f31cf3e58b4fd9afe28e1372"}, - {file = "coverage-7.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6b4ba0f488c1bdb6bd9ba81da50715a372119785458831c73428a8566253b86b"}, - {file = "coverage-7.10.1-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:083442ecf97d434f0cb3b3e3676584443182653da08b42e965326ba12d6b5f2a"}, - {file = "coverage-7.10.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c1a40c486041006b135759f59189385da7c66d239bad897c994e18fd1d0c128f"}, - {file = "coverage-7.10.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3beb76e20b28046989300c4ea81bf690df84ee98ade4dc0bbbf774a28eb98440"}, - {file = "coverage-7.10.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bc265a7945e8d08da28999ad02b544963f813a00f3ed0a7a0ce4165fd77629f8"}, - {file = "coverage-7.10.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:47c91f32ba4ac46f1e224a7ebf3f98b4b24335bad16137737fe71a5961a0665c"}, - {file = "coverage-7.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1a108dd78ed185020f66f131c60078f3fae3f61646c28c8bb4edd3fa121fc7fc"}, - {file = "coverage-7.10.1-cp310-cp310-win32.whl", hash = "sha256:7092cc82382e634075cc0255b0b69cb7cada7c1f249070ace6a95cb0f13548ef"}, - {file = "coverage-7.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:ac0c5bba938879c2fc0bc6c1b47311b5ad1212a9dcb8b40fe2c8110239b7faed"}, - {file = "coverage-7.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b45e2f9d5b0b5c1977cb4feb5f594be60eb121106f8900348e29331f553a726f"}, - {file = "coverage-7.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a7a4d74cb0f5e3334f9aa26af7016ddb94fb4bfa11b4a573d8e98ecba8c34f1"}, - {file = "coverage-7.10.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d4b0aab55ad60ead26159ff12b538c85fbab731a5e3411c642b46c3525863437"}, - {file = "coverage-7.10.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:dcc93488c9ebd229be6ee1f0d9aad90da97b33ad7e2912f5495804d78a3cd6b7"}, - {file = "coverage-7.10.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aa309df995d020f3438407081b51ff527171cca6772b33cf8f85344b8b4b8770"}, - {file = "coverage-7.10.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cfb8b9d8855c8608f9747602a48ab525b1d320ecf0113994f6df23160af68262"}, - {file = "coverage-7.10.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:320d86da829b012982b414c7cdda65f5d358d63f764e0e4e54b33097646f39a3"}, - {file = "coverage-7.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dc60ddd483c556590da1d9482a4518292eec36dd0e1e8496966759a1f282bcd0"}, - {file = "coverage-7.10.1-cp311-cp311-win32.whl", hash = "sha256:4fcfe294f95b44e4754da5b58be750396f2b1caca8f9a0e78588e3ef85f8b8be"}, - {file = "coverage-7.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:efa23166da3fe2915f8ab452dde40319ac84dc357f635737174a08dbd912980c"}, - {file = "coverage-7.10.1-cp311-cp311-win_arm64.whl", hash = "sha256:d12b15a8c3759e2bb580ffa423ae54be4f184cf23beffcbd641f4fe6e1584293"}, - {file = "coverage-7.10.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6b7dc7f0a75a7eaa4584e5843c873c561b12602439d2351ee28c7478186c4da4"}, - {file = "coverage-7.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:607f82389f0ecafc565813aa201a5cade04f897603750028dd660fb01797265e"}, - {file = "coverage-7.10.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f7da31a1ba31f1c1d4d5044b7c5813878adae1f3af8f4052d679cc493c7328f4"}, - {file = "coverage-7.10.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:51fe93f3fe4f5d8483d51072fddc65e717a175490804e1942c975a68e04bf97a"}, - {file = "coverage-7.10.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3e59d00830da411a1feef6ac828b90bbf74c9b6a8e87b8ca37964925bba76dbe"}, - {file = "coverage-7.10.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:924563481c27941229cb4e16eefacc35da28563e80791b3ddc5597b062a5c386"}, - {file = "coverage-7.10.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ca79146ee421b259f8131f153102220b84d1a5e6fb9c8aed13b3badfd1796de6"}, - {file = "coverage-7.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2b225a06d227f23f386fdc0eab471506d9e644be699424814acc7d114595495f"}, - {file = "coverage-7.10.1-cp312-cp312-win32.whl", hash = "sha256:5ba9a8770effec5baaaab1567be916c87d8eea0c9ad11253722d86874d885eca"}, - {file = "coverage-7.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:9eb245a8d8dd0ad73b4062135a251ec55086fbc2c42e0eb9725a9b553fba18a3"}, - {file = "coverage-7.10.1-cp312-cp312-win_arm64.whl", hash = "sha256:7718060dd4434cc719803a5e526838a5d66e4efa5dc46d2b25c21965a9c6fcc4"}, - {file = "coverage-7.10.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ebb08d0867c5a25dffa4823377292a0ffd7aaafb218b5d4e2e106378b1061e39"}, - {file = "coverage-7.10.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f32a95a83c2e17422f67af922a89422cd24c6fa94041f083dd0bb4f6057d0bc7"}, - {file = "coverage-7.10.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c4c746d11c8aba4b9f58ca8bfc6fbfd0da4efe7960ae5540d1a1b13655ee8892"}, - {file = "coverage-7.10.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7f39edd52c23e5c7ed94e0e4bf088928029edf86ef10b95413e5ea670c5e92d7"}, - {file = "coverage-7.10.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ab6e19b684981d0cd968906e293d5628e89faacb27977c92f3600b201926b994"}, - {file = "coverage-7.10.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5121d8cf0eacb16133501455d216bb5f99899ae2f52d394fe45d59229e6611d0"}, - {file = "coverage-7.10.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:df1c742ca6f46a6f6cbcaef9ac694dc2cb1260d30a6a2f5c68c5f5bcfee1cfd7"}, - {file = "coverage-7.10.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:40f9a38676f9c073bf4b9194707aa1eb97dca0e22cc3766d83879d72500132c7"}, - {file = "coverage-7.10.1-cp313-cp313-win32.whl", hash = "sha256:2348631f049e884839553b9974f0821d39241c6ffb01a418efce434f7eba0fe7"}, - {file = "coverage-7.10.1-cp313-cp313-win_amd64.whl", hash = "sha256:4072b31361b0d6d23f750c524f694e1a417c1220a30d3ef02741eed28520c48e"}, - {file = "coverage-7.10.1-cp313-cp313-win_arm64.whl", hash = "sha256:3e31dfb8271937cab9425f19259b1b1d1f556790e98eb266009e7a61d337b6d4"}, - {file = "coverage-7.10.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1c4f679c6b573a5257af6012f167a45be4c749c9925fd44d5178fd641ad8bf72"}, - {file = "coverage-7.10.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:871ebe8143da284bd77b84a9136200bd638be253618765d21a1fce71006d94af"}, - {file = "coverage-7.10.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:998c4751dabf7d29b30594af416e4bf5091f11f92a8d88eb1512c7ba136d1ed7"}, - {file = "coverage-7.10.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:780f750a25e7749d0af6b3631759c2c14f45de209f3faaa2398312d1c7a22759"}, - {file = "coverage-7.10.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:590bdba9445df4763bdbebc928d8182f094c1f3947a8dc0fc82ef014dbdd8324"}, - {file = "coverage-7.10.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b2df80cb6a2af86d300e70acb82e9b79dab2c1e6971e44b78dbfc1a1e736b53"}, - {file = "coverage-7.10.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d6a558c2725bfb6337bf57c1cd366c13798bfd3bfc9e3dd1f4a6f6fc95a4605f"}, - {file = "coverage-7.10.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e6150d167f32f2a54690e572e0a4c90296fb000a18e9b26ab81a6489e24e78dd"}, - {file = "coverage-7.10.1-cp313-cp313t-win32.whl", hash = "sha256:d946a0c067aa88be4a593aad1236493313bafaa27e2a2080bfe88db827972f3c"}, - {file = "coverage-7.10.1-cp313-cp313t-win_amd64.whl", hash = "sha256:e37c72eaccdd5ed1130c67a92ad38f5b2af66eeff7b0abe29534225db2ef7b18"}, - {file = "coverage-7.10.1-cp313-cp313t-win_arm64.whl", hash = "sha256:89ec0ffc215c590c732918c95cd02b55c7d0f569d76b90bb1a5e78aa340618e4"}, - {file = "coverage-7.10.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:166d89c57e877e93d8827dac32cedae6b0277ca684c6511497311249f35a280c"}, - {file = "coverage-7.10.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:bed4a2341b33cd1a7d9ffc47df4a78ee61d3416d43b4adc9e18b7d266650b83e"}, - {file = "coverage-7.10.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ddca1e4f5f4c67980533df01430184c19b5359900e080248bbf4ed6789584d8b"}, - {file = "coverage-7.10.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:37b69226001d8b7de7126cad7366b0778d36777e4d788c66991455ba817c5b41"}, - {file = "coverage-7.10.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b2f22102197bcb1722691296f9e589f02b616f874e54a209284dd7b9294b0b7f"}, - {file = "coverage-7.10.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1e0c768b0f9ac5839dac5cf88992a4bb459e488ee8a1f8489af4cb33b1af00f1"}, - {file = "coverage-7.10.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:991196702d5e0b120a8fef2664e1b9c333a81d36d5f6bcf6b225c0cf8b0451a2"}, - {file = "coverage-7.10.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:ae8e59e5f4fd85d6ad34c2bb9d74037b5b11be072b8b7e9986beb11f957573d4"}, - {file = "coverage-7.10.1-cp314-cp314-win32.whl", hash = "sha256:042125c89cf74a074984002e165d61fe0e31c7bd40ebb4bbebf07939b5924613"}, - {file = "coverage-7.10.1-cp314-cp314-win_amd64.whl", hash = "sha256:a22c3bfe09f7a530e2c94c87ff7af867259c91bef87ed2089cd69b783af7b84e"}, - {file = "coverage-7.10.1-cp314-cp314-win_arm64.whl", hash = "sha256:ee6be07af68d9c4fca4027c70cea0c31a0f1bc9cb464ff3c84a1f916bf82e652"}, - {file = "coverage-7.10.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:d24fb3c0c8ff0d517c5ca5de7cf3994a4cd559cde0315201511dbfa7ab528894"}, - {file = "coverage-7.10.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1217a54cfd79be20512a67ca81c7da3f2163f51bbfd188aab91054df012154f5"}, - {file = "coverage-7.10.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:51f30da7a52c009667e02f125737229d7d8044ad84b79db454308033a7808ab2"}, - {file = "coverage-7.10.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ed3718c757c82d920f1c94089066225ca2ad7f00bb904cb72b1c39ebdd906ccb"}, - {file = "coverage-7.10.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc452481e124a819ced0c25412ea2e144269ef2f2534b862d9f6a9dae4bda17b"}, - {file = "coverage-7.10.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:9d6f494c307e5cb9b1e052ec1a471060f1dea092c8116e642e7a23e79d9388ea"}, - {file = "coverage-7.10.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:fc0e46d86905ddd16b85991f1f4919028092b4e511689bbdaff0876bd8aab3dd"}, - {file = "coverage-7.10.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:80b9ccd82e30038b61fc9a692a8dc4801504689651b281ed9109f10cc9fe8b4d"}, - {file = "coverage-7.10.1-cp314-cp314t-win32.whl", hash = "sha256:e58991a2b213417285ec866d3cd32db17a6a88061a985dbb7e8e8f13af429c47"}, - {file = "coverage-7.10.1-cp314-cp314t-win_amd64.whl", hash = "sha256:e88dd71e4ecbc49d9d57d064117462c43f40a21a1383507811cf834a4a620651"}, - {file = "coverage-7.10.1-cp314-cp314t-win_arm64.whl", hash = "sha256:1aadfb06a30c62c2eb82322171fe1f7c288c80ca4156d46af0ca039052814bab"}, - {file = "coverage-7.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:57b6e8789cbefdef0667e4a94f8ffa40f9402cee5fc3b8e4274c894737890145"}, - {file = "coverage-7.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:85b22a9cce00cb03156334da67eb86e29f22b5e93876d0dd6a98646bb8a74e53"}, - {file = "coverage-7.10.1-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:97b6983a2f9c76d345ca395e843a049390b39652984e4a3b45b2442fa733992d"}, - {file = "coverage-7.10.1-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ddf2a63b91399a1c2f88f40bc1705d5a7777e31c7e9eb27c602280f477b582ba"}, - {file = "coverage-7.10.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:47ab6dbbc31a14c5486420c2c1077fcae692097f673cf5be9ddbec8cdaa4cdbc"}, - {file = "coverage-7.10.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:21eb7d8b45d3700e7c2936a736f732794c47615a20f739f4133d5230a6512a88"}, - {file = "coverage-7.10.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:283005bb4d98ae33e45f2861cd2cde6a21878661c9ad49697f6951b358a0379b"}, - {file = "coverage-7.10.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:fefe31d61d02a8b2c419700b1fade9784a43d726de26495f243b663cd9fe1513"}, - {file = "coverage-7.10.1-cp39-cp39-win32.whl", hash = "sha256:e8ab8e4c7ec7f8a55ac05b5b715a051d74eac62511c6d96d5bb79aaafa3b04cf"}, - {file = "coverage-7.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:c36baa0ecde742784aa76c2b816466d3ea888d5297fda0edbac1bf48fa94688a"}, - {file = "coverage-7.10.1-py3-none-any.whl", hash = "sha256:fa2a258aa6bf188eb9a8948f7102a83da7c430a0dce918dbd8b60ef8fcb772d7"}, - {file = "coverage-7.10.1.tar.gz", hash = "sha256:ae2b4856f29ddfe827106794f3589949a57da6f0d38ab01e24ec35107979ba57"}, + {file = "coverage-7.13.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0723d2c96324561b9aa76fb982406e11d93cdb388a7a7da2b16e04719cf7ca5"}, + {file = "coverage-7.13.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52f444e86475992506b32d4e5ca55c24fc88d73bcbda0e9745095b28ef4dc0cf"}, + {file = "coverage-7.13.5-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:704de6328e3d612a8f6c07000a878ff38181ec3263d5a11da1db294fa6a9bdf8"}, + {file = "coverage-7.13.5-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a1a6d79a14e1ec1832cabc833898636ad5f3754a678ef8bb4908515208bf84f4"}, + {file = "coverage-7.13.5-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79060214983769c7ba3f0cee10b54c97609dca4d478fa1aa32b914480fd5738d"}, + {file = "coverage-7.13.5-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:356e76b46783a98c2a2fe81ec79df4883a1e62895ea952968fb253c114e7f930"}, + {file = "coverage-7.13.5-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0cef0cdec915d11254a7f549c1170afecce708d30610c6abdded1f74e581666d"}, + {file = "coverage-7.13.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:dc022073d063b25a402454e5712ef9e007113e3a676b96c5f29b2bda29352f40"}, + {file = "coverage-7.13.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9b74db26dfea4f4e50d48a4602207cd1e78be33182bc9cbf22da94f332f99878"}, + {file = "coverage-7.13.5-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ad146744ca4fd09b50c482650e3c1b1f4dfa1d4792e0a04a369c7f23336f0400"}, + {file = "coverage-7.13.5-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:c555b48be1853fe3997c11c4bd521cdd9a9612352de01fa4508f16ec341e6fe0"}, + {file = "coverage-7.13.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7034b5c56a58ae5e85f23949d52c14aca2cfc6848a31764995b7de88f13a1ea0"}, + {file = "coverage-7.13.5-cp310-cp310-win32.whl", hash = "sha256:eb7fdf1ef130660e7415e0253a01a7d5a88c9c4d158bcf75cbbd922fd65a5b58"}, + {file = "coverage-7.13.5-cp310-cp310-win_amd64.whl", hash = "sha256:3e1bb5f6c78feeb1be3475789b14a0f0a5b47d505bfc7267126ccbd50289999e"}, + {file = "coverage-7.13.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66a80c616f80181f4d643b0f9e709d97bcea413ecd9631e1dedc7401c8e6695d"}, + {file = "coverage-7.13.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:145ede53ccbafb297c1c9287f788d1bc3efd6c900da23bf6931b09eafc931587"}, + {file = "coverage-7.13.5-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0672854dc733c342fa3e957e0605256d2bf5934feeac328da9e0b5449634a642"}, + {file = "coverage-7.13.5-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ec10e2a42b41c923c2209b846126c6582db5e43a33157e9870ba9fb70dc7854b"}, + {file = "coverage-7.13.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be3d4bbad9d4b037791794ddeedd7d64a56f5933a2c1373e18e9e568b9141686"}, + {file = "coverage-7.13.5-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4d2afbc5cc54d286bfb54541aa50b64cdb07a718227168c87b9e2fb8f25e1743"}, + {file = "coverage-7.13.5-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3ad050321264c49c2fa67bb599100456fc51d004b82534f379d16445da40fb75"}, + {file = "coverage-7.13.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7300c8a6d13335b29bb76d7651c66af6bd8658517c43499f110ddc6717bfc209"}, + {file = "coverage-7.13.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:eb07647a5738b89baab047f14edd18ded523de60f3b30e75c2acc826f79c839a"}, + {file = "coverage-7.13.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:9adb6688e3b53adffefd4a52d72cbd8b02602bfb8f74dcd862337182fd4d1a4e"}, + {file = "coverage-7.13.5-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7c8d4bc913dd70b93488d6c496c77f3aff5ea99a07e36a18f865bca55adef8bd"}, + {file = "coverage-7.13.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0e3c426ffc4cd952f54ee9ffbdd10345709ecc78a3ecfd796a57236bfad0b9b8"}, + {file = "coverage-7.13.5-cp311-cp311-win32.whl", hash = "sha256:259b69bb83ad9894c4b25be2528139eecba9a82646ebdda2d9db1ba28424a6bf"}, + {file = "coverage-7.13.5-cp311-cp311-win_amd64.whl", hash = "sha256:258354455f4e86e3e9d0d17571d522e13b4e1e19bf0f8596bcf9476d61e7d8a9"}, + {file = "coverage-7.13.5-cp311-cp311-win_arm64.whl", hash = "sha256:bff95879c33ec8da99fc9b6fe345ddb5be6414b41d6d1ad1c8f188d26f36e028"}, + {file = "coverage-7.13.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:460cf0114c5016fa841214ff5564aa4864f11948da9440bc97e21ad1f4ba1e01"}, + {file = "coverage-7.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0e223ce4b4ed47f065bfb123687686512e37629be25cc63728557ae7db261422"}, + {file = "coverage-7.13.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6e3370441f4513c6252bf042b9c36d22491142385049243253c7e48398a15a9f"}, + {file = "coverage-7.13.5-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:03ccc709a17a1de074fb1d11f217342fb0d2b1582ed544f554fc9fc3f07e95f5"}, + {file = "coverage-7.13.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3f4818d065964db3c1c66dc0fbdac5ac692ecbc875555e13374fdbe7eedb4376"}, + {file = "coverage-7.13.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:012d5319e66e9d5a218834642d6c35d265515a62f01157a45bcc036ecf947256"}, + {file = "coverage-7.13.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8dd02af98971bdb956363e4827d34425cb3df19ee550ef92855b0acb9c7ce51c"}, + {file = "coverage-7.13.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f08fd75c50a760c7eb068ae823777268daaf16a80b918fa58eea888f8e3919f5"}, + {file = "coverage-7.13.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:843ea8643cf967d1ac7e8ecd4bb00c99135adf4816c0c0593fdcc47b597fcf09"}, + {file = "coverage-7.13.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:9d44d7aa963820b1b971dbecd90bfe5fe8f81cff79787eb6cca15750bd2f79b9"}, + {file = "coverage-7.13.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:7132bed4bd7b836200c591410ae7d97bf7ae8be6fc87d160b2bd881df929e7bf"}, + {file = "coverage-7.13.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a698e363641b98843c517817db75373c83254781426e94ada3197cabbc2c919c"}, + {file = "coverage-7.13.5-cp312-cp312-win32.whl", hash = "sha256:bdba0a6b8812e8c7df002d908a9a2ea3c36e92611b5708633c50869e6d922fdf"}, + {file = "coverage-7.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:d2c87e0c473a10bffe991502eac389220533024c8082ec1ce849f4218dded810"}, + {file = "coverage-7.13.5-cp312-cp312-win_arm64.whl", hash = "sha256:bf69236a9a81bdca3bff53796237aab096cdbf8d78a66ad61e992d9dac7eb2de"}, + {file = "coverage-7.13.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ec4af212df513e399cf11610cc27063f1586419e814755ab362e50a85ea69c1"}, + {file = "coverage-7.13.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:941617e518602e2d64942c88ec8499f7fbd49d3f6c4327d3a71d43a1973032f3"}, + {file = "coverage-7.13.5-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:da305e9937617ee95c2e39d8ff9f040e0487cbf1ac174f777ed5eddd7a7c1f26"}, + {file = "coverage-7.13.5-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:78e696e1cc714e57e8b25760b33a8b1026b7048d270140d25dafe1b0a1ee05a3"}, + {file = "coverage-7.13.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:02ca0eed225b2ff301c474aeeeae27d26e2537942aa0f87491d3e147e784a82b"}, + {file = "coverage-7.13.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:04690832cbea4e4663d9149e05dba142546ca05cb1848816760e7f58285c970a"}, + {file = "coverage-7.13.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0590e44dd2745c696a778f7bab6aa95256de2cbc8b8cff4f7db8ff09813d6969"}, + {file = "coverage-7.13.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d7cfad2d6d81dd298ab6b89fe72c3b7b05ec7544bdda3b707ddaecff8d25c161"}, + {file = "coverage-7.13.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e092b9499de38ae0fbfbc603a74660eb6ff3e869e507b50d85a13b6db9863e15"}, + {file = "coverage-7.13.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:48c39bc4a04d983a54a705a6389512883d4a3b9862991b3617d547940e9f52b1"}, + {file = "coverage-7.13.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2d3807015f138ffea1ed9afeeb8624fd781703f2858b62a8dd8da5a0994c57b6"}, + {file = "coverage-7.13.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee2aa19e03161671ec964004fb74b2257805d9710bf14a5c704558b9d8dbaf17"}, + {file = "coverage-7.13.5-cp313-cp313-win32.whl", hash = "sha256:ce1998c0483007608c8382f4ff50164bfc5bd07a2246dd272aa4043b75e61e85"}, + {file = "coverage-7.13.5-cp313-cp313-win_amd64.whl", hash = "sha256:631efb83f01569670a5e866ceb80fe483e7c159fac6f167e6571522636104a0b"}, + {file = "coverage-7.13.5-cp313-cp313-win_arm64.whl", hash = "sha256:f4cd16206ad171cbc2470dbea9103cf9a7607d5fe8c242fdf1edf36174020664"}, + {file = "coverage-7.13.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0428cbef5783ad91fe240f673cc1f76b25e74bbfe1a13115e4aa30d3f538162d"}, + {file = "coverage-7.13.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e0b216a19534b2427cc201a26c25da4a48633f29a487c61258643e89d28200c0"}, + {file = "coverage-7.13.5-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:972a9cd27894afe4bc2b1480107054e062df08e671df7c2f18c205e805ccd806"}, + {file = "coverage-7.13.5-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4b59148601efcd2bac8c4dbf1f0ad6391693ccf7a74b8205781751637076aee3"}, + {file = "coverage-7.13.5-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:505d7083c8b0c87a8fa8c07370c285847c1f77739b22e299ad75a6af6c32c5c9"}, + {file = "coverage-7.13.5-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:60365289c3741e4db327e7baff2a4aaacf22f788e80fa4683393891b70a89fbd"}, + {file = "coverage-7.13.5-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1b88c69c8ef5d4b6fe7dea66d6636056a0f6a7527c440e890cf9259011f5e606"}, + {file = "coverage-7.13.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5b13955d31d1633cf9376908089b7cebe7d15ddad7aeaabcbe969a595a97e95e"}, + {file = "coverage-7.13.5-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:f70c9ab2595c56f81a89620e22899eea8b212a4041bd728ac6f4a28bf5d3ddd0"}, + {file = "coverage-7.13.5-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:084b84a8c63e8d6fc7e3931b316a9bcafca1458d753c539db82d31ed20091a87"}, + {file = "coverage-7.13.5-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:ad14385487393e386e2ea988b09d62dd42c397662ac2dabc3832d71253eee479"}, + {file = "coverage-7.13.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:7f2c47b36fe7709a6e83bfadf4eefb90bd25fbe4014d715224c4316f808e59a2"}, + {file = "coverage-7.13.5-cp313-cp313t-win32.whl", hash = "sha256:67e9bc5449801fad0e5dff329499fb090ba4c5800b86805c80617b4e29809b2a"}, + {file = "coverage-7.13.5-cp313-cp313t-win_amd64.whl", hash = "sha256:da86cdcf10d2519e10cabb8ac2de03da1bcb6e4853790b7fbd48523332e3a819"}, + {file = "coverage-7.13.5-cp313-cp313t-win_arm64.whl", hash = "sha256:0ecf12ecb326fe2c339d93fc131816f3a7367d223db37817208905c89bded911"}, + {file = "coverage-7.13.5-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:fbabfaceaeb587e16f7008f7795cd80d20ec548dc7f94fbb0d4ec2e038ce563f"}, + {file = "coverage-7.13.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9bb2a28101a443669a423b665939381084412b81c3f8c0fcfbac57f4e30b5b8e"}, + {file = "coverage-7.13.5-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bd3a2fbc1c6cccb3c5106140d87cc6a8715110373ef42b63cf5aea29df8c217a"}, + {file = "coverage-7.13.5-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6c36ddb64ed9d7e496028d1d00dfec3e428e0aabf4006583bb1839958d280510"}, + {file = "coverage-7.13.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:380e8e9084d8eb38db3a9176a1a4f3c0082c3806fa0dc882d1d87abc3c789247"}, + {file = "coverage-7.13.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e808af52a0513762df4d945ea164a24b37f2f518cbe97e03deaa0ee66139b4d6"}, + {file = "coverage-7.13.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e301d30dd7e95ae068671d746ba8c34e945a82682e62918e41b2679acd2051a0"}, + {file = "coverage-7.13.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:800bc829053c80d240a687ceeb927a94fd108bbdc68dfbe505d0d75ab578a882"}, + {file = "coverage-7.13.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:0b67af5492adb31940ee418a5a655c28e48165da5afab8c7fa6fd72a142f8740"}, + {file = "coverage-7.13.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c9136ff29c3a91e25b1d1552b5308e53a1e0653a23e53b6366d7c2dcbbaf8a16"}, + {file = "coverage-7.13.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:cff784eef7f0b8f6cb28804fbddcfa99f89efe4cc35fb5627e3ac58f91ed3ac0"}, + {file = "coverage-7.13.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:68a4953be99b17ac3c23b6efbc8a38330d99680c9458927491d18700ef23ded0"}, + {file = "coverage-7.13.5-cp314-cp314-win32.whl", hash = "sha256:35a31f2b1578185fbe6aa2e74cea1b1d0bbf4c552774247d9160d29b80ed56cc"}, + {file = "coverage-7.13.5-cp314-cp314-win_amd64.whl", hash = "sha256:2aa055ae1857258f9e0045be26a6d62bdb47a72448b62d7b55f4820f361a2633"}, + {file = "coverage-7.13.5-cp314-cp314-win_arm64.whl", hash = "sha256:1b11eef33edeae9d142f9b4358edb76273b3bfd30bc3df9a4f95d0e49caf94e8"}, + {file = "coverage-7.13.5-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:10a0c37f0b646eaff7cce1874c31d1f1ccb297688d4c747291f4f4c70741cc8b"}, + {file = "coverage-7.13.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b5db73ba3c41c7008037fa731ad5459fc3944cb7452fc0aa9f822ad3533c583c"}, + {file = "coverage-7.13.5-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:750db93a81e3e5a9831b534be7b1229df848b2e125a604fe6651e48aa070e5f9"}, + {file = "coverage-7.13.5-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ddb4f4a5479f2539644be484da179b653273bca1a323947d48ab107b3ed1f29"}, + {file = "coverage-7.13.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8a7a2049c14f413163e2bdabd37e41179b1d1ccb10ffc6ccc4b7a718429c607"}, + {file = "coverage-7.13.5-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1c85e0b6c05c592ea6d8768a66a254bfb3874b53774b12d4c89c481eb78cb90"}, + {file = "coverage-7.13.5-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:777c4d1eff1b67876139d24288aaf1817f6c03d6bae9c5cc8d27b83bcfe38fe3"}, + {file = "coverage-7.13.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6697e29b93707167687543480a40f0db8f356e86d9f67ddf2e37e2dfd91a9dab"}, + {file = "coverage-7.13.5-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:8fdf453a942c3e4d99bd80088141c4c6960bb232c409d9c3558e2dbaa3998562"}, + {file = "coverage-7.13.5-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:32ca0c0114c9834a43f045a87dcebd69d108d8ffb666957ea65aa132f50332e2"}, + {file = "coverage-7.13.5-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:8769751c10f339021e2638cd354e13adeac54004d1941119b2c96fe5276d45ea"}, + {file = "coverage-7.13.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cec2d83125531bd153175354055cdb7a09987af08a9430bd173c937c6d0fba2a"}, + {file = "coverage-7.13.5-cp314-cp314t-win32.whl", hash = "sha256:0cd9ed7a8b181775459296e402ca4fb27db1279740a24e93b3b41942ebe4b215"}, + {file = "coverage-7.13.5-cp314-cp314t-win_amd64.whl", hash = "sha256:301e3b7dfefecaca37c9f1aa6f0049b7d4ab8dd933742b607765d757aca77d43"}, + {file = "coverage-7.13.5-cp314-cp314t-win_arm64.whl", hash = "sha256:9dacc2ad679b292709e0f5fc1ac74a6d4d5562e424058962c7bb0c658ad25e45"}, + {file = "coverage-7.13.5-py3-none-any.whl", hash = "sha256:34b02417cf070e173989b3db962f7ed56d2f644307b2cf9d5a0f258e13084a61"}, + {file = "coverage-7.13.5.tar.gz", hash = "sha256:c81f6515c4c40141f83f502b07bbfa5c240ba25bbe73da7b33f1e5b6120ff179"}, ] [package.dependencies] @@ -537,18 +636,18 @@ files = [ [[package]] name = "deprecated" -version = "1.2.18" +version = "1.3.1" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" groups = ["docs"] files = [ - {file = "Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec"}, - {file = "deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d"}, + {file = "deprecated-1.3.1-py2.py3-none-any.whl", hash = "sha256:597bfef186b6f60181535a29fbe44865ce137a5079f295b479886c82729d5f3f"}, + {file = "deprecated-1.3.1.tar.gz", hash = "sha256:b1b50e0ff0c1fddaa5708a2c6b0a6588bb09b892825ab2b214ac9ea9d92a5223"}, ] [package.dependencies] -wrapt = ">=1.10,<2" +wrapt = ">=1.10,<3" [package.extras] dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools ; python_version >= \"3.12\"", "tox"] @@ -578,15 +677,15 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.3.0" +version = "1.3.1" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" groups = ["dev"] markers = "python_version == \"3.10\"" files = [ - {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, - {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, + {file = "exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598"}, + {file = "exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219"}, ] [package.dependencies] @@ -614,90 +713,156 @@ pyflakes = ">=2.5.0,<2.6.0" [[package]] name = "frozendict" -version = "2.4.6" +version = "2.4.7" description = "A simple immutable dictionary" optional = false python-versions = ">=3.6" groups = ["main"] files = [ - {file = "frozendict-2.4.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c3a05c0a50cab96b4bb0ea25aa752efbfceed5ccb24c007612bc63e51299336f"}, - {file = "frozendict-2.4.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f5b94d5b07c00986f9e37a38dd83c13f5fe3bf3f1ccc8e88edea8fe15d6cd88c"}, - {file = "frozendict-2.4.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4c789fd70879ccb6289a603cdebdc4953e7e5dea047d30c1b180529b28257b5"}, - {file = "frozendict-2.4.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da6a10164c8a50b34b9ab508a9420df38f4edf286b9ca7b7df8a91767baecb34"}, - {file = "frozendict-2.4.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9a8a43036754a941601635ea9c788ebd7a7efbed2becba01b54a887b41b175b9"}, - {file = "frozendict-2.4.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c9905dcf7aa659e6a11b8051114c9fa76dfde3a6e50e6dc129d5aece75b449a2"}, - {file = "frozendict-2.4.6-cp310-cp310-win_amd64.whl", hash = "sha256:323f1b674a2cc18f86ab81698e22aba8145d7a755e0ac2cccf142ee2db58620d"}, - {file = "frozendict-2.4.6-cp310-cp310-win_arm64.whl", hash = "sha256:eabd21d8e5db0c58b60d26b4bb9839cac13132e88277e1376970172a85ee04b3"}, - {file = "frozendict-2.4.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:eddabeb769fab1e122d3a6872982c78179b5bcc909fdc769f3cf1964f55a6d20"}, - {file = "frozendict-2.4.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:377a65be0a700188fc21e669c07de60f4f6d35fae8071c292b7df04776a1c27b"}, - {file = "frozendict-2.4.6-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce1e9217b85eec6ba9560d520d5089c82dbb15f977906eb345d81459723dd7e3"}, - {file = "frozendict-2.4.6-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:7291abacf51798d5ffe632771a69c14fb423ab98d63c4ccd1aa382619afe2f89"}, - {file = "frozendict-2.4.6-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:e72fb86e48811957d66ffb3e95580af7b1af1e6fbd760ad63d7bd79b2c9a07f8"}, - {file = "frozendict-2.4.6-cp36-cp36m-win_amd64.whl", hash = "sha256:622301b1c29c4f9bba633667d592a3a2b093cb408ba3ce578b8901ace3931ef3"}, - {file = "frozendict-2.4.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a4e3737cb99ed03200cd303bdcd5514c9f34b29ee48f405c1184141bd68611c9"}, - {file = "frozendict-2.4.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49ffaf09241bc1417daa19362a2241a4aa435f758fd4375c39ce9790443a39cd"}, - {file = "frozendict-2.4.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d69418479bfb834ba75b0e764f058af46ceee3d655deb6a0dd0c0c1a5e82f09"}, - {file = "frozendict-2.4.6-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:c131f10c4d3906866454c4e89b87a7e0027d533cce8f4652aa5255112c4d6677"}, - {file = "frozendict-2.4.6-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:fc67cbb3c96af7a798fab53d52589752c1673027e516b702ab355510ddf6bdff"}, - {file = "frozendict-2.4.6-cp37-cp37m-win_amd64.whl", hash = "sha256:7730f8ebe791d147a1586cbf6a42629351d4597773317002181b66a2da0d509e"}, - {file = "frozendict-2.4.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:807862e14b0e9665042458fde692c4431d660c4219b9bb240817f5b918182222"}, - {file = "frozendict-2.4.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9647c74efe3d845faa666d4853cfeabbaee403b53270cabfc635b321f770e6b8"}, - {file = "frozendict-2.4.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:665fad3f0f815aa41294e561d98dbedba4b483b3968e7e8cab7d728d64b96e33"}, - {file = "frozendict-2.4.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f42e6b75254ea2afe428ad6d095b62f95a7ae6d4f8272f0bd44a25dddd20f67"}, - {file = "frozendict-2.4.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:02331541611f3897f260900a1815b63389654951126e6e65545e529b63c08361"}, - {file = "frozendict-2.4.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:18d50a2598350b89189da9150058191f55057581e40533e470db46c942373acf"}, - {file = "frozendict-2.4.6-cp38-cp38-win_amd64.whl", hash = "sha256:1b4a3f8f6dd51bee74a50995c39b5a606b612847862203dd5483b9cd91b0d36a"}, - {file = "frozendict-2.4.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a76cee5c4be2a5d1ff063188232fffcce05dde6fd5edd6afe7b75b247526490e"}, - {file = "frozendict-2.4.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba5ef7328706db857a2bdb2c2a17b4cd37c32a19c017cff1bb7eeebc86b0f411"}, - {file = "frozendict-2.4.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:669237c571856be575eca28a69e92a3d18f8490511eff184937283dc6093bd67"}, - {file = "frozendict-2.4.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0aaa11e7c472150efe65adbcd6c17ac0f586896096ab3963775e1c5c58ac0098"}, - {file = "frozendict-2.4.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b8f2829048f29fe115da4a60409be2130e69402e29029339663fac39c90e6e2b"}, - {file = "frozendict-2.4.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:94321e646cc39bebc66954a31edd1847d3a2a3483cf52ff051cd0996e7db07db"}, - {file = "frozendict-2.4.6-cp39-cp39-win_amd64.whl", hash = "sha256:74b6b26c15dddfefddeb89813e455b00ebf78d0a3662b89506b4d55c6445a9f4"}, - {file = "frozendict-2.4.6-cp39-cp39-win_arm64.whl", hash = "sha256:7088102345d1606450bd1801a61139bbaa2cb0d805b9b692f8d81918ea835da6"}, - {file = "frozendict-2.4.6-py311-none-any.whl", hash = "sha256:d065db6a44db2e2375c23eac816f1a022feb2fa98cbb50df44a9e83700accbea"}, - {file = "frozendict-2.4.6-py312-none-any.whl", hash = "sha256:49344abe90fb75f0f9fdefe6d4ef6d4894e640fadab71f11009d52ad97f370b9"}, - {file = "frozendict-2.4.6-py313-none-any.whl", hash = "sha256:7134a2bb95d4a16556bb5f2b9736dceb6ea848fa5b6f3f6c2d6dba93b44b4757"}, - {file = "frozendict-2.4.6.tar.gz", hash = "sha256:df7cd16470fbd26fc4969a208efadc46319334eb97def1ddf48919b351192b8e"}, + {file = "frozendict-2.4.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bd37c087a538944652363cfd77fb7abe8100cc1f48afea0b88b38bf0f469c3d2"}, + {file = "frozendict-2.4.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2b96f224a5431889f04b2bc99c0e9abe285679464273ead83d7d7f2a15907d35"}, + {file = "frozendict-2.4.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5c1781f28c4bbb177644b3cb6d5cf7da59be374b02d91cdde68d1d5ef32e046b"}, + {file = "frozendict-2.4.7-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8a06f6c3d3b8d487226fdde93f621e04a54faecc5bf5d9b16497b8f9ead0ac3e"}, + {file = "frozendict-2.4.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b809d1c861436a75b2b015dbfd94f6154fa4e7cb0a70e389df1d5f6246b21d1e"}, + {file = "frozendict-2.4.7-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:75eefdf257a84ea73d553eb80d0abbff0af4c9df62529e4600fd3f96ff17eeb3"}, + {file = "frozendict-2.4.7-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a4d2b27d8156922c9739dd2ff4f3934716e17cfd1cf6fb61aa17af7d378555e9"}, + {file = "frozendict-2.4.7-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2ebd953c41408acfb8041ff9e6c3519c09988fb7e007df7ab6b56e229029d788"}, + {file = "frozendict-2.4.7-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c64d34b802912ee6d107936e970b90750385a1fdfd38d310098b2918ba4cbf2"}, + {file = "frozendict-2.4.7-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:294a7d7d51dd979021a8691b46aedf9bd4a594ce3ed33a4bdf0a712d6929d712"}, + {file = "frozendict-2.4.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f65d1b90e9ddc791ea82ef91a9ae0ab27ef6c0cfa88fadfa0e5ca5a22f8fa22f"}, + {file = "frozendict-2.4.7-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:82d5272d08451bcef6fb6235a0a04cf1816b6b6815cec76be5ace1de17e0c1a4"}, + {file = "frozendict-2.4.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5943c3f683d3f32036f6ca975e920e383d85add1857eee547742de9c1f283716"}, + {file = "frozendict-2.4.7-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:88c6bea948da03087035bb9ca9625305d70e084aa33f11e17048cb7dda4ca293"}, + {file = "frozendict-2.4.7-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:ffd1a9f9babec9119712e76a39397d8aa0d72ef8c4ccad917c6175d7e7f81b74"}, + {file = "frozendict-2.4.7-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:0ff6f57854cc8aa8b30947ec005f9246d96e795a78b21441614e85d39b708822"}, + {file = "frozendict-2.4.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d774df483c12d6cba896eb9a1337bbc5ad3f564eb18cfaaee3e95fb4402f2a86"}, + {file = "frozendict-2.4.7-cp310-cp310-win32.whl", hash = "sha256:a10d38fa300f6bef230fae1fdb4bc98706b78c8a3a2f3140fde748469ef3cfe8"}, + {file = "frozendict-2.4.7-cp310-cp310-win_amd64.whl", hash = "sha256:dd518f300e5eb6a8827bee380f2e1a31c01dc0af069b13abdecd4e5769bd8a97"}, + {file = "frozendict-2.4.7-cp310-cp310-win_arm64.whl", hash = "sha256:3842cfc2d69df5b9978f2e881b7678a282dbdd6846b11b5159f910bc633cbe4f"}, + {file = "frozendict-2.4.7-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:735be62d757e1e7e496ccb6401efe82b473faa653e95eec0826cd7819a29a34c"}, + {file = "frozendict-2.4.7-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fff8584e3bbdc5c1713cd016fbf4b88babfffd4e5e89b39020f2a208dd24c900"}, + {file = "frozendict-2.4.7-cp36-cp36m-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:91a06ee46b3e3ef3b237046b914c0c905eab9fdfeac677e9b51473b482e24c28"}, + {file = "frozendict-2.4.7-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fd7ba56cf6340c732ecb78787c4e9600c4bd01372af7313ded21037126d33ec6"}, + {file = "frozendict-2.4.7-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1b4426457757c30ad86b57cdbcc0adaa328399f1ec3d231a0a2ce7447248987"}, + {file = "frozendict-2.4.7-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b22d337c76b765cb7961d4ee47fe29f89e30921eb47bf856b14dc7641f4df3e5"}, + {file = "frozendict-2.4.7-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57134ef5df1dd32229c148c75a7b89245dbdb89966a155d6dfd4bda653e8c7af"}, + {file = "frozendict-2.4.7-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:c89617a784e1c24a31f5aa4809402f8072a26b64ddbc437897f6391ff69b0ee9"}, + {file = "frozendict-2.4.7-cp36-cp36m-musllinux_1_2_armv7l.whl", hash = "sha256:176dd384dfe1d0d79449e05f67764c57c6f0f3095378bf00deb33165d5d2df5b"}, + {file = "frozendict-2.4.7-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:b1a94e8935c69ae30043b465af496f447950f2c03660aee8657074084faae0b3"}, + {file = "frozendict-2.4.7-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:c570649ceccfa5e11ad9351e9009dc484c315a51a56aa02ced07ae97644bb7aa"}, + {file = "frozendict-2.4.7-cp36-cp36m-musllinux_1_2_s390x.whl", hash = "sha256:e0d450c9d444befe2668bf9386ac2945a2f38152248d58f6b3feea63db59ba08"}, + {file = "frozendict-2.4.7-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:7469912c1a04102457871ff675aebe600dbb7e79a6450a166cc8079b88f6ca79"}, + {file = "frozendict-2.4.7-cp36-cp36m-win32.whl", hash = "sha256:2808bab8e21887a8c106cca5f6f0ab5bda7ee81e159409a10f53d57542ccd99c"}, + {file = "frozendict-2.4.7-cp36-cp36m-win_amd64.whl", hash = "sha256:ca17ac727ffeeba6c46f5a88e0284a7cb1520fb03127645fcdd7041080adf849"}, + {file = "frozendict-2.4.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ef11dd996208c5a96eab0683f7a17cb4b992948464d2498520efd75a10a2aac"}, + {file = "frozendict-2.4.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b960e700dc95faca7dd6919d0dce183ef89bfe01554d323cf5de7331a2e80f83"}, + {file = "frozendict-2.4.7-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fc43257a06e6117da6a8a0779243b974cdb9205fed82e32eb669f6746c75d27d"}, + {file = "frozendict-2.4.7-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ece525da7d0aa3eb56c3e479f30612028d545081c15450d67d771a303ee7d4c"}, + {file = "frozendict-2.4.7-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ddffe7c0b3be414f88185e212758989c65b497315781290eb029e2c1e1fd64e"}, + {file = "frozendict-2.4.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05dd27415f913cd11649009f53d97eb565ce7b76787d7869c4733738c10e8d27"}, + {file = "frozendict-2.4.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0664092614d2b9d0aa404731f33ad5459a54fe8dab9d1fd45aa714fa6de4d0ef"}, + {file = "frozendict-2.4.7-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:830d181781bb263c9fa430b81f82c867546f5dcb368e73931c8591f533a04afb"}, + {file = "frozendict-2.4.7-cp37-cp37m-musllinux_1_2_armv7l.whl", hash = "sha256:c93827e0854393cd904b927ceb529afc17776706f5b9e45c7eaf6a40b3fc7b25"}, + {file = "frozendict-2.4.7-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:6d30dbba6eb1497c695f3108c2c292807e7a237c67a1b9ff92c04e89969d22d1"}, + {file = "frozendict-2.4.7-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:ec846bde66b75d68518c7b24a0a46d09db0aee5a6aefd2209d9901faf6e9df21"}, + {file = "frozendict-2.4.7-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:1df8e22f7d24172c08434b10911f3971434bb5a59b4d1b0078ae33a623625294"}, + {file = "frozendict-2.4.7-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:39abe54264ae69a0b2e00fabdb5118604f36a5b927d33e7532cd594c5142ebf4"}, + {file = "frozendict-2.4.7-cp37-cp37m-win32.whl", hash = "sha256:d10c2ea7c90ba204cd053167ba214d0cdd00f3184c7b8d117a56d7fd2b0c6553"}, + {file = "frozendict-2.4.7-cp37-cp37m-win_amd64.whl", hash = "sha256:346a53640f15c1640a3503f60ba99df39e4ab174979f10db4304bbb378df5cbd"}, + {file = "frozendict-2.4.7-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:cc520f3f4af14f456143a534d554175dbc0f0636ffd653e63675cd591862a9d9"}, + {file = "frozendict-2.4.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7fd0d0bd3a79e009dddbf5fedfd927ad495c218cd7b13a112d28a37e2079725c"}, + {file = "frozendict-2.4.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a404857e48d85a517bb5b974d740f8c4fccb25d8df98885f3a2a4d950870b845"}, + {file = "frozendict-2.4.7-cp38-cp38-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f42e2c25d3eee4ea3da88466f38ed0dce8c622a1a9d92572e5ee53b7a6bb9ef1"}, + {file = "frozendict-2.4.7-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a1a083e9ee7a1904e545a6307c7db1dd76200077520fcbf7a98d886f81b57dd7"}, + {file = "frozendict-2.4.7-cp38-cp38-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f556ea05d9c5f6dae50d57ce6234e4ab1fbf4551dd0d52b4fed6ef537d9f3d3c"}, + {file = "frozendict-2.4.7-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:739ee81e574f33b46f1e6d9312f3ec2c549bdd574a4ebb6bf106775c9d85ca7b"}, + {file = "frozendict-2.4.7-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:48ab42b01952bc11543577de9fe5d9ca7c41b35dda36326a07fb47d84b3d5f22"}, + {file = "frozendict-2.4.7-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34233deb8d09e798e874a6ac00b054d2e842164d982ebd43eb91b9f0a6a34876"}, + {file = "frozendict-2.4.7-cp38-cp38-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:76bd99f3508cb2ec87976f2e3fe7d92fb373a661cacffb863013d15e4cfaf0eb"}, + {file = "frozendict-2.4.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a265e95e7087f44b88a6d78a63ea95a2ca0eb0a21ab4f76047f4c164a8beb413"}, + {file = "frozendict-2.4.7-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:1662f1b72b4f4a2ffdfdc4981ece275ca11f90244208ac1f1fc2c17fc9c9437a"}, + {file = "frozendict-2.4.7-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:2e5d2c30f4a3fea83a14b0a5722f21c10de5c755ab5637c70de5eb60886d58cd"}, + {file = "frozendict-2.4.7-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2cf0a665bf2f1ce69d3cd8b6d3574b1d32ae00981a16fa1d255d2da8a2e44b7c"}, + {file = "frozendict-2.4.7-cp38-cp38-musllinux_1_2_riscv64.whl", hash = "sha256:708382875c3cfe91be625dddcba03dee2dfdadbad2c431568a8c7f2f2af0bbee"}, + {file = "frozendict-2.4.7-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:7fe194f37052a8f45a1a8507e36229e28b79f3d21542ae55ea6a18c6a444f625"}, + {file = "frozendict-2.4.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d8930877a2dd40461968d9238d95c754e51b33ce7d2a45500f88ffeed5cb7202"}, + {file = "frozendict-2.4.7-cp38-cp38-win32.whl", hash = "sha256:6991469a889ee8a108fe5ed1b044447c7b7a07da9067e93c59cbfac8c1d625cf"}, + {file = "frozendict-2.4.7-cp38-cp38-win_amd64.whl", hash = "sha256:ebae8f4a07372acfc3963fc8d68070cdaab70272c3dd836f057ebbe9b7d38643"}, + {file = "frozendict-2.4.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1c521ad3d747aa475e9040e231f5f1847c04423bae5571c010a9d969e6983c40"}, + {file = "frozendict-2.4.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:70e655c3aa5f893807830f549a7275031a181dbebeaf74c461b51adc755d9335"}, + {file = "frozendict-2.4.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11d35075f979c96f528d74ccbf89322a7ef8211977dd566bc384985ebce689be"}, + {file = "frozendict-2.4.7-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d4d7ec24d3bfcfac3baf4dffd7fcea3fa8474b087ce32696232132064aa062cf"}, + {file = "frozendict-2.4.7-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5694417864875ca959932e3b98e2b7d5d27c75177bf510939d0da583712ddf58"}, + {file = "frozendict-2.4.7-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:57a754671c5746e11140363aa2f4e7a75c8607de6e85a2bf89dcd1daf51885a7"}, + {file = "frozendict-2.4.7-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:313e0e1d8b22b317aa1f7dd48aec8cbb0416ddd625addf7648a69148fcb9ccff"}, + {file = "frozendict-2.4.7-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:176a66094428b9fd66270927b9787e3b8b1c9505ef92723c7b0ef1923dbe3c4a"}, + {file = "frozendict-2.4.7-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de1fff2683d8af01299ec01eb21a24b6097ce92015fc1fbefa977cecf076a3fc"}, + {file = "frozendict-2.4.7-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:115a822ecd754574e11205e0880e9d61258d960863d6fd1b90883aa800f6d3b3"}, + {file = "frozendict-2.4.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:de8d2c98777ba266f5466e211778d4e3bd00635a207c54f6f7511d8613b86dd3"}, + {file = "frozendict-2.4.7-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:1e307be0e1f26cbc9593f6bdad5238a1408a50f39f63c9c39eb93c7de5926767"}, + {file = "frozendict-2.4.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:78a55f320ca924545494ce153df02d4349156cd95dc4603c1f0e80c42c889249"}, + {file = "frozendict-2.4.7-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:e89492dfcc4c27a718f8b5a4c8df1a2dec6c689718cccd70cb2ceba69ab8c642"}, + {file = "frozendict-2.4.7-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:1e801d62e35df24be2c6f7f43c114058712efa79a8549c289437754dad0207a3"}, + {file = "frozendict-2.4.7-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:3ed9e2f3547a59f4ef5c233614c6faa6221d33004cb615ae1c07ffc551cfe178"}, + {file = "frozendict-2.4.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ad0448ed5569f0a9b9b010af9fb5b6d9bdc0b4b877a3ddb188396c4742e62284"}, + {file = "frozendict-2.4.7-cp39-cp39-win32.whl", hash = "sha256:eab9ef8a9268042e819de03079b984eb0894f05a7b63c4e5319b1cf1ef362ba7"}, + {file = "frozendict-2.4.7-cp39-cp39-win_amd64.whl", hash = "sha256:8dfe2f4840b043436ee5bdd07b0fa5daecedf086e6957e7df050a56ab6db078d"}, + {file = "frozendict-2.4.7-cp39-cp39-win_arm64.whl", hash = "sha256:cc2085926872a1b26deda4b81b2254d2e5d2cb2c4d7b327abe4c820b7c93f40b"}, + {file = "frozendict-2.4.7-py3-none-any.whl", hash = "sha256:972af65924ea25cf5b4d9326d549e69a9a4918d8a76a9d3a7cd174d98b237550"}, + {file = "frozendict-2.4.7.tar.gz", hash = "sha256:e478fb2a1391a56c8a6e10cc97c4a9002b410ecd1ac28c18d780661762e271bd"}, ] [[package]] name = "idna" -version = "3.10" +version = "3.13" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" groups = ["main", "dev", "docs"] files = [ - {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, - {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, + {file = "idna-3.13-py3-none-any.whl", hash = "sha256:892ea0cde124a99ce773decba204c5552b69c3c67ffd5f232eb7696135bc8bb3"}, + {file = "idna-3.13.tar.gz", hash = "sha256:585ea8fe5d69b9181ec1afba340451fba6ba764af97026f92a91d4eef164a242"}, ] [package.extras] -all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] +all = ["mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] [[package]] name = "imagesize" -version = "1.4.1" +version = "1.5.0" description = "Getting image size from png/jpeg/jpeg2000/gif file" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +groups = ["docs"] +markers = "python_version >= \"3.12\"" +files = [ + {file = "imagesize-1.5.0-py2.py3-none-any.whl", hash = "sha256:32677681b3f434c2cb496f00e89c5a291247b35b1f527589909e008057da5899"}, + {file = "imagesize-1.5.0.tar.gz", hash = "sha256:8bfc5363a7f2133a89f0098451e0bcb1cd71aba4dc02bbcecb39d99d40e1b94f"}, +] + +[[package]] +name = "imagesize" +version = "2.0.0" +description = "Get image size from headers (BMP/PNG/JPEG/JPEG2000/GIF/TIFF/SVG/Netpbm/WebP/AVIF/HEIC/HEIF)" +optional = false +python-versions = "<3.15,>=3.10" groups = ["docs"] +markers = "python_version < \"3.12\"" files = [ - {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, - {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, + {file = "imagesize-2.0.0-py2.py3-none-any.whl", hash = "sha256:5667c5bbb57ab3f1fa4bc366f4fbc971db3d5ed011fd2715fd8001f782718d96"}, + {file = "imagesize-2.0.0.tar.gz", hash = "sha256:8e8358c4a05c304f1fccf7ff96f036e7243a189e9e42e90851993c558cfe9ee3"}, ] [[package]] name = "iniconfig" -version = "2.1.0" +version = "2.3.0" description = "brain-dead simple config-ini parsing" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" groups = ["dev"] files = [ - {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, - {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, + {file = "iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12"}, + {file = "iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730"}, ] [[package]] @@ -788,106 +953,146 @@ tornado = "*" [[package]] name = "lxml" -version = "6.0.0" +version = "6.1.0" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "lxml-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:35bc626eec405f745199200ccb5c6b36f202675d204aa29bb52e27ba2b71dea8"}, - {file = "lxml-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:246b40f8a4aec341cbbf52617cad8ab7c888d944bfe12a6abd2b1f6cfb6f6082"}, - {file = "lxml-6.0.0-cp310-cp310-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:2793a627e95d119e9f1e19720730472f5543a6d84c50ea33313ce328d870f2dd"}, - {file = "lxml-6.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:46b9ed911f36bfeb6338e0b482e7fe7c27d362c52fde29f221fddbc9ee2227e7"}, - {file = "lxml-6.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2b4790b558bee331a933e08883c423f65bbcd07e278f91b2272489e31ab1e2b4"}, - {file = "lxml-6.0.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e2030956cf4886b10be9a0285c6802e078ec2391e1dd7ff3eb509c2c95a69b76"}, - {file = "lxml-6.0.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d23854ecf381ab1facc8f353dcd9adeddef3652268ee75297c1164c987c11dc"}, - {file = "lxml-6.0.0-cp310-cp310-manylinux_2_31_armv7l.whl", hash = "sha256:43fe5af2d590bf4691531b1d9a2495d7aab2090547eaacd224a3afec95706d76"}, - {file = "lxml-6.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:74e748012f8c19b47f7d6321ac929a9a94ee92ef12bc4298c47e8b7219b26541"}, - {file = "lxml-6.0.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:43cfbb7db02b30ad3926e8fceaef260ba2fb7df787e38fa2df890c1ca7966c3b"}, - {file = "lxml-6.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:34190a1ec4f1e84af256495436b2d196529c3f2094f0af80202947567fdbf2e7"}, - {file = "lxml-6.0.0-cp310-cp310-win32.whl", hash = "sha256:5967fe415b1920a3877a4195e9a2b779249630ee49ece22021c690320ff07452"}, - {file = "lxml-6.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:f3389924581d9a770c6caa4df4e74b606180869043b9073e2cec324bad6e306e"}, - {file = "lxml-6.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:522fe7abb41309e9543b0d9b8b434f2b630c5fdaf6482bee642b34c8c70079c8"}, - {file = "lxml-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ee56288d0df919e4aac43b539dd0e34bb55d6a12a6562038e8d6f3ed07f9e36"}, - {file = "lxml-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8dd6dd0e9c1992613ccda2bcb74fc9d49159dbe0f0ca4753f37527749885c25"}, - {file = "lxml-6.0.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:d7ae472f74afcc47320238b5dbfd363aba111a525943c8a34a1b657c6be934c3"}, - {file = "lxml-6.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5592401cdf3dc682194727c1ddaa8aa0f3ddc57ca64fd03226a430b955eab6f6"}, - {file = "lxml-6.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:58ffd35bd5425c3c3b9692d078bf7ab851441434531a7e517c4984d5634cd65b"}, - {file = "lxml-6.0.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f720a14aa102a38907c6d5030e3d66b3b680c3e6f6bc95473931ea3c00c59967"}, - {file = "lxml-6.0.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2a5e8d207311a0170aca0eb6b160af91adc29ec121832e4ac151a57743a1e1e"}, - {file = "lxml-6.0.0-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:2dd1cc3ea7e60bfb31ff32cafe07e24839df573a5e7c2d33304082a5019bcd58"}, - {file = "lxml-6.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2cfcf84f1defed7e5798ef4f88aa25fcc52d279be731ce904789aa7ccfb7e8d2"}, - {file = "lxml-6.0.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:a52a4704811e2623b0324a18d41ad4b9fabf43ce5ff99b14e40a520e2190c851"}, - {file = "lxml-6.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c16304bba98f48a28ae10e32a8e75c349dd742c45156f297e16eeb1ba9287a1f"}, - {file = "lxml-6.0.0-cp311-cp311-win32.whl", hash = "sha256:f8d19565ae3eb956d84da3ef367aa7def14a2735d05bd275cd54c0301f0d0d6c"}, - {file = "lxml-6.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:b2d71cdefda9424adff9a3607ba5bbfc60ee972d73c21c7e3c19e71037574816"}, - {file = "lxml-6.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:8a2e76efbf8772add72d002d67a4c3d0958638696f541734304c7f28217a9cab"}, - {file = "lxml-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78718d8454a6e928470d511bf8ac93f469283a45c354995f7d19e77292f26108"}, - {file = "lxml-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:84ef591495ffd3f9dcabffd6391db7bb70d7230b5c35ef5148354a134f56f2be"}, - {file = "lxml-6.0.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:2930aa001a3776c3e2601cb8e0a15d21b8270528d89cc308be4843ade546b9ab"}, - {file = "lxml-6.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:219e0431ea8006e15005767f0351e3f7f9143e793e58519dc97fe9e07fae5563"}, - {file = "lxml-6.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bd5913b4972681ffc9718bc2d4c53cde39ef81415e1671ff93e9aa30b46595e7"}, - {file = "lxml-6.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:390240baeb9f415a82eefc2e13285016f9c8b5ad71ec80574ae8fa9605093cd7"}, - {file = "lxml-6.0.0-cp312-cp312-manylinux_2_27_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d6e200909a119626744dd81bae409fc44134389e03fbf1d68ed2a55a2fb10991"}, - {file = "lxml-6.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ca50bd612438258a91b5b3788c6621c1f05c8c478e7951899f492be42defc0da"}, - {file = "lxml-6.0.0-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:c24b8efd9c0f62bad0439283c2c795ef916c5a6b75f03c17799775c7ae3c0c9e"}, - {file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:afd27d8629ae94c5d863e32ab0e1d5590371d296b87dae0a751fb22bf3685741"}, - {file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:54c4855eabd9fc29707d30141be99e5cd1102e7d2258d2892314cf4c110726c3"}, - {file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c907516d49f77f6cd8ead1322198bdfd902003c3c330c77a1c5f3cc32a0e4d16"}, - {file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:36531f81c8214e293097cd2b7873f178997dae33d3667caaae8bdfb9666b76c0"}, - {file = "lxml-6.0.0-cp312-cp312-win32.whl", hash = "sha256:690b20e3388a7ec98e899fd54c924e50ba6693874aa65ef9cb53de7f7de9d64a"}, - {file = "lxml-6.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:310b719b695b3dd442cdfbbe64936b2f2e231bb91d998e99e6f0daf991a3eba3"}, - {file = "lxml-6.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:8cb26f51c82d77483cdcd2b4a53cda55bbee29b3c2f3ddeb47182a2a9064e4eb"}, - {file = "lxml-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6da7cd4f405fd7db56e51e96bff0865b9853ae70df0e6720624049da76bde2da"}, - {file = "lxml-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b34339898bb556a2351a1830f88f751679f343eabf9cf05841c95b165152c9e7"}, - {file = "lxml-6.0.0-cp313-cp313-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:51a5e4c61a4541bd1cd3ba74766d0c9b6c12d6a1a4964ef60026832aac8e79b3"}, - {file = "lxml-6.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d18a25b19ca7307045581b18b3ec9ead2b1db5ccd8719c291f0cd0a5cec6cb81"}, - {file = "lxml-6.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d4f0c66df4386b75d2ab1e20a489f30dc7fd9a06a896d64980541506086be1f1"}, - {file = "lxml-6.0.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9f4b481b6cc3a897adb4279216695150bbe7a44c03daba3c894f49d2037e0a24"}, - {file = "lxml-6.0.0-cp313-cp313-manylinux_2_27_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8a78d6c9168f5bcb20971bf3329c2b83078611fbe1f807baadc64afc70523b3a"}, - {file = "lxml-6.0.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae06fbab4f1bb7db4f7c8ca9897dc8db4447d1a2b9bee78474ad403437bcc29"}, - {file = "lxml-6.0.0-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:1fa377b827ca2023244a06554c6e7dc6828a10aaf74ca41965c5d8a4925aebb4"}, - {file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1676b56d48048a62ef77a250428d1f31f610763636e0784ba67a9740823988ca"}, - {file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:0e32698462aacc5c1cf6bdfebc9c781821b7e74c79f13e5ffc8bfe27c42b1abf"}, - {file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4d6036c3a296707357efb375cfc24bb64cd955b9ec731abf11ebb1e40063949f"}, - {file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7488a43033c958637b1a08cddc9188eb06d3ad36582cebc7d4815980b47e27ef"}, - {file = "lxml-6.0.0-cp313-cp313-win32.whl", hash = "sha256:5fcd7d3b1d8ecb91445bd71b9c88bdbeae528fefee4f379895becfc72298d181"}, - {file = "lxml-6.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:2f34687222b78fff795feeb799a7d44eca2477c3d9d3a46ce17d51a4f383e32e"}, - {file = "lxml-6.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:21db1ec5525780fd07251636eb5f7acb84003e9382c72c18c542a87c416ade03"}, - {file = "lxml-6.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4eb114a0754fd00075c12648d991ec7a4357f9cb873042cc9a77bf3a7e30c9db"}, - {file = "lxml-6.0.0-cp38-cp38-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:7da298e1659e45d151b4028ad5c7974917e108afb48731f4ed785d02b6818994"}, - {file = "lxml-6.0.0-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7bf61bc4345c1895221357af8f3e89f8c103d93156ef326532d35c707e2fb19d"}, - {file = "lxml-6.0.0-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63b634facdfbad421d4b61c90735688465d4ab3a8853ac22c76ccac2baf98d97"}, - {file = "lxml-6.0.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:e380e85b93f148ad28ac15f8117e2fd8e5437aa7732d65e260134f83ce67911b"}, - {file = "lxml-6.0.0-cp38-cp38-win32.whl", hash = "sha256:185efc2fed89cdd97552585c624d3c908f0464090f4b91f7d92f8ed2f3b18f54"}, - {file = "lxml-6.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:f97487996a39cb18278ca33f7be98198f278d0bc3c5d0fd4d7b3d63646ca3c8a"}, - {file = "lxml-6.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:85b14a4689d5cff426c12eefe750738648706ea2753b20c2f973b2a000d3d261"}, - {file = "lxml-6.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f64ccf593916e93b8d36ed55401bb7fe9c7d5de3180ce2e10b08f82a8f397316"}, - {file = "lxml-6.0.0-cp39-cp39-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:b372d10d17a701b0945f67be58fae4664fd056b85e0ff0fbc1e6c951cdbc0512"}, - {file = "lxml-6.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a674c0948789e9136d69065cc28009c1b1874c6ea340253db58be7622ce6398f"}, - {file = "lxml-6.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:edf6e4c8fe14dfe316939711e3ece3f9a20760aabf686051b537a7562f4da91a"}, - {file = "lxml-6.0.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:048a930eb4572829604982e39a0c7289ab5dc8abc7fc9f5aabd6fbc08c154e93"}, - {file = "lxml-6.0.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0b5fa5eda84057a4f1bbb4bb77a8c28ff20ae7ce211588d698ae453e13c6281"}, - {file = "lxml-6.0.0-cp39-cp39-manylinux_2_31_armv7l.whl", hash = "sha256:c352fc8f36f7e9727db17adbf93f82499457b3d7e5511368569b4c5bd155a922"}, - {file = "lxml-6.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8db5dc617cb937ae17ff3403c3a70a7de9df4852a046f93e71edaec678f721d0"}, - {file = "lxml-6.0.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:2181e4b1d07dde53986023482673c0f1fba5178ef800f9ab95ad791e8bdded6a"}, - {file = "lxml-6.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b3c98d5b24c6095e89e03d65d5c574705be3d49c0d8ca10c17a8a4b5201b72f5"}, - {file = "lxml-6.0.0-cp39-cp39-win32.whl", hash = "sha256:04d67ceee6db4bcb92987ccb16e53bef6b42ced872509f333c04fb58a3315256"}, - {file = "lxml-6.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:e0b1520ef900e9ef62e392dd3d7ae4f5fa224d1dd62897a792cf353eb20b6cae"}, - {file = "lxml-6.0.0-cp39-cp39-win_arm64.whl", hash = "sha256:e35e8aaaf3981489f42884b59726693de32dabfc438ac10ef4eb3409961fd402"}, - {file = "lxml-6.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:dbdd7679a6f4f08152818043dbb39491d1af3332128b3752c3ec5cebc0011a72"}, - {file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:40442e2a4456e9910875ac12951476d36c0870dcb38a68719f8c4686609897c4"}, - {file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:db0efd6bae1c4730b9c863fc4f5f3c0fa3e8f05cae2c44ae141cb9dfc7d091dc"}, - {file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9ab542c91f5a47aaa58abdd8ea84b498e8e49fe4b883d67800017757a3eb78e8"}, - {file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:013090383863b72c62a702d07678b658fa2567aa58d373d963cca245b017e065"}, - {file = "lxml-6.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c86df1c9af35d903d2b52d22ea3e66db8058d21dc0f59842ca5deb0595921141"}, - {file = "lxml-6.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4337e4aec93b7c011f7ee2e357b0d30562edd1955620fdd4aeab6aacd90d43c5"}, - {file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ae74f7c762270196d2dda56f8dd7309411f08a4084ff2dfcc0b095a218df2e06"}, - {file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:059c4cbf3973a621b62ea3132934ae737da2c132a788e6cfb9b08d63a0ef73f9"}, - {file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:17f090a9bc0ce8da51a5632092f98a7e7f84bca26f33d161a98b57f7fb0004ca"}, - {file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9da022c14baeec36edfcc8daf0e281e2f55b950249a455776f0d1adeeada4734"}, - {file = "lxml-6.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a55da151d0b0c6ab176b4e761670ac0e2667817a1e0dadd04a01d0561a219349"}, - {file = "lxml-6.0.0.tar.gz", hash = "sha256:032e65120339d44cdc3efc326c9f660f5f7205f3a535c1fdbf898b29ea01fb72"}, + {file = "lxml-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:41dcc4c7b10484257cbd6c37b83ddb26df2b0e5aff5ac00d095689015af868ec"}, + {file = "lxml-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a31286dbb5e74c8e9a5344465b77ab4c5bd511a253b355b5ca2fae7e579fafec"}, + {file = "lxml-6.1.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1bc4cc83fb7f66ffb16f74d6dd0162e144333fc36ebcce32246f80c8735b2551"}, + {file = "lxml-6.1.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:20cf4d0651987c906a2f5cba4e3a8d6ba4bfdf973cfe2a96c0d6053888ea2ecd"}, + {file = "lxml-6.1.0-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ffb34ea45a82dd637c2c97ae1bbb920850c1e59bcae79ce1c15af531d83e7215"}, + {file = "lxml-6.1.0-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a1d9b99e5b2597e4f5aed2484fef835256fa1b68a19e4265c97628ef4bf8bcf4"}, + {file = "lxml-6.1.0-cp310-cp310-manylinux_2_28_i686.whl", hash = "sha256:d43aa26dcda363f21e79afa0668f5029ed7394b3bb8c92a6927a3d34e8b610ea"}, + {file = "lxml-6.1.0-cp310-cp310-manylinux_2_31_armv7l.whl", hash = "sha256:6262b87f9e5c1e5fe501d6c153247289af42eb44ad7660b9b3de17baaf92d6f6"}, + {file = "lxml-6.1.0-cp310-cp310-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d1392c569c032f78a11a25d1de1c43fff13294c793b39e19d84fade3045cbbc3"}, + {file = "lxml-6.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:045e387d1f4f42a418380930fa3f45c73c9b392faf67e495e58902e68e8f44a7"}, + {file = "lxml-6.1.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:9f93d5b8b07f73e8c77e3c6556a3db269918390c804b5e5fcdd4858232cc8f16"}, + {file = "lxml-6.1.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:de550d129f18d8ab819651ffe4f38b1b713c7e116707de3c0c6400d0ef34fbc1"}, + {file = "lxml-6.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c08da09dc003c9e8c70e06b53a11db6fb3b250c21c4236b03c7d7b443c318e7a"}, + {file = "lxml-6.1.0-cp310-cp310-win32.whl", hash = "sha256:37448bf9c7d7adfc5254763901e2bbd6bb876228dfc1fc7f66e58c06368a7544"}, + {file = "lxml-6.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:2593a0a6621545b9095b71ad74ed4226eba438a7d9fc3712a99bdb15508cf93a"}, + {file = "lxml-6.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:e80807d72f96b96ad5588cb85c75616e4f2795a7737d4630784c51497beb7776"}, + {file = "lxml-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cec05be8c876f92a5aa07b01d60bbb4d11cfbdd654cad0561c0d7b5c043a61b9"}, + {file = "lxml-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9c03e048b6ce8e77b09c734e931584894ecd58d08296804ca2d0b184c933ce50"}, + {file = "lxml-6.1.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:942454ff253da14218f972b23dc72fa4edf6c943f37edd19cd697618b626fac5"}, + {file = "lxml-6.1.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d036ee7b99d5148072ac7c9b847193decdfeac633db350363f7bce4fff108f0e"}, + {file = "lxml-6.1.0-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ae5d8d5427f3cc317e7950f2da7ad276df0cfa37b8de2f5658959e618ea8512"}, + {file = "lxml-6.1.0-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:363e47283bde87051b821826e71dde47f107e08614e1aa312ba0c5711e77738c"}, + {file = "lxml-6.1.0-cp311-cp311-manylinux_2_28_i686.whl", hash = "sha256:f504d861d9f2a8f94020130adac88d66de93841707a23a86244263d1e54682f5"}, + {file = "lxml-6.1.0-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:23a5dc68e08ed13331d61815c08f260f46b4a60fdd1640bbeb82cf89a9d90289"}, + {file = "lxml-6.1.0-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f15401d8d3dbf239e23c818afc10c7207f7b95f9a307e092122b6f86dd43209a"}, + {file = "lxml-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fcf3da95e93349e0647d48d4b36a12783105bcc74cb0c416952f9988410846a3"}, + {file = "lxml-6.1.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:0d082495c5fcf426e425a6e28daaba1fcb6d8f854a4ff01effb1f1f381203eb9"}, + {file = "lxml-6.1.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:e3c4f84b24a1fcba435157d111c4b755099c6ff00a3daee1ad281817de75ed11"}, + {file = "lxml-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:976a6b39b1b13e8c354ad8d3f261f3a4ac6609518af91bdb5094760a08f132c4"}, + {file = "lxml-6.1.0-cp311-cp311-win32.whl", hash = "sha256:857efde87d365706590847b916baff69c0bc9252dc5af030e378c9800c0b10e3"}, + {file = "lxml-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:183bfb45a493081943be7ea2b5adfc2b611e1cf377cefa8b8a8be404f45ef9a7"}, + {file = "lxml-6.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:19f4164243fc206d12ed3d866e80e74f5bc3627966520da1a5f97e42c32a3f39"}, + {file = "lxml-6.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d2f17a16cd8751e8eb233a7e41aecdf8e511712e00088bf9be455f604cd0d28d"}, + {file = "lxml-6.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f0cea5b1d3e6e77d71bd2b9972eb2446221a69dc52bb0b9c3c6f6e5700592d93"}, + {file = "lxml-6.1.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fc46da94826188ed45cb53bd8e3fc076ae22675aea2087843d4735627f867c6d"}, + {file = "lxml-6.1.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9147d8e386ec3b82c3b15d88927f734f565b0aaadef7def562b853adca45784a"}, + {file = "lxml-6.1.0-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5715e0e28736a070f3f34a7ccc09e2fdcba0e3060abbcf61a1a5718ff6d6b105"}, + {file = "lxml-6.1.0-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4937460dc5df0cdd2f06a86c285c28afda06aefa3af949f9477d3e8df430c485"}, + {file = "lxml-6.1.0-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bc783ee3147e60a25aa0445ea82b3e8aabb83b240f2b95d32cb75587ff781814"}, + {file = "lxml-6.1.0-cp312-cp312-manylinux_2_28_i686.whl", hash = "sha256:40d9189f80075f2e1f88db21ef815a2b17b28adf8e50aaf5c789bfe737027f32"}, + {file = "lxml-6.1.0-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:05b9b8787e35bec69e68daf4952b2e6dfcfb0db7ecf1a06f8cdfbbac4eb71aad"}, + {file = "lxml-6.1.0-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0f0f08beb0182e3e9a86fae124b3c47a7b41b7b69b225e1377db983802404e54"}, + {file = "lxml-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:73becf6d8c81d4c76b1014dbd3584cb26d904492dcf73ca85dc8bff08dcd6d2d"}, + {file = "lxml-6.1.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1ae225f66e5938f4fa29d37e009a3bb3b13032ac57eb4eb42afa44f6e4054e69"}, + {file = "lxml-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:690022c7fae793b0489aa68a658822cea83e0d5933781811cabbf5ea3bcfe73d"}, + {file = "lxml-6.1.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:63aeafc26aac0be8aff14af7871249e87ea1319be92090bfd632ec68e03b16a5"}, + {file = "lxml-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:264c605ab9c0e4aa1a679636f4582c4d3313700009fac3ec9c3412ed0d8f3e1d"}, + {file = "lxml-6.1.0-cp312-cp312-win32.whl", hash = "sha256:56971379bc5ee8037c5a0f09fa88f66cdb7d37c3e38af3e45cf539f41131ac1f"}, + {file = "lxml-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:bba078de0031c219e5dd06cf3e6bf8fb8e6e64a77819b358f53bb132e3e03366"}, + {file = "lxml-6.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:c3592631e652afa34999a088f98ba7dfc7d6aff0d535c410bea77a71743f3819"}, + {file = "lxml-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a0092f2b107b69601adf562a57c956fbb596e05e3e6651cabd3054113b007e45"}, + {file = "lxml-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fc7140d7a7386e6b545d41b7358f4d02b656d4053f5fa6859f92f4b9c2572c4d"}, + {file = "lxml-6.1.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:419c58fc92cc3a2c3fa5f78c63dbf5da70c1fa9c1b25f25727ecee89a96c7de2"}, + {file = "lxml-6.1.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:37fabd1452852636cf38ecdcc9dd5ca4bba7a35d6c53fa09725deeb894a87491"}, + {file = "lxml-6.1.0-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2853c8b2170cc6cd54a6b4d50d2c1a8a7aeca201f23804b4898525c7a152cfc"}, + {file = "lxml-6.1.0-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8e369cbd690e788c8d15e56222d91a09c6a417f49cbc543040cba0fe2e25a79e"}, + {file = "lxml-6.1.0-cp313-cp313-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e69aa6805905807186eb00e66c6d97a935c928275182eb02ee40ba00da9623b2"}, + {file = "lxml-6.1.0-cp313-cp313-manylinux_2_28_i686.whl", hash = "sha256:4bd1bdb8a9e0e2dd229de19b5f8aebac80e916921b4b2c6ef8a52bc131d0c1f9"}, + {file = "lxml-6.1.0-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:cbd7b79cdcb4986ad78a2662625882747f09db5e4cd7b2ae178a88c9c51b3dfe"}, + {file = "lxml-6.1.0-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:43e4d297f11080ec9d64a4b1ad7ac02b4484c9f0e2179d9c4ef78e886e747b88"}, + {file = "lxml-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cc16682cc987a3da00aa56a3aa3075b08edb10d9b1e476938cfdbee8f3b67181"}, + {file = "lxml-6.1.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:d6d8efe71429635f0559579092bb5e60560d7b9115ee38c4adbea35632e7fa24"}, + {file = "lxml-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7e39ab3a28af7784e206d8606ec0e4bcad0190f63a492bca95e94e5a4aef7f6e"}, + {file = "lxml-6.1.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:9eb667bf50856c4a58145f8ca2d5e5be160191e79eb9e30855a476191b3c3495"}, + {file = "lxml-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7f4a77d6f7edf9230cee3e1f7f6764722a41604ee5681844f18db9a81ea0ec33"}, + {file = "lxml-6.1.0-cp313-cp313-win32.whl", hash = "sha256:28902146ffbe5222df411c5d19e5352490122e14447e98cd118907ee3fd6ee62"}, + {file = "lxml-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:4a1503c56e4e2b38dc76f2f2da7bae69670c0f1933e27cfa34b2fa5876410b16"}, + {file = "lxml-6.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:e0af85773850417d994d019741239b901b22c6680206f46a34766926e466141d"}, + {file = "lxml-6.1.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:ab863fd37458fed6456525f297d21239d987800c46e67da5ef04fc6b3dd93ac8"}, + {file = "lxml-6.1.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:6fd8b1df8254ff4fd93fd31da1fc15770bde23ac045be9bb1f87425702f61cc9"}, + {file = "lxml-6.1.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:47024feaae386a92a146af0d2aeed65229bf6fff738e6a11dda6b0015fb8fd03"}, + {file = "lxml-6.1.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3f00972f84450204cd5d93a5395965e348956aaceaadec693a22ec743f8ae3eb"}, + {file = "lxml-6.1.0-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97faa0860e13b05b15a51fb4986421ef7a30f0b3334061c416e0981e9450ca4c"}, + {file = "lxml-6.1.0-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:972a6451204798675407beaad97b868d0c733d9a74dafefc63120b81b8c2de28"}, + {file = "lxml-6.1.0-cp314-cp314-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fe022f20bc4569ec66b63b3fb275a3d628d9d32da6326b2982584104db6d3086"}, + {file = "lxml-6.1.0-cp314-cp314-manylinux_2_28_i686.whl", hash = "sha256:75c4c7c619a744f972f4451bf5adf6d0fb00992a1ffc9fd78e13b0bc817cc99f"}, + {file = "lxml-6.1.0-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:3648f20d25102a22b6061c688beb3a805099ea4beb0a01ce62975d926944d292"}, + {file = "lxml-6.1.0-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:77b9f99b17cbf14026d1e618035077060fc7195dd940d025149f3e2e830fbfcb"}, + {file = "lxml-6.1.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:32662519149fd7a9db354175aa5e417d83485a8039b8aaa62f873ceee7ea4cad"}, + {file = "lxml-6.1.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:73d658216fc173cf2c939e90e07b941c5e12736b0bf6a99e7af95459cfe8eabb"}, + {file = "lxml-6.1.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ac4db068889f8772a4a698c5980ec302771bb545e10c4b095d4c8be26749616f"}, + {file = "lxml-6.1.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:45e9dfbd1b661eb64ba0d4dbe762bd210c42d86dd1e5bd2bdf89d634231beb43"}, + {file = "lxml-6.1.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:89e8d73d09ac696a5ba42ec69787913d53284f12092f651506779314f10ba585"}, + {file = "lxml-6.1.0-cp314-cp314-win32.whl", hash = "sha256:ebe33f4ec1b2de38ceb225a1749a2965855bffeef435ba93cd2d5d540783bf2f"}, + {file = "lxml-6.1.0-cp314-cp314-win_amd64.whl", hash = "sha256:398443df51c538bd578529aa7e5f7afc6c292644174b47961f3bf87fe5741120"}, + {file = "lxml-6.1.0-cp314-cp314-win_arm64.whl", hash = "sha256:8c8984e1d8c4b3949e419158fda14d921ff703a9ed8a47236c6eb7a2b6cb4946"}, + {file = "lxml-6.1.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:1081dd10bc6fa437db2500e13993abf7cc30716d0a2f40e65abb935f02ec559c"}, + {file = "lxml-6.1.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:dabecc48db5f42ba348d1f5d5afdc54c6c4cc758e676926c7cd327045749517d"}, + {file = "lxml-6.1.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e3dd5fe19c9e0ac818a9c7f132a5e43c1339ec1cbbfecb1a938bd3a47875b7c9"}, + {file = "lxml-6.1.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9e7b0a4ca6dcc007a4cef00a761bba2dea959de4bd2df98f926b33c92ca5dfb9"}, + {file = "lxml-6.1.0-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d27bbe326c6b539c64b42638b18bc6003a8d88f76213a97ac9ed4f885efeab7"}, + {file = "lxml-6.1.0-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4e425db0c5445ef0ad56b0eec54f89b88b2d884656e536a90b2f52aecb4ca86"}, + {file = "lxml-6.1.0-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4b89b098105b8599dc57adac95d1813409ac476d3c948a498775d3d0c6124bfb"}, + {file = "lxml-6.1.0-cp314-cp314t-manylinux_2_28_i686.whl", hash = "sha256:c4a699432846df86cc3de502ee85f445ebad748a1c6021d445f3e514d2cd4b1c"}, + {file = "lxml-6.1.0-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:30e7b2ed63b6c8e97cca8af048589a788ab5c9c905f36d9cf1c2bb549f450d2f"}, + {file = "lxml-6.1.0-cp314-cp314t-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:022981127642fe19866d2907d76241bb07ed21749601f727d5d5dd1ce5d1b773"}, + {file = "lxml-6.1.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:23cad0cc86046d4222f7f418910e46b89971c5a45d3c8abfad0f64b7b05e4a9b"}, + {file = "lxml-6.1.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:21c3302068f50d1e8728c67c87ba92aa87043abee517aa2576cca1855326b405"}, + {file = "lxml-6.1.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:be10838781cb3be19251e276910cd508fe127e27c3242e50521521a0f3781690"}, + {file = "lxml-6.1.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:2173a7bffe97667bbf0767f8a99e587740a8c56fdf3befac4b09cb29a80276fd"}, + {file = "lxml-6.1.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c6854e9cf99c84beb004eecd7d3a3868ef1109bf2b1df92d7bc11e96a36c2180"}, + {file = "lxml-6.1.0-cp314-cp314t-win32.whl", hash = "sha256:00750d63ef0031a05331b9223463b1c7c02b9004cef2346a5b2877f0f9494dd2"}, + {file = "lxml-6.1.0-cp314-cp314t-win_amd64.whl", hash = "sha256:80410c3a7e3c617af04de17caa9f9f20adaa817093293d69eae7d7d0522836f5"}, + {file = "lxml-6.1.0-cp314-cp314t-win_arm64.whl", hash = "sha256:26dd9f57ee3bd41e7d35b4c98a2ffd89ed11591649f421f0ec19f67d50ec67ac"}, + {file = "lxml-6.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b6c2f225662bc5ad416bdd06f72ca301b31b39ce4261f0e0097017fc2891b940"}, + {file = "lxml-6.1.0-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a86f06f059e22a0d574990ee2df24ede03f7f3c68c1336293eee9536c4c776cd"}, + {file = "lxml-6.1.0-cp38-cp38-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:468479e52ecf3ec23799c863336d02c05fc2f7ffd1a1424eeeb9a28d4eb69d13"}, + {file = "lxml-6.1.0-cp38-cp38-manylinux_2_28_i686.whl", hash = "sha256:a02ca8fe48815bddcfca3248efe54451abb9dbf2f7d1c5744c8aa4142d476919"}, + {file = "lxml-6.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:bb40648d96157f9081886defe13eac99253e663be969ff938a9289eff6e47b72"}, + {file = "lxml-6.1.0-cp38-cp38-win32.whl", hash = "sha256:1dd6a1c3ad4cb674f44525d9957f3e9c209bb6dd9213245195167a281fcc2bdc"}, + {file = "lxml-6.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:4e2c54d6b47361d0f1d3bc8d4e082ad87201e56ccdcca4d3b9ee3644ff595ec8"}, + {file = "lxml-6.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:920354904d1cb86577d4b3cfe2830c2dbe81d6f4449e57ada428f1609b5985f7"}, + {file = "lxml-6.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c871299c595ee004d186f61840f0bfc4941aa3f17c8ba4a565ead7e4f4f820ee"}, + {file = "lxml-6.1.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d0d799ff958655781296ec870d5e2448e75150da2b3d07f13ff5b0c2c35beefd"}, + {file = "lxml-6.1.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7ba11752e346bd804ea312ec2eea2532dfa8b8d3261d81a32ef9e6ab16256280"}, + {file = "lxml-6.1.0-cp39-cp39-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:26c5272c6a4bf4cf32d3f5a7890c942b0e04438691157d341616d02cca74d4bd"}, + {file = "lxml-6.1.0-cp39-cp39-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c53fa3a5a52122d590e847a57ccf955557b9634a7f99ff5a35131321b0a85317"}, + {file = "lxml-6.1.0-cp39-cp39-manylinux_2_28_i686.whl", hash = "sha256:76b958b4ea3104483c20f74866d55aa056546e15ebe83dd7aecd63698f43b755"}, + {file = "lxml-6.1.0-cp39-cp39-manylinux_2_31_armv7l.whl", hash = "sha256:8c11b984b5ce6add4dccc7144c7be5d364d298f15b0c6a57da1991baedc750ce"}, + {file = "lxml-6.1.0-cp39-cp39-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d3829a6e6fd550a219564912d4002c537f65da4c6ae4e093cc34462f4fa027ad"}, + {file = "lxml-6.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:52b0ac6903cf74ebf997eb8c682d2fbac7d1ab7e4c552413eec55868a9b73f39"}, + {file = "lxml-6.1.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:29f5c00cb7d752bce2c70ebd2d31b0a42f9499ffdd3ecb2f31a5b73ee43031ad"}, + {file = "lxml-6.1.0-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:c748ebcb6877de89f48ab90ca96642ac458fff5dec291a2b9337cd4d0934e383"}, + {file = "lxml-6.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:08950a23f296b3f83521577274e3d3b0f3d739bf2e68d01a752e4288bc50d286"}, + {file = "lxml-6.1.0-cp39-cp39-win32.whl", hash = "sha256:11a873c77a181b4fef9c2e357d08ed399542c2af1390101da66720a19c7c9618"}, + {file = "lxml-6.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:81ff55c70b67d19d52b6fd118a114c0a4c97d799cd3089ff9bd9e2ff4b414ee2"}, + {file = "lxml-6.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:481d6e2104285d9add34f41b42b247b76b61c5b5c26c303c2e9707bbf8bd9a64"}, + {file = "lxml-6.1.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:546b66c0dd1bb8d9fa89d7123e5fa19a8aff3a1f2141eb22df96112afb17b842"}, + {file = "lxml-6.1.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5cfa1a34df366d9dc0d5eaf420f4cf2bb1e1bebe1066d1c2fc28c179f8a4004c"}, + {file = "lxml-6.1.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:db88156fcf544cdbf0d95588051515cfdfd4c876fc66444eb98bceb5d6db76de"}, + {file = "lxml-6.1.0-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:07f98f5496f96bf724b1e3c933c107f0cbf2745db18c03d2e13a291c3afd2635"}, + {file = "lxml-6.1.0-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4642e04449a1e164b5ff71ffd901ddb772dfabf5c9adf1b7be5dffe1212bc037"}, + {file = "lxml-6.1.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:7da13bb6fbadfafb474e0226a30570a3445cfd47c86296f2446dafbd77079ace"}, + {file = "lxml-6.1.0.tar.gz", hash = "sha256:bfd57d8008c4965709a919c3e9a98f76c2c7cb319086b3d26858250620023b13"}, ] [package.extras] @@ -923,73 +1128,101 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "3.0.2" +version = "3.0.3" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" groups = ["dev", "docs"] files = [ - {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, - {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, + {file = "markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559"}, + {file = "markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1"}, + {file = "markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa"}, + {file = "markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8"}, + {file = "markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1"}, + {file = "markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad"}, + {file = "markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a"}, + {file = "markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19"}, + {file = "markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01"}, + {file = "markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c"}, + {file = "markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e"}, + {file = "markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b"}, + {file = "markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d"}, + {file = "markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c"}, + {file = "markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f"}, + {file = "markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795"}, + {file = "markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12"}, + {file = "markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed"}, + {file = "markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5"}, + {file = "markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485"}, + {file = "markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73"}, + {file = "markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287"}, + {file = "markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe"}, + {file = "markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe"}, + {file = "markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9"}, + {file = "markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581"}, + {file = "markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4"}, + {file = "markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab"}, + {file = "markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa"}, + {file = "markupsafe-3.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15d939a21d546304880945ca1ecb8a039db6b4dc49b2c5a400387cdae6a62e26"}, + {file = "markupsafe-3.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f71a396b3bf33ecaa1626c255855702aca4d3d9fea5e051b41ac59a9c1c41edc"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f4b68347f8c5eab4a13419215bdfd7f8c9b19f2b25520968adfad23eb0ce60c"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8fc20152abba6b83724d7ff268c249fa196d8259ff481f3b1476383f8f24e42"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:949b8d66bc381ee8b007cd945914c721d9aba8e27f71959d750a46f7c282b20b"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3537e01efc9d4dccdf77221fb1cb3b8e1a38d5428920e0657ce299b20324d758"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:591ae9f2a647529ca990bc681daebdd52c8791ff06c2bfa05b65163e28102ef2"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a320721ab5a1aba0a233739394eb907f8c8da5c98c9181d1161e77a0c8e36f2d"}, + {file = "markupsafe-3.0.3-cp39-cp39-win32.whl", hash = "sha256:df2449253ef108a379b8b5d6b43f4b1a8e81a061d6537becd5582fba5f9196d7"}, + {file = "markupsafe-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:7c3fb7d25180895632e5d3148dbdc29ea38ccb7fd210aa27acbd1201a1902c6e"}, + {file = "markupsafe-3.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:38664109c14ffc9e7437e86b4dceb442b0096dfe3541d7864d9cbe1da4cf36c8"}, + {file = "markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698"}, ] [[package]] @@ -1006,18 +1239,18 @@ files = [ [[package]] name = "mdit-py-plugins" -version = "0.4.2" +version = "0.5.0" description = "Collection of plugins for markdown-it-py" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" groups = ["docs"] files = [ - {file = "mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636"}, - {file = "mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5"}, + {file = "mdit_py_plugins-0.5.0-py3-none-any.whl", hash = "sha256:07a08422fc1936a5d26d146759e9155ea466e842f5ab2f7d2266dd084c8dab1f"}, + {file = "mdit_py_plugins-0.5.0.tar.gz", hash = "sha256:f4918cb50119f50446560513a8e311d574ff6aaed72606ddae6d35716fe809c6"}, ] [package.dependencies] -markdown-it-py = ">=1.0.0,<4.0.0" +markdown-it-py = ">=2.0.0,<5.0.0" [package.extras] code-style = ["pre-commit"] @@ -1095,14 +1328,14 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "packaging" -version = "25.0" +version = "26.1" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] files = [ - {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, - {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, + {file = "packaging-26.1-py3-none-any.whl", hash = "sha256:5d9c0669c6285e491e0ced2eee587eaf67b670d94a19e94e3984a481aba6802f"}, + {file = "packaging-26.1.tar.gz", hash = "sha256:f042152b681c4bfac5cae2742a55e103d27ab2ec0f3d88037136b6bfe7c9c5de"}, ] [[package]] @@ -1154,14 +1387,14 @@ test = ["pytest", "pytest-xdist", "setuptools"] [[package]] name = "pyaml" -version = "25.7.0" +version = "26.2.1" description = "PyYAML-based module to produce a bit more pretty and readable YAML-serialized data" optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "pyaml-25.7.0-py3-none-any.whl", hash = "sha256:ce5d7867cc2b455efdb9b0448324ff7b9f74d99f64650f12ca570102db6b985f"}, - {file = "pyaml-25.7.0.tar.gz", hash = "sha256:e113a64ec16881bf2b092e2beb84b7dcf1bd98096ad17f5f14e8fb782a75d99b"}, + {file = "pyaml-26.2.1-py3-none-any.whl", hash = "sha256:6261c2f0a2f33245286c794ad6ec234be33a73d2b05427079fd343e2812a87cf"}, + {file = "pyaml-26.2.1.tar.gz", hash = "sha256:489dd82997235d4cfcf76a6287fce2f075487d77a6567c271e8d790583690c68"}, ] [package.dependencies] @@ -1184,33 +1417,34 @@ files = [ [[package]] name = "pycparser" -version = "2.22" +version = "3.0" description = "C parser in Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" groups = ["main"] +markers = "platform_python_implementation != \"PyPy\" and implementation_name != \"PyPy\"" files = [ - {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, - {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, + {file = "pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992"}, + {file = "pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29"}, ] [[package]] name = "pydantic" -version = "2.11.7" +version = "2.13.3" description = "Data validation using Python type hints" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"}, - {file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"}, + {file = "pydantic-2.13.3-py3-none-any.whl", hash = "sha256:6db14ac8dfc9a1e57f87ea2c0de670c251240f43cb0c30a5130e9720dc612927"}, + {file = "pydantic-2.13.3.tar.gz", hash = "sha256:af09e9d1d09f4e7fe37145c1f577e1d61ceb9a41924bf0094a36506285d0a84d"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.33.2" -typing-extensions = ">=4.12.2" -typing-inspection = ">=0.4.0" +pydantic-core = "2.46.3" +typing-extensions = ">=4.14.1" +typing-inspection = ">=0.4.2" [package.extras] email = ["email-validator (>=2.0.0)"] @@ -1218,126 +1452,147 @@ timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.46.3" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}, - {file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"}, - {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"}, - {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"}, - {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"}, - {file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"}, - {file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"}, - {file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"}, - {file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"}, - {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"}, - {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"}, - {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"}, - {file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"}, - {file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"}, - {file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"}, - {file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"}, - {file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"}, - {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"}, - {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"}, - {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"}, - {file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"}, - {file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"}, - {file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"}, - {file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"}, - {file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"}, - {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"}, - {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"}, - {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"}, - {file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"}, - {file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"}, - {file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"}, - {file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"}, - {file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"}, - {file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"}, - {file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"}, - {file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"}, - {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"}, - {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"}, - {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"}, - {file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"}, - {file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"}, - {file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"}, + {file = "pydantic_core-2.46.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:1da3786b8018e60349680720158cc19161cc3b4bdd815beb0a321cd5ce1ad5b1"}, + {file = "pydantic_core-2.46.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cc0988cb29d21bf4a9d5cf2ef970b5c0e38d8d8e107a493278c05dc6c1dda69f"}, + {file = "pydantic_core-2.46.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27f9067c3bfadd04c55484b89c0d267981b2f3512850f6f66e1e74204a4e4ce3"}, + {file = "pydantic_core-2.46.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a642ac886ecf6402d9882d10c405dcf4b902abeb2972cd5fb4a48c83cd59279a"}, + {file = "pydantic_core-2.46.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79f561438481f28681584b89e2effb22855e2179880314bcddbf5968e935e807"}, + {file = "pydantic_core-2.46.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57a973eae4665352a47cf1a99b4ee864620f2fe663a217d7a8da68a1f3a5bfda"}, + {file = "pydantic_core-2.46.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83d002b97072a53ea150d63e0a3adfae5670cef5aa8a6e490240e482d3b22e57"}, + {file = "pydantic_core-2.46.3-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:b40ddd51e7c44b28cfaef746c9d3c506d658885e0a46f9eeef2ee815cbf8e045"}, + {file = "pydantic_core-2.46.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac5ec7fb9b87f04ee839af2d53bcadea57ded7d229719f56c0ed895bff987943"}, + {file = "pydantic_core-2.46.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a3b11c812f61b3129c4905781a2601dfdfdea5fe1e6c1cfb696b55d14e9c054f"}, + {file = "pydantic_core-2.46.3-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:1108da631e602e5b3c38d6d04fe5bb3bfa54349e6918e3ca6cf570b2e2b2f9d4"}, + {file = "pydantic_core-2.46.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:de885175515bcfa98ae618c1df7a072f13d179f81376c8007112af20567fd08a"}, + {file = "pydantic_core-2.46.3-cp310-cp310-win32.whl", hash = "sha256:d11058e3201527d41bc6b545c79187c9e4bf85e15a236a6007f0e991518882b7"}, + {file = "pydantic_core-2.46.3-cp310-cp310-win_amd64.whl", hash = "sha256:3612edf65c8ea67ac13616c4d23af12faef1ae435a8a93e5934c2a0cbbdd1fd6"}, + {file = "pydantic_core-2.46.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ab124d49d0459b2373ecf54118a45c28a1e6d4192a533fbc915e70f556feb8e5"}, + {file = "pydantic_core-2.46.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cca67d52a5c7a16aed2b3999e719c4bcf644074eac304a5d3d62dd70ae7d4b2c"}, + {file = "pydantic_core-2.46.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c024e08c0ba23e6fd68c771a521e9d6a792f2ebb0fa734296b36394dc30390e"}, + {file = "pydantic_core-2.46.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6645ce7eec4928e29a1e3b3d5c946621d105d3e79f0c9cddf07c2a9770949287"}, + {file = "pydantic_core-2.46.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a712c7118e6c5ea96562f7b488435172abb94a3c53c22c9efc1412264a45cbbe"}, + {file = "pydantic_core-2.46.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:69a868ef3ff206343579021c40faf3b1edc64b1cc508ff243a28b0a514ccb050"}, + {file = "pydantic_core-2.46.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc7e8c32db809aa0f6ea1d6869ebc8518a65d5150fdfad8bcae6a49ae32a22e2"}, + {file = "pydantic_core-2.46.3-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:3481bd1341dc85779ee506bc8e1196a277ace359d89d28588a9468c3ecbe63fa"}, + {file = "pydantic_core-2.46.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8690eba565c6d68ffd3a8655525cbdd5246510b44a637ee2c6c03a7ebfe64d3c"}, + {file = "pydantic_core-2.46.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4de88889d7e88d50d40ee5b39d5dac0bcaef9ba91f7e536ac064e6b2834ecccf"}, + {file = "pydantic_core-2.46.3-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:e480080975c1ef7f780b8f99ed72337e7cc5efea2e518a20a692e8e7b278eb8b"}, + {file = "pydantic_core-2.46.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:de3a5c376f8cd94da9a1b8fd3dd1c16c7a7b216ed31dc8ce9fd7a22bf13b836e"}, + {file = "pydantic_core-2.46.3-cp311-cp311-win32.whl", hash = "sha256:fc331a5314ffddd5385b9ee9d0d2fee0b13c27e0e02dad71b1ae5d6561f51eeb"}, + {file = "pydantic_core-2.46.3-cp311-cp311-win_amd64.whl", hash = "sha256:b5b9c6cf08a8a5e502698f5e153056d12c34b8fb30317e0c5fd06f45162a6346"}, + {file = "pydantic_core-2.46.3-cp311-cp311-win_arm64.whl", hash = "sha256:5dfd51cf457482f04ec49491811a2b8fd5b843b64b11eecd2d7a1ee596ea78a6"}, + {file = "pydantic_core-2.46.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b11b59b3eee90a80a36701ddb4576d9ae31f93f05cb9e277ceaa09e6bf074a67"}, + {file = "pydantic_core-2.46.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:af8653713055ea18a3abc1537fe2ebc42f5b0bbb768d1eb79fd74eb47c0ac089"}, + {file = "pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75a519dab6d63c514f3a81053e5266c549679e4aa88f6ec57f2b7b854aceb1b0"}, + {file = "pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a6cd87cb1575b1ad05ba98894c5b5c96411ef678fa2f6ed2576607095b8d9789"}, + {file = "pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f80a55484b8d843c8ada81ebf70a682f3f00a3d40e378c06cf17ecb44d280d7d"}, + {file = "pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3861f1731b90c50a3266316b9044f5c9b405eecb8e299b0a7120596334e4fe9c"}, + {file = "pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb528e295ed31570ac3dcc9bfdd6e0150bc11ce6168ac87a8082055cf1a67395"}, + {file = "pydantic_core-2.46.3-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:367508faa4973b992b271ba1494acaab36eb7e8739d1e47be5035fb1ea225396"}, + {file = "pydantic_core-2.46.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ad3c826fe523e4becf4fe39baa44286cff85ef137c729a2c5e269afbfd0905d"}, + {file = "pydantic_core-2.46.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ec638c5d194ef8af27db69f16c954a09797c0dc25015ad6123eb2c73a4d271ca"}, + {file = "pydantic_core-2.46.3-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:28ed528c45446062ee66edb1d33df5d88828ae167de76e773a3c7f64bd14e976"}, + {file = "pydantic_core-2.46.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aed19d0c783886d5bd86d80ae5030006b45e28464218747dcf83dabfdd092c7b"}, + {file = "pydantic_core-2.46.3-cp312-cp312-win32.whl", hash = "sha256:06d5d8820cbbdb4147578c1fe7ffcd5b83f34508cb9f9ab76e807be7db6ff0a4"}, + {file = "pydantic_core-2.46.3-cp312-cp312-win_amd64.whl", hash = "sha256:c3212fda0ee959c1dd04c60b601ec31097aaa893573a3a1abd0a47bcac2968c1"}, + {file = "pydantic_core-2.46.3-cp312-cp312-win_arm64.whl", hash = "sha256:f1f8338dd7a7f31761f1f1a3c47503a9a3b34eea3c8b01fa6ee96408affb5e72"}, + {file = "pydantic_core-2.46.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:12bc98de041458b80c86c56b24df1d23832f3e166cbaff011f25d187f5c62c37"}, + {file = "pydantic_core-2.46.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:85348b8f89d2c3508b65b16c3c33a4da22b8215138d8b996912bb1532868885f"}, + {file = "pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1105677a6df914b1fb71a81b96c8cce7726857e1717d86001f29be06a25ee6f8"}, + {file = "pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:87082cd65669a33adeba5470769e9704c7cf026cc30afb9cc77fd865578ebaad"}, + {file = "pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60e5f66e12c4f5212d08522963380eaaeac5ebd795826cfd19b2dfb0c7a52b9c"}, + {file = "pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6cdf19bf84128d5e7c37e8a73a0c5c10d51103a650ac585d42dd6ae233f2b7f"}, + {file = "pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:031bb17f4885a43773c8c763089499f242aee2ea85cf17154168775dccdecf35"}, + {file = "pydantic_core-2.46.3-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:bcf2a8b2982a6673693eae7348ef3d8cf3979c1d63b54fca7c397a635cc68687"}, + {file = "pydantic_core-2.46.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28e8cf2f52d72ced402a137145923a762cbb5081e48b34312f7a0c8f55928ec3"}, + {file = "pydantic_core-2.46.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:17eaface65d9fc5abb940003020309c1bf7a211f5f608d7870297c367e6f9022"}, + {file = "pydantic_core-2.46.3-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:93fd339f23408a07e98950a89644f92c54d8729719a40b30c0a30bb9ebc55d23"}, + {file = "pydantic_core-2.46.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:23cbdb3aaa74dfe0837975dbf69b469753bbde8eacace524519ffdb6b6e89eb7"}, + {file = "pydantic_core-2.46.3-cp313-cp313-win32.whl", hash = "sha256:610eda2e3838f401105e6326ca304f5da1e15393ae25dacae5c5c63f2c275b13"}, + {file = "pydantic_core-2.46.3-cp313-cp313-win_amd64.whl", hash = "sha256:68cc7866ed863db34351294187f9b729964c371ba33e31c26f478471c52e1ed0"}, + {file = "pydantic_core-2.46.3-cp313-cp313-win_arm64.whl", hash = "sha256:f64b5537ac62b231572879cd08ec05600308636a5d63bcbdb15063a466977bec"}, + {file = "pydantic_core-2.46.3-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:afa3aa644f74e290cdede48a7b0bee37d1c35e71b05105f6b340d484af536d9b"}, + {file = "pydantic_core-2.46.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ced3310e51aa425f7f77da8bbbb5212616655bedbe82c70944320bc1dbe5e018"}, + {file = "pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e29908922ce9da1a30b4da490bd1d3d82c01dcfdf864d2a74aacee674d0bfa34"}, + {file = "pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0c9ff69140423eea8ed2d5477df3ba037f671f5e897d206d921bc9fdc39613e7"}, + {file = "pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b675ab0a0d5b1c8fdb81195dc5bcefea3f3c240871cdd7ff9a2de8aa50772eb2"}, + {file = "pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0087084960f209a9a4af50ecd1fb063d9ad3658c07bb81a7a53f452dacbfb2ba"}, + {file = "pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed42e6cc8e1b0e2b9b96e2276bad70ae625d10d6d524aed0c93de974ae029f9f"}, + {file = "pydantic_core-2.46.3-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:f1771ce258afb3e4201e67d154edbbae712a76a6081079fe247c2f53c6322c22"}, + {file = "pydantic_core-2.46.3-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a7610b6a5242a6c736d8ad47fd5fff87fcfe8f833b281b1c409c3d6835d9227f"}, + {file = "pydantic_core-2.46.3-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:ff5e7783bcc5476e1db448bf268f11cb257b1c276d3e89f00b5727be86dd0127"}, + {file = "pydantic_core-2.46.3-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:9d2e32edcc143bc01e95300671915d9ca052d4f745aa0a49c48d4803f8a85f2c"}, + {file = "pydantic_core-2.46.3-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:6e42d83d1c6b87fa56b521479cff237e626a292f3b31b6345c15a99121b454c1"}, + {file = "pydantic_core-2.46.3-cp314-cp314-win32.whl", hash = "sha256:07bc6d2a28c3adb4f7c6ae46aa4f2d2929af127f587ed44057af50bf1ce0f505"}, + {file = "pydantic_core-2.46.3-cp314-cp314-win_amd64.whl", hash = "sha256:8940562319bc621da30714617e6a7eaa6b98c84e8c685bcdc02d7ed5e7c7c44e"}, + {file = "pydantic_core-2.46.3-cp314-cp314-win_arm64.whl", hash = "sha256:5dcbbcf4d22210ced8f837c96db941bdb078f419543472aca5d9a0bb7cddc7df"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:d0fe3dce1e836e418f912c1ad91c73357d03e556a4d286f441bf34fed2dbeecf"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9ce92e58abc722dac1bf835a6798a60b294e48eb0e625ec9fd994b932ac5feee"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a03e6467f0f5ab796a486146d1b887b2dc5e5f9b3288898c1b1c3ad974e53e4a"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2798b6ba041b9d70acfb9071a2ea13c8456dd1e6a5555798e41ba7b0790e329c"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9be3e221bdc6d69abf294dcf7aff6af19c31a5cdcc8f0aa3b14be29df4bd03b1"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f13936129ce841f2a5ddf6f126fea3c43cd128807b5a59588c37cf10178c2e64"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28b5f2ef03416facccb1c6ef744c69793175fd27e44ef15669201601cf423acb"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:830d1247d77ad23852314f069e9d7ddafeec5f684baf9d7e7065ed46a049c4e6"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0793c90c1a3c74966e7975eaef3ed30ebdff3260a0f815a62a22adc17e4c01c"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:d2d0aead851b66f5245ec0c4fb2612ef457f8bbafefdf65a2bf9d6bac6140f47"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:2f40e4246676beb31c5ce77c38a55ca4e465c6b38d11ea1bd935420568e0b1ab"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:cf489cf8986c543939aeee17a09c04d6ffb43bfef8ca16fcbcc5cfdcbed24dba"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-win32.whl", hash = "sha256:ffe0883b56cfc05798bf994164d2b2ff03efe2d22022a2bb080f3b626176dd56"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-win_amd64.whl", hash = "sha256:706d9d0ce9cf4593d07270d8e9f53b161f90c57d315aeec4fb4fd7a8b10240d8"}, + {file = "pydantic_core-2.46.3-cp314-cp314t-win_arm64.whl", hash = "sha256:77706aeb41df6a76568434701e0917da10692da28cb69d5fb6919ce5fdb07374"}, + {file = "pydantic_core-2.46.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:fa3eb7c2995aa443687a825bc30395c8521b7c6ec201966e55debfd1128bcceb"}, + {file = "pydantic_core-2.46.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d08782c4045f90724b44c95d35ebec0d67edb8a957a2ac81d5a8e4b8a200495"}, + {file = "pydantic_core-2.46.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:831eb19aa789a97356979e94c981e5667759301fb708d1c0d5adf1bc0098b873"}, + {file = "pydantic_core-2.46.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4335e87c7afa436a0dfa899e138d57a72f8aad542e2cf19c36fb428461caabd0"}, + {file = "pydantic_core-2.46.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99421e7684a60f7f3550a1d159ade5fdff1954baedb6bdd407cba6a307c9f27d"}, + {file = "pydantic_core-2.46.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd81f6907932ebac3abbe41378dac64b2380db1287e2aa64d8d88f78d170f51a"}, + {file = "pydantic_core-2.46.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f247596366f4221af52beddd65af1218797771d6989bc891a0b86ccaa019168"}, + {file = "pydantic_core-2.46.3-cp39-cp39-manylinux_2_31_riscv64.whl", hash = "sha256:6dff8cc884679df229ebc6d8eb2321ea6f8e091bc7d4886d4dc2e0e71452843c"}, + {file = "pydantic_core-2.46.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68ef2f623dda6d5a9067ac014e406c020c780b2a358930a7e5c1b73702900720"}, + {file = "pydantic_core-2.46.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d56bdb4af1767cc15b0386b3c581fdfe659bb9ee4a4f776e92c1cd9d074000d6"}, + {file = "pydantic_core-2.46.3-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:91249bcb7c165c2fb2a2f852dbc5c91636e2e218e75d96dfdd517e4078e173dd"}, + {file = "pydantic_core-2.46.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b068543bdb707f5d935dab765d99227aa2545ef2820935f2e5dd801795c7dbd"}, + {file = "pydantic_core-2.46.3-cp39-cp39-win32.whl", hash = "sha256:dcda6583921c05a40533f982321532f2d8db29326c7b95c4026941fa5074bd79"}, + {file = "pydantic_core-2.46.3-cp39-cp39-win_amd64.whl", hash = "sha256:a35cc284c8dd7edae8a31533713b4d2467dfe7c4f1b5587dd4031f28f90d1d13"}, + {file = "pydantic_core-2.46.3-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:9715525891ed524a0a1eb6d053c74d4d4ad5017677fb00af0b7c2644a31bae46"}, + {file = "pydantic_core-2.46.3-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:9d2f400712a99a013aff420ef1eb9be077f8189a36c1e3ef87660b4e1088a874"}, + {file = "pydantic_core-2.46.3-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd2aab0e2e9dc2daf36bd2686c982535d5e7b1d930a1344a7bb6e82baab42a76"}, + {file = "pydantic_core-2.46.3-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e9d76736da5f362fabfeea6a69b13b7f2be405c6d6966f06b2f6bfff7e64531"}, + {file = "pydantic_core-2.46.3-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:b12dd51f1187c2eb489af8e20f880362db98e954b54ab792fa5d92e8bcc6b803"}, + {file = "pydantic_core-2.46.3-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:f00a0961b125f1a47af7bcc17f00782e12f4cd056f83416006b30111d941dfa3"}, + {file = "pydantic_core-2.46.3-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57697d7c056aca4bbb680200f96563e841a6386ac1129370a0102592f4dddff5"}, + {file = "pydantic_core-2.46.3-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd35aa21299def8db7ef4fe5c4ff862941a9a158ca7b63d61e66fe67d30416b4"}, + {file = "pydantic_core-2.46.3-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:13afdd885f3d71280cf286b13b310ee0f7ccfefd1dbbb661514a474b726e2f25"}, + {file = "pydantic_core-2.46.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:f91c0aff3e3ee0928edd1232c57f643a7a003e6edf1860bc3afcdc749cb513f3"}, + {file = "pydantic_core-2.46.3-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6529d1d128321a58d30afcc97b49e98836542f68dd41b33c2e972bb9e5290536"}, + {file = "pydantic_core-2.46.3-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:975c267cff4f7e7272eacbe50f6cc03ca9a3da4c4fbd66fffd89c94c1e311aa1"}, + {file = "pydantic_core-2.46.3-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2b8e4f2bbdf71415c544b4b1138b8060db7b6611bc927e8064c769f64bed651c"}, + {file = "pydantic_core-2.46.3-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e61ea8e9fff9606d09178f577ff8ccdd7206ff73d6552bcec18e1033c4254b85"}, + {file = "pydantic_core-2.46.3-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b504bda01bafc69b6d3c7a0c7f039dcf60f47fab70e06fe23f57b5c75bdc82b8"}, + {file = "pydantic_core-2.46.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:b00b76f7142fc60c762ce579bd29c8fa44aaa56592dd3c54fab3928d0d4ca6ff"}, + {file = "pydantic_core-2.46.3.tar.gz", hash = "sha256:41c178f65b8c29807239d47e6050262eb6bf84eb695e41101e62e38df4a5bc2c"}, ] [package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" +typing-extensions = ">=4.14.1" [[package]] name = "pydantic-settings" -version = "2.10.1" +version = "2.14.0" description = "Settings management using Pydantic" optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" groups = ["main"] files = [ - {file = "pydantic_settings-2.10.1-py3-none-any.whl", hash = "sha256:a60952460b99cf661dc25c29c0ef171721f98bfcb52ef8d9ea4c943d7c8cc796"}, - {file = "pydantic_settings-2.10.1.tar.gz", hash = "sha256:06f0062169818d0f5524420a360d632d5857b83cffd4d42fe29597807a1614ee"}, + {file = "pydantic_settings-2.14.0-py3-none-any.whl", hash = "sha256:fc8d5d692eb7092e43c8647c1c35a3ecd00e040fcf02ed86f4cb5458ca62182e"}, + {file = "pydantic_settings-2.14.0.tar.gz", hash = "sha256:24285fd4b0e0c06507dd9fdfd331ee23794305352aaec8fc4eb92d4047aeb67d"}, ] [package.dependencies] @@ -1346,7 +1601,7 @@ python-dotenv = ">=0.21.0" typing-inspection = ">=0.4.0" [package.extras] -aws-secrets-manager = ["boto3 (>=1.35.0)", "boto3-stubs[secretsmanager]"] +aws-secrets-manager = ["boto3 (>=1.35.0)", "types-boto3[secretsmanager]"] azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0)"] gcp-secret-manager = ["google-cloud-secret-manager (>=2.23.1)"] toml = ["tomli (>=2.0.1)"] @@ -1395,14 +1650,14 @@ files = [ [[package]] name = "pygments" -version = "2.19.2" +version = "2.20.0" description = "Pygments is a syntax highlighting package written in Python." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["docs"] files = [ - {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}, - {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}, + {file = "pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176"}, + {file = "pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f"}, ] [package.extras] @@ -1450,41 +1705,56 @@ requests = ["requests"] [[package]] name = "pynacl" -version = "1.5.0" +version = "1.6.2" description = "Python binding to the Networking and Cryptography (NaCl) library" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" groups = ["main"] files = [ - {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"}, - {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"}, - {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"}, - {file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"}, - {file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"}, - {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"}, + {file = "pynacl-1.6.2-cp314-cp314t-macosx_10_10_universal2.whl", hash = "sha256:622d7b07cc5c02c666795792931b50c91f3ce3c2649762efb1ef0d5684c81594"}, + {file = "pynacl-1.6.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d071c6a9a4c94d79eb665db4ce5cedc537faf74f2355e4d502591d850d3913c0"}, + {file = "pynacl-1.6.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe9847ca47d287af41e82be1dd5e23023d3c31a951da134121ab02e42ac218c9"}, + {file = "pynacl-1.6.2-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:04316d1fc625d860b6c162fff704eb8426b1a8bcd3abacea11142cbd99a6b574"}, + {file = "pynacl-1.6.2-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44081faff368d6c5553ccf55322ef2819abb40e25afaec7e740f159f74813634"}, + {file = "pynacl-1.6.2-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:a9f9932d8d2811ce1a8ffa79dcbdf3970e7355b5c8eb0c1a881a57e7f7d96e88"}, + {file = "pynacl-1.6.2-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:bc4a36b28dd72fb4845e5d8f9760610588a96d5a51f01d84d8c6ff9849968c14"}, + {file = "pynacl-1.6.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3bffb6d0f6becacb6526f8f42adfb5efb26337056ee0831fb9a7044d1a964444"}, + {file = "pynacl-1.6.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:2fef529ef3ee487ad8113d287a593fa26f48ee3620d92ecc6f1d09ea38e0709b"}, + {file = "pynacl-1.6.2-cp314-cp314t-win32.whl", hash = "sha256:a84bf1c20339d06dc0c85d9aea9637a24f718f375d861b2668b2f9f96fa51145"}, + {file = "pynacl-1.6.2-cp314-cp314t-win_amd64.whl", hash = "sha256:320ef68a41c87547c91a8b58903c9caa641ab01e8512ce291085b5fe2fcb7590"}, + {file = "pynacl-1.6.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d29bfe37e20e015a7d8b23cfc8bd6aa7909c92a1b8f41ee416bbb3e79ef182b2"}, + {file = "pynacl-1.6.2-cp38-abi3-macosx_10_10_universal2.whl", hash = "sha256:c949ea47e4206af7c8f604b8278093b674f7c79ed0d4719cc836902bf4517465"}, + {file = "pynacl-1.6.2-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8845c0631c0be43abdd865511c41eab235e0be69c81dc66a50911594198679b0"}, + {file = "pynacl-1.6.2-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:22de65bb9010a725b0dac248f353bb072969c94fa8d6b1f34b87d7953cf7bbe4"}, + {file = "pynacl-1.6.2-cp38-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:46065496ab748469cdd999246d17e301b2c24ae2fdf739132e580a0e94c94a87"}, + {file = "pynacl-1.6.2-cp38-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8a66d6fb6ae7661c58995f9c6435bda2b1e68b54b598a6a10247bfcdadac996c"}, + {file = "pynacl-1.6.2-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:26bfcd00dcf2cf160f122186af731ae30ab120c18e8375684ec2670dccd28130"}, + {file = "pynacl-1.6.2-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c8a231e36ec2cab018c4ad4358c386e36eede0319a0c41fed24f840b1dac59f6"}, + {file = "pynacl-1.6.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:68be3a09455743ff9505491220b64440ced8973fe930f270c8e07ccfa25b1f9e"}, + {file = "pynacl-1.6.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:8b097553b380236d51ed11356c953bf8ce36a29a3e596e934ecabe76c985a577"}, + {file = "pynacl-1.6.2-cp38-abi3-win32.whl", hash = "sha256:5811c72b473b2f38f7e2a3dc4f8642e3a3e9b5e7317266e4ced1fba85cae41aa"}, + {file = "pynacl-1.6.2-cp38-abi3-win_amd64.whl", hash = "sha256:62985f233210dee6548c223301b6c25440852e13d59a8b81490203c3227c5ba0"}, + {file = "pynacl-1.6.2-cp38-abi3-win_arm64.whl", hash = "sha256:834a43af110f743a754448463e8fd61259cd4ab5bbedcf70f9dabad1d28a394c"}, + {file = "pynacl-1.6.2.tar.gz", hash = "sha256:018494d6d696ae03c7e656e5e74cdfd8ea1326962cc401bcf018f1ed8436811c"}, ] [package.dependencies] -cffi = ">=1.4.1" +cffi = {version = ">=2.0.0", markers = "platform_python_implementation != \"PyPy\" and python_version >= \"3.9\""} [package.extras] -docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] -tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] +docs = ["sphinx (<7)", "sphinx_rtd_theme"] +tests = ["hypothesis (>=3.27.0)", "pytest (>=7.4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] [[package]] name = "pyparsing" -version = "3.2.3" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" +version = "3.3.2" +description = "pyparsing - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf"}, - {file = "pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be"}, + {file = "pyparsing-3.3.2-py3-none-any.whl", hash = "sha256:850ba148bd908d7e2411587e247a1e4f0327839c40e2e5e6d05a007ecc69911d"}, + {file = "pyparsing-3.3.2.tar.gz", hash = "sha256:c777f4d763f140633dcb6d8a3eda953bf7a214dc4eff598413c070bcdc117cbc"}, ] [package.extras] @@ -1621,14 +1891,14 @@ chardet = "*" [[package]] name = "python-dotenv" -version = "1.1.1" +version = "1.2.2" description = "Read key-value pairs from a .env file and set them as environment variables" optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" groups = ["main"] files = [ - {file = "python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc"}, - {file = "python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab"}, + {file = "python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a"}, + {file = "python_dotenv-1.2.2.tar.gz", hash = "sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3"}, ] [package.extras] @@ -1636,77 +1906,97 @@ cli = ["click (>=5.0)"] [[package]] name = "pyyaml" -version = "6.0.2" +version = "6.0.3" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" groups = ["main", "docs"] files = [ - {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, - {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, - {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, - {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, - {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, - {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, - {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, - {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, - {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, - {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, - {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, - {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, - {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, - {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, - {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, - {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, - {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, + {file = "PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efd7b85f94a6f21e4932043973a7ba2613b059c4a000551892ac9f1d11f5baf3"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22ba7cfcad58ef3ecddc7ed1db3409af68d023b7f940da23c6c2a1890976eda6"}, + {file = "PyYAML-6.0.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6344df0d5755a2c9a276d4473ae6b90647e216ab4757f8426893b5dd2ac3f369"}, + {file = "PyYAML-6.0.3-cp38-cp38-win32.whl", hash = "sha256:3ff07ec89bae51176c0549bc4c63aa6202991da2d9a6129d7aef7f1407d3f295"}, + {file = "PyYAML-6.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:5cf4e27da7e3fbed4d6c3d8e797387aaad68102272f8f9752883bc32d61cb87b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69"}, + {file = "pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e"}, + {file = "pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4"}, + {file = "pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b"}, + {file = "pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea"}, + {file = "pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be"}, + {file = "pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7"}, + {file = "pyyaml-6.0.3-cp39-cp39-win32.whl", hash = "sha256:1ebe39cb5fc479422b83de611d14e2c0d3bb2a18bbcb01f229ab3cfbd8fee7a0"}, + {file = "pyyaml-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:2e71d11abed7344e42a8849600193d15b6def118602c4c176f748e4583246007"}, + {file = "pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f"}, ] [[package]] name = "rdflib" -version = "7.5.0" +version = "7.6.0" description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information." optional = false python-versions = ">=3.8.1" groups = ["main"] files = [ - {file = "rdflib-7.5.0-py3-none-any.whl", hash = "sha256:b011dfc40d0fc8a44252e906dcd8fc806a7859bc231be190c37e9568a31ac572"}, - {file = "rdflib-7.5.0.tar.gz", hash = "sha256:663083443908b1830e567350d72e74d9948b310f827966358d76eebdc92bf592"}, + {file = "rdflib-7.6.0-py3-none-any.whl", hash = "sha256:30c0a3ebf4c0e09215f066be7246794b6492e054e782d7ac2a34c9f70a15e0dd"}, + {file = "rdflib-7.6.0.tar.gz", hash = "sha256:6c831288d5e4a5a7ece85d0ccde9877d512a3d0f02d7c06455d00d6d0ea379df"}, ] [package.dependencies] @@ -1715,6 +2005,7 @@ pyparsing = ">=2.1.0,<4" [package.extras] berkeleydb = ["berkeleydb (>=18.1.0,<19.0.0)"] +graphdb = ["httpx (>=0.28.1,<0.29.0)"] html = ["html5rdf (>=1.2,<2)"] lxml = ["lxml (>=4.3,<6.0)"] networkx = ["networkx (>=2,<4)"] @@ -1723,25 +2014,25 @@ rdf4j = ["httpx (>=0.28.1,<0.29.0)"] [[package]] name = "requests" -version = "2.32.4" +version = "2.33.1" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" groups = ["main", "dev", "docs"] files = [ - {file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"}, - {file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"}, + {file = "requests-2.33.1-py3-none-any.whl", hash = "sha256:4e6d1ef462f3626a1f0a0a9c42dd93c63bad33f9f1c1937509b8c5c8718ab56a"}, + {file = "requests-2.33.1.tar.gz", hash = "sha256:18817f8c57c6263968bc123d237e3b8b08ac046f5456bd1e307ee8f4250d3517"}, ] [package.dependencies] -certifi = ">=2017.4.17" +certifi = ">=2023.5.7" charset_normalizer = ">=2,<4" idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" +urllib3 = ">=1.26,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<8)"] [[package]] name = "requests-mock" @@ -1821,59 +2112,74 @@ jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] [[package]] name = "ruamel-yaml-clib" -version = "0.2.12" +version = "0.2.15" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" optional = false python-versions = ">=3.9" groups = ["main"] markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\"" files = [ - {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:11f891336688faf5156a36293a9c362bdc7c88f03a8a027c2c1d8e0bcde998e5"}, - {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:a606ef75a60ecf3d924613892cc603b154178ee25abb3055db5062da811fd969"}, - {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd5415dded15c3822597455bc02bcd66e81ef8b7a48cb71a33628fc9fdde39df"}, - {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"}, - {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"}, - {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"}, - {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a"}, - {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"}, - {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"}, - {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"}, - {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:d84318609196d6bd6da0edfa25cedfbabd8dbde5140a0a23af29ad4b8f91fb1e"}, - {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb43a269eb827806502c7c8efb7ae7e9e9d0573257a46e8e952f4d4caba4f31e"}, - {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"}, - {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"}, - {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"}, - {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3"}, - {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"}, - {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"}, - {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"}, - {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:943f32bc9dedb3abff9879edc134901df92cfce2c3d5c9348f172f62eb2d771d"}, - {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95c3829bb364fdb8e0332c9931ecf57d9be3519241323c5274bd82f709cebc0c"}, - {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"}, - {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"}, - {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"}, - {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d"}, - {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"}, - {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"}, - {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"}, - {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:e7e3736715fbf53e9be2a79eb4db68e4ed857017344d697e8b9749444ae57475"}, - {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b7e75b4965e1d4690e93021adfcecccbca7d61c7bddd8e22406ef2ff20d74ef"}, - {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"}, - {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"}, - {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"}, - {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4f6f3eac23941b32afccc23081e1f50612bdbe4e982012ef4f5797986828cd01"}, - {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"}, - {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"}, - {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"}, - {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bc5f1e1c28e966d61d2519f2a3d451ba989f9ea0f2307de7bc45baa526de9e45"}, - {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a0e060aace4c24dcaf71023bbd7d42674e3b230f7e7b97317baf1e953e5b519"}, - {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"}, - {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"}, - {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"}, - {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2c59aa6170b990d8d2719323e628aaf36f3bfbc1c26279c0eeeb24d05d2d11c7"}, - {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"}, - {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"}, - {file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"}, + {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:88eea8baf72f0ccf232c22124d122a7f26e8a24110a0273d9bcddcb0f7e1fa03"}, + {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b6f7d74d094d1f3a4e157278da97752f16ee230080ae331fcc219056ca54f77"}, + {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4be366220090d7c3424ac2b71c90d1044ea34fca8c0b88f250064fd06087e614"}, + {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f66f600833af58bea694d5892453f2270695b92200280ee8c625ec5a477eed3"}, + {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da3d6adadcf55a93c214d23941aef4abfd45652110aed6580e814152f385b862"}, + {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e9fde97ecb7bb9c41261c2ce0da10323e9227555c674989f8d9eb7572fc2098d"}, + {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:05c70f7f86be6f7bee53794d80050a28ae7e13e4a0087c1839dcdefd68eb36b6"}, + {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6f1d38cbe622039d111b69e9ca945e7e3efebb30ba998867908773183357f3ed"}, + {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-win32.whl", hash = "sha256:fe239bdfdae2302e93bd6e8264bd9b71290218fff7084a9db250b55caaccf43f"}, + {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-win_amd64.whl", hash = "sha256:468858e5cbde0198337e6a2a78eda8c3fb148bdf4c6498eaf4bc9ba3f8e780bd"}, + {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c583229f336682b7212a43d2fa32c30e643d3076178fb9f7a6a14dde85a2d8bd"}, + {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56ea19c157ed8c74b6be51b5fa1c3aff6e289a041575f0556f66e5fb848bb137"}, + {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5fea0932358e18293407feb921d4f4457db837b67ec1837f87074667449f9401"}, + {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef71831bd61fbdb7aa0399d5c4da06bea37107ab5c79ff884cc07f2450910262"}, + {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:617d35dc765715fa86f8c3ccdae1e4229055832c452d4ec20856136acc75053f"}, + {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b45498cc81a4724a2d42273d6cfc243c0547ad7c6b87b4f774cb7bcc131c98d"}, + {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:def5663361f6771b18646620fca12968aae730132e104688766cf8a3b1d65922"}, + {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:014181cdec565c8745b7cbc4de3bf2cc8ced05183d986e6d1200168e5bb59490"}, + {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-win32.whl", hash = "sha256:d290eda8f6ada19e1771b54e5706b8f9807e6bb08e873900d5ba114ced13e02c"}, + {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-win_amd64.whl", hash = "sha256:bdc06ad71173b915167702f55d0f3f027fc61abd975bd308a0968c02db4a4c3e"}, + {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cb15a2e2a90c8475df45c0949793af1ff413acfb0a716b8b94e488ea95ce7cff"}, + {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:64da03cbe93c1e91af133f5bec37fd24d0d4ba2418eaf970d7166b0a26a148a2"}, + {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f6d3655e95a80325b84c4e14c080b2470fe4f33b6846f288379ce36154993fb1"}, + {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:71845d377c7a47afc6592aacfea738cc8a7e876d586dfba814501d8c53c1ba60"}, + {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11e5499db1ccbc7f4b41f0565e4f799d863ea720e01d3e99fa0b7b5fcd7802c9"}, + {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4b293a37dc97e2b1e8a1aec62792d1e52027087c8eea4fc7b5abd2bdafdd6642"}, + {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:512571ad41bba04eac7268fe33f7f4742210ca26a81fe0c75357fa682636c690"}, + {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e5e9f630c73a490b758bf14d859a39f375e6999aea5ddd2e2e9da89b9953486a"}, + {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-win32.whl", hash = "sha256:f4421ab780c37210a07d138e56dd4b51f8642187cdfb433eb687fe8c11de0144"}, + {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-win_amd64.whl", hash = "sha256:2b216904750889133d9222b7b873c199d48ecbb12912aca78970f84a5aa1a4bc"}, + {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4dcec721fddbb62e60c2801ba08c87010bd6b700054a09998c4d09c08147b8fb"}, + {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:65f48245279f9bb301d1276f9679b82e4c080a1ae25e679f682ac62446fac471"}, + {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:46895c17ead5e22bea5e576f1db7e41cb273e8d062c04a6a49013d9f60996c25"}, + {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3eb199178b08956e5be6288ee0b05b2fb0b5c1f309725ad25d9c6ea7e27f962a"}, + {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d1032919280ebc04a80e4fb1e93f7a738129857eaec9448310e638c8bccefcf"}, + {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ab0df0648d86a7ecbd9c632e8f8d6b21bb21b5fc9d9e095c796cacf32a728d2d"}, + {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:331fb180858dd8534f0e61aa243b944f25e73a4dae9962bd44c46d1761126bbf"}, + {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fd4c928ddf6bce586285daa6d90680b9c291cfd045fc40aad34e445d57b1bf51"}, + {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-win32.whl", hash = "sha256:bf0846d629e160223805db9fe8cc7aec16aaa11a07310c50c8c7164efa440aec"}, + {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-win_amd64.whl", hash = "sha256:45702dfbea1420ba3450bb3dd9a80b33f0badd57539c6aac09f42584303e0db6"}, + {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:753faf20b3a5906faf1fc50e4ddb8c074cb9b251e00b14c18b28492f933ac8ef"}, + {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:480894aee0b29752560a9de46c0e5f84a82602f2bc5c6cde8db9a345319acfdf"}, + {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4d3b58ab2454b4747442ac76fab66739c72b1e2bb9bd173d7694b9f9dbc9c000"}, + {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bfd309b316228acecfa30670c3887dcedf9b7a44ea39e2101e75d2654522acd4"}, + {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2812ff359ec1f30129b62372e5f22a52936fac13d5d21e70373dbca5d64bb97c"}, + {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7e74ea87307303ba91073b63e67f2c667e93f05a8c63079ee5b7a5c8d0d7b043"}, + {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:713cd68af9dfbe0bb588e144a61aad8dcc00ef92a82d2e87183ca662d242f524"}, + {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:542d77b72786a35563f97069b9379ce762944e67055bea293480f7734b2c7e5e"}, + {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-win32.whl", hash = "sha256:424ead8cef3939d690c4b5c85ef5b52155a231ff8b252961b6516ed7cf05f6aa"}, + {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-win_amd64.whl", hash = "sha256:ac9b8d5fa4bb7fd2917ab5027f60d4234345fd366fe39aa711d5dca090aa1467"}, + {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:923816815974425fbb1f1bf57e85eca6e14d8adc313c66db21c094927ad01815"}, + {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dcc7f3162d3711fd5d52e2267e44636e3e566d1e5675a5f0b30e98f2c4af7974"}, + {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5d3c9210219cbc0f22706f19b154c9a798ff65a6beeafbf77fc9c057ec806f7d"}, + {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bb7b728fd9f405aa00b4a0b17ba3f3b810d0ccc5f77f7373162e9b5f0ff75d5"}, + {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3cb75a3c14f1d6c3c2a94631e362802f70e83e20d1f2b2ef3026c05b415c4900"}, + {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:badd1d7283f3e5894779a6ea8944cc765138b96804496c91812b2829f70e18a7"}, + {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0ba6604bbc3dfcef844631932d06a1a4dcac3fee904efccf582261948431628a"}, + {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a8220fd4c6f98485e97aea65e1df76d4fed1678ede1fe1d0eed2957230d287c4"}, + {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-win32.whl", hash = "sha256:04d21dc9c57d9608225da28285900762befbb0165ae48482c15d8d4989d4af14"}, + {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-win_amd64.whl", hash = "sha256:27dc656e84396e6d687f97c6e65fb284d100483628f02d95464fd731743a4afe"}, + {file = "ruamel_yaml_clib-0.2.15.tar.gz", hash = "sha256:46e4cc8c43ef6a94885f72512094e482114a8a706d3c555a34ed4b0d20200600"}, ] [[package]] @@ -1893,24 +2199,24 @@ pyaml = ">=17.12.1" [[package]] name = "setuptools" -version = "80.9.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" +version = "82.0.1" +description = "Most extensible Python build backend with support for C/C++ extension modules" optional = false python-versions = ">=3.9" groups = ["main", "docs"] files = [ - {file = "setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"}, - {file = "setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c"}, + {file = "setuptools-82.0.1-py3-none-any.whl", hash = "sha256:a59e362652f08dcd477c78bb6e7bd9d80a7995bc73ce773050228a348ce2e5bb"}, + {file = "setuptools-82.0.1.tar.gz", hash = "sha256:7d872682c5d01cfde07da7bccc7b65469d3dca203318515ada1de5eda35efbf9"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] -core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.13.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.18.*)", "pytest-mypy"] [[package]] name = "six" @@ -1938,14 +2244,14 @@ files = [ [[package]] name = "soupsieve" -version = "2.7" +version = "2.8.3" description = "A modern CSS selector implementation for Beautiful Soup." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["docs"] files = [ - {file = "soupsieve-2.7-py3-none-any.whl", hash = "sha256:6e60cc5c1ffaf1cebcc12e8188320b72071e922c2e897f737cadce79ad5d30c4"}, - {file = "soupsieve-2.7.tar.gz", hash = "sha256:ad282f9b6926286d2ead4750552c8a6142bc4c783fd66b0293547c8fe6ae126a"}, + {file = "soupsieve-2.8.3-py3-none-any.whl", hash = "sha256:ed64f2ba4eebeab06cc4962affce381647455978ffc1e36bb79a545b91f45a95"}, + {file = "soupsieve-2.8.3.tar.gz", hash = "sha256:3267f1eeea4251fb42728b6dfb746edc9acaffc4a45b27e19450b676586e8349"}, ] [[package]] @@ -2347,91 +2653,116 @@ files = [ [[package]] name = "tomli" -version = "2.2.1" +version = "2.4.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, - {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, - {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, - {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, - {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, - {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, - {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, - {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, - {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, - {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, + {file = "tomli-2.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f8f0fc26ec2cc2b965b7a3b87cd19c5c6b8c5e5f436b984e85f486d652285c30"}, + {file = "tomli-2.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4ab97e64ccda8756376892c53a72bd1f964e519c77236368527f758fbc36a53a"}, + {file = "tomli-2.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96481a5786729fd470164b47cdb3e0e58062a496f455ee41b4403be77cb5a076"}, + {file = "tomli-2.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a881ab208c0baf688221f8cecc5401bd291d67e38a1ac884d6736cbcd8247e9"}, + {file = "tomli-2.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47149d5bd38761ac8be13a84864bf0b7b70bc051806bc3669ab1cbc56216b23c"}, + {file = "tomli-2.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ec9bfaf3ad2df51ace80688143a6a4ebc09a248f6ff781a9945e51937008fcbc"}, + {file = "tomli-2.4.1-cp311-cp311-win32.whl", hash = "sha256:ff2983983d34813c1aeb0fa89091e76c3a22889ee83ab27c5eeb45100560c049"}, + {file = "tomli-2.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:5ee18d9ebdb417e384b58fe414e8d6af9f4e7a0ae761519fb50f721de398dd4e"}, + {file = "tomli-2.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:c2541745709bad0264b7d4705ad453b76ccd191e64aa6f0fc66b69a293a45ece"}, + {file = "tomli-2.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c742f741d58a28940ce01d58f0ab2ea3ced8b12402f162f4d534dfe18ba1cd6a"}, + {file = "tomli-2.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7f86fd587c4ed9dd76f318225e7d9b29cfc5a9d43de44e5754db8d1128487085"}, + {file = "tomli-2.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ff18e6a727ee0ab0388507b89d1bc6a22b138d1e2fa56d1ad494586d61d2eae9"}, + {file = "tomli-2.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:136443dbd7e1dee43c68ac2694fde36b2849865fa258d39bf822c10e8068eac5"}, + {file = "tomli-2.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5e262d41726bc187e69af7825504c933b6794dc3fbd5945e41a79bb14c31f585"}, + {file = "tomli-2.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5cb41aa38891e073ee49d55fbc7839cfdb2bc0e600add13874d048c94aadddd1"}, + {file = "tomli-2.4.1-cp312-cp312-win32.whl", hash = "sha256:da25dc3563bff5965356133435b757a795a17b17d01dbc0f42fb32447ddfd917"}, + {file = "tomli-2.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:52c8ef851d9a240f11a88c003eacb03c31fc1c9c4ec64a99a0f922b93874fda9"}, + {file = "tomli-2.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:f758f1b9299d059cc3f6546ae2af89670cb1c4d48ea29c3cacc4fe7de3058257"}, + {file = "tomli-2.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:36d2bd2ad5fb9eaddba5226aa02c8ec3fa4f192631e347b3ed28186d43be6b54"}, + {file = "tomli-2.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:eb0dc4e38e6a1fd579e5d50369aa2e10acfc9cace504579b2faabb478e76941a"}, + {file = "tomli-2.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c7f2c7f2b9ca6bdeef8f0fa897f8e05085923eb091721675170254cbc5b02897"}, + {file = "tomli-2.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f3c6818a1a86dd6dca7ddcaaf76947d5ba31aecc28cb1b67009a5877c9a64f3f"}, + {file = "tomli-2.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d312ef37c91508b0ab2cee7da26ec0b3ed2f03ce12bd87a588d771ae15dcf82d"}, + {file = "tomli-2.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:51529d40e3ca50046d7606fa99ce3956a617f9b36380da3b7f0dd3dd28e68cb5"}, + {file = "tomli-2.4.1-cp313-cp313-win32.whl", hash = "sha256:2190f2e9dd7508d2a90ded5ed369255980a1bcdd58e52f7fe24b8162bf9fedbd"}, + {file = "tomli-2.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:8d65a2fbf9d2f8352685bc1364177ee3923d6baf5e7f43ea4959d7d8bc326a36"}, + {file = "tomli-2.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:4b605484e43cdc43f0954ddae319fb75f04cc10dd80d830540060ee7cd0243cd"}, + {file = "tomli-2.4.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:fd0409a3653af6c147209d267a0e4243f0ae46b011aa978b1080359fddc9b6cf"}, + {file = "tomli-2.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a120733b01c45e9a0c34aeef92bf0cf1d56cfe81ed9d47d562f9ed591a9828ac"}, + {file = "tomli-2.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:559db847dc486944896521f68d8190be1c9e719fced785720d2216fe7022b662"}, + {file = "tomli-2.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01f520d4f53ef97964a240a035ec2a869fe1a37dde002b57ebc4417a27ccd853"}, + {file = "tomli-2.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7f94b27a62cfad8496c8d2513e1a222dd446f095fca8987fceef261225538a15"}, + {file = "tomli-2.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:ede3e6487c5ef5d28634ba3f31f989030ad6af71edfb0055cbbd14189ff240ba"}, + {file = "tomli-2.4.1-cp314-cp314-win32.whl", hash = "sha256:3d48a93ee1c9b79c04bb38772ee1b64dcf18ff43085896ea460ca8dec96f35f6"}, + {file = "tomli-2.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:88dceee75c2c63af144e456745e10101eb67361050196b0b6af5d717254dddf7"}, + {file = "tomli-2.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:b8c198f8c1805dc42708689ed6864951fd2494f924149d3e4bce7710f8eb5232"}, + {file = "tomli-2.4.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:d4d8fe59808a54658fcc0160ecfb1b30f9089906c50b23bcb4c69eddc19ec2b4"}, + {file = "tomli-2.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7008df2e7655c495dd12d2a4ad038ff878d4ca4b81fccaf82b714e07eae4402c"}, + {file = "tomli-2.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1d8591993e228b0c930c4bb0db464bdad97b3289fb981255d6c9a41aedc84b2d"}, + {file = "tomli-2.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:734e20b57ba95624ecf1841e72b53f6e186355e216e5412de414e3c51e5e3c41"}, + {file = "tomli-2.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8a650c2dbafa08d42e51ba0b62740dae4ecb9338eefa093aa5c78ceb546fcd5c"}, + {file = "tomli-2.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:504aa796fe0569bb43171066009ead363de03675276d2d121ac1a4572397870f"}, + {file = "tomli-2.4.1-cp314-cp314t-win32.whl", hash = "sha256:b1d22e6e9387bf4739fbe23bfa80e93f6b0373a7f1b96c6227c32bef95a4d7a8"}, + {file = "tomli-2.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:2c1c351919aca02858f740c6d33adea0c5deea37f9ecca1cc1ef9e884a619d26"}, + {file = "tomli-2.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:eab21f45c7f66c13f2a9e0e1535309cee140182a9cdae1e041d02e47291e8396"}, + {file = "tomli-2.4.1-py3-none-any.whl", hash = "sha256:0d85819802132122da43cb86656f8d1f8c6587d54ae7dcaf30e90533028b49fe"}, + {file = "tomli-2.4.1.tar.gz", hash = "sha256:7c7e1a961a0b2f2472c1ac5b69affa0ae1132c39adcb67aba98568702b9cc23f"}, +] + +[[package]] +name = "tomlkit" +version = "0.14.0" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "tomlkit-0.14.0-py3-none-any.whl", hash = "sha256:592064ed85b40fa213469f81ac584f67a4f2992509a7c3ea2d632208623a3680"}, + {file = "tomlkit-0.14.0.tar.gz", hash = "sha256:cf00efca415dbd57575befb1f6634c4f42d2d87dbba376128adb42c121b87064"}, ] [[package]] name = "tornado" -version = "6.5.1" +version = "6.5.5" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = false python-versions = ">=3.9" groups = ["docs"] files = [ - {file = "tornado-6.5.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d50065ba7fd11d3bd41bcad0825227cc9a95154bad83239357094c36708001f7"}, - {file = "tornado-6.5.1-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9e9ca370f717997cb85606d074b0e5b247282cf5e2e1611568b8821afe0342d6"}, - {file = "tornado-6.5.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b77e9dfa7ed69754a54c89d82ef746398be82f749df69c4d3abe75c4d1ff4888"}, - {file = "tornado-6.5.1-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:253b76040ee3bab8bcf7ba9feb136436a3787208717a1fb9f2c16b744fba7331"}, - {file = "tornado-6.5.1-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:308473f4cc5a76227157cdf904de33ac268af770b2c5f05ca6c1161d82fdd95e"}, - {file = "tornado-6.5.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:caec6314ce8a81cf69bd89909f4b633b9f523834dc1a352021775d45e51d9401"}, - {file = "tornado-6.5.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:13ce6e3396c24e2808774741331638ee6c2f50b114b97a55c5b442df65fd9692"}, - {file = "tornado-6.5.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5cae6145f4cdf5ab24744526cc0f55a17d76f02c98f4cff9daa08ae9a217448a"}, - {file = "tornado-6.5.1-cp39-abi3-win32.whl", hash = "sha256:e0a36e1bc684dca10b1aa75a31df8bdfed656831489bc1e6a6ebed05dc1ec365"}, - {file = "tornado-6.5.1-cp39-abi3-win_amd64.whl", hash = "sha256:908e7d64567cecd4c2b458075589a775063453aeb1d2a1853eedb806922f568b"}, - {file = "tornado-6.5.1-cp39-abi3-win_arm64.whl", hash = "sha256:02420a0eb7bf617257b9935e2b754d1b63897525d8a289c9d65690d580b4dcf7"}, - {file = "tornado-6.5.1.tar.gz", hash = "sha256:84ceece391e8eb9b2b95578db65e920d2a61070260594819589609ba9bc6308c"}, + {file = "tornado-6.5.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:487dc9cc380e29f58c7ab88f9e27cdeef04b2140862e5076a66fb6bb68bb1bfa"}, + {file = "tornado-6.5.5-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:65a7f1d46d4bb41df1ac99f5fcb685fb25c7e61613742d5108b010975a9a6521"}, + {file = "tornado-6.5.5-cp39-abi3-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e74c92e8e65086b338fd56333fb9a68b9f6f2fe7ad532645a290a464bcf46be5"}, + {file = "tornado-6.5.5-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:435319e9e340276428bbdb4e7fa732c2d399386d1de5686cb331ec8eee754f07"}, + {file = "tornado-6.5.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3f54aa540bdbfee7b9eb268ead60e7d199de5021facd276819c193c0fb28ea4e"}, + {file = "tornado-6.5.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:36abed1754faeb80fbd6e64db2758091e1320f6bba74a4cf8c09cd18ccce8aca"}, + {file = "tornado-6.5.5-cp39-abi3-win32.whl", hash = "sha256:dd3eafaaeec1c7f2f8fdcd5f964e8907ad788fe8a5a32c4426fbbdda621223b7"}, + {file = "tornado-6.5.5-cp39-abi3-win_amd64.whl", hash = "sha256:6443a794ba961a9f619b1ae926a2e900ac20c34483eea67be4ed8f1e58d3ef7b"}, + {file = "tornado-6.5.5-cp39-abi3-win_arm64.whl", hash = "sha256:2c9a876e094109333f888539ddb2de4361743e5d21eece20688e3e351e4990a6"}, + {file = "tornado-6.5.5.tar.gz", hash = "sha256:192b8f3ea91bd7f1f50c06955416ed76c6b72f96779b962f07f911b91e8d30e9"}, ] [[package]] name = "typing-extensions" -version = "4.14.1" +version = "4.15.0" description = "Backported and Experimental Type Hints for Python 3.9+" optional = false python-versions = ">=3.9" groups = ["main", "dev", "docs"] files = [ - {file = "typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"}, - {file = "typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"}, + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, ] markers = {dev = "python_version == \"3.10\""} [[package]] name = "typing-inspection" -version = "0.4.1" +version = "0.4.2" description = "Runtime typing introspection tools" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51"}, - {file = "typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"}, + {file = "typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7"}, + {file = "typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464"}, ] [package.dependencies] @@ -2439,32 +2770,32 @@ typing-extensions = ">=4.12.0" [[package]] name = "urllib3" -version = "2.5.0" +version = "2.6.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" groups = ["main", "dev", "docs"] files = [ - {file = "urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"}, - {file = "urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"}, + {file = "urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4"}, + {file = "urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed"}, ] [package.extras] -brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] +brotli = ["brotli (>=1.2.0) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=1.2.0.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] +zstd = ["backports-zstd (>=1.0.0) ; python_version < \"3.14\""] [[package]] name = "werkzeug" -version = "3.1.6" +version = "3.1.8" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "werkzeug-3.1.6-py3-none-any.whl", hash = "sha256:7ddf3357bb9564e407607f988f683d72038551200c704012bb9a4c523d42f131"}, - {file = "werkzeug-3.1.6.tar.gz", hash = "sha256:210c6bede5a420a913956b4791a7f4d6843a43b6fcee4dfa08a65e93007d0d25"}, + {file = "werkzeug-3.1.8-py3-none-any.whl", hash = "sha256:63a77fb8892bf28ebc3178683445222aa500e48ebad5ec77b0ad80f8726b1f50"}, + {file = "werkzeug-3.1.8.tar.gz", hash = "sha256:9bad61a4268dac112f1c5cd4630a56ede601b6ed420300677a869083d70a4c44"}, ] [package.dependencies] @@ -2475,109 +2806,123 @@ watchdog = ["watchdog (>=2.3)"] [[package]] name = "wheel" -version = "0.45.1" -description = "A built-package format for Python" +version = "0.47.0" +description = "Command line tool for manipulating wheel files" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["docs"] files = [ - {file = "wheel-0.45.1-py3-none-any.whl", hash = "sha256:708e7481cc80179af0e556bbf0cc00b8444c7321e2700b8d8580231d13017248"}, - {file = "wheel-0.45.1.tar.gz", hash = "sha256:661e1abd9198507b1409a20c02106d9670b2576e916d58f520316666abca6729"}, + {file = "wheel-0.47.0-py3-none-any.whl", hash = "sha256:212281cab4dff978f6cedd499cd893e1f620791ca6ff7107cf270781e587eced"}, + {file = "wheel-0.47.0.tar.gz", hash = "sha256:cc72bd1009ba0cf63922e28f94d9d83b920aa2bb28f798a31d0691b02fa3c9b3"}, ] -[package.extras] -test = ["pytest (>=6.0.0)", "setuptools (>=65)"] +[package.dependencies] +packaging = ">=24.0" [[package]] name = "wrapt" -version = "1.17.2" +version = "2.1.2" description = "Module for decorators, wrappers and monkey patching." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["docs"] files = [ - {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"}, - {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"}, - {file = "wrapt-1.17.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80dd7db6a7cb57ffbc279c4394246414ec99537ae81ffd702443335a61dbf3a7"}, - {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a6e821770cf99cc586d33833b2ff32faebdbe886bd6322395606cf55153246c"}, - {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b60fb58b90c6d63779cb0c0c54eeb38941bae3ecf7a73c764c52c88c2dcb9d72"}, - {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b870b5df5b71d8c3359d21be8f0d6c485fa0ebdb6477dda51a1ea54a9b558061"}, - {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4011d137b9955791f9084749cba9a367c68d50ab8d11d64c50ba1688c9b457f2"}, - {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1473400e5b2733e58b396a04eb7f35f541e1fb976d0c0724d0223dd607e0f74c"}, - {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3cedbfa9c940fdad3e6e941db7138e26ce8aad38ab5fe9dcfadfed9db7a54e62"}, - {file = "wrapt-1.17.2-cp310-cp310-win32.whl", hash = "sha256:582530701bff1dec6779efa00c516496968edd851fba224fbd86e46cc6b73563"}, - {file = "wrapt-1.17.2-cp310-cp310-win_amd64.whl", hash = "sha256:58705da316756681ad3c9c73fd15499aa4d8c69f9fd38dc8a35e06c12468582f"}, - {file = "wrapt-1.17.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ff04ef6eec3eee8a5efef2401495967a916feaa353643defcc03fc74fe213b58"}, - {file = "wrapt-1.17.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db983e7bca53819efdbd64590ee96c9213894272c776966ca6306b73e4affda"}, - {file = "wrapt-1.17.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9abc77a4ce4c6f2a3168ff34b1da9b0f311a8f1cfd694ec96b0603dff1c79438"}, - {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b929ac182f5ace000d459c59c2c9c33047e20e935f8e39371fa6e3b85d56f4a"}, - {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f09b286faeff3c750a879d336fb6d8713206fc97af3adc14def0cdd349df6000"}, - {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7ed2d9d039bd41e889f6fb9364554052ca21ce823580f6a07c4ec245c1f5d6"}, - {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:129a150f5c445165ff941fc02ee27df65940fcb8a22a61828b1853c98763a64b"}, - {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1fb5699e4464afe5c7e65fa51d4f99e0b2eadcc176e4aa33600a3df7801d6662"}, - {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9a2bce789a5ea90e51a02dfcc39e31b7f1e662bc3317979aa7e5538e3a034f72"}, - {file = "wrapt-1.17.2-cp311-cp311-win32.whl", hash = "sha256:4afd5814270fdf6380616b321fd31435a462019d834f83c8611a0ce7484c7317"}, - {file = "wrapt-1.17.2-cp311-cp311-win_amd64.whl", hash = "sha256:acc130bc0375999da18e3d19e5a86403667ac0c4042a094fefb7eec8ebac7cf3"}, - {file = "wrapt-1.17.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d5e2439eecc762cd85e7bd37161d4714aa03a33c5ba884e26c81559817ca0925"}, - {file = "wrapt-1.17.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fc7cb4c1c744f8c05cd5f9438a3caa6ab94ce8344e952d7c45a8ed59dd88392"}, - {file = "wrapt-1.17.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fdbdb757d5390f7c675e558fd3186d590973244fab0c5fe63d373ade3e99d40"}, - {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb1d0dbf99411f3d871deb6faa9aabb9d4e744d67dcaaa05399af89d847a91d"}, - {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d18a4865f46b8579d44e4fe1e2bcbc6472ad83d98e22a26c963d46e4c125ef0b"}, - {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc570b5f14a79734437cb7b0500376b6b791153314986074486e0b0fa8d71d98"}, - {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6d9187b01bebc3875bac9b087948a2bccefe464a7d8f627cf6e48b1bbae30f82"}, - {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e8659775f1adf02eb1e6f109751268e493c73716ca5761f8acb695e52a756ae"}, - {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8b2816ebef96d83657b56306152a93909a83f23994f4b30ad4573b00bd11bb9"}, - {file = "wrapt-1.17.2-cp312-cp312-win32.whl", hash = "sha256:468090021f391fe0056ad3e807e3d9034e0fd01adcd3bdfba977b6fdf4213ea9"}, - {file = "wrapt-1.17.2-cp312-cp312-win_amd64.whl", hash = "sha256:ec89ed91f2fa8e3f52ae53cd3cf640d6feff92ba90d62236a81e4e563ac0e991"}, - {file = "wrapt-1.17.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6ed6ffac43aecfe6d86ec5b74b06a5be33d5bb9243d055141e8cabb12aa08125"}, - {file = "wrapt-1.17.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:35621ae4c00e056adb0009f8e86e28eb4a41a4bfa8f9bfa9fca7d343fe94f998"}, - {file = "wrapt-1.17.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a604bf7a053f8362d27eb9fefd2097f82600b856d5abe996d623babd067b1ab5"}, - {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cbabee4f083b6b4cd282f5b817a867cf0b1028c54d445b7ec7cfe6505057cf8"}, - {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49703ce2ddc220df165bd2962f8e03b84c89fee2d65e1c24a7defff6f988f4d6"}, - {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8112e52c5822fc4253f3901b676c55ddf288614dc7011634e2719718eaa187dc"}, - {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fee687dce376205d9a494e9c121e27183b2a3df18037f89d69bd7b35bcf59e2"}, - {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:18983c537e04d11cf027fbb60a1e8dfd5190e2b60cc27bc0808e653e7b218d1b"}, - {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:703919b1633412ab54bcf920ab388735832fdcb9f9a00ae49387f0fe67dad504"}, - {file = "wrapt-1.17.2-cp313-cp313-win32.whl", hash = "sha256:abbb9e76177c35d4e8568e58650aa6926040d6a9f6f03435b7a522bf1c487f9a"}, - {file = "wrapt-1.17.2-cp313-cp313-win_amd64.whl", hash = "sha256:69606d7bb691b50a4240ce6b22ebb319c1cfb164e5f6569835058196e0f3a845"}, - {file = "wrapt-1.17.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:4a721d3c943dae44f8e243b380cb645a709ba5bd35d3ad27bc2ed947e9c68192"}, - {file = "wrapt-1.17.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:766d8bbefcb9e00c3ac3b000d9acc51f1b399513f44d77dfe0eb026ad7c9a19b"}, - {file = "wrapt-1.17.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e496a8ce2c256da1eb98bd15803a79bee00fc351f5dfb9ea82594a3f058309e0"}, - {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d615e4fe22f4ad3528448c193b218e077656ca9ccb22ce2cb20db730f8d306"}, - {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5aaeff38654462bc4b09023918b7f21790efb807f54c000a39d41d69cf552cb"}, - {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7d15bbd2bc99e92e39f49a04653062ee6085c0e18b3b7512a4f2fe91f2d681"}, - {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e3890b508a23299083e065f435a492b5435eba6e304a7114d2f919d400888cc6"}, - {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8c8b293cd65ad716d13d8dd3624e42e5a19cc2a2f1acc74b30c2c13f15cb61a6"}, - {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c82b8785d98cdd9fed4cac84d765d234ed3251bd6afe34cb7ac523cb93e8b4f"}, - {file = "wrapt-1.17.2-cp313-cp313t-win32.whl", hash = "sha256:13e6afb7fe71fe7485a4550a8844cc9ffbe263c0f1a1eea569bc7091d4898555"}, - {file = "wrapt-1.17.2-cp313-cp313t-win_amd64.whl", hash = "sha256:eaf675418ed6b3b31c7a989fd007fa7c3be66ce14e5c3b27336383604c9da85c"}, - {file = "wrapt-1.17.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5c803c401ea1c1c18de70a06a6f79fcc9c5acfc79133e9869e730ad7f8ad8ef9"}, - {file = "wrapt-1.17.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f917c1180fdb8623c2b75a99192f4025e412597c50b2ac870f156de8fb101119"}, - {file = "wrapt-1.17.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ecc840861360ba9d176d413a5489b9a0aff6d6303d7e733e2c4623cfa26904a6"}, - {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb87745b2e6dc56361bfde481d5a378dc314b252a98d7dd19a651a3fa58f24a9"}, - {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58455b79ec2661c3600e65c0a716955adc2410f7383755d537584b0de41b1d8a"}, - {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4e42a40a5e164cbfdb7b386c966a588b1047558a990981ace551ed7e12ca9c2"}, - {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:91bd7d1773e64019f9288b7a5101f3ae50d3d8e6b1de7edee9c2ccc1d32f0c0a"}, - {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:bb90fb8bda722a1b9d48ac1e6c38f923ea757b3baf8ebd0c82e09c5c1a0e7a04"}, - {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:08e7ce672e35efa54c5024936e559469436f8b8096253404faeb54d2a878416f"}, - {file = "wrapt-1.17.2-cp38-cp38-win32.whl", hash = "sha256:410a92fefd2e0e10d26210e1dfb4a876ddaf8439ef60d6434f21ef8d87efc5b7"}, - {file = "wrapt-1.17.2-cp38-cp38-win_amd64.whl", hash = "sha256:95c658736ec15602da0ed73f312d410117723914a5c91a14ee4cdd72f1d790b3"}, - {file = "wrapt-1.17.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99039fa9e6306880572915728d7f6c24a86ec57b0a83f6b2491e1d8ab0235b9a"}, - {file = "wrapt-1.17.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2696993ee1eebd20b8e4ee4356483c4cb696066ddc24bd70bcbb80fa56ff9061"}, - {file = "wrapt-1.17.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:612dff5db80beef9e649c6d803a8d50c409082f1fedc9dbcdfde2983b2025b82"}, - {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62c2caa1585c82b3f7a7ab56afef7b3602021d6da34fbc1cf234ff139fed3cd9"}, - {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c958bcfd59bacc2d0249dcfe575e71da54f9dcf4a8bdf89c4cb9a68a1170d73f"}, - {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc78a84e2dfbc27afe4b2bd7c80c8db9bca75cc5b85df52bfe634596a1da846b"}, - {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba0f0eb61ef00ea10e00eb53a9129501f52385c44853dbd6c4ad3f403603083f"}, - {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1e1fe0e6ab7775fd842bc39e86f6dcfc4507ab0ffe206093e76d61cde37225c8"}, - {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c86563182421896d73858e08e1db93afdd2b947a70064b813d515d66549e15f9"}, - {file = "wrapt-1.17.2-cp39-cp39-win32.whl", hash = "sha256:f393cda562f79828f38a819f4788641ac7c4085f30f1ce1a68672baa686482bb"}, - {file = "wrapt-1.17.2-cp39-cp39-win_amd64.whl", hash = "sha256:36ccae62f64235cf8ddb682073a60519426fdd4725524ae38874adf72b5f2aeb"}, - {file = "wrapt-1.17.2-py3-none-any.whl", hash = "sha256:b18f2d1533a71f069c7f82d524a52599053d4c7166e9dd374ae2136b7f40f7c8"}, - {file = "wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3"}, + {file = "wrapt-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4b7a86d99a14f76facb269dc148590c01aaf47584071809a70da30555228158c"}, + {file = "wrapt-2.1.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a819e39017f95bf7aede768f75915635aa8f671f2993c036991b8d3bfe8dbb6f"}, + {file = "wrapt-2.1.2-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5681123e60aed0e64c7d44f72bbf8b4ce45f79d81467e2c4c728629f5baf06eb"}, + {file = "wrapt-2.1.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2b8b28e97a44d21836259739ae76284e180b18abbb4dcfdff07a415cf1016c3e"}, + {file = "wrapt-2.1.2-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cef91c95a50596fcdc31397eb6955476f82ae8a3f5a8eabdc13611b60ee380ba"}, + {file = "wrapt-2.1.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:dad63212b168de8569b1c512f4eac4b57f2c6934b30df32d6ee9534a79f1493f"}, + {file = "wrapt-2.1.2-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d307aa6888d5efab2c1cde09843d48c843990be13069003184b67d426d145394"}, + {file = "wrapt-2.1.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c87cf3f0c85e27b3ac7d9ad95da166bf8739ca215a8b171e8404a2d739897a45"}, + {file = "wrapt-2.1.2-cp310-cp310-win32.whl", hash = "sha256:d1c5fea4f9fe3762e2b905fdd67df51e4be7a73b7674957af2d2ade71a5c075d"}, + {file = "wrapt-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:d8f7740e1af13dff2684e4d56fe604a7e04d6c94e737a60568d8d4238b9a0c71"}, + {file = "wrapt-2.1.2-cp310-cp310-win_arm64.whl", hash = "sha256:1c6cc827c00dc839350155f316f1f8b4b0c370f52b6a19e782e2bda89600c7dc"}, + {file = "wrapt-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:96159a0ee2b0277d44201c3b5be479a9979cf154e8c82fa5df49586a8e7679bb"}, + {file = "wrapt-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98ba61833a77b747901e9012072f038795de7fc77849f1faa965464f3f87ff2d"}, + {file = "wrapt-2.1.2-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:767c0dbbe76cae2a60dd2b235ac0c87c9cccf4898aef8062e57bead46b5f6894"}, + {file = "wrapt-2.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c691a6bc752c0cc4711cc0c00896fcd0f116abc253609ef64ef930032821842"}, + {file = "wrapt-2.1.2-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f3b7d73012ea75aee5844de58c88f44cf62d0d62711e39da5a82824a7c4626a8"}, + {file = "wrapt-2.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:577dff354e7acd9d411eaf4bfe76b724c89c89c8fc9b7e127ee28c5f7bcb25b6"}, + {file = "wrapt-2.1.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:3d7b6fd105f8b24e5bd23ccf41cb1d1099796524bcc6f7fbb8fe576c44befbc9"}, + {file = "wrapt-2.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:866abdbf4612e0b34764922ef8b1c5668867610a718d3053d59e24a5e5fcfc15"}, + {file = "wrapt-2.1.2-cp311-cp311-win32.whl", hash = "sha256:5a0a0a3a882393095573344075189eb2d566e0fd205a2b6414e9997b1b800a8b"}, + {file = "wrapt-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:64a07a71d2730ba56f11d1a4b91f7817dc79bc134c11516b75d1921a7c6fcda1"}, + {file = "wrapt-2.1.2-cp311-cp311-win_arm64.whl", hash = "sha256:b89f095fe98bc12107f82a9f7d570dc83a0870291aeb6b1d7a7d35575f55d98a"}, + {file = "wrapt-2.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ff2aad9c4cda28a8f0653fc2d487596458c2a3f475e56ba02909e950a9efa6a9"}, + {file = "wrapt-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6433ea84e1cfacf32021d2a4ee909554ade7fd392caa6f7c13f1f4bf7b8e8748"}, + {file = "wrapt-2.1.2-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c20b757c268d30d6215916a5fa8461048d023865d888e437fab451139cad6c8e"}, + {file = "wrapt-2.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79847b83eb38e70d93dc392c7c5b587efe65b3e7afcc167aa8abd5d60e8761c8"}, + {file = "wrapt-2.1.2-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f8fba1bae256186a83d1875b2b1f4e2d1242e8fac0f58ec0d7e41b26967b965c"}, + {file = "wrapt-2.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e3d3b35eedcf5f7d022291ecd7533321c4775f7b9cd0050a31a68499ba45757c"}, + {file = "wrapt-2.1.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:6f2c5390460de57fa9582bc8a1b7a6c86e1a41dfad74c5225fc07044c15cc8d1"}, + {file = "wrapt-2.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7dfa9f2cf65d027b951d05c662cc99ee3bd01f6e4691ed39848a7a5fffc902b2"}, + {file = "wrapt-2.1.2-cp312-cp312-win32.whl", hash = "sha256:eba8155747eb2cae4a0b913d9ebd12a1db4d860fc4c829d7578c7b989bd3f2f0"}, + {file = "wrapt-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:1c51c738d7d9faa0b3601708e7e2eda9bf779e1b601dce6c77411f2a1b324a63"}, + {file = "wrapt-2.1.2-cp312-cp312-win_arm64.whl", hash = "sha256:c8e46ae8e4032792eb2f677dbd0d557170a8e5524d22acc55199f43efedd39bf"}, + {file = "wrapt-2.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787fd6f4d67befa6fe2abdffcbd3de2d82dfc6fb8a6d850407c53332709d030b"}, + {file = "wrapt-2.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4bdf26e03e6d0da3f0e9422fd36bcebf7bc0eeb55fdf9c727a09abc6b9fe472e"}, + {file = "wrapt-2.1.2-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bbac24d879aa22998e87f6b3f481a5216311e7d53c7db87f189a7a0266dafffb"}, + {file = "wrapt-2.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:16997dfb9d67addc2e3f41b62a104341e80cac52f91110dece393923c0ebd5ca"}, + {file = "wrapt-2.1.2-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:162e4e2ba7542da9027821cb6e7c5e068d64f9a10b5f15512ea28e954893a267"}, + {file = "wrapt-2.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f29c827a8d9936ac320746747a016c4bc66ef639f5cd0d32df24f5eacbf9c69f"}, + {file = "wrapt-2.1.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:a9dd9813825f7ecb018c17fd147a01845eb330254dff86d3b5816f20f4d6aaf8"}, + {file = "wrapt-2.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6f8dbdd3719e534860d6a78526aafc220e0241f981367018c2875178cf83a413"}, + {file = "wrapt-2.1.2-cp313-cp313-win32.whl", hash = "sha256:5c35b5d82b16a3bc6e0a04349b606a0582bc29f573786aebe98e0c159bc48db6"}, + {file = "wrapt-2.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:f8bc1c264d8d1cf5b3560a87bbdd31131573eb25f9f9447bb6252b8d4c44a3a1"}, + {file = "wrapt-2.1.2-cp313-cp313-win_arm64.whl", hash = "sha256:3beb22f674550d5634642c645aba4c72a2c66fb185ae1aebe1e955fae5a13baf"}, + {file = "wrapt-2.1.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0fc04bc8664a8bc4c8e00b37b5355cffca2535209fba1abb09ae2b7c76ddf82b"}, + {file = "wrapt-2.1.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a9b9d50c9af998875a1482a038eb05755dfd6fe303a313f6a940bb53a83c3f18"}, + {file = "wrapt-2.1.2-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2d3ff4f0024dd224290c0eabf0240f1bfc1f26363431505fb1b0283d3b08f11d"}, + {file = "wrapt-2.1.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3278c471f4468ad544a691b31bb856374fbdefb7fee1a152153e64019379f015"}, + {file = "wrapt-2.1.2-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a8914c754d3134a3032601c6984db1c576e6abaf3fc68094bb8ab1379d75ff92"}, + {file = "wrapt-2.1.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:ff95d4264e55839be37bafe1536db2ab2de19da6b65f9244f01f332b5286cfbf"}, + {file = "wrapt-2.1.2-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:76405518ca4e1b76fbb1b9f686cff93aebae03920cc55ceeec48ff9f719c5f67"}, + {file = "wrapt-2.1.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c0be8b5a74c5824e9359b53e7e58bef71a729bacc82e16587db1c4ebc91f7c5a"}, + {file = "wrapt-2.1.2-cp313-cp313t-win32.whl", hash = "sha256:f01277d9a5fc1862f26f7626da9cf443bebc0abd2f303f41c5e995b15887dabd"}, + {file = "wrapt-2.1.2-cp313-cp313t-win_amd64.whl", hash = "sha256:84ce8f1c2104d2f6daa912b1b5b039f331febfeee74f8042ad4e04992bd95c8f"}, + {file = "wrapt-2.1.2-cp313-cp313t-win_arm64.whl", hash = "sha256:a93cd767e37faeddbe07d8fc4212d5cba660af59bdb0f6372c93faaa13e6e679"}, + {file = "wrapt-2.1.2-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:1370e516598854e5b4366e09ce81e08bfe94d42b0fd569b88ec46cc56d9164a9"}, + {file = "wrapt-2.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6de1a3851c27e0bd6a04ca993ea6f80fc53e6c742ee1601f486c08e9f9b900a9"}, + {file = "wrapt-2.1.2-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:de9f1a2bbc5ac7f6012ec24525bdd444765a2ff64b5985ac6e0692144838542e"}, + {file = "wrapt-2.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:970d57ed83fa040d8b20c52fe74a6ae7e3775ae8cff5efd6a81e06b19078484c"}, + {file = "wrapt-2.1.2-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3969c56e4563c375861c8df14fa55146e81ac11c8db49ea6fb7f2ba58bc1ff9a"}, + {file = "wrapt-2.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:57d7c0c980abdc5f1d98b11a2aa3bb159790add80258c717fa49a99921456d90"}, + {file = "wrapt-2.1.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:776867878e83130c7a04237010463372e877c1c994d449ca6aaafeab6aab2586"}, + {file = "wrapt-2.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:fab036efe5464ec3291411fabb80a7a39e2dd80bae9bcbeeca5087fdfa891e19"}, + {file = "wrapt-2.1.2-cp314-cp314-win32.whl", hash = "sha256:e6ed62c82ddf58d001096ae84ce7f833db97ae2263bff31c9b336ba8cfe3f508"}, + {file = "wrapt-2.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:467e7c76315390331c67073073d00662015bb730c566820c9ca9b54e4d67fd04"}, + {file = "wrapt-2.1.2-cp314-cp314-win_arm64.whl", hash = "sha256:da1f00a557c66225d53b095a97eace0fc5349e3bfda28fa34ffae238978ee575"}, + {file = "wrapt-2.1.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:62503ffbc2d3a69891cf29beeaccdb4d5e0a126e2b6a851688d4777e01428dbb"}, + {file = "wrapt-2.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c7e6cd120ef837d5b6f860a6ea3745f8763805c418bb2f12eeb1fa6e25f22d22"}, + {file = "wrapt-2.1.2-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3769a77df8e756d65fbc050333f423c01ae012b4f6731aaf70cf2bef61b34596"}, + {file = "wrapt-2.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a76d61a2e851996150ba0f80582dd92a870643fa481f3b3846f229de88caf044"}, + {file = "wrapt-2.1.2-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6f97edc9842cf215312b75fe737ee7c8adda75a89979f8e11558dfff6343cc4b"}, + {file = "wrapt-2.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:4006c351de6d5007aa33a551f600404ba44228a89e833d2fadc5caa5de8edfbf"}, + {file = "wrapt-2.1.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:a9372fc3639a878c8e7d87e1556fa209091b0a66e912c611e3f833e2c4202be2"}, + {file = "wrapt-2.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3144b027ff30cbd2fca07c0a87e67011adb717eb5f5bd8496325c17e454257a3"}, + {file = "wrapt-2.1.2-cp314-cp314t-win32.whl", hash = "sha256:3b8d15e52e195813efe5db8cec156eebe339aaf84222f4f4f051a6c01f237ed7"}, + {file = "wrapt-2.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:08ffa54146a7559f5b8df4b289b46d963a8e74ed16ba3687f99896101a3990c5"}, + {file = "wrapt-2.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:72aaa9d0d8e4ed0e2e98019cea47a21f823c9dd4b43c7b77bba6679ffcca6a00"}, + {file = "wrapt-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5e0fa9cc32300daf9eb09a1f5bdc6deb9a79defd70d5356ba453bcd50aef3742"}, + {file = "wrapt-2.1.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:710f6e5dfaf6a5d5c397d2d6758a78fecd9649deb21f1b645f5b57a328d63050"}, + {file = "wrapt-2.1.2-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:305d8a1755116bfdad5dda9e771dcb2138990a1d66e9edd81658816edf51aed1"}, + {file = "wrapt-2.1.2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f0d8fc30a43b5fe191cf2b1a0c82bab2571dadd38e7c0062ee87d6df858dd06e"}, + {file = "wrapt-2.1.2-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a5d516e22aedb7c9c1d47cba1c63160b1a6f61ec2f3948d127cd38d5cfbb556f"}, + {file = "wrapt-2.1.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:45914e8efbe4b9d5102fcf0e8e2e3258b83a5d5fba9f8f7b6d15681e9d29ffe0"}, + {file = "wrapt-2.1.2-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:478282ebd3795a089154fb16d3db360e103aa13d3b2ad30f8f6aac0d2207de0e"}, + {file = "wrapt-2.1.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3756219045f73fb28c5d7662778e4156fbd06cf823c4d2d4b19f97305e52819c"}, + {file = "wrapt-2.1.2-cp39-cp39-win32.whl", hash = "sha256:b8aefb4dbb18d904b96827435a763fa42fc1f08ea096a391710407a60983ced8"}, + {file = "wrapt-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:e5aeab8fe15c3dff75cfee94260dcd9cded012d4ff06add036c28fae7718593b"}, + {file = "wrapt-2.1.2-cp39-cp39-win_arm64.whl", hash = "sha256:f069e113743a21a3defac6677f000068ebb931639f789b5b226598e247a4c89e"}, + {file = "wrapt-2.1.2-py3-none-any.whl", hash = "sha256:b8fd6fa2b2c4e7621808f8c62e8317f4aae56e59721ad933bac5239d913cf0e8"}, + {file = "wrapt-2.1.2.tar.gz", hash = "sha256:3996a67eecc2c68fd47b4e3c564405a5777367adfd9b8abb58387b63ee83b21e"}, ] +[package.extras] +dev = ["pytest", "setuptools"] + [metadata] lock-version = "2.1" python-versions = ">=3.10, <4.0.0" -content-hash = "b4999552687ca998bc8b7ec6f53141801b789896051ea12986ef53acda8ce589" +content-hash = "f1cd5ad45b0c3344e42e7c0086497a62441b37fd366d8c09503b32c2abb3674d" diff --git a/pyproject.toml b/pyproject.toml index fba0299a..17ea5087 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,6 +41,7 @@ dependencies = [ "pynacl>=1.5.0, <2.0.0", "rdflib (>=7.1.4,<8.0.0)", "schemaorg (>=0.1.1,<0.2.0)", + "tomlkit (>=0.14.0,<0.15.0)", ] requires-python = ">=3.10, <4.0.0" From 23a11b3c0099dcfa4e46e9a86258d222cb4aab1c Mon Sep 17 00:00:00 2001 From: notactuallyfinn Date: Thu, 23 Apr 2026 16:10:16 +0200 Subject: [PATCH 247/247] add/ update comments and doc strings --- src/hermes/model/api.py | 5 +++++ src/hermes/model/context_manager.py | 23 +++++++++++++++++++---- src/hermes/model/merge/container.py | 26 +++++++++++++------------- src/hermes/model/types/ld_container.py | 12 ++++++------ src/hermes/model/types/ld_context.py | 13 +++++++++++-- src/hermes/model/types/ld_dict.py | 19 +++++++++---------- src/hermes/model/types/ld_list.py | 19 +++++++++++-------- 7 files changed, 74 insertions(+), 43 deletions(-) diff --git a/src/hermes/model/api.py b/src/hermes/model/api.py index 2b467636..2e654196 100644 --- a/src/hermes/model/api.py +++ b/src/hermes/model/api.py @@ -12,6 +12,7 @@ from hermes.model.types.ld_container import PYTHONIZED_LD_CONTAINER from hermes.model.types.ld_context import ALL_CONTEXTS from hermes.model.types.pyld_util import bundled_loader + from .context_manager import HermesContext from .error import HermesContextError @@ -37,7 +38,9 @@ def __init__( Returns: None: """ + # create context object ctx = ALL_CONTEXTS + [{**extra_vocabs}] if extra_vocabs is not None else ALL_CONTEXTS + # initialize underlying ld_dict super().__init__([ld_dict.from_dict(data, context=ctx).data_dict if data else {}], context=ctx) @classmethod @@ -92,7 +95,9 @@ def write_to_cache(self: Self, ctx: HermesContext, target_dir: str) -> None: Returns: None: """ + # open cache dir with ctx[target_dir] as cache: + # write expanded, context and compact version of self cache["codemeta"] = self.compact() cache["context"] = {"@context": self.full_context} cache["expanded"] = self.ld_value diff --git a/src/hermes/model/context_manager.py b/src/hermes/model/context_manager.py index 837c1518..8e61fa03 100644 --- a/src/hermes/model/context_manager.py +++ b/src/hermes/model/context_manager.py @@ -3,12 +3,13 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileContributor: Michael Meinel +# SPDX-FileContributor: Michael Fritzsche import json import os.path from pathlib import Path from types import TracebackType -from typing import Union +from typing import Optional from typing_extensions import Self from .error import HermesContextError @@ -42,11 +43,14 @@ def __enter__(self: Self) -> None: Returns: None: """ + # check if the cache_dir exists if self._cache_dir.is_dir(): + # load all files from the cache dir and cache the contents for filepath in self._cache_dir.glob('*'): basename, _ = os.path.splitext(filepath.name) self._cached_data[basename] = json.load(filepath.open('r')) + # return the cache object return self def __getitem__(self: Self, item: str) -> dict: @@ -59,11 +63,14 @@ def __getitem__(self: Self, item: str) -> dict: Returns: dict: The JSON value in the given file. """ + # check whether or not the given file was already loaded if item not in self._cached_data: + # construct the file path as well as load and cache the file filepath = self._cache_dir / f'{item}.json' if filepath.is_file(): self._cached_data[item] = json.load(filepath.open('r')) + # return the loaded json return self._cached_data[item] def __setitem__(self: Self, key: str, value: dict) -> None: @@ -78,13 +85,14 @@ def __setitem__(self: Self, key: str, value: dict) -> None: Returns: None: """ + # update the value of the cache self._cached_data[key] = value def __exit__( self: Self, - exc_type: Union[type[BaseException], None], - exc_val: Union[BaseException, None], - exc_tb: Union[TracebackType, None] + exc_type: Optional[type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType] ) -> None: """ Updates the files from the cache. @@ -98,9 +106,12 @@ def __exit__( None: """ if exc_type is None: + # If the exit did not happen because of an exception: + # create the cache dir (if necessary) and write the cached json data self._cache_dir.mkdir(exist_ok=True, parents=True) for basename, data in self._cached_data.items(): + # create complete file path and write the data cachefile = self._cache_dir / f'{basename}.json' json.dump(data, cachefile.open('w')) @@ -158,10 +169,12 @@ def finalize_step(self: Self, step: str) -> None: ValueError: If no step can be removed. ValueError: If the given step is not the last one. """ + # check if the given step was prepared last if len(self._current_step) < 1: raise ValueError("There is no step to end.") if self._current_step[-1] != step: raise ValueError(f"Cannot end step {step} while in {self._current_step[-1]}.") + # remove the last step (i.e. the given one) self._current_step.pop() def __getitem__(self: Self, source_name: str) -> HermesCache: @@ -177,7 +190,9 @@ def __getitem__(self: Self, source_name: str) -> HermesCache: Raises: HermesContextError: If no step has been prepared (i.e. no current cache dir is set). """ + # check if a step is prepared if len(self._current_step) < 1: raise HermesContextError("Prepare a step first.") + # build the dir of the cache and return the HermesCache for it subdir = self.cache_dir / self._current_step[-1] / source_name return HermesCache(subdir) diff --git a/src/hermes/model/merge/container.py b/src/hermes/model/merge/container.py index a4ae1e2c..e9cc03a2 100644 --- a/src/hermes/model/merge/container.py +++ b/src/hermes/model/merge/container.py @@ -7,7 +7,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Callable, Union +from typing import TYPE_CHECKING, Any, Callable, Optional, Union from typing_extensions import Self from hermes.model.types import ld_container, ld_context, ld_dict, ld_list @@ -15,8 +15,8 @@ BASIC_TYPE, EXPANDED_JSON_LD_VALUE, JSON_LD_CONTEXT_DICT, JSON_LD_VALUE, TIME_TYPE ) from hermes.model.types.pyld_util import bundled_loader -from .action import MergeError +from .action import MergeError if TYPE_CHECKING: from .action import MergeAction @@ -83,11 +83,11 @@ def __init__( self: "ld_merge_list", data: Union[list[str], list[dict[str, EXPANDED_JSON_LD_VALUE]]], *, - parent: Union[ld_container, None] = None, - key: Union[str, None] = None, - index: Union[int, None] = None, - context: Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None, - strategies: dict[Union[str, None], dict[Union[str, None], MergeAction]] = {} + parent: Optional[ld_container] = None, + key: Optional[str] = None, + index: Optional[int] = None, + context: Optional[list[Union[str, JSON_LD_CONTEXT_DICT]]] = None, + strategies: dict[Optional[str], dict[Optional[str], MergeAction]] = {} ) -> None: """ Create a new ld_merge_list. @@ -124,11 +124,11 @@ def __init__( self: Self, data: list[dict[str, EXPANDED_JSON_LD_VALUE]], *, - parent: Union[ld_dict, ld_list, None] = None, - key: Union[str, None] = None, - index: Union[int, None] = None, - context: Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None, - strategies: dict[Union[str, None], dict[Union[str, None], MergeAction]] = {} + parent: Optional[Union[ld_dict, ld_list]] = None, + key: Optional[str] = None, + index: Optional[int] = None, + context: Optional[list[Union[str, JSON_LD_CONTEXT_DICT]]] = None, + strategies: dict[Optional[str], dict[Optional[str], MergeAction]] = {} ) -> None: """ Create a new instance of an ld_merge_dict. See also :meth:`ld_dict.__init__`. @@ -199,7 +199,7 @@ def update(self: Self, other: ld_dict) -> None: # this works implicitly because ld_dict.update invokes self.__setitem__ which is overwritten by ld_merge_dict super().update(other) - def add_strategy(self: Self, strategy: dict[Union[str, None], dict[Union[str, None], MergeAction]]) -> None: + def add_strategy(self: Self, strategy: dict[Optional[str], dict[Optional[str], MergeAction]]) -> None: """ Adds ``strategy`` to the ``self.strategies``. diff --git a/src/hermes/model/types/ld_container.py b/src/hermes/model/types/ld_container.py index 3e888688..abc37fa9 100644 --- a/src/hermes/model/types/ld_container.py +++ b/src/hermes/model/types/ld_container.py @@ -10,7 +10,7 @@ from __future__ import annotations from datetime import date, datetime, time -from typing import Any, TypeAlias, TYPE_CHECKING, Union +from typing import Any, Optional, TypeAlias, TYPE_CHECKING, Union from typing_extensions import Self from .pyld_util import JsonLdProcessor, bundled_loader @@ -70,10 +70,10 @@ def __init__( self: Self, data: EXPANDED_JSON_LD_VALUE, *, - parent: Union[ld_dict, ld_list, None] = None, - key: Union[str, None] = None, - index: Union[int, None] = None, - context: Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None, + parent: Optional[Union[ld_dict, ld_list]] = None, + key: Optional[str] = None, + index: Optional[int] = None, + context: Optional[list[Union[str, JSON_LD_CONTEXT_DICT]]] = None, ) -> None: """ Create a new instance of an ld_container. @@ -303,7 +303,7 @@ def __str__(self: Self) -> str: return str(self.to_python()) def compact( - self: Self, context: Union[list[Union[JSON_LD_CONTEXT_DICT, str]], JSON_LD_CONTEXT_DICT, str, None] = None + self: Self, context: Optional[Union[list[Union[JSON_LD_CONTEXT_DICT, str]], JSON_LD_CONTEXT_DICT, str]] = None ) -> COMPACTED_JSON_LD_VALUE: """ Returns the compacted version of the given ld_container using its context only if none was supplied. diff --git a/src/hermes/model/types/ld_context.py b/src/hermes/model/types/ld_context.py index 09dd8085..681f4792 100644 --- a/src/hermes/model/types/ld_context.py +++ b/src/hermes/model/types/ld_context.py @@ -4,6 +4,7 @@ # SPDX-FileContributor: Michael Meinel # SPDX-FileContributor: Stephan Druskat +# SPDX-FileContributor: Michael Fritzsche from typing import Union from typing_extensions import Self @@ -69,7 +70,7 @@ class ContextPrefix: context dict[str | None, str]: The mapping of prefix its expanded IRI. """ - def __init__(self: Self, vocabularies: list[str | dict]) -> None: + def __init__(self: Self, vocabularies: list[Union[str, dict]]) -> None: """ If the list contains more than one string item, the last one will be used as the default vocabulary. If a prefix string is used more than once across all dictionaries in the list, the last item with this key will be included @@ -86,10 +87,12 @@ def __init__(self: Self, vocabularies: list[str | dict]) -> None: self.vocabularies = vocabularies self.context = {} + # add every entry in the vocabulary to the context for vocab in self.vocabularies: if isinstance(vocab, str): vocab = {None: vocab} + # add all prefix, base_iri pairs from vocab to context self.context.update( { prefix: base_iri @@ -98,7 +101,7 @@ def __init__(self: Self, vocabularies: list[str | dict]) -> None: } ) - def __getitem__(self: Self, compressed_term: str | tuple) -> str: + def __getitem__(self: Self, compressed_term: Union[str, tuple]) -> str: """ Gets the fully qualified IRI for a term from a vocabulary inside the initialized context. The vocabulary must have been added to the context at initialization. @@ -121,7 +124,11 @@ def __getitem__(self: Self, compressed_term: str | tuple) -> str: Returns: str: The fully qualified IRI for the passed term + + Raises: + HermesContextError: If the compressed term is '' or its prefix can't be expanded. """ + # seperate the prefix from the term if not isinstance(compressed_term, str): prefix, term = compressed_term elif ":" in compressed_term: @@ -133,11 +140,13 @@ def __getitem__(self: Self, compressed_term: str | tuple) -> str: else: raise HermesContextError(compressed_term) + # expand the prefix try: base_iri = self.context[prefix] except KeyError as ke: raise HermesContextError(prefix) from ke + # return the expanded term return base_iri + term diff --git a/src/hermes/model/types/ld_dict.py b/src/hermes/model/types/ld_dict.py index 8561887c..4d419ad7 100644 --- a/src/hermes/model/types/ld_dict.py +++ b/src/hermes/model/types/ld_dict.py @@ -8,7 +8,7 @@ from __future__ import annotations from collections.abc import Generator, Iterator, KeysView -from typing import Any, Literal, Union, TYPE_CHECKING +from typing import Any, Literal, Optional, Union, TYPE_CHECKING from typing_extensions import Self from .ld_container import ( @@ -41,10 +41,10 @@ def __init__( self: Self, data: list[dict[str, EXPANDED_JSON_LD_VALUE]], *, - parent: Union[ld_dict, ld_list, None] = None, - key: Union[str, None] = None, - index: Union[int, None] = None, - context: Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None + parent: Optional[Union[ld_dict, ld_list]] = None, + key: Optional[str] = None, + index: Optional[int] = None, + context: Optional[list[Union[str, JSON_LD_CONTEXT_DICT]]] = None ) -> None: """ Create a new instance of an ld_dict. @@ -133,7 +133,6 @@ def __contains__(self: Self, key: str) -> bool: """ # expand the key and check if self contains a key, value pair with it full_iri = self.ld_proc.expand_iri(self.active_ctx, key) - # FIXME: is that good? return full_iri in self.data_dict def __eq__( @@ -364,10 +363,10 @@ def from_dict( cls: type[Self], value: dict[str, PYTHONIZED_LD_CONTAINER], *, - parent: Union[ld_dict, ld_list, None] = None, - key: Union[str, None] = None, - context: Union[str, JSON_LD_CONTEXT_DICT, list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None, - ld_type: Union[str, list[str], None] = None + parent: Optional[Union[ld_dict, ld_list]] = None, + key: Optional[str] = None, + context: Optional[Union[str, JSON_LD_CONTEXT_DICT, list[Union[str, JSON_LD_CONTEXT_DICT]]]] = None, + ld_type: Optional[Union[str, list[str]]] = None ) -> ld_dict: """ Creates a ld_dict from the given dict with the given parent, key, context and ld_type.\n diff --git a/src/hermes/model/types/ld_list.py b/src/hermes/model/types/ld_list.py index 3aa34c5d..14331472 100644 --- a/src/hermes/model/types/ld_list.py +++ b/src/hermes/model/types/ld_list.py @@ -11,7 +11,7 @@ from collections import deque from collections.abc import Generator, Hashable -from typing import Any, Union, TYPE_CHECKING +from typing import Any, Optional, Union, TYPE_CHECKING from typing_extensions import Self from .ld_container import ( @@ -42,10 +42,10 @@ def __init__( self: Self, data: EXPANDED_JSON_LD_VALUE, *, - parent: Union[ld_dict, ld_list, None] = None, - key: Union[str, None] = None, - index: Union[int, None] = None, - context: Union[list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None, + parent: Optional[Union[ld_dict, ld_list]] = None, + key: Optional[str] = None, + index: Optional[int] = None, + context: Optional[list[Union[str, JSON_LD_CONTEXT_DICT]]] = None, ) -> None: """ Create a new instance of an ld_list. @@ -132,6 +132,9 @@ def __setitem__( Returns: None: + + Raises: + TypeError: If a slice is not assigned an iterable. """ if not isinstance(index, slice): # expand the value @@ -582,9 +585,9 @@ def from_list( cls: type[Self], value: list[Union[JSON_LD_VALUE, BASIC_TYPE, TIME_TYPE]], *, - parent: Union[ld_dict, ld_list, None] = None, - key: Union[str, None] = None, - context: Union[str, JSON_LD_CONTEXT_DICT, list[Union[str, JSON_LD_CONTEXT_DICT]], None] = None, + parent: Optional[Union[ld_dict, ld_list]] = None, + key: Optional[str] = None, + context: Optional[Union[str, JSON_LD_CONTEXT_DICT, list[Union[str, JSON_LD_CONTEXT_DICT]]]] = None, container_type: str = "@set" ) -> ld_list: """