From 032f7ef4d4c0d54d84f85f7a209d690dbe38f8a2 Mon Sep 17 00:00:00 2001 From: ArnauBN <117755502+ArnauBN@users.noreply.github.com> Date: Sun, 20 Jul 2025 19:27:16 +0200 Subject: [PATCH 1/2] to_dict, from_dict and test from json file --- src/cpp_fqn_parser/fqn.py | 22 +++++++++++++++++++- src/cpp_fqn_parser/scope.py | 17 +++++++++++++++- src/cpp_fqn_parser/token.py | 16 +++++++++++++++ src/cpp_fqn_parser/utils.py | 33 ++++++++++++++++++++++++++++++ test_data/fqns.json | 27 +++++++++++++++++++++++++ tests/conftest.py | 40 ++++++++----------------------------- tests/test_parser.py | 7 ++++--- tests/test_tokenizer.py | 8 +++++--- 8 files changed, 130 insertions(+), 40 deletions(-) create mode 100644 src/cpp_fqn_parser/utils.py create mode 100644 test_data/fqns.json diff --git a/src/cpp_fqn_parser/fqn.py b/src/cpp_fqn_parser/fqn.py index f7abba0..6710c35 100644 --- a/src/cpp_fqn_parser/fqn.py +++ b/src/cpp_fqn_parser/fqn.py @@ -1,7 +1,8 @@ from dataclasses import dataclass -from typing import Optional, List +from typing import Optional, List, Dict, Any from .scope import Scope +from .utils import to_dict @dataclass @@ -47,3 +48,22 @@ def __eq__(self, other: object) -> bool: self.template == other.template and self.constant == other.constant and self.volatile == other.volatile) + + def to_dict(self) -> Dict[str, Any]: + return to_dict(self) + + @staticmethod + def from_dict(data: Dict[str, Any]) -> 'FQN': + attrs = ["name", "full_name", "return_type", "args", "scopes", "template", "constant", "volatile"] + for attr in attrs: + if attr not in data: + raise KeyError(f"Missing key '{attr}'") + + return FQN(name=data["name"], + full_name=data["full_name"], + return_type=data["return_type"], + args=data["args"], + scopes=[Scope.from_dict(scope) for scope in data["scopes"]], + template=data["template"], + constant=data["constant"], + volatile=data["volatile"]) diff --git a/src/cpp_fqn_parser/scope.py b/src/cpp_fqn_parser/scope.py index 49feadf..a3475a3 100644 --- a/src/cpp_fqn_parser/scope.py +++ b/src/cpp_fqn_parser/scope.py @@ -1,5 +1,7 @@ from dataclasses import dataclass -from typing import Optional +from typing import Optional, Dict, Any + +from .utils import to_dict @dataclass @@ -16,3 +18,16 @@ class Scope: def __eq__(self, other: object) -> bool: return isinstance(other, Scope) and self.name == other.name and self.template == other.template + + def to_dict(self) -> Dict[str, Any]: + return to_dict(self) + + @staticmethod + def from_dict(data: Dict[str, Any]) -> 'Scope': + attrs = ["name", "template"] + for attr in attrs: + if attr not in data: + raise KeyError(f"Missing key '{attr}'") + + return Scope(name=data["name"], + template=data["template"]) diff --git a/src/cpp_fqn_parser/token.py b/src/cpp_fqn_parser/token.py index 4540ce4..ec33d47 100644 --- a/src/cpp_fqn_parser/token.py +++ b/src/cpp_fqn_parser/token.py @@ -1,5 +1,8 @@ +from typing import Dict, Any from dataclasses import dataclass +from .utils import to_dict + @dataclass class Token: @@ -24,3 +27,16 @@ def __eq__(self, other: object) -> bool: bool: True if `other` is a Token with the same type and value, False otherwise. """ return isinstance(other, Token) and self.type_ == other.type_ and self.value == other.value + + def to_dict(self) -> Dict[str, Any]: + return to_dict(self) + + @staticmethod + def from_dict(data: Dict[str, Any]) -> 'Token': + attrs = ["type_", "value"] + for attr in attrs: + if attr not in data: + raise KeyError(f"Missing key '{attr}'") + + return Token(type_=data["type_"], + value=data["value"]) diff --git a/src/cpp_fqn_parser/utils.py b/src/cpp_fqn_parser/utils.py new file mode 100644 index 0000000..f2a978f --- /dev/null +++ b/src/cpp_fqn_parser/utils.py @@ -0,0 +1,33 @@ +from typing import Dict, Any + + +def to_dict(obj: Any) -> Dict[str, Any]: + """ + Recursively converts an object and its attributes into a dictionary. + + This function inspects the given object and returns a dictionary of its attributes. + If any attribute is: + - An object with a `to_dict()` method, it calls that method. + - A list, it recursively serializes each item. + - A dict, it recursively serializes each value. + - A primitive type (str, int, float, etc.), it leaves it unchanged. + + This is useful for converting nested objects into JSON-serializable dictionaries. + + Args: + obj (Any): The object to convert. + + Returns: + Dict[str, Any]: A dictionary representing the object's attributes and their serialized values. + """ + def serialize(inner_obj): + if hasattr(inner_obj, "to_dict"): + return inner_obj.to_dict() + elif isinstance(inner_obj, list): + return [serialize(item) for item in inner_obj] + elif isinstance(inner_obj, dict): + return {k: serialize(v) for k, v in inner_obj.items()} + else: + return inner_obj + + return {k: serialize(v) for k, v in vars(obj).items()} diff --git a/test_data/fqns.json b/test_data/fqns.json new file mode 100644 index 0000000..e3ec927 --- /dev/null +++ b/test_data/fqns.json @@ -0,0 +1,27 @@ +[ + { + "fqn": "one::two::three()", + "tokens": [ + {"type_": "MEMBER", "value": "one"}, + {"type_": "SCOPE", "value": "::"}, + {"type_": "MEMBER", "value": "two"}, + {"type_": "SCOPE", "value": "::"}, + {"type_": "MEMBER", "value": "three"}, + {"type_": "PARENTHESIS_START", "value": "("}, + {"type_": "PARENTHESIS_END", "value": ")"} + ], + "parser": { + "name": "three", + "full_name": "one::two::three()", + "return_type": null, + "args": null, + "scopes": [ + {"name": "one", "template": null}, + {"name": "two", "template": null} + ], + "template": null, + "constant": false, + "volatile": false + } + } +] \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py index aa56fdb..5d22d22 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,5 @@ +from pathlib import Path +import json from typing import List import pytest @@ -5,38 +7,12 @@ from src.cpp_fqn_parser import Token, FQN, Scope -@pytest.fixture -def simple_fqn_input() -> str: - return "one::two::three()" - - -@pytest.fixture -def simple_tokenizer_expected() -> List[Token]: - return [ - Token('MEMBER', 'one'), - Token('SCOPE', '::'), - Token('MEMBER', 'two'), - Token('SCOPE', '::'), - Token('MEMBER', 'three'), - Token('PARENTHESIS_START', '('), - Token('PARENTHESIS_END', ')'), - ] - - -@pytest.fixture -def simple_parser_expected(): - scopes = [ - Scope(name="one", template=None), - Scope(name="two", template=None) - ] - return FQN(name="three", - full_name="one::two::three()", - return_type=None, - args=None, - scopes=scopes, - template=None, - constant=False, - volatile=False) +def pytest_generate_tests(metafunc): + if "fqn_dict" in metafunc.fixturenames: + path = Path(__file__).parent.parent / "test_data" / "fqns.json" + with path.open() as f: + fqns = json.load(f) + metafunc.parametrize("fqn_dict", fqns) @pytest.fixture diff --git a/tests/test_parser.py b/tests/test_parser.py index b10bd50..9458634 100644 --- a/tests/test_parser.py +++ b/tests/test_parser.py @@ -1,10 +1,11 @@ from src.cpp_fqn_parser import Parser, FQN -def test_simple_fqn(simple_fqn_input: str, simple_parser_expected: FQN) -> None: - parser: Parser = Parser(simple_fqn_input) +def test_tokenizer_fqn(fqn_dict: dict): + parser: Parser = Parser(fqn_dict["fqn"]) result: FQN = parser.parse() - assert result == simple_parser_expected + expected = FQN.from_dict(fqn_dict["parser"]) + assert result == expected def test_fqn(fqn_input: str, parser_expected: FQN) -> None: diff --git a/tests/test_tokenizer.py b/tests/test_tokenizer.py index 431df04..3b7f7e7 100644 --- a/tests/test_tokenizer.py +++ b/tests/test_tokenizer.py @@ -3,10 +3,12 @@ from src.cpp_fqn_parser import Tokenizer, Token -def test_simple_fqn(simple_fqn_input: str, simple_tokenizer_expected: List[Token]) -> None: - tokenizer: Tokenizer = Tokenizer(simple_fqn_input) +def test_tokenizer_fqn(fqn_dict: dict): + tokenizer: Tokenizer = Tokenizer(fqn_dict["fqn"]) result: List[Token] = list(tokenizer.get_all_tokens()) - assert result == simple_tokenizer_expected + expected = [Token.from_dict(token) + for token in fqn_dict["tokens"]] + assert result == expected def test_fqn(fqn_input: str, tokenizer_expected: List[Token]) -> None: From 6fc92199ed8cdeaead07c30c26bdadda166d1e8a Mon Sep 17 00:00:00 2001 From: ArnauBN <117755502+ArnauBN@users.noreply.github.com> Date: Sun, 20 Jul 2025 19:32:14 +0200 Subject: [PATCH 2/2] add check_keys function --- src/cpp_fqn_parser/fqn.py | 9 +++------ src/cpp_fqn_parser/scope.py | 11 ++++------- src/cpp_fqn_parser/token.py | 11 ++++------- src/cpp_fqn_parser/utils.py | 18 +++++++++++++++++- 4 files changed, 28 insertions(+), 21 deletions(-) diff --git a/src/cpp_fqn_parser/fqn.py b/src/cpp_fqn_parser/fqn.py index 6710c35..952e065 100644 --- a/src/cpp_fqn_parser/fqn.py +++ b/src/cpp_fqn_parser/fqn.py @@ -2,7 +2,7 @@ from typing import Optional, List, Dict, Any from .scope import Scope -from .utils import to_dict +from .utils import to_dict, check_keys @dataclass @@ -54,11 +54,8 @@ def to_dict(self) -> Dict[str, Any]: @staticmethod def from_dict(data: Dict[str, Any]) -> 'FQN': - attrs = ["name", "full_name", "return_type", "args", "scopes", "template", "constant", "volatile"] - for attr in attrs: - if attr not in data: - raise KeyError(f"Missing key '{attr}'") - + attrs: List[str] = ["name", "full_name", "return_type", "args", "scopes", "template", "constant", "volatile"] + check_keys(attrs, data) return FQN(name=data["name"], full_name=data["full_name"], return_type=data["return_type"], diff --git a/src/cpp_fqn_parser/scope.py b/src/cpp_fqn_parser/scope.py index a3475a3..d464ba2 100644 --- a/src/cpp_fqn_parser/scope.py +++ b/src/cpp_fqn_parser/scope.py @@ -1,7 +1,7 @@ from dataclasses import dataclass -from typing import Optional, Dict, Any +from typing import Optional, Dict, Any, List -from .utils import to_dict +from .utils import to_dict, check_keys @dataclass @@ -24,10 +24,7 @@ def to_dict(self) -> Dict[str, Any]: @staticmethod def from_dict(data: Dict[str, Any]) -> 'Scope': - attrs = ["name", "template"] - for attr in attrs: - if attr not in data: - raise KeyError(f"Missing key '{attr}'") - + attrs: List[str] = ["name", "template"] + check_keys(attrs, data) return Scope(name=data["name"], template=data["template"]) diff --git a/src/cpp_fqn_parser/token.py b/src/cpp_fqn_parser/token.py index ec33d47..56884a5 100644 --- a/src/cpp_fqn_parser/token.py +++ b/src/cpp_fqn_parser/token.py @@ -1,7 +1,7 @@ -from typing import Dict, Any +from typing import Dict, Any, List from dataclasses import dataclass -from .utils import to_dict +from .utils import to_dict, check_keys @dataclass @@ -33,10 +33,7 @@ def to_dict(self) -> Dict[str, Any]: @staticmethod def from_dict(data: Dict[str, Any]) -> 'Token': - attrs = ["type_", "value"] - for attr in attrs: - if attr not in data: - raise KeyError(f"Missing key '{attr}'") - + attrs: List[str] = ["type_", "value"] + check_keys(attrs, data) return Token(type_=data["type_"], value=data["value"]) diff --git a/src/cpp_fqn_parser/utils.py b/src/cpp_fqn_parser/utils.py index f2a978f..795a907 100644 --- a/src/cpp_fqn_parser/utils.py +++ b/src/cpp_fqn_parser/utils.py @@ -1,4 +1,4 @@ -from typing import Dict, Any +from typing import Dict, Any, List, Mapping def to_dict(obj: Any) -> Dict[str, Any]: @@ -31,3 +31,19 @@ def serialize(inner_obj): return inner_obj return {k: serialize(v) for k, v in vars(obj).items()} + + +def check_keys(keys: List[str], data: Mapping[str, Any]) -> None: + """ + Checks that all specified keys are present in the given mapping. + + Args: + keys (List[str]): A list of keys to check for. + data (Mapping[str, Any]): The dictionary-like object to check. + + Raises: + KeyError: If any key in `keys` is missing from `data`. + """ + for key in keys: + if key not in data: + raise KeyError(f"Missing key '{key}'")