diff --git a/src/cpp_fqn_parser/fqn.py b/src/cpp_fqn_parser/fqn.py index f7abba0..952e065 100644 --- a/src/cpp_fqn_parser/fqn.py +++ b/src/cpp_fqn_parser/fqn.py @@ -1,7 +1,8 @@ from dataclasses import dataclass -from typing import Optional, List +from typing import Optional, List, Dict, Any from .scope import Scope +from .utils import to_dict, check_keys @dataclass @@ -47,3 +48,19 @@ def __eq__(self, other: object) -> bool: self.template == other.template and self.constant == other.constant and self.volatile == other.volatile) + + def to_dict(self) -> Dict[str, Any]: + return to_dict(self) + + @staticmethod + def from_dict(data: Dict[str, Any]) -> 'FQN': + attrs: List[str] = ["name", "full_name", "return_type", "args", "scopes", "template", "constant", "volatile"] + check_keys(attrs, data) + return FQN(name=data["name"], + full_name=data["full_name"], + return_type=data["return_type"], + args=data["args"], + scopes=[Scope.from_dict(scope) for scope in data["scopes"]], + template=data["template"], + constant=data["constant"], + volatile=data["volatile"]) diff --git a/src/cpp_fqn_parser/scope.py b/src/cpp_fqn_parser/scope.py index 49feadf..d464ba2 100644 --- a/src/cpp_fqn_parser/scope.py +++ b/src/cpp_fqn_parser/scope.py @@ -1,5 +1,7 @@ from dataclasses import dataclass -from typing import Optional +from typing import Optional, Dict, Any, List + +from .utils import to_dict, check_keys @dataclass @@ -16,3 +18,13 @@ class Scope: def __eq__(self, other: object) -> bool: return isinstance(other, Scope) and self.name == other.name and self.template == other.template + + def to_dict(self) -> Dict[str, Any]: + return to_dict(self) + + @staticmethod + def from_dict(data: Dict[str, Any]) -> 'Scope': + attrs: List[str] = ["name", "template"] + check_keys(attrs, data) + return Scope(name=data["name"], + template=data["template"]) diff --git a/src/cpp_fqn_parser/token.py b/src/cpp_fqn_parser/token.py index 4540ce4..56884a5 100644 --- a/src/cpp_fqn_parser/token.py +++ b/src/cpp_fqn_parser/token.py @@ -1,5 +1,8 @@ +from typing import Dict, Any, List from dataclasses import dataclass +from .utils import to_dict, check_keys + @dataclass class Token: @@ -24,3 +27,13 @@ def __eq__(self, other: object) -> bool: bool: True if `other` is a Token with the same type and value, False otherwise. """ return isinstance(other, Token) and self.type_ == other.type_ and self.value == other.value + + def to_dict(self) -> Dict[str, Any]: + return to_dict(self) + + @staticmethod + def from_dict(data: Dict[str, Any]) -> 'Token': + attrs: List[str] = ["type_", "value"] + check_keys(attrs, data) + return Token(type_=data["type_"], + value=data["value"]) diff --git a/src/cpp_fqn_parser/utils.py b/src/cpp_fqn_parser/utils.py new file mode 100644 index 0000000..795a907 --- /dev/null +++ b/src/cpp_fqn_parser/utils.py @@ -0,0 +1,49 @@ +from typing import Dict, Any, List, Mapping + + +def to_dict(obj: Any) -> Dict[str, Any]: + """ + Recursively converts an object and its attributes into a dictionary. + + This function inspects the given object and returns a dictionary of its attributes. + If any attribute is: + - An object with a `to_dict()` method, it calls that method. + - A list, it recursively serializes each item. + - A dict, it recursively serializes each value. + - A primitive type (str, int, float, etc.), it leaves it unchanged. + + This is useful for converting nested objects into JSON-serializable dictionaries. + + Args: + obj (Any): The object to convert. + + Returns: + Dict[str, Any]: A dictionary representing the object's attributes and their serialized values. + """ + def serialize(inner_obj): + if hasattr(inner_obj, "to_dict"): + return inner_obj.to_dict() + elif isinstance(inner_obj, list): + return [serialize(item) for item in inner_obj] + elif isinstance(inner_obj, dict): + return {k: serialize(v) for k, v in inner_obj.items()} + else: + return inner_obj + + return {k: serialize(v) for k, v in vars(obj).items()} + + +def check_keys(keys: List[str], data: Mapping[str, Any]) -> None: + """ + Checks that all specified keys are present in the given mapping. + + Args: + keys (List[str]): A list of keys to check for. + data (Mapping[str, Any]): The dictionary-like object to check. + + Raises: + KeyError: If any key in `keys` is missing from `data`. + """ + for key in keys: + if key not in data: + raise KeyError(f"Missing key '{key}'") diff --git a/test_data/fqns.json b/test_data/fqns.json new file mode 100644 index 0000000..e3ec927 --- /dev/null +++ b/test_data/fqns.json @@ -0,0 +1,27 @@ +[ + { + "fqn": "one::two::three()", + "tokens": [ + {"type_": "MEMBER", "value": "one"}, + {"type_": "SCOPE", "value": "::"}, + {"type_": "MEMBER", "value": "two"}, + {"type_": "SCOPE", "value": "::"}, + {"type_": "MEMBER", "value": "three"}, + {"type_": "PARENTHESIS_START", "value": "("}, + {"type_": "PARENTHESIS_END", "value": ")"} + ], + "parser": { + "name": "three", + "full_name": "one::two::three()", + "return_type": null, + "args": null, + "scopes": [ + {"name": "one", "template": null}, + {"name": "two", "template": null} + ], + "template": null, + "constant": false, + "volatile": false + } + } +] \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py index aa56fdb..5d22d22 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,5 @@ +from pathlib import Path +import json from typing import List import pytest @@ -5,38 +7,12 @@ from src.cpp_fqn_parser import Token, FQN, Scope -@pytest.fixture -def simple_fqn_input() -> str: - return "one::two::three()" - - -@pytest.fixture -def simple_tokenizer_expected() -> List[Token]: - return [ - Token('MEMBER', 'one'), - Token('SCOPE', '::'), - Token('MEMBER', 'two'), - Token('SCOPE', '::'), - Token('MEMBER', 'three'), - Token('PARENTHESIS_START', '('), - Token('PARENTHESIS_END', ')'), - ] - - -@pytest.fixture -def simple_parser_expected(): - scopes = [ - Scope(name="one", template=None), - Scope(name="two", template=None) - ] - return FQN(name="three", - full_name="one::two::three()", - return_type=None, - args=None, - scopes=scopes, - template=None, - constant=False, - volatile=False) +def pytest_generate_tests(metafunc): + if "fqn_dict" in metafunc.fixturenames: + path = Path(__file__).parent.parent / "test_data" / "fqns.json" + with path.open() as f: + fqns = json.load(f) + metafunc.parametrize("fqn_dict", fqns) @pytest.fixture diff --git a/tests/test_parser.py b/tests/test_parser.py index b10bd50..9458634 100644 --- a/tests/test_parser.py +++ b/tests/test_parser.py @@ -1,10 +1,11 @@ from src.cpp_fqn_parser import Parser, FQN -def test_simple_fqn(simple_fqn_input: str, simple_parser_expected: FQN) -> None: - parser: Parser = Parser(simple_fqn_input) +def test_tokenizer_fqn(fqn_dict: dict): + parser: Parser = Parser(fqn_dict["fqn"]) result: FQN = parser.parse() - assert result == simple_parser_expected + expected = FQN.from_dict(fqn_dict["parser"]) + assert result == expected def test_fqn(fqn_input: str, parser_expected: FQN) -> None: diff --git a/tests/test_tokenizer.py b/tests/test_tokenizer.py index 431df04..3b7f7e7 100644 --- a/tests/test_tokenizer.py +++ b/tests/test_tokenizer.py @@ -3,10 +3,12 @@ from src.cpp_fqn_parser import Tokenizer, Token -def test_simple_fqn(simple_fqn_input: str, simple_tokenizer_expected: List[Token]) -> None: - tokenizer: Tokenizer = Tokenizer(simple_fqn_input) +def test_tokenizer_fqn(fqn_dict: dict): + tokenizer: Tokenizer = Tokenizer(fqn_dict["fqn"]) result: List[Token] = list(tokenizer.get_all_tokens()) - assert result == simple_tokenizer_expected + expected = [Token.from_dict(token) + for token in fqn_dict["tokens"]] + assert result == expected def test_fqn(fqn_input: str, tokenizer_expected: List[Token]) -> None: