Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 18 additions & 1 deletion src/cpp_fqn_parser/fqn.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
from dataclasses import dataclass
from typing import Optional, List
from typing import Optional, List, Dict, Any

from .scope import Scope
from .utils import to_dict, check_keys


@dataclass
Expand Down Expand Up @@ -47,3 +48,19 @@ def __eq__(self, other: object) -> bool:
self.template == other.template and
self.constant == other.constant and
self.volatile == other.volatile)

def to_dict(self) -> Dict[str, Any]:
return to_dict(self)

@staticmethod
def from_dict(data: Dict[str, Any]) -> 'FQN':
attrs: List[str] = ["name", "full_name", "return_type", "args", "scopes", "template", "constant", "volatile"]
check_keys(attrs, data)
return FQN(name=data["name"],
full_name=data["full_name"],
return_type=data["return_type"],
args=data["args"],
scopes=[Scope.from_dict(scope) for scope in data["scopes"]],
template=data["template"],
constant=data["constant"],
volatile=data["volatile"])
14 changes: 13 additions & 1 deletion src/cpp_fqn_parser/scope.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
from dataclasses import dataclass
from typing import Optional
from typing import Optional, Dict, Any, List

from .utils import to_dict, check_keys


@dataclass
Expand All @@ -16,3 +18,13 @@ class Scope:

def __eq__(self, other: object) -> bool:
return isinstance(other, Scope) and self.name == other.name and self.template == other.template

def to_dict(self) -> Dict[str, Any]:
return to_dict(self)

@staticmethod
def from_dict(data: Dict[str, Any]) -> 'Scope':
attrs: List[str] = ["name", "template"]
check_keys(attrs, data)
return Scope(name=data["name"],
template=data["template"])
13 changes: 13 additions & 0 deletions src/cpp_fqn_parser/token.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
from typing import Dict, Any, List
from dataclasses import dataclass

from .utils import to_dict, check_keys


@dataclass
class Token:
Expand All @@ -24,3 +27,13 @@ def __eq__(self, other: object) -> bool:
bool: True if `other` is a Token with the same type and value, False otherwise.
"""
return isinstance(other, Token) and self.type_ == other.type_ and self.value == other.value

def to_dict(self) -> Dict[str, Any]:
return to_dict(self)

@staticmethod
def from_dict(data: Dict[str, Any]) -> 'Token':
attrs: List[str] = ["type_", "value"]
check_keys(attrs, data)
return Token(type_=data["type_"],
value=data["value"])
49 changes: 49 additions & 0 deletions src/cpp_fqn_parser/utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
from typing import Dict, Any, List, Mapping


def to_dict(obj: Any) -> Dict[str, Any]:
"""
Recursively converts an object and its attributes into a dictionary.

This function inspects the given object and returns a dictionary of its attributes.
If any attribute is:
- An object with a `to_dict()` method, it calls that method.
- A list, it recursively serializes each item.
- A dict, it recursively serializes each value.
- A primitive type (str, int, float, etc.), it leaves it unchanged.

This is useful for converting nested objects into JSON-serializable dictionaries.

Args:
obj (Any): The object to convert.

Returns:
Dict[str, Any]: A dictionary representing the object's attributes and their serialized values.
"""
def serialize(inner_obj):
if hasattr(inner_obj, "to_dict"):
return inner_obj.to_dict()
elif isinstance(inner_obj, list):
return [serialize(item) for item in inner_obj]
elif isinstance(inner_obj, dict):
return {k: serialize(v) for k, v in inner_obj.items()}
else:
return inner_obj

return {k: serialize(v) for k, v in vars(obj).items()}


def check_keys(keys: List[str], data: Mapping[str, Any]) -> None:
"""
Checks that all specified keys are present in the given mapping.

Args:
keys (List[str]): A list of keys to check for.
data (Mapping[str, Any]): The dictionary-like object to check.

Raises:
KeyError: If any key in `keys` is missing from `data`.
"""
for key in keys:
if key not in data:
raise KeyError(f"Missing key '{key}'")
27 changes: 27 additions & 0 deletions test_data/fqns.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
[
{
"fqn": "one::two::three()",
"tokens": [
{"type_": "MEMBER", "value": "one"},
{"type_": "SCOPE", "value": "::"},
{"type_": "MEMBER", "value": "two"},
{"type_": "SCOPE", "value": "::"},
{"type_": "MEMBER", "value": "three"},
{"type_": "PARENTHESIS_START", "value": "("},
{"type_": "PARENTHESIS_END", "value": ")"}
],
"parser": {
"name": "three",
"full_name": "one::two::three()",
"return_type": null,
"args": null,
"scopes": [
{"name": "one", "template": null},
{"name": "two", "template": null}
],
"template": null,
"constant": false,
"volatile": false
}
}
]
40 changes: 8 additions & 32 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,42 +1,18 @@
from pathlib import Path
import json
from typing import List

import pytest

from src.cpp_fqn_parser import Token, FQN, Scope


@pytest.fixture
def simple_fqn_input() -> str:
return "one::two::three()"


@pytest.fixture
def simple_tokenizer_expected() -> List[Token]:
return [
Token('MEMBER', 'one'),
Token('SCOPE', '::'),
Token('MEMBER', 'two'),
Token('SCOPE', '::'),
Token('MEMBER', 'three'),
Token('PARENTHESIS_START', '('),
Token('PARENTHESIS_END', ')'),
]


@pytest.fixture
def simple_parser_expected():
scopes = [
Scope(name="one", template=None),
Scope(name="two", template=None)
]
return FQN(name="three",
full_name="one::two::three()",
return_type=None,
args=None,
scopes=scopes,
template=None,
constant=False,
volatile=False)
def pytest_generate_tests(metafunc):
if "fqn_dict" in metafunc.fixturenames:
path = Path(__file__).parent.parent / "test_data" / "fqns.json"
with path.open() as f:
fqns = json.load(f)
metafunc.parametrize("fqn_dict", fqns)


@pytest.fixture
Expand Down
7 changes: 4 additions & 3 deletions tests/test_parser.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
from src.cpp_fqn_parser import Parser, FQN


def test_simple_fqn(simple_fqn_input: str, simple_parser_expected: FQN) -> None:
parser: Parser = Parser(simple_fqn_input)
def test_tokenizer_fqn(fqn_dict: dict):
parser: Parser = Parser(fqn_dict["fqn"])
result: FQN = parser.parse()
assert result == simple_parser_expected
expected = FQN.from_dict(fqn_dict["parser"])
assert result == expected


def test_fqn(fqn_input: str, parser_expected: FQN) -> None:
Expand Down
8 changes: 5 additions & 3 deletions tests/test_tokenizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,12 @@
from src.cpp_fqn_parser import Tokenizer, Token


def test_simple_fqn(simple_fqn_input: str, simple_tokenizer_expected: List[Token]) -> None:
tokenizer: Tokenizer = Tokenizer(simple_fqn_input)
def test_tokenizer_fqn(fqn_dict: dict):
tokenizer: Tokenizer = Tokenizer(fqn_dict["fqn"])
result: List[Token] = list(tokenizer.get_all_tokens())
assert result == simple_tokenizer_expected
expected = [Token.from_dict(token)
for token in fqn_dict["tokens"]]
assert result == expected


def test_fqn(fqn_input: str, tokenizer_expected: List[Token]) -> None:
Expand Down