From 8c1afe74c2c1841a7f038eab4e90dacc77b062ef Mon Sep 17 00:00:00 2001 From: ArnauBN <117755502+ArnauBN@users.noreply.github.com> Date: Sun, 27 Jul 2025 09:11:41 +0200 Subject: [PATCH] moved tests to json --- test_data/fqns.json | 87 ++++++++++++++++++++++++++++ tests/conftest.py | 123 +--------------------------------------- tests/test_parser.py | 18 ------ tests/test_tokenizer.py | 18 ------ 4 files changed, 90 insertions(+), 156 deletions(-) diff --git a/test_data/fqns.json b/test_data/fqns.json index e3ec927..9708cdb 100644 --- a/test_data/fqns.json +++ b/test_data/fqns.json @@ -23,5 +23,92 @@ "constant": false, "volatile": false } + }, + { + "fqn": "int one_3hello0::tconstwo::three(const four &) volatile", + "tokens": [ + {"type_": "MEMBER", "value": "int"}, + {"type_": "WHITESPACE", "value": " "}, + {"type_": "MEMBER", "value": "one_3hello0"}, + {"type_": "SCOPE", "value": "::"}, + {"type_": "MEMBER", "value": "tconstwo"}, + {"type_": "TEMPLATE_START", "value": "<"}, + {"type_": "MEMBER", "value": "mytemplate"}, + {"type_": "TEMPLATE_END", "value": ">"}, + {"type_": "SCOPE", "value": "::"}, + {"type_": "MEMBER", "value": "three"}, + {"type_": "PARENTHESIS_START", "value": "("}, + {"type_": "MEMBER", "value": "const"}, + {"type_": "WHITESPACE", "value": " "}, + {"type_": "MEMBER", "value": "four"}, + {"type_": "WHITESPACE", "value": " "}, + {"type_": "REFERENCE", "value": "&"}, + {"type_": "PARENTHESIS_END", "value": ")"}, + {"type_": "WHITESPACE", "value": " "}, + {"type_": "MEMBER", "value": "volatile"} + ], + "parser": { + "name": "three", + "full_name": "int one_3hello0::tconstwo::three(const four &) volatile", + "return_type": "int", + "args": ["const four &"], + "scopes": [ + {"name": "one_3hello0", "template": null}, + {"name": "tconstwo", "template": ""} + ], + "template": null, + "constant": false, + "volatile": true + } + }, + { + "fqn": "one::two::operator*()", + "tokens": [ + {"type_": "MEMBER", "value": "one"}, + {"type_": "SCOPE", "value": "::"}, + {"type_": "MEMBER", "value": "two"}, + {"type_": "SCOPE", "value": "::"}, + {"type_": "OPERATOR", "value": "operator*"}, + {"type_": "PARENTHESIS_START", "value": "("}, + {"type_": "PARENTHESIS_END", "value": ")"} + ], + "parser": { + "name": "operator*", + "full_name": "one::two::operator*()", + "return_type": null, + "args": null, + "scopes": [ + {"name": "one", "template": null}, + {"name": "two", "template": null} + ], + "template": null, + "constant": false, + "volatile": false + } + }, + { + "fqn": "one::two::operator []()", + "tokens": [ + {"type_": "MEMBER", "value": "one"}, + {"type_": "SCOPE", "value": "::"}, + {"type_": "MEMBER", "value": "two"}, + {"type_": "SCOPE", "value": "::"}, + {"type_": "OPERATOR", "value": "operator[]"}, + {"type_": "PARENTHESIS_START", "value": "("}, + {"type_": "PARENTHESIS_END", "value": ")"} + ], + "parser": { + "name": "operator[]", + "full_name": "one::two::operator []()", + "return_type": null, + "args": null, + "scopes": [ + {"name": "one", "template": null}, + {"name": "two", "template": null} + ], + "template": null, + "constant": false, + "volatile": false + } } ] \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py index 5d22d22..86bc634 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,129 +1,12 @@ from pathlib import Path import json -from typing import List -import pytest +from _pytest.python import Metafunc -from src.cpp_fqn_parser import Token, FQN, Scope - -def pytest_generate_tests(metafunc): +def pytest_generate_tests(metafunc: Metafunc) -> None: if "fqn_dict" in metafunc.fixturenames: - path = Path(__file__).parent.parent / "test_data" / "fqns.json" + path: Path = Path(__file__).parent.parent / "test_data" / "fqns.json" with path.open() as f: fqns = json.load(f) metafunc.parametrize("fqn_dict", fqns) - - -@pytest.fixture -def fqn_input() -> str: - return "int one_3hello0::tconstwo::three(const four &) volatile" - - -@pytest.fixture -def tokenizer_expected() -> List[Token]: - return [ - Token('MEMBER', 'int'), - Token('WHITESPACE', ' '), - Token('MEMBER', 'one_3hello0'), - Token('SCOPE', '::'), - Token('MEMBER', 'tconstwo'), - Token('TEMPLATE_START', '<'), - Token('MEMBER', 'mytemplate'), - Token('TEMPLATE_END', '>'), - Token('SCOPE', '::'), - Token('MEMBER', 'three'), - Token('PARENTHESIS_START', '('), - Token('MEMBER', 'const'), - Token('WHITESPACE', ' '), - Token('MEMBER', 'four'), - Token('WHITESPACE', ' '), - Token('REFERENCE', '&'), - Token('PARENTHESIS_END', ')'), - Token('WHITESPACE', ' '), - Token('MEMBER', 'volatile') - ] - - -@pytest.fixture -def parser_expected(): - scopes = [ - Scope(name="one_3hello0", template=None), - Scope(name="tconstwo", template="") - ] - return FQN(name="three", - full_name="int one_3hello0::tconstwo::three(const four &) volatile", - return_type="int", - args=["const four &"], - scopes=scopes, - template=None, - constant=False, - volatile=True) - - -@pytest.fixture -def operator_fqn_input() -> str: - return "one::two::operator*()" - - -@pytest.fixture -def operator_tokenizer_expected() -> List[Token]: - return [ - Token('MEMBER', 'one'), - Token('SCOPE', '::'), - Token('MEMBER', 'two'), - Token('SCOPE', '::'), - Token('OPERATOR', 'operator*'), - Token('PARENTHESIS_START', '('), - Token('PARENTHESIS_END', ')'), - ] - - -@pytest.fixture -def operator_fqn_input_2() -> str: - return "one::two::operator []()" - - -@pytest.fixture -def operator_tokenizer_expected_2() -> List[Token]: - return [ - Token('MEMBER', 'one'), - Token('SCOPE', '::'), - Token('MEMBER', 'two'), - Token('SCOPE', '::'), - Token('OPERATOR', 'operator[]'), - Token('PARENTHESIS_START', '('), - Token('PARENTHESIS_END', ')'), - ] - - -@pytest.fixture -def parser_operator_expected(): - scopes = [ - Scope(name="one", template=None), - Scope(name="two", template=None) - ] - return FQN(name="operator*", - full_name="one::two::operator*()", - return_type=None, - args=None, - scopes=scopes, - template=None, - constant=False, - volatile=False) - - -@pytest.fixture -def parser_operator_expected_2(): - scopes = [ - Scope(name="one", template=None), - Scope(name="two", template=None) - ] - return FQN(name="operator[]", - full_name="one::two::operator []()", - return_type=None, - args=None, - scopes=scopes, - template=None, - constant=False, - volatile=False) diff --git a/tests/test_parser.py b/tests/test_parser.py index 9458634..1a1fdf2 100644 --- a/tests/test_parser.py +++ b/tests/test_parser.py @@ -6,21 +6,3 @@ def test_tokenizer_fqn(fqn_dict: dict): result: FQN = parser.parse() expected = FQN.from_dict(fqn_dict["parser"]) assert result == expected - - -def test_fqn(fqn_input: str, parser_expected: FQN) -> None: - parser: Parser = Parser(fqn_input) - result: FQN = parser.parse() - assert result == parser_expected - - -def test_operator_fqn(operator_fqn_input: str, parser_operator_expected: FQN) -> None: - parser: Parser = Parser(operator_fqn_input) - result: FQN = parser.parse() - assert result == parser_operator_expected - - -def test_operator_fqn_2(operator_fqn_input_2: str, parser_operator_expected_2: FQN) -> None: - parser: Parser = Parser(operator_fqn_input_2) - result: FQN = parser.parse() - assert result == parser_operator_expected_2 diff --git a/tests/test_tokenizer.py b/tests/test_tokenizer.py index 3b7f7e7..9eef783 100644 --- a/tests/test_tokenizer.py +++ b/tests/test_tokenizer.py @@ -9,21 +9,3 @@ def test_tokenizer_fqn(fqn_dict: dict): expected = [Token.from_dict(token) for token in fqn_dict["tokens"]] assert result == expected - - -def test_fqn(fqn_input: str, tokenizer_expected: List[Token]) -> None: - tokenizer: Tokenizer = Tokenizer(fqn_input) - result: List[Token] = list(tokenizer.get_all_tokens()) - assert result == tokenizer_expected - - -def test_operator_fqn(operator_fqn_input: str, operator_tokenizer_expected: List[Token]) -> None: - tokenizer: Tokenizer = Tokenizer(operator_fqn_input) - result: List[Token] = list(tokenizer.get_all_tokens()) - assert result == operator_tokenizer_expected - - -def test_operator_fqn_2(operator_fqn_input_2: str, operator_tokenizer_expected_2: List[Token]) -> None: - tokenizer: Tokenizer = Tokenizer(operator_fqn_input_2) - result: List[Token] = list(tokenizer.get_all_tokens()) - assert result == operator_tokenizer_expected_2