Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
87 changes: 87 additions & 0 deletions test_data/fqns.json
Original file line number Diff line number Diff line change
Expand Up @@ -23,5 +23,92 @@
"constant": false,
"volatile": false
}
},
{
"fqn": "int one_3hello0::tconstwo<mytemplate>::three(const four &) volatile",
"tokens": [
{"type_": "MEMBER", "value": "int"},
{"type_": "WHITESPACE", "value": " "},
{"type_": "MEMBER", "value": "one_3hello0"},
{"type_": "SCOPE", "value": "::"},
{"type_": "MEMBER", "value": "tconstwo"},
{"type_": "TEMPLATE_START", "value": "<"},
{"type_": "MEMBER", "value": "mytemplate"},
{"type_": "TEMPLATE_END", "value": ">"},
{"type_": "SCOPE", "value": "::"},
{"type_": "MEMBER", "value": "three"},
{"type_": "PARENTHESIS_START", "value": "("},
{"type_": "MEMBER", "value": "const"},
{"type_": "WHITESPACE", "value": " "},
{"type_": "MEMBER", "value": "four"},
{"type_": "WHITESPACE", "value": " "},
{"type_": "REFERENCE", "value": "&"},
{"type_": "PARENTHESIS_END", "value": ")"},
{"type_": "WHITESPACE", "value": " "},
{"type_": "MEMBER", "value": "volatile"}
],
"parser": {
"name": "three",
"full_name": "int one_3hello0::tconstwo<mytemplate>::three(const four &) volatile",
"return_type": "int",
"args": ["const four &"],
"scopes": [
{"name": "one_3hello0", "template": null},
{"name": "tconstwo", "template": "<mytemplate>"}
],
"template": null,
"constant": false,
"volatile": true
}
},
{
"fqn": "one::two::operator*()",
"tokens": [
{"type_": "MEMBER", "value": "one"},
{"type_": "SCOPE", "value": "::"},
{"type_": "MEMBER", "value": "two"},
{"type_": "SCOPE", "value": "::"},
{"type_": "OPERATOR", "value": "operator*"},
{"type_": "PARENTHESIS_START", "value": "("},
{"type_": "PARENTHESIS_END", "value": ")"}
],
"parser": {
"name": "operator*",
"full_name": "one::two::operator*()",
"return_type": null,
"args": null,
"scopes": [
{"name": "one", "template": null},
{"name": "two", "template": null}
],
"template": null,
"constant": false,
"volatile": false
}
},
{
"fqn": "one::two::operator []()",
"tokens": [
{"type_": "MEMBER", "value": "one"},
{"type_": "SCOPE", "value": "::"},
{"type_": "MEMBER", "value": "two"},
{"type_": "SCOPE", "value": "::"},
{"type_": "OPERATOR", "value": "operator[]"},
{"type_": "PARENTHESIS_START", "value": "("},
{"type_": "PARENTHESIS_END", "value": ")"}
],
"parser": {
"name": "operator[]",
"full_name": "one::two::operator []()",
"return_type": null,
"args": null,
"scopes": [
{"name": "one", "template": null},
{"name": "two", "template": null}
],
"template": null,
"constant": false,
"volatile": false
}
}
]
123 changes: 3 additions & 120 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,129 +1,12 @@
from pathlib import Path
import json
from typing import List

import pytest
from _pytest.python import Metafunc

from src.cpp_fqn_parser import Token, FQN, Scope


def pytest_generate_tests(metafunc):
def pytest_generate_tests(metafunc: Metafunc) -> None:
if "fqn_dict" in metafunc.fixturenames:
path = Path(__file__).parent.parent / "test_data" / "fqns.json"
path: Path = Path(__file__).parent.parent / "test_data" / "fqns.json"
with path.open() as f:
fqns = json.load(f)
metafunc.parametrize("fqn_dict", fqns)


@pytest.fixture
def fqn_input() -> str:
return "int one_3hello0::tconstwo<mytemplate>::three(const four &) volatile"


@pytest.fixture
def tokenizer_expected() -> List[Token]:
return [
Token('MEMBER', 'int'),
Token('WHITESPACE', ' '),
Token('MEMBER', 'one_3hello0'),
Token('SCOPE', '::'),
Token('MEMBER', 'tconstwo'),
Token('TEMPLATE_START', '<'),
Token('MEMBER', 'mytemplate'),
Token('TEMPLATE_END', '>'),
Token('SCOPE', '::'),
Token('MEMBER', 'three'),
Token('PARENTHESIS_START', '('),
Token('MEMBER', 'const'),
Token('WHITESPACE', ' '),
Token('MEMBER', 'four'),
Token('WHITESPACE', ' '),
Token('REFERENCE', '&'),
Token('PARENTHESIS_END', ')'),
Token('WHITESPACE', ' '),
Token('MEMBER', 'volatile')
]


@pytest.fixture
def parser_expected():
scopes = [
Scope(name="one_3hello0", template=None),
Scope(name="tconstwo", template="<mytemplate>")
]
return FQN(name="three",
full_name="int one_3hello0::tconstwo<mytemplate>::three(const four &) volatile",
return_type="int",
args=["const four &"],
scopes=scopes,
template=None,
constant=False,
volatile=True)


@pytest.fixture
def operator_fqn_input() -> str:
return "one::two::operator*()"


@pytest.fixture
def operator_tokenizer_expected() -> List[Token]:
return [
Token('MEMBER', 'one'),
Token('SCOPE', '::'),
Token('MEMBER', 'two'),
Token('SCOPE', '::'),
Token('OPERATOR', 'operator*'),
Token('PARENTHESIS_START', '('),
Token('PARENTHESIS_END', ')'),
]


@pytest.fixture
def operator_fqn_input_2() -> str:
return "one::two::operator []()"


@pytest.fixture
def operator_tokenizer_expected_2() -> List[Token]:
return [
Token('MEMBER', 'one'),
Token('SCOPE', '::'),
Token('MEMBER', 'two'),
Token('SCOPE', '::'),
Token('OPERATOR', 'operator[]'),
Token('PARENTHESIS_START', '('),
Token('PARENTHESIS_END', ')'),
]


@pytest.fixture
def parser_operator_expected():
scopes = [
Scope(name="one", template=None),
Scope(name="two", template=None)
]
return FQN(name="operator*",
full_name="one::two::operator*()",
return_type=None,
args=None,
scopes=scopes,
template=None,
constant=False,
volatile=False)


@pytest.fixture
def parser_operator_expected_2():
scopes = [
Scope(name="one", template=None),
Scope(name="two", template=None)
]
return FQN(name="operator[]",
full_name="one::two::operator []()",
return_type=None,
args=None,
scopes=scopes,
template=None,
constant=False,
volatile=False)
18 changes: 0 additions & 18 deletions tests/test_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,21 +6,3 @@ def test_tokenizer_fqn(fqn_dict: dict):
result: FQN = parser.parse()
expected = FQN.from_dict(fqn_dict["parser"])
assert result == expected


def test_fqn(fqn_input: str, parser_expected: FQN) -> None:
parser: Parser = Parser(fqn_input)
result: FQN = parser.parse()
assert result == parser_expected


def test_operator_fqn(operator_fqn_input: str, parser_operator_expected: FQN) -> None:
parser: Parser = Parser(operator_fqn_input)
result: FQN = parser.parse()
assert result == parser_operator_expected


def test_operator_fqn_2(operator_fqn_input_2: str, parser_operator_expected_2: FQN) -> None:
parser: Parser = Parser(operator_fqn_input_2)
result: FQN = parser.parse()
assert result == parser_operator_expected_2
18 changes: 0 additions & 18 deletions tests/test_tokenizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,21 +9,3 @@ def test_tokenizer_fqn(fqn_dict: dict):
expected = [Token.from_dict(token)
for token in fqn_dict["tokens"]]
assert result == expected


def test_fqn(fqn_input: str, tokenizer_expected: List[Token]) -> None:
tokenizer: Tokenizer = Tokenizer(fqn_input)
result: List[Token] = list(tokenizer.get_all_tokens())
assert result == tokenizer_expected


def test_operator_fqn(operator_fqn_input: str, operator_tokenizer_expected: List[Token]) -> None:
tokenizer: Tokenizer = Tokenizer(operator_fqn_input)
result: List[Token] = list(tokenizer.get_all_tokens())
assert result == operator_tokenizer_expected


def test_operator_fqn_2(operator_fqn_input_2: str, operator_tokenizer_expected_2: List[Token]) -> None:
tokenizer: Tokenizer = Tokenizer(operator_fqn_input_2)
result: List[Token] = list(tokenizer.get_all_tokens())
assert result == operator_tokenizer_expected_2