Skip to content

Commit fb73ac2

Browse files
authored
Merge pull request #196 from Pennycook/ci/type-hints
Fix broken type hints
2 parents 679051b + 81bae3c commit fb73ac2

9 files changed

Lines changed: 77 additions & 49 deletions

File tree

.pre-commit-config.yaml

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,3 +64,18 @@ repos:
6464
- id: bandit
6565
name: bandit
6666
args: ["-c", ".bandit"]
67+
68+
- repo: https://github.com/pre-commit/mirrors-mypy
69+
rev: v1.17.1
70+
hooks:
71+
- id: mypy
72+
name: mypy
73+
args: []
74+
additional_dependencies:
75+
[
76+
"types-jsonschema",
77+
"types-tqdm",
78+
"types-tabulate",
79+
"scipy-stubs",
80+
"matplotlib", # There are no official stubs for matplotlib
81+
]

codebasin/__init__.py

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,11 @@
11
# Copyright (C) 2019-2024 Intel Corporation
22
# SPDX-License-Identifier: BSD-3-Clause
3+
from __future__ import annotations
4+
35
import importlib.metadata
46
import os
57
import shlex
8+
import typing
69
import warnings
710
from collections.abc import Iterable
811
from pathlib import Path
@@ -152,7 +155,7 @@ def __iter__(self):
152155
yield from self.commands
153156

154157
@classmethod
155-
def from_json(cls, instance: list):
158+
def from_json(cls, instance: list) -> CompilationDatabase:
156159
"""
157160
Parameters
158161
----------
@@ -174,7 +177,10 @@ def from_json(cls, instance: list):
174177
return cls(commands)
175178

176179
@classmethod
177-
def from_file(cls, filename: str | os.PathLike[str]):
180+
def from_file(
181+
cls,
182+
filename: str | os.PathLike[str],
183+
) -> CompilationDatabase:
178184
"""
179185
Parameters
180186
---------
@@ -194,8 +200,8 @@ def from_file(cls, filename: str | os.PathLike[str]):
194200
A CompilationDatbase corresponding to the provided JSON file.
195201
"""
196202
with open(filename) as f:
197-
db = codebasin.util._load_json(f, schema_name="compiledb")
198-
return CompilationDatabase.from_json(db)
203+
db: object = codebasin.util._load_json(f, schema_name="compiledb")
204+
return CompilationDatabase.from_json(typing.cast(list, db))
199205

200206

201207
class CodeBase:

codebasin/__main__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -44,10 +44,10 @@ def _help_string(*lines: str, is_long=False, is_last=False):
4444

4545
# argparse.HelpFormatter indents by 24 characters.
4646
# We cannot override this directly, but can delete them with backspaces.
47-
lines = ["\b" * 20 + x for x in lines]
47+
modified_lines = ["\b" * 20 + x for x in lines]
4848

4949
# The additional space is required for argparse to respect newlines.
50-
result += "\n".join(lines)
50+
result += "\n".join(modified_lines)
5151

5252
if not is_last:
5353
result += "\n "

codebasin/config.py

Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
Contains functions to build up a configuration dictionary,
55
defining a specific code base configuration.
66
"""
7+
from __future__ import annotations
78

89
import argparse
910
import logging
@@ -12,16 +13,17 @@
1213
import re
1314
import string
1415
import tomllib
16+
from collections.abc import Sequence
1517
from dataclasses import asdict, dataclass, field
1618
from itertools import chain
1719
from pathlib import Path
18-
from typing import Self
20+
from typing import Any
1921

2022
from codebasin import CompilationDatabase, util
2123

2224
log = logging.getLogger(__name__)
2325

24-
_compilers = None
26+
_compilers = {}
2527

2628

2729
class _StoreSplitAction(argparse.Action):
@@ -45,9 +47,9 @@ def __call__(
4547
self,
4648
parser: argparse.ArgumentParser,
4749
namespace: argparse.Namespace,
48-
values: str,
49-
option_string: str,
50-
):
50+
values: str | Sequence[Any] | None,
51+
option_string: str | None = None,
52+
) -> None:
5153
if not isinstance(values, str):
5254
raise TypeError("store_split expects string values")
5355
split_values = values.split(self.sep)
@@ -84,9 +86,9 @@ def __call__(
8486
self,
8587
parser: argparse.ArgumentParser,
8688
namespace: argparse.Namespace,
87-
value: str,
88-
option_string: str,
89-
):
89+
value: str | Sequence[Any] | None,
90+
option_string: str | None = None,
91+
) -> None:
9092
if not isinstance(value, str):
9193
raise TypeError("extend_match expects string value")
9294
matches = re.findall(self.pattern, value)
@@ -118,7 +120,7 @@ class _CompilerMode:
118120
include_files: list[str] = field(default_factory=list)
119121

120122
@classmethod
121-
def from_toml(cls, toml: object) -> Self:
123+
def from_toml(cls, toml: dict[str, Any]) -> _CompilerMode:
122124
return _CompilerMode(**toml)
123125

124126

@@ -131,7 +133,7 @@ class _CompilerPass:
131133
modes: list[str] = field(default_factory=list)
132134

133135
@classmethod
134-
def from_toml(cls, toml: object) -> Self:
136+
def from_toml(cls, toml: dict[str, Any]) -> _CompilerPass:
135137
return _CompilerPass(**toml)
136138

137139

@@ -144,7 +146,7 @@ class _Compiler:
144146
passes: dict[str, _CompilerPass] = field(default_factory=dict)
145147

146148
@classmethod
147-
def from_toml(cls, toml: object) -> Self:
149+
def from_toml(cls, toml: dict[str, Any]) -> _Compiler:
148150
kwargs = toml.copy()
149151
if "parser" in kwargs:
150152
for option in kwargs["parser"]:

codebasin/coverage/__main__.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -128,11 +128,13 @@ def _compute(args: argparse.Namespace):
128128
with open(filename, "rb") as f:
129129
digest = hashlib.file_digest(f, "sha512")
130130

131-
used_lines = []
132-
unused_lines = []
131+
used_lines: list[int] = []
132+
unused_lines: list[int] = []
133133
tree = state.get_tree(filename)
134134
association = state.get_map(filename)
135135
for node in [n for n in tree.walk() if isinstance(n, CodeNode)]:
136+
if not node.lines:
137+
continue
136138
if association[node] == frozenset([]):
137139
unused_lines.extend(node.lines)
138140
else:

codebasin/finder.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ def get_setmap(self, codebase: CodeBase) -> dict[frozenset, int]:
9696
dict[frozenset, int]
9797
The number of lines associated with each platform set.
9898
"""
99-
setmap = collections.defaultdict(int)
99+
setmap: dict[frozenset, int] = collections.defaultdict(int)
100100
for fn in codebase:
101101
# Don't count symlinks if their target is in the code base.
102102
# The target will be counted separately.

codebasin/preprocessor.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -620,7 +620,7 @@ class CodeNode(Node):
620620
end_line: int = field(default=-1, init=False)
621621
num_lines: int = field(default=0, init=False)
622622
source: str | None = field(default=None, init=False, repr=False)
623-
lines: list[str] | None = field(
623+
lines: list[int] | None = field(
624624
default_factory=list,
625625
init=False,
626626
repr=False,

codebasin/report.py

Lines changed: 20 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
import sys
1616
import warnings
1717
from collections import defaultdict
18-
from collections.abc import Iterable
18+
from collections.abc import Sequence
1919
from pathlib import Path
2020
from typing import Self, TextIO
2121

@@ -129,11 +129,11 @@ def average_coverage(
129129
if len(platforms) == 0:
130130
return float("nan")
131131

132-
total = sum([coverage(setmap, [p]) for p in platforms])
132+
total = sum([coverage(setmap, {p}) for p in platforms])
133133
return total / len(platforms)
134134

135135

136-
def distance(setmap, p1, p2):
136+
def distance(setmap, p1, p2) -> float:
137137
"""
138138
Compute distance between two platforms
139139
"""
@@ -148,14 +148,14 @@ def distance(setmap, p1, p2):
148148
return d
149149

150150

151-
def divergence(setmap):
151+
def divergence(setmap) -> float:
152152
"""
153153
Compute code divergence as defined by Harrell and Kitson
154154
i.e. average of pair-wise distances between platform sets
155155
"""
156156
platforms = extract_platforms(setmap)
157157

158-
d = 0
158+
d = 0.0
159159
npairs = 0
160160
for p1, p2 in it.combinations(platforms, 2):
161161
d += distance(setmap, p1, p2)
@@ -166,14 +166,14 @@ def divergence(setmap):
166166
return d / float(npairs)
167167

168168

169-
def summary(setmap: defaultdict[str, int], stream: TextIO = sys.stdout):
169+
def summary(setmap: dict[frozenset[str], int], stream: TextIO = sys.stdout):
170170
"""
171171
Produce a summary report for the platform set, including
172172
a breakdown of SLOC per platform subset, code divergence, etc.
173173
174174
Parameters
175175
----------
176-
setmap: defaultdict[str, int]
176+
setmap: dict[frozenset[str], int]
177177
The setmap used to compute the summary report.
178178
179179
stream: TextIO, default: sys.stdout
@@ -214,7 +214,7 @@ def summary(setmap: defaultdict[str, int], stream: TextIO = sys.stdout):
214214

215215
def clustering(
216216
output_name: str,
217-
setmap: defaultdict[str, int],
217+
setmap: dict[frozenset[str], int],
218218
stream: TextIO = sys.stdout,
219219
):
220220
"""
@@ -225,7 +225,7 @@ def clustering(
225225
output_name: str
226226
The filename for the dendrogram.
227227
228-
setmap: defaultdict[str, int]
228+
setmap: dict[frozenset[str], int]
229229
The setmap used to compute the clustering statistics.
230230
231231
stream: TextIO, default: sys.stdout
@@ -313,7 +313,7 @@ def find_duplicates(codebase: CodeBase) -> list[set[Path]]:
313313
A list of all sets of Paths with identical contents.
314314
"""
315315
# Search for possible matches using a hash, ignoring symlinks.
316-
possible_matches = {}
316+
possible_matches: dict[str, set] = {}
317317
for path in codebase:
318318
path = Path(path)
319319
if path.is_symlink():
@@ -486,15 +486,15 @@ def is_symlink(self):
486486
def _platforms_str(
487487
self,
488488
all_platforms: set[str],
489-
labels: Iterable[str] = string.ascii_uppercase,
489+
labels: Sequence[str] = string.ascii_uppercase,
490490
) -> str:
491491
"""
492492
Parameters
493493
----------
494494
all_platforms: set[str]
495495
The set of all platforms.
496496
497-
labels: Iterable[str], default: string.ascii_uppercase
497+
labels: Sequence[str], default: string.ascii_uppercase
498498
The labels to use in place of real platform names.
499499
500500
Returns
@@ -605,7 +605,7 @@ def __init__(self, rootdir: str | os.PathLike[str]):
605605
def insert(
606606
self,
607607
filename: str | os.PathLike[str],
608-
setmap: defaultdict[str, int],
608+
setmap: dict[frozenset[str], int],
609609
):
610610
"""
611611
Insert a new file into the tree, creating as many nodes as necessary.
@@ -653,7 +653,7 @@ def _print(
653653
prefix: str = "",
654654
connector: str = "",
655655
fancy: bool = True,
656-
levels: int = None,
656+
levels: int | None = None,
657657
):
658658
"""
659659
Recursive helper function to print all nodes in a FileTree.
@@ -740,7 +740,7 @@ def _print(
740740

741741
return lines
742742

743-
def write_to(self, stream: TextIO, levels: int = None):
743+
def write_to(self, stream: TextIO, levels: int | None = None):
744744
"""
745745
Write the FileTree to the specified stream.
746746
@@ -766,7 +766,7 @@ def files(
766766
*,
767767
stream: TextIO = sys.stdout,
768768
prune: bool = False,
769-
levels: int = None,
769+
levels: int | None = None,
770770
):
771771
"""
772772
Produce a file tree representing the code base.
@@ -796,7 +796,7 @@ def files(
796796
# Build up a tree from the list of files.
797797
tree = FileTree(codebase.directories[0])
798798
for f in codebase:
799-
setmap = defaultdict(int)
799+
setmap: dict[frozenset[str], int] = defaultdict(int)
800800
if state:
801801
association = state.get_map(f)
802802
for node in filter(
@@ -828,10 +828,10 @@ def files(
828828
]
829829
legend += ["[" + " | ".join(header) + "]"]
830830
legend += [""]
831-
legend = "\n".join(legend)
831+
legend_string = "\n".join(legend)
832832
if not stream.isatty():
833-
legend = _strip_colors(legend)
834-
print(legend, file=stream)
833+
legend_string = _strip_colors(legend_string)
834+
print(legend_string, file=stream)
835835

836836
# Print the tree.
837837
tree.write_to(stream, levels=levels)

0 commit comments

Comments
 (0)