Skip to content

Commit 1f90b21

Browse files
authored
Merge pull request #145 from ChEB-AI/fix/ruff_action_only
Use ruff as the main formatter, replacing black and isort
2 parents efac8aa + aaa7036 commit 1f90b21

36 files changed

+122
-133
lines changed

.github/workflows/lint.yml

Lines changed: 0 additions & 26 deletions
This file was deleted.

.github/workflows/pre-commit.yml

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
name: Pre-commit Check
2+
3+
on:
4+
push:
5+
branches: [main, master]
6+
pull_request:
7+
8+
jobs:
9+
pre-commit:
10+
runs-on: ubuntu-latest
11+
steps:
12+
- uses: actions/checkout@v4
13+
14+
- uses: actions/setup-python@v5
15+
with:
16+
python-version: '3.10'
17+
18+
- name: Run pre-commit
19+
uses: pre-commit/action@v3.0.1

.pre-commit-config.yaml

Lines changed: 16 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -1,31 +1,19 @@
11
repos:
2-
- repo: https://github.com/psf/black
3-
rev: "25.1.0"
4-
hooks:
5-
- id: black
6-
- id: black-jupyter # for formatting jupyter-notebook
2+
# Use `pre-commit autoupdate` to update all the hook.
73

8-
- repo: https://github.com/pycqa/isort
9-
rev: 5.13.2
10-
hooks:
11-
- id: isort
12-
name: isort (python)
13-
args: ["--profile=black"]
4+
- repo: https://github.com/astral-sh/ruff-pre-commit
5+
# Ruff version. https://docs.astral.sh/ruff/integrations/#pre-commit
6+
rev: v0.14.11
7+
hooks:
8+
# Run the linter.
9+
- id: ruff-check
10+
args: [ --fix ]
11+
# Run the formatter.
12+
- id: ruff-format
1413

15-
- repo: https://github.com/asottile/seed-isort-config
16-
rev: v2.2.0
17-
hooks:
18-
- id: seed-isort-config
19-
20-
- repo: https://github.com/pre-commit/pre-commit-hooks
21-
rev: v4.6.0
22-
hooks:
23-
- id: check-yaml
24-
- id: end-of-file-fixer
25-
- id: trailing-whitespace
26-
27-
- repo: https://github.com/astral-sh/ruff-pre-commit
28-
rev: v0.12.2
29-
hooks:
30-
- id: ruff
31-
args: [--fix]
14+
- repo: https://github.com/pre-commit/pre-commit-hooks
15+
rev: v6.0.0
16+
hooks:
17+
- id: check-yaml
18+
- id: end-of-file-fixer
19+
- id: trailing-whitespace

README.md

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,11 +23,15 @@ cd python-chebai
2323
pip install -e .
2424
```
2525

26-
Some packages are not installed by default:
26+
Some packages are not installed by default but can be added with the following extras:
2727
```
2828
pip install chebai[dev]
2929
```
3030
installs additional packages useful to people who want to contribute to the library.
31+
This includes `pre-commit`, which runs automatic formatting before each commit.
32+
To set up `pre-commit` for your workflow, run `pre-commit install`.
33+
For more details, see the [`pre-commit` documentation](https://pre-commit.com).
34+
3135
```
3236
pip install chebai[plot]
3337
```

chebai/loss/bce_weighted.py

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -32,18 +32,20 @@ def __init__(
3232
data_extractor = data_extractor.labeled
3333
self.data_extractor = data_extractor
3434

35-
assert (
36-
isinstance(beta, float) and beta > 0.0
37-
), f"Beta parameter must be a float with value greater than 0.0, for loss class {self.__class__.__name__}."
35+
assert isinstance(beta, float) and beta > 0.0, (
36+
f"Beta parameter must be a float with value greater than 0.0, for loss class {self.__class__.__name__}."
37+
)
3838

39-
assert (
40-
self.data_extractor is not None
41-
), f"Data extractor must be provided if this loss class ({self.__class__.__name__}) is used."
39+
assert self.data_extractor is not None, (
40+
f"Data extractor must be provided if this loss class ({self.__class__.__name__}) is used."
41+
)
4242

4343
assert all(
4444
os.path.exists(os.path.join(self.data_extractor.processed_dir, file_name))
4545
for file_name in self.data_extractor.processed_file_names
46-
), "Dataset files not found. Make sure the dataset is processed before using this loss."
46+
), (
47+
"Dataset files not found. Make sure the dataset is processed before using this loss."
48+
)
4749

4850
assert (
4951
isinstance(self.data_extractor, _ChEBIDataExtractor)

chebai/loss/focal_loss.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -36,9 +36,9 @@ def __init__(
3636
and alpha is not None
3737
and isinstance(alpha, (list, torch.Tensor))
3838
):
39-
assert (
40-
num_classes is not None
41-
), "num_classes must be specified for multi-class classification"
39+
assert num_classes is not None, (
40+
"num_classes must be specified for multi-class classification"
41+
)
4242
if isinstance(alpha, list):
4343
self.alpha = torch.Tensor(alpha)
4444
else:

chebai/preprocessing/datasets/base.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -96,9 +96,9 @@ def __init__(
9696
self.prediction_kind = prediction_kind
9797
self.data_limit = data_limit
9898
self.label_filter = label_filter
99-
assert (balance_after_filter is not None) or (
100-
self.label_filter is None
101-
), "Filter balancing requires a filter"
99+
assert (balance_after_filter is not None) or (self.label_filter is None), (
100+
"Filter balancing requires a filter"
101+
)
102102
self.balance_after_filter = balance_after_filter
103103
self.num_workers = num_workers
104104
self.persistent_workers: bool = bool(persistent_workers)
@@ -108,13 +108,13 @@ def __init__(
108108
self.use_inner_cross_validation = (
109109
inner_k_folds > 1
110110
) # only use cv if there are at least 2 folds
111-
assert (
112-
fold_index is None or self.use_inner_cross_validation is not None
113-
), "fold_index can only be set if cross validation is used"
111+
assert fold_index is None or self.use_inner_cross_validation is not None, (
112+
"fold_index can only be set if cross validation is used"
113+
)
114114
if fold_index is not None and self.inner_k_folds is not None:
115-
assert (
116-
fold_index < self.inner_k_folds
117-
), "fold_index can't be larger than the total number of folds"
115+
assert fold_index < self.inner_k_folds, (
116+
"fold_index can't be larger than the total number of folds"
117+
)
118118
self.fold_index = fold_index
119119
self._base_dir = base_dir
120120
self.n_token_limit = n_token_limit
@@ -137,9 +137,9 @@ def num_of_labels(self):
137137

138138
@property
139139
def feature_vector_size(self):
140-
assert (
141-
self._feature_vector_size is not None
142-
), "size of feature vector must be set"
140+
assert self._feature_vector_size is not None, (
141+
"size of feature vector must be set"
142+
)
143143
return self._feature_vector_size
144144

145145
@property

chebai/preprocessing/datasets/chebi.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -144,9 +144,9 @@ def __init__(
144144
**kwargs,
145145
):
146146
if bool(augment_smiles):
147-
assert (
148-
int(aug_smiles_variations) > 0
149-
), "Number of variations must be greater than 0"
147+
assert int(aug_smiles_variations) > 0, (
148+
"Number of variations must be greater than 0"
149+
)
150150
aug_smiles_variations = int(aug_smiles_variations)
151151

152152
if not kwargs.get("splits_file_path", None):

chebai/preprocessing/datasets/pubchem.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -408,9 +408,7 @@ def download(self):
408408
print("Selecting most dissimilar values from random subsets...")
409409
for i in tqdm.tqdm(range(self.n_random_subsets)):
410410
smiles_i = random_smiles[
411-
i
412-
* len(random_smiles)
413-
// self.n_random_subsets : (i + 1)
411+
i * len(random_smiles) // self.n_random_subsets : (i + 1)
414412
* len(random_smiles)
415413
// self.n_random_subsets
416414
]

chebai/preprocessing/migration/chebi_data_migration.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -308,7 +308,6 @@ def _old_raw_dir(self) -> str:
308308

309309

310310
class Main:
311-
312311
def migrate(
313312
self,
314313
datamodule: Optional[_ChEBIDataExtractor] = None,

0 commit comments

Comments
 (0)