Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add pylint to pre-commit #391

Merged
merged 9 commits into from
Apr 23, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 0 additions & 46 deletions .github/workflows/lint.yml

This file was deleted.

16 changes: 15 additions & 1 deletion .github/workflows/pre-commit.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,5 +10,19 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: Install requirements
run: |
pip install -U pip
pip install pylint
pip install -U black
pip install .[dev]
pip install wandb
pip install tqdm
- name: Run black
run: |
python -m black .
- uses: pre-commit/[email protected]
78 changes: 43 additions & 35 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,50 +1,58 @@
exclude: &exclude_files >
(?x)^(
docs/.*|
tests/.*|
.github/.*|
LICENSE.md|
README.md|
)$

repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
rev: v2.5.0
hooks:
- id: end-of-file-fixer
- id: trailing-whitespace
- id: mixed-line-ending
- id: trailing-whitespace
exclude: *exclude_files

- repo: https://github.com/psf/black-pre-commit-mirror
rev: 23.12.1
rev: 24.4.0
hooks:
- id: black
name: Black Formating
exclude: *exclude_files

- repo: https://github.com/pycqa/isort
rev: 5.13.2
hooks:
- id: isort
name: Sort imports
exclude: *exclude_files

# Failing - to be investigated separately
# - repo: local
# hooks:
# - id: pylint
# name: Pylint Checks
# entry: pylint
# language: system
# types: [python]
# args:
# [
# "--rcfile=pyproject.toml",
# "mace",
# "tests",
# "scripts"
# ]

# - repo: local
# hooks:
# - id: mypy
# name: mypy type checks
# entry: mypy
# language: system
# types: [python]
# args:
# [
# --config-file=.mypy.ini,
# mace,
# tests,
# scripts
# ]

- repo: https://github.com/PyCQA/pylint
rev: pylint-2.5.2
hooks:
- id: pylint
language: system
args: [
'--disable=line-too-long',
'--disable=no-member',
'--disable=missing-module-docstring',
'--disable=missing-class-docstring',
'--disable=missing-function-docstring',
'--disable=too-many-arguments',
'--disable=too-many-locals',
'--disable=not-callable',
'--disable=logging-fstring-interpolation',
'--disable=logging-not-lazy',
'--disable=invalid-name',
'--disable=too-few-public-methods',
'--disable=too-many-instance-attributes',
'--disable=too-many-statements',
'--disable=too-many-branches',
'--disable=import-outside-toplevel',
'--disable=cell-var-from-loop',
'--disable=duplicate-code',
'--disable=use-dict-literal',
]
exclude: *exclude_files
1 change: 1 addition & 0 deletions mace/cli/active_learning_md.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Demonstrates active learning molecular dynamics with constant temperature."""

import argparse
import os
import time
Expand Down
3 changes: 1 addition & 2 deletions mace/cli/preprocess_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,8 +78,7 @@ def split_array(a: np.ndarray, max_size: int):
for j in range(0, len(factors) - i + 1):
if np.prod(factors[j : j + i]) <= max_size:
test = np.prod(factors[j : j + i])
if test > max_factor:
max_factor = test
max_factor = max(test, max_factor)
return np.array_split(a, max_factor), drop_last


Expand Down
2 changes: 1 addition & 1 deletion mace/cli/run_train.py
Original file line number Diff line number Diff line change
Expand Up @@ -823,7 +823,7 @@ def main() -> None:
path_complied,
_extra_files=extra_files,
)
except Exception as e: # pylint: disable=W070344
except Exception as e: # pylint: disable=W0703
pass

if args.distributed:
Expand Down
6 changes: 3 additions & 3 deletions mace/data/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,9 +246,9 @@ def load_from_xyz(
isolated_atom_config = atoms.info.get("config_type") == "IsolatedAtom"
if isolated_atom_config:
if energy_key in atoms.info.keys():
atomic_energies_dict[
atoms.get_atomic_numbers()[0]
] = atoms.info[energy_key]
atomic_energies_dict[atoms.get_atomic_numbers()[0]] = (
atoms.info[energy_key]
)
else:
logging.warning(
f"Configuration '{idx}' is marked as 'IsolatedAtom' "
Expand Down
58 changes: 29 additions & 29 deletions mace/tools/finetuning_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,24 +50,24 @@ def load_foundations(
for j in range(4): # Assuming 4 layers in conv_tp_weights,
layer_name = f"layer{j}"
if j == 0:
getattr(
model.interactions[i].conv_tp_weights, layer_name
).weight = torch.nn.Parameter(
getattr(
model_foundations.interactions[i].conv_tp_weights,
layer_name,
getattr(model.interactions[i].conv_tp_weights, layer_name).weight = (
torch.nn.Parameter(
getattr(
model_foundations.interactions[i].conv_tp_weights,
layer_name,
)
.weight[:num_radial, :]
.clone()
)
.weight[:num_radial, :]
.clone()
)
else:
getattr(
model.interactions[i].conv_tp_weights, layer_name
).weight = torch.nn.Parameter(
getattr(
model_foundations.interactions[i].conv_tp_weights,
layer_name,
).weight.clone()
getattr(model.interactions[i].conv_tp_weights, layer_name).weight = (
torch.nn.Parameter(
getattr(
model_foundations.interactions[i].conv_tp_weights,
layer_name,
).weight.clone()
)
)

model.interactions[i].linear.weight = torch.nn.Parameter(
Expand Down Expand Up @@ -105,24 +105,24 @@ def load_foundations(
for i in range(2): # Assuming 2 products modules
max_range = max_L + 1 if i == 0 else 1
for j in range(max_range): # Assuming 3 contractions in symmetric_contractions
model.products[i].symmetric_contractions.contractions[
j
].weights_max = torch.nn.Parameter(
model_foundations.products[i]
.symmetric_contractions.contractions[j]
.weights_max[indices_weights, :, :]
.clone()
)

for k in range(2): # Assuming 2 weights in each contraction
model.products[i].symmetric_contractions.contractions[j].weights[
k
] = torch.nn.Parameter(
model.products[i].symmetric_contractions.contractions[j].weights_max = (
torch.nn.Parameter(
model_foundations.products[i]
.symmetric_contractions.contractions[j]
.weights[k][indices_weights, :, :]
.weights_max[indices_weights, :, :]
.clone()
)
)

for k in range(2): # Assuming 2 weights in each contraction
model.products[i].symmetric_contractions.contractions[j].weights[k] = (
torch.nn.Parameter(
model_foundations.products[i]
.symmetric_contractions.contractions[j]
.weights[k][indices_weights, :, :]
.clone()
)
)

model.products[i].linear.weight = torch.nn.Parameter(
model_foundations.products[i].linear.weight.clone()
Expand Down
4 changes: 2 additions & 2 deletions mace/tools/scripts_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -388,9 +388,9 @@ def step(self, metrics=None, epoch=None): # pylint: disable=E1123
if self.scheduler == "ExponentialLR":
self.lr_scheduler.step(epoch=epoch)
elif self.scheduler == "ReduceLROnPlateau":
self.lr_scheduler.step(
self.lr_scheduler.step( # pylint: disable=E1123
metrics=metrics, epoch=epoch
) # pylint: disable=E1123
)

def __getattr__(self, name):
if name == "step":
Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ disable = [
"import-outside-toplevel",
"cell-var-from-loop",
"duplicate-code",
"use-dict-literal",
]

[tool.pylint.MASTER]
Expand Down
1 change: 1 addition & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ install_requires =
python-hostlist
configargparse
GitPython
tqdm
# for plotting:
matplotlib
pandas
Expand Down
Loading