Skip to content

Commit

Permalink
Switch linting to ruff
Browse files Browse the repository at this point in the history
  • Loading branch information
ffl096 committed Jan 30, 2024
1 parent c84b27c commit d0e04cd
Show file tree
Hide file tree
Showing 20 changed files with 102 additions and 106 deletions.
30 changes: 11 additions & 19 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.0.1
hooks:
- id: check-byte-order-marker
- id: fix-byte-order-marker
- id: check-case-conflict
- id: check-merge-conflict
- id: check-yaml
Expand All @@ -18,24 +18,16 @@ repos:
- id: trailing-whitespace
- id: requirements-txt-fixer

- repo: https://github.com/psf/black
rev: 23.10.1
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.1.14
hooks:
- id: black
- id: ruff
types_or: [ python, pyi, jupyter ]
args: [ --fix ]
- id: ruff-format
types_or: [ python, pyi, jupyter ]

- repo: https://github.com/pycqa/isort
rev: 5.12.0
- repo: https://github.com/numpy/numpydoc
rev: v1.6.0
hooks:
- id : isort
args : ["--profile=black", "--filter-files"]

- repo: https://github.com/asottile/blacken-docs
rev: v1.12.0
hooks:
- id: blacken-docs
additional_dependencies: [black==20.8b0]
- repo: https://github.com/pycqa/flake8
rev: 6.0.0
hooks:
- id: flake8
additional_dependencies: [flake8-docstrings, Flake8-pyproject]
- id: numpydoc-validation
48 changes: 25 additions & 23 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -44,14 +44,8 @@ doc = [
"pydata-sphinx-theme"
]
lint = [
"black < 24.0",
"black[jupyter]",
"flake8",
"flake8-docstrings",
"Flake8-pyproject",
"isort",
"pre-commit",
"tomli"
"ruff"
]
test = [
"pytest",
Expand All @@ -68,6 +62,30 @@ all = ["TopoEmbedX[dev, doc]"]
homepage="https://github.com/pyt-team/TopoEmbedX"
repository="https://github.com/pyt-team/TopoEmbedX"

[tool.ruff]
target-version = "py310"
extend-include = ["*.ipynb"]

[tool.ruff.format]
docstring-code-format = true

[tool.ruff.lint]
select = [
"F", # pyflakes errors
"E", # code style
"W", # warnings
"I", # import order
"UP", # pyupgrade rules
"SIM", # code simplifications
]
ignore = ["E501"] # line too long

[tool.ruff.lint.pydocstyle]
convention = "numpy"

[tool.ruff.per-file-ignores]
"__init__.py" = ["F403"]

[tool.setuptools.dynamic]
version = {attr = "topoembedx.__version__"}

Expand Down Expand Up @@ -96,19 +114,3 @@ ignore_missing_imports = true

[tool.pytest.ini_options]
addopts = "--capture=no"

[tool.isort]
multi_line_output = 3
include_trailing_comma = true
skip = [".gitignore", "__init__.py"]

[tool.flake8]
application_import_names = "topoembedx"
docstring-convention = "numpy"
exclude = ["examples/*.ipynb"]
import_order_style = "smarkets"
max-line-length = 88
extend-ignore = ["E501", "F401", "E203"]
per-file-ignores = [
"*/__init__.py: D104,F401,F403"
]
1 change: 0 additions & 1 deletion test/classes/test_cell2vec.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
"""Test Cell2Vec class."""

import numpy as np
import pytest
import toponetx as tnx

from topoembedx.classes.cell2vec import Cell2Vec
Expand Down
1 change: 0 additions & 1 deletion test/classes/test_cell_diff2vec.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
"""Test CellDiff2Vec class."""

import pytest
import toponetx as tnx

from topoembedx.classes.cell_diff2vec import CellDiff2Vec
Expand Down
1 change: 0 additions & 1 deletion test/classes/test_deepcell.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
"""Test the DeepCell class."""

import numpy as np
import pytest
import toponetx as tnx

from topoembedx.classes.deepcell import DeepCell
Expand Down
1 change: 0 additions & 1 deletion test/classes/test_higher_order_laplacian_eigenmaps.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
"""Test HigherOrderLaplacianEigenmaps class."""

import numpy as np
import pytest
import toponetx as tnx

from topoembedx.classes.higher_order_laplacian_eigenmaps import (
Expand Down
1 change: 0 additions & 1 deletion test/classes/test_hoglee.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
"""Test the HOGLEE class."""

import numpy as np
import pytest
import toponetx as tnx

from topoembedx.classes.hoglee import HOGLEE
Expand Down
2 changes: 0 additions & 2 deletions test/classes/test_hope.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
"""Test HOPE class."""

import numpy as np
import pytest
import toponetx as tnx

from topoembedx.classes.hope import HOPE
Expand Down
1 change: 1 addition & 0 deletions topoembedx/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
"""Initialize the library with modules and other content."""
__version__ = "0.0.1"

from .classes.cell2vec import Cell2Vec
Expand Down
1 change: 1 addition & 0 deletions topoembedx/classes/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
"""Initialize the classes module of TopoEmbedX."""
8 changes: 4 additions & 4 deletions topoembedx/classes/cell2vec.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""Cell2Vec: a class that extends the Node2Vec class."""
from typing import Literal, Union
from typing import Literal

import networkx as nx
import numpy as np
Expand Down Expand Up @@ -94,9 +94,9 @@ def fit(

g = nx.from_numpy_matrix(self.A)

super(Cell2Vec, self).fit(g)
super().fit(g)

def get_embedding(self, get_dict: bool = False) -> Union[dict, np.ndarray]:
def get_embedding(self, get_dict: bool = False) -> dict | np.ndarray:
"""Get embedding.
Parameters
Expand All @@ -109,7 +109,7 @@ def get_embedding(self, get_dict: bool = False) -> Union[dict, np.ndarray]:
dict or numpy.ndarray
Embedding.
"""
emb = super(Cell2Vec, self).get_embedding()
emb = super().get_embedding()
if get_dict:
return dict(zip(self.ind, emb))
return emb
8 changes: 4 additions & 4 deletions topoembedx/classes/cell_diff2vec.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""Class CellDiff2Vec."""
from typing import Literal, Union
from typing import Literal

import networkx as nx
import numpy as np
Expand Down Expand Up @@ -82,9 +82,9 @@ def fit(
raise ValueError(
"The diffusion_cover is too large for the size of the graph."
)
super(CellDiff2Vec, self).fit(g)
super().fit(g)

def get_embedding(self, get_dict: bool = False) -> Union[dict, np.ndarray]:
def get_embedding(self, get_dict: bool = False) -> dict | np.ndarray:
"""Get embedding.
Parameters
Expand All @@ -97,7 +97,7 @@ def get_embedding(self, get_dict: bool = False) -> Union[dict, np.ndarray]:
dict or numpy.ndarray
Embedding.
"""
emb = super(CellDiff2Vec, self).get_embedding()
emb = super().get_embedding()

Check warning on line 100 in topoembedx/classes/cell_diff2vec.py

View check run for this annotation

Codecov / codecov/patch

topoembedx/classes/cell_diff2vec.py#L100

Added line #L100 was not covered by tests
if get_dict:
return dict(zip(self.ind, emb))
return emb
8 changes: 4 additions & 4 deletions topoembedx/classes/deepcell.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""DeepCell class for embedding complex networks using DeepWalk."""
from typing import Literal, Union
from typing import Literal

import networkx as nx
import numpy as np
Expand Down Expand Up @@ -79,9 +79,9 @@ def fit(

g = nx.from_numpy_matrix(self.A)

super(DeepCell, self).fit(g)
super().fit(g)

def get_embedding(self, get_dict: bool = False) -> Union[dict, np.ndarray]:
def get_embedding(self, get_dict: bool = False) -> dict | np.ndarray:
"""Get embeddings.
Parameters
Expand All @@ -94,7 +94,7 @@ def get_embedding(self, get_dict: bool = False) -> Union[dict, np.ndarray]:
dict or np.ndarray
The embedding of the complex.
"""
emb = super(DeepCell, self).get_embedding()
emb = super().get_embedding()
if get_dict:
return dict(zip(self.ind, emb))
return emb
8 changes: 4 additions & 4 deletions topoembedx/classes/higher_order_laplacian_eigenmaps.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""Higher Order Laplacian Eigenmaps."""
from typing import Literal, Union
from typing import Literal

import networkx as nx
import numpy as np
Expand Down Expand Up @@ -76,9 +76,9 @@ def fit(

g = nx.from_numpy_matrix(self.A)

super(HigherOrderLaplacianEigenmaps, self).fit(g)
super().fit(g)

def get_embedding(self, get_dict: bool = False) -> Union[dict, np.ndarray]:
def get_embedding(self, get_dict: bool = False) -> dict | np.ndarray:
"""Get embeddings.
Parameters
Expand All @@ -91,7 +91,7 @@ def get_embedding(self, get_dict: bool = False) -> Union[dict, np.ndarray]:
dict or np.ndarray
The embedding of the complex.
"""
emb = super(HigherOrderLaplacianEigenmaps, self).get_embedding()
emb = super().get_embedding()
if get_dict:
return dict(zip(self.ind, emb))
return emb
8 changes: 4 additions & 4 deletions topoembedx/classes/hoglee.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""Higher Order Geometric Laplacian EigenMaps (HOGLEE) class."""
from typing import Literal, Union
from typing import Literal

import networkx as nx
import numpy as np
Expand Down Expand Up @@ -65,9 +65,9 @@ def fit(

g = nx.from_numpy_matrix(self.A)

super(HOGLEE, self).fit(g)
super().fit(g)

def get_embedding(self, get_dict: bool = False) -> Union[dict, np.ndarray]:
def get_embedding(self, get_dict: bool = False) -> dict | np.ndarray:
"""Get embedding.
Parameters
Expand All @@ -80,7 +80,7 @@ def get_embedding(self, get_dict: bool = False) -> Union[dict, np.ndarray]:
dict or numpy.ndarray
Embedding.
"""
emb = super(HOGLEE, self).get_embedding()
emb = super().get_embedding()
if get_dict:
return dict(zip(self.ind, emb))
return emb
29 changes: 14 additions & 15 deletions topoembedx/classes/hope.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""Higher Order Laplacian Positional Encoder (HOPE) class."""
from typing import Literal, Union, overload
from typing import Literal, overload

import numpy as np
from scipy import sparse
Expand Down Expand Up @@ -41,7 +41,7 @@ def _laplacian_pe(
@staticmethod
def _laplacian_pe(
A: np.ndarray, n_eigvecs: int, return_eigenval: bool = False
) -> Union[np.ndarray, tuple[np.ndarray, np.ndarray]]:
) -> np.ndarray | tuple[np.ndarray, np.ndarray]:
"""Compute Laplacian Positional Encodings (PE) for a given adjacency matrix.
Parameters
Expand Down Expand Up @@ -148,29 +148,28 @@ def fit(
>>> import toponetx as tnx
>>> from topoembedx import HOPE
>>> ccc = tnx.classes.CombinatorialComplex()
>>> ccc.add_cell([2,5],rank=1)
>>> ccc.add_cell([2,4],rank=1)
>>> ccc.add_cell([7,8],rank=1)
>>> ccc.add_cell([6,8],rank=1)
>>> ccc.add_cell([2,4,5],rank=3)
>>> ccc.add_cell([6,7,8],rank=3)
>>> ccc.add_cell([2, 5], rank=1)
>>> ccc.add_cell([2, 4], rank=1)
>>> ccc.add_cell([7, 8], rank=1)
>>> ccc.add_cell([6, 8], rank=1)
>>> ccc.add_cell([2, 4, 5], rank=3)
>>> ccc.add_cell([6, 7, 8], rank=3)
>>> model = HOPE()
>>> model.fit(ccc, neighborhood_type="adj", neighborhood_dim={"rank": 0, "via_rank" :3})
>>> model.fit(
... ccc,
... neighborhood_type="adj",
... neighborhood_dim={"rank": 0, "via_rank": 3},
... )
>>> em = model.get_embedding(get_dict=True)
Returns
-------
None
"""
self.ind, self.A = neighborhood_from_complex(
complex, neighborhood_type, neighborhood_dim
)

self._embedding = self._laplacian_pe(self.A, self.dimensions)

def get_embedding(self, get_dict: bool = False) -> Union[dict, np.ndarray]:
def get_embedding(self, get_dict: bool = False) -> dict | np.ndarray:
"""Get embedding.
Parameters
Expand Down
Loading

0 comments on commit d0e04cd

Please sign in to comment.