Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

style: update ruff version + add more rules #380

Merged
merged 4 commits into from
Jul 16, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ repos:
- id: detect-aws-credentials
args: [ --allow-missing-credentials ]
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.2.0 # ruff version
rev: v0.5.0 # ruff version
hooks:
- id: ruff-format
- id: ruff
Expand Down
2 changes: 1 addition & 1 deletion Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,6 @@ pytest-cov = "*"
pytest-asyncio = "*"
mock = "*"
pre-commit = ">=3.7.1"
ruff = "==0.2.0"
ruff = "==0.5.0"
ipykernel = "*"
jupyterlab = "*"
24 changes: 18 additions & 6 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ dynamic = ["version"]

[project.optional-dependencies]
tests = ["pytest", "pytest-cov", "mock", "pytest-asyncio"]
dev = ["pre-commit>=3.7.1", "ruff==0.2.0"]
dev = ["pre-commit>=3.7.1", "ruff==0.5.0"]
notebooks = ["ipykernel", "jupyterlab"]
docs = [
"sphinx==6.1.3",
Expand Down Expand Up @@ -104,16 +104,22 @@ select = [
"DTZ", # https://docs.astral.sh/ruff/rules/#flake8-datetimez-dtz
"T10", # https://docs.astral.sh/ruff/rules/#flake8-datetimez-dtz
"EM", # https://docs.astral.sh/ruff/rules/#flake8-errmsg-em
"LOG", # https://docs.astral.sh/ruff/rules/#flake8-logging-log
"G", # https://docs.astral.sh/ruff/rules/#flake8-logging-format-g
"INP", # https://docs.astral.sh/ruff/rules/#flake8-no-pep420-inp
"PIE", # https://docs.astral.sh/ruff/rules/#flake8-pie-pie
"T20", # https://docs.astral.sh/ruff/rules/#flake8-print-t20
"PT", # https://docs.astral.sh/ruff/rules/#flake8-pytest-style-pt
"Q", # https://docs.astral.sh/ruff/rules/#flake8-quotes-q
"RSE", # https://docs.astral.sh/ruff/rules/#flake8-raise-rse
"RET", # https://docs.astral.sh/ruff/rules/#flake8-return-ret
"SLF", # https://docs.astral.sh/ruff/rules/#flake8-self-slf
"SIM", # https://docs.astral.sh/ruff/rules/#flake8-simplify-sim
"ARG", # https://docs.astral.sh/ruff/rules/#flake8-unused-arguments-arg
"PTH", # https://docs.astral.sh/ruff/rules/#flake8-use-pathlib-pth
"PGH", # https://docs.astral.sh/ruff/rules/#pygrep-hooks-pgh
"PERF", # https://docs.astral.sh/ruff/rules/#perflint-perf
"FURB", # https://docs.astral.sh/ruff/rules/#refurb-furb
"RUF", # https://docs.astral.sh/ruff/rules/#ruff-specific-rules-ruf
]
fixable = [
Expand All @@ -124,15 +130,19 @@ fixable = [
"ANN",
"B",
"C4",
"LOG",
"G",
"PIE",
"PT",
"RSE",
"SIM",
"PERF",
"FURB",
"RUF"
]
# ANN101 - missing-type-self
# ANN003 - missing-type-kwargs
# ANN101 - missing-type-self
# ANN102 - missing-type-cls
# D203 - one-blank-line-before-class
# D205 - blank-line-after-summary
# D206 - indent-with-spaces*
Expand All @@ -148,7 +158,7 @@ fixable = [
# S321 - suspicious-ftp-lib-usage
# *ignored for compatibility with formatter
ignore = [
"ANN101", "ANN003",
"ANN003", "ANN101", "ANN102",
"D203", "D205", "D206", "D213", "D300", "D400", "D415",
"E111", "E114", "E117", "E501",
"W191",
Expand All @@ -160,7 +170,9 @@ ignore = [
# ANN2 - missing-return-type
# ANN102 - missing-type-cls
# S101 - assert
# B011 - assert-false
# N815 -
"tests/*" = ["ANN001", "ANN2", "ANN102", "S101", "B011"]
# INP001 - implicit-namespace-package
# ARG001 - unused-function-argument
# SLF001 - private-member-acces
# N815 - mixed-case-variable-in-class-scope
"tests/*" = ["ANN001", "ANN2", "ANN102", "S101", "INP001", "SLF001", "ARG001"]
"src/metakb/schemas/*" = ["ANN102", "N815"]
1 change: 1 addition & 0 deletions src/metakb/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""The MetaKB package."""

from importlib.metadata import PackageNotFoundError, version
from pathlib import Path

Expand Down
1 change: 1 addition & 0 deletions src/metakb/cli.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""Provide CLI utility for performing data collection, transformation, and upload
to graph datastore.
"""

import datetime
import logging
import re
Expand Down
1 change: 1 addition & 0 deletions src/metakb/database.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Acquire connection to Neo4j graph database."""

import ast
import logging
from os import environ
Expand Down
1 change: 1 addition & 0 deletions src/metakb/harvesters/base.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""A module for the Harvester base class"""

import datetime
import json
import logging
Expand Down
1 change: 1 addition & 0 deletions src/metakb/harvesters/civic.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""A module for the CIViC harvester."""

import logging
from typing import Any

Expand Down
1 change: 1 addition & 0 deletions src/metakb/harvesters/moa.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""A module for the Molecular Oncology Almanac harvester"""

import logging

import requests
Expand Down
8 changes: 3 additions & 5 deletions src/metakb/load_data.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
"""Provide methods for loading data into the database."""

import json
import logging
from pathlib import Path

from neo4j import Driver, ManagedTransaction

from metakb.database import get_driver
from metakb.schemas.app import SourceName

_logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -456,13 +456,12 @@ def _add_study(tx: ManagedTransaction, study_in: dict) -> None:
tx.run(query, **study)


def add_transformed_data(driver: Driver, data: dict, src_name: SourceName) -> None:
def add_transformed_data(driver: Driver, data: dict) -> None:
"""Add set of data formatted per Common Data Model to DB.

:param data: contains key/value pairs for data objects to add to DB, including
studies, variation, therapeutic procedures, conditions, genes, methods,
documents, etc.
:param src_name: Name of source for `data`
"""
# Used to keep track of IDs that are in studies. This is used to prevent adding
# nodes that aren't associated to studies
Expand Down Expand Up @@ -508,5 +507,4 @@ def load_from_json(src_transformed_cdm: Path, driver: Driver | None = None) -> N
driver = get_driver()
with src_transformed_cdm.open() as f:
items = json.load(f)
src_name = SourceName(str(src_transformed_cdm).split("/")[-1].split("_cdm")[0])
add_transformed_data(driver, items, src_name)
add_transformed_data(driver, items)
1 change: 1 addition & 0 deletions src/metakb/log_handle.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
a way to manage logs in our own production environments, so the entry points that we
define in the library make use of methods here to set some of our preferred baselines.
"""

import logging
import os

Expand Down
3 changes: 2 additions & 1 deletion src/metakb/main.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Main application for FastAPI."""

from collections.abc import AsyncGenerator
from contextlib import asynccontextmanager
from typing import Annotated
Expand All @@ -14,7 +15,7 @@


@asynccontextmanager
async def lifespan(app: FastAPI) -> AsyncGenerator:
async def lifespan(app: FastAPI) -> AsyncGenerator: # noqa: ARG001
"""Configure FastAPI instance lifespan.

:param app: FastAPI app instance
Expand Down
1 change: 1 addition & 0 deletions src/metakb/normalizers.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Handle construction of and relay requests to VICC normalizer services."""

import logging
import os
from collections.abc import Iterable
Expand Down
9 changes: 5 additions & 4 deletions src/metakb/query.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Provide class/methods/schemas for issuing queries against the database."""

import json
import logging
from copy import copy
Expand Down Expand Up @@ -521,10 +522,10 @@ def _get_variations(self, cv_id: str, relation: VariationRelation) -> list[dict]
v_params[variation_k] = json.loads(variation_v)
elif variation_k.startswith("expression_hgvs_"):
syntax = variation_k.split("expression_")[-1].replace("_", ".")
for hgvs_expr in variation_v:
expressions.append(
models.Expression(syntax=syntax, value=hgvs_expr)
)
expressions.extend(
models.Expression(syntax=syntax, value=hgvs_expr)
for hgvs_expr in variation_v
)

v_params["expressions"] = expressions or None
loc_params = r_params["loc"]
Expand Down
11 changes: 6 additions & 5 deletions src/metakb/schemas/annotation.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Module containing GK pilot annotation definitions"""

import datetime
from enum import Enum
from typing import Literal
Expand All @@ -23,7 +24,7 @@ class Direction(str, Enum):
NONE = "none"


class Document(core_models._MappableEntity):
class Document(core_models._MappableEntity): # noqa: SLF001
"""a representation of a physical or digital document"""

type: Literal["Document"] = "Document"
Expand All @@ -41,7 +42,7 @@ class Document(core_models._MappableEntity):
)


class Method(core_models._Entity):
class Method(core_models._Entity): # noqa: SLF001
"""A set of instructions that specify how to achieve some objective (e.g.
experimental protocols, curation guidelines, rule sets, etc.)
"""
Expand All @@ -56,7 +57,7 @@ class Method(core_models._Entity):
)


class Agent(core_models._Entity):
class Agent(core_models._Entity): # noqa: SLF001
"""An autonomous actor (person, organization, or computational agent) that bears
some form of responsibility for an activity taking place, for the existence of an
entity, or for another agent's activity.
Expand All @@ -67,7 +68,7 @@ class Agent(core_models._Entity):
subtype: AgentSubtype | None = None


class Contribution(core_models._Entity):
class Contribution(core_models._Entity): # noqa: SLF001
"""The sum of all actions taken by a single agent in contributing to the creation,
modification, assessment, or deprecation of a particular entity (e.g. a Statement,
EvidenceLine, DataItem, Publication, etc.)
Expand Down Expand Up @@ -98,7 +99,7 @@ def date_format(cls, v: str | None) -> str | None:
return v


class _InformationEntity(core_models._Entity):
class _InformationEntity(core_models._Entity): # noqa: SLF001
"""InformationEntities are abstract (non-physical) entities that are about something
(i.e. they carry information about things in the real world).
"""
Expand Down
5 changes: 3 additions & 2 deletions src/metakb/schemas/api.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Create schemas for API"""

from typing import Literal

from pydantic import BaseModel, ConfigDict, StrictStr
Expand All @@ -12,9 +13,9 @@ class ServiceMeta(BaseModel):

name: Literal["metakb"] = "metakb"
version: StrictStr = __version__
url: Literal[
url: Literal["https://github.com/cancervariants/metakb"] = (
"https://github.com/cancervariants/metakb"
] = "https://github.com/cancervariants/metakb"
)

model_config = ConfigDict(
json_schema_extra={
Expand Down
1 change: 1 addition & 0 deletions src/metakb/schemas/app.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Module containing app schemas and enums"""

from enum import Enum


Expand Down
10 changes: 8 additions & 2 deletions src/metakb/schemas/categorical_variation.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
See the `CatVar page <https://www.ga4gh.org/product/categorical-variation-catvar/>`_ on
the GA4GH website for more information.
"""

from enum import Enum
from typing import Literal

Expand All @@ -27,7 +28,7 @@ class LocationMatchCharacteristic(str, Enum):
SUPERINTERVAL = "superinterval"


class _CategoricalVariationBase(core_models._DomainEntity):
class _CategoricalVariationBase(core_models._DomainEntity): # noqa: SLF001
"""Base class for Categorical Variation"""

members: list[models.Variation | core_models.IRI] | None = Field(
Expand Down Expand Up @@ -130,7 +131,12 @@ class CategoricalVariation(RootModel):
individual contextual variation instances may be members of the domain.
"""

root: CanonicalAllele | CategoricalCnv | DescribedVariation | ProteinSequenceConsequence = Field(
root: (
CanonicalAllele
| CategoricalCnv
| DescribedVariation
| ProteinSequenceConsequence
) = Field(
...,
json_schema_extra={
"description": "A representation of a categorically-defined domain for variation, in which individual contextual variation instances may be members of the domain.",
Expand Down
1 change: 1 addition & 0 deletions src/metakb/schemas/variation_statement.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Module containing variant statement definitions"""

from enum import Enum
from typing import Literal

Expand Down
1 change: 1 addition & 0 deletions src/metakb/transform/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
"""Transformations for sources."""

from .civic import CivicTransform # noqa: F401
from .moa import MoaTransform # noqa: F401
1 change: 1 addition & 0 deletions src/metakb/transform/base.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""A module for the Transform base class."""

import datetime
import json
import logging
Expand Down
34 changes: 17 additions & 17 deletions src/metakb/transform/civic.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""A module for to transform CIViC."""

import logging
import re
from enum import Enum
Expand Down Expand Up @@ -558,15 +559,14 @@ async def _add_variations(self, variants: list[dict]) -> None:
members = await self._get_variation_members(variant)

# Get variant types
variant_types_value = []
for vt in variant["variant_types"]:
variant_types_value.append(
Coding(
code=vt["so_id"],
system=f"{vt['url'].rsplit('/', 1)[0]}/",
label="_".join(vt["name"].lower().split()),
)
variant_types_value = [
Coding(
code=vt["so_id"],
system=f"{vt['url'].rsplit('/', 1)[0]}/",
label="_".join(vt["name"].lower().split()),
)
for vt in variant["variant_types"]
]

# Get mappings
mappings = [
Expand All @@ -590,16 +590,16 @@ async def _add_variations(self, variants: list[dict]) -> None:
)
)

for ce in variant["clinvar_entries"]:
mappings.append(
Mapping(
coding=Coding(
code=ce,
system="https://www.ncbi.nlm.nih.gov/clinvar/variation/",
),
relation=Relation.RELATED_MATCH,
)
mappings.extend(
Mapping(
coding=Coding(
code=ce,
system="https://www.ncbi.nlm.nih.gov/clinvar/variation/",
),
relation=Relation.RELATED_MATCH,
)
for ce in variant["clinvar_entries"]
)

aliases = []
for a in variant["variant_aliases"]:
Expand Down
Loading
Loading