Skip to content

Commit

Permalink
Migrates developer tooling to use ruff (#1982)
Browse files Browse the repository at this point in the history
* Configure ruff

Signed-off-by: Thomas J. Fan <[email protected]>

* DOC Update docs

Signed-off-by: Thomas J. Fan <[email protected]>

* Run make fmt

Signed-off-by: Thomas J. Fan <[email protected]>

* DOC Correct grammar in docs

Signed-off-by: Thomas J. Fan <[email protected]>

* Move mypy ignore to correct place

Signed-off-by: Thomas J. Fan <[email protected]>

---------

Signed-off-by: Thomas J. Fan <[email protected]>
  • Loading branch information
thomasjpfan authored Nov 21, 2023
1 parent 79d4fa2 commit 684beed
Show file tree
Hide file tree
Showing 45 changed files with 103 additions and 114 deletions.
20 changes: 8 additions & 12 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,17 +1,13 @@
repos:
- repo: https://github.com/PyCQA/flake8
rev: 4.0.1
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.1.6
hooks:
- id: flake8
- repo: https://github.com/psf/black
rev: 22.3.0
hooks:
- id: black
- repo: https://github.com/PyCQA/isort
rev: 5.12.0
hooks:
- id: isort
args: ["--profile", "black"]
# Run the linter.
- id: ruff
args: [--fix]
# Run the formatter.
- id: ruff-format
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.2.0
hooks:
Expand Down
7 changes: 3 additions & 4 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,9 @@ setup: install-piptools ## Install requirements
pip install -r dev-requirements.in

.PHONY: fmt
fmt: ## Format code with black and isort
autoflake --remove-all-unused-imports --ignore-init-module-imports --ignore-pass-after-docstring --in-place -r flytekit plugins tests
pre-commit run black --all-files || true
pre-commit run isort --all-files || true
fmt:
pre-commit run ruff --all-files || true
pre-commit run ruff-format --all-files || true

.PHONY: lint
lint: ## Run linters
Expand Down
6 changes: 3 additions & 3 deletions docs/source/contributing.rst
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ Some helpful make commands ::

$ make
setup Install requirements
fmt Format code with black and isort
fmt Format code with ruff
lint Run linters
test Run tests
requirements Compile requirements
Expand Down Expand Up @@ -124,15 +124,15 @@ Pre-commit hooks
================

We use `pre-commit <https://pre-commit.com/>`__ to automate linting and code formatting on every commit.
Configured hooks include `black <https://github.com/psf/black>`__, `isort <https://github.com/PyCQA/isort>`__, and `flake8 <https://github.com/PyCQA/flake8>`__ and also linters to check for the validity of YAML files and ensuring that newlines are added to the end of files.
Configured hooks include `ruff <https://github.com/astral-sh/ruff>`__ and also linters to check for the validity of YAML files and ensuring that newlines are added to the end of files.

We run all those hooks in CI, but if you want to run them locally on every commit, run `pre-commit install` after installing the dev environment requirements. In case you want to disable `pre-commit` hooks locally, for example, while you're iterating on some feature, run `pre-commit uninstall`. More info in https://pre-commit.com/.


Formatting
==========

We use `black <https://github.com/psf/black>`__ and `isort <https://github.com/PyCQA/isort>`__ to autoformat code. In fact, they have been configured as git hooks in `pre-commit`. Run the following commands to execute the formatters. ::
We use `ruff <https://github.com/astral-sh/ruff>`__ to autoformat code. In fact, they have been configured as git hooks in `pre-commit`. Run the following commands to execute the formatters. ::

source ~/.virtualenvs/flytekit/bin/activate
make fmt
Expand Down
1 change: 0 additions & 1 deletion flytekit/clients/auth/authenticator.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,6 @@ def __init__(

def _initialize_auth_client(self):
if not self._auth_client:

from .auth_client import _create_code_challenge, _generate_code_verifier

code_verifier = _generate_code_verifier()
Expand Down
2 changes: 1 addition & 1 deletion flytekit/clis/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def str2bool(str):
:param Text str:
:rtype: bool
"""
return not str.lower() in ["false", "0", "off", "no"]
return str.lower() not in ["false", "0", "off", "no"]


# TODO Deprecated delete after deleting flyte_cli register
Expand Down
1 change: 0 additions & 1 deletion flytekit/clis/sdk_in_container/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@

@dataclass
class BuildParams(RunLevelParams):

fast: bool = make_field(
click.Option(
param_decls=["--fast"],
Expand Down
2 changes: 0 additions & 2 deletions flytekit/clis/sdk_in_container/serialize.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,6 @@ def serialize(
@click.option("-f", "--folder", type=click.Path(exists=True))
@click.pass_context
def workflows(ctx, folder=None):

if folder:
click.echo(f"Writing output to {folder}")

Expand Down Expand Up @@ -194,7 +193,6 @@ def fast(ctx):
@click.option("-f", "--folder", type=click.Path(exists=True))
@click.pass_context
def fast_workflows(ctx, folder=None, deref_symlinks=False):

if folder:
click.echo(f"Writing output to {folder}")

Expand Down
2 changes: 1 addition & 1 deletion flytekit/configuration/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def read_from_file(

def bool_transformer(config_val: typing.Any) -> bool:
if type(config_val) is str:
return True if config_val and not config_val.lower() in ["false", "0", "off", "no"] else False
return True if config_val and config_val.lower() not in ["false", "0", "off", "no"] else False
else:
return config_val

Expand Down
1 change: 0 additions & 1 deletion flytekit/core/checkpointer.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,6 @@ def prev_exists(self) -> bool:
return self._checkpoint_src is not None

def restore(self, path: typing.Optional[typing.Union[Path, str]] = None) -> typing.Optional[Path]:

# We have to lazy load, until we fix the imports
from flytekit.core.context_manager import FlyteContextManager

Expand Down
1 change: 0 additions & 1 deletion flytekit/core/notification.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@

# Duplicates flytekit.common.notifications.Notification to avoid using the ExtendedSdkType metaclass.
class Notification(_common_model.Notification):

VALID_PHASES = {
_execution_model.WorkflowExecutionPhase.ABORTED,
_execution_model.WorkflowExecutionPhase.FAILED,
Expand Down
1 change: 0 additions & 1 deletion flytekit/core/tracker.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,6 @@ def find_lhs(self) -> str:
# Try to find object in module when the tracked instance is defined in the __main__ module.
# This section tries to find the matching object in the module when the module is loaded from the __file__.
if self._module_file is not None:

# Since the module loaded from the file is different from the original module that defined self, we need
# to match by variable name and type.
module = import_module_from_file(self._instantiated_in, self._module_file)
Expand Down
19 changes: 15 additions & 4 deletions flytekit/core/type_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -1176,7 +1176,10 @@ def to_literal(self, ctx: FlyteContext, python_val: T, python_type: Type[T], exp
batch_size = annotation.val
break
if batch_size > 0:
lit_list = [TypeEngine.to_literal(ctx, python_val[i : i + batch_size], FlytePickle, expected.collection_type) for i in range(0, len(python_val), batch_size)] # type: ignore
lit_list = [
TypeEngine.to_literal(ctx, python_val[i : i + batch_size], FlytePickle, expected.collection_type)
for i in range(0, len(python_val), batch_size)
] # type: ignore
else:
lit_list = []
else:
Expand Down Expand Up @@ -1695,7 +1698,9 @@ def generate_attribute_list_from_dataclass_json(schema: dict, schema_name: typin
return attribute_list


def convert_marshmallow_json_schema_to_python_class(schema: dict, schema_name: typing.Any) -> Type[dataclasses.dataclass()]: # type: ignore
def convert_marshmallow_json_schema_to_python_class(
schema: dict, schema_name: typing.Any
) -> Type[dataclasses.dataclass()]: # type: ignore
"""
Generate a model class based on the provided JSON Schema
:param schema: dict representing valid JSON schema
Expand All @@ -1706,7 +1711,9 @@ def convert_marshmallow_json_schema_to_python_class(schema: dict, schema_name: t
return dataclass_json(dataclasses.make_dataclass(schema_name, attribute_list))


def convert_mashumaro_json_schema_to_python_class(schema: dict, schema_name: typing.Any) -> Type[dataclasses.dataclass()]: # type: ignore
def convert_mashumaro_json_schema_to_python_class(
schema: dict, schema_name: typing.Any
) -> Type[dataclasses.dataclass()]: # type: ignore
"""
Generate a model class based on the provided JSON Schema
:param schema: dict representing valid JSON schema
Expand All @@ -1718,7 +1725,11 @@ def convert_mashumaro_json_schema_to_python_class(schema: dict, schema_name: typ


def _get_element_type(element_property: typing.Dict[str, str]) -> Type:
element_type = [e_property["type"] for e_property in element_property["anyOf"]] if element_property.get("anyOf") else element_property["type"] # type: ignore
element_type = (
[e_property["type"] for e_property in element_property["anyOf"]] # type: ignore
if element_property.get("anyOf")
else element_property["type"]
)
element_format = element_property["format"] if "format" in element_property else None

if type(element_type) == list:
Expand Down
1 change: 0 additions & 1 deletion flytekit/exceptions/user.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@ def __init__(self, received_type, expected_type, additional_msg=None, received_v


class FlyteValueException(FlyteUserException, ValueError):

_ERROR_CODE = "USER:ValueError"

@classmethod
Expand Down
1 change: 0 additions & 1 deletion flytekit/interfaces/cli_identifiers.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@


class Identifier(_core_identifier.Identifier):

_STRING_TO_TYPE_MAP = {
"lp": _core_identifier.ResourceType.LAUNCH_PLAN,
"wf": _core_identifier.ResourceType.WORKFLOW,
Expand Down
1 change: 0 additions & 1 deletion flytekit/models/filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ def from_flyte_idl(cls):


class Filter(_FlyteIdlEntity):

_comparator = "nil"

def __init__(self, key, value):
Expand Down
1 change: 0 additions & 1 deletion flytekit/remote/remote_callable.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@

class RemoteEntity(ABC):
def __init__(self, *args, **kwargs):

# In cases where we make a FlyteTask/Workflow/LaunchPlan from a locally created Python object (i.e. an @task
# or an @workflow decorated function), we actually have the Python interface, so
self._python_interface: Optional[Dict[str, Type]] = None
Expand Down
1 change: 0 additions & 1 deletion flytekit/tools/fast_registration.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,6 @@ def compute_digest(source: os.PathLike, filter: Optional[callable] = None) -> st
"""
hasher = hashlib.md5()
for root, _, files in os.walk(source, topdown=True):

files.sort()

for fname in files:
Expand Down
2 changes: 0 additions & 2 deletions flytekit/types/directory/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -320,7 +320,6 @@ def to_literal(
python_type: typing.Type[FlyteDirectory],
expected: LiteralType,
) -> Literal:

remote_directory = None
should_upload = True
batch_size = get_batch_size(python_type)
Expand Down Expand Up @@ -371,7 +370,6 @@ def to_literal(
def to_python_value(
self, ctx: FlyteContext, lv: Literal, expected_python_type: typing.Type[FlyteDirectory]
) -> FlyteDirectory:

uri = lv.scalar.blob.uri

# This is a local file path, like /usr/local/my_dir, don't mess with it. Certainly, downloading it doesn't
Expand Down
2 changes: 1 addition & 1 deletion flytekit/types/file/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def __repr__(self):

@staticmethod
def check_and_convert_to_str(item: typing.Union[typing.Type, str]) -> str:
if not get_origin(item) is Annotated:
if get_origin(item) is not Annotated:
return str(item)
if get_args(item)[0] == str:
return str(get_args(item)[1])
Expand Down
4 changes: 1 addition & 3 deletions flytekit/types/file/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,7 @@ def noop():

@dataclass
class FlyteFile(os.PathLike, typing.Generic[T], DataClassJSONMixin):
path: typing.Union[str, os.PathLike] = field(
default=None, metadata=config(mm_field=fields.String())
) # type: ignore
path: typing.Union[str, os.PathLike] = field(default=None, metadata=config(mm_field=fields.String())) # type: ignore
"""
Since there is no native Python implementation of files and directories for the Flyte Blob type, (like how int
exists for Flyte's Integer type) we need to create one so that users can express that their tasks take
Expand Down
1 change: 0 additions & 1 deletion flytekit/types/file/image.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@ def get_literal_type(self, t: Type[T]) -> LiteralType:
def to_literal(
self, ctx: FlyteContext, python_val: PIL.Image.Image, python_type: Type[T], expected: LiteralType
) -> Literal:

meta = BlobMetadata(
type=_core_types.BlobType(
format=self.FILE_FORMAT, dimensionality=_core_types.BlobType.BlobDimensionality.SINGLE
Expand Down
1 change: 0 additions & 1 deletion flytekit/types/structured/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@


def register_csv_handlers():

from .basic_dfs import CSVToPandasDecodingHandler, PandasToCSVEncodingHandler

StructuredDatasetTransformerEngine.register(PandasToCSVEncodingHandler(), default_format_for_type=True)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def __init__(
name: str,
task_config: HPOJob,
training_task: Union[SagemakerCustomTrainingTask, SagemakerBuiltinAlgorithmsTask],
**kwargs
**kwargs,
):
if training_task is None or not (
isinstance(training_task, SagemakerCustomTrainingTask)
Expand All @@ -72,7 +72,7 @@ def __init__(
name=name,
interface=updated_iface,
task_config=task_config,
**kwargs
**kwargs,
)

def execute(self, **kwargs) -> Any:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,15 +45,13 @@ def metric_name(self) -> str:
return self._metric_name

def to_flyte_idl(self) -> _pb2_hpo_job.HyperparameterTuningObjective:

return _pb2_hpo_job.HyperparameterTuningObjective(
objective_type=self.objective_type,
metric_name=self._metric_name,
)

@classmethod
def from_flyte_idl(cls, pb2_object: _pb2_hpo_job.HyperparameterTuningObjective):

return cls(
objective_type=pb2_object.objective_type,
metric_name=pb2_object.metric_name,
Expand Down Expand Up @@ -115,7 +113,6 @@ def training_job_early_stopping_type(self) -> int:
return self._training_job_early_stopping_type

def to_flyte_idl(self) -> _pb2_hpo_job.HyperparameterTuningJobConfig:

return _pb2_hpo_job.HyperparameterTuningJobConfig(
tuning_strategy=self._tuning_strategy,
tuning_objective=self._tuning_objective.to_flyte_idl(),
Expand All @@ -124,7 +121,6 @@ def to_flyte_idl(self) -> _pb2_hpo_job.HyperparameterTuningJobConfig:

@classmethod
def from_flyte_idl(cls, pb2_object: _pb2_hpo_job.HyperparameterTuningJobConfig):

return cls(
tuning_strategy=pb2_object.tuning_strategy,
tuning_objective=HyperparameterTuningObjective.from_flyte_idl(pb2_object.tuning_objective),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,6 @@ def metric_definitions(self) -> List[MetricDefinition]:
return self._metric_definitions

def to_flyte_idl(self) -> _training_job_pb2.AlgorithmSpecification:

return _training_job_pb2.AlgorithmSpecification(
input_mode=self.input_mode,
algorithm_name=self.algorithm_name,
Expand All @@ -266,7 +265,6 @@ def to_flyte_idl(self) -> _training_job_pb2.AlgorithmSpecification:

@classmethod
def from_flyte_idl(cls, pb2_object: _training_job_pb2.AlgorithmSpecification):

return cls(
input_mode=pb2_object.input_mode,
algorithm_name=pb2_object.algorithm_name,
Expand Down
1 change: 0 additions & 1 deletion plugins/flytekit-dolt/flytekitplugins/dolt/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@ def to_literal(
python_type: typing.Type[DoltTable],
expected: LiteralType,
) -> Literal:

if not isinstance(python_val, DoltTable):
raise AssertionError(f"Value cannot be converted to a table: {python_val}")

Expand Down
6 changes: 3 additions & 3 deletions plugins/flytekit-greatexpectations/tests/test_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def my_task(
},
),
),
]
],
) -> str:
return directory

Expand Down Expand Up @@ -236,7 +236,7 @@ def my_task(
batch_request_config=BatchRequestConfig(data_connector_query={"limit": 10}),
local_file_path="/tmp/test3.parquet", # noqa: F722
),
]
],
) -> int:
return dataframe.open().all().shape[0]

Expand All @@ -261,7 +261,7 @@ def my_task(
batch_request_config=BatchRequestConfig(data_connector_query={"limit": 10}),
local_file_path="/tmp/test3.parquet", # noqa: F722
),
]
],
) -> int:
return dataframe.open().all().shape[0]

Expand Down
2 changes: 1 addition & 1 deletion plugins/flytekit-mmcloud/tests/test_mmcloud.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def test_flyte_to_mmcloud_resources():
("2", "2Gi", "1", "1Gi"),
("2", "1Gi", "1", "2Gi"),
}
for (req_cpu, req_mem, lim_cpu, lim_mem) in error_cases:
for req_cpu, req_mem, lim_cpu, lim_mem in error_cases:
with pytest.raises(ValueError):
flyte_to_mmcloud_resources(
requests=Resources(cpu=req_cpu, mem=req_mem),
Expand Down
Loading

0 comments on commit 684beed

Please sign in to comment.