Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Format and lint with Ruff #897

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,10 @@ dev: has-poetry
lint:
./check-package.sh
poetry run mypy --config-file mypy.ini dlt tests
poetry run flake8 --max-line-length=200 dlt
poetry run flake8 --max-line-length=200 tests --exclude tests/reflection/module_cases
poetry run black dlt docs tests --diff --extend-exclude=".*syntax_error.py"
poetry run ruff lint dlt tests
# poetry run flake8 --max-line-length=200 dlt
# poetry run flake8 --max-line-length=200 tests --exclude tests/reflection/module_cases
# poetry run black dlt docs tests --diff --extend-exclude=".*syntax_error.py"
# poetry run isort ./ --diff
# $(MAKE) lint-security

Expand Down
9 changes: 4 additions & 5 deletions dlt/cli/deploy_command.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,9 +134,9 @@ def _create_new_workflow(self) -> Any:
) as f:
workflow = yaml.safe_load(f)
# customize the workflow
workflow["name"] = (
f"Run {self.state['pipeline_name']} pipeline from {self.pipeline_script_path}"
)
workflow[
"name"
] = f"Run {self.state['pipeline_name']} pipeline from {self.pipeline_script_path}"
if self.run_on_push is False:
del workflow["on"]["push"]
if self.run_manually is False:
Expand Down Expand Up @@ -196,8 +196,7 @@ def _echo_instructions(self, *args: Optional[Any]) -> None:
fmt.echo(
"* The dependencies that will be used to run the pipeline are stored in %s. If you"
" change add more dependencies, remember to refresh your deployment by running the same"
" 'deploy' command again."
% fmt.bold(self.artifacts["requirements_txt_name"])
" 'deploy' command again." % fmt.bold(self.artifacts["requirements_txt_name"])
)
fmt.echo()
if len(self.secret_envs) == 0 and len(self.envs) == 0:
Expand Down
6 changes: 4 additions & 2 deletions dlt/common/configuration/inject.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,8 @@ def with_config(
auto_pipeline_section: bool = False,
include_defaults: bool = True,
accept_partial: bool = False,
) -> TFun: ...
) -> TFun:
...


@overload
Expand All @@ -45,7 +46,8 @@ def with_config(
auto_pipeline_section: bool = False,
include_defaults: bool = True,
accept_partial: bool = False,
) -> Callable[[TFun], TFun]: ...
) -> Callable[[TFun], TFun]:
...


def with_config(
Expand Down
2 changes: 1 addition & 1 deletion dlt/common/configuration/resolve.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def resolve_configuration(
*,
sections: Tuple[str, ...] = (),
explicit_value: Any = None,
accept_partial: bool = False
accept_partial: bool = False,
) -> TConfiguration:
if not isinstance(config, BaseConfiguration):
raise ConfigurationWrongTypeException(type(config))
Expand Down
6 changes: 4 additions & 2 deletions dlt/common/configuration/specs/base_configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,11 +105,13 @@ def is_secret_hint(hint: Type[Any]) -> bool:


@overload
def configspec(cls: Type[TAnyClass]) -> Type[TAnyClass]: ...
def configspec(cls: Type[TAnyClass]) -> Type[TAnyClass]:
...


@overload
def configspec(cls: None = ...) -> Callable[[Type[TAnyClass]], Type[TAnyClass]]: ...
def configspec(cls: None = ...) -> Callable[[Type[TAnyClass]], Type[TAnyClass]]:
...


def configspec(
Expand Down
3 changes: 2 additions & 1 deletion dlt/common/configuration/specs/config_section_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,4 +79,5 @@ def __init__(
sections: Tuple[str, ...] = (),
merge_style: TMergeFunc = None,
source_state_key: str = None,
) -> None: ...
) -> None:
...
6 changes: 4 additions & 2 deletions dlt/common/data_writers/buffered.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def __init__(
file_max_items: int = None,
file_max_bytes: int = None,
disable_compression: bool = False,
_caps: DestinationCapabilitiesContext = None
_caps: DestinationCapabilitiesContext = None,
):
self.file_format = file_format
self._file_format_spec = DataWriter.data_format_from_file_format(self.file_format)
Expand Down Expand Up @@ -197,7 +197,9 @@ def _flush_items(self, allow_empty_file: bool = False) -> None:
self._file = self.open(self._file_name, "wb") # type: ignore
else:
self._file = self.open(self._file_name, "wt", encoding="utf-8") # type: ignore
self._writer = DataWriter.from_file_format(self.file_format, self._file, caps=self._caps) # type: ignore[assignment]
self._writer = DataWriter.from_file_format(
self.file_format, self._file, caps=self._caps
) # type: ignore[assignment]
self._writer.write_header(self._current_columns)
# write buffer
if self._buffered_items:
Expand Down
5 changes: 4 additions & 1 deletion dlt/common/data_writers/escape.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,10 @@ def _make_sql_escape_re(escape_dict: Dict[str, str]) -> re.Pattern: # type: ign


def _escape_extended(
v: str, prefix: str = "E'", escape_dict: Dict[str, str] = None, escape_re: re.Pattern = None # type: ignore[type-arg]
v: str,
prefix: str = "E'",
escape_dict: Dict[str, str] = None,
escape_re: re.Pattern = None, # type: ignore[type-arg]
) -> str:
escape_dict = escape_dict or SQL_ESCAPE_DICT
escape_re = escape_re or SQL_ESCAPE_RE
Expand Down
18 changes: 11 additions & 7 deletions dlt/common/destination/reference.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,9 +74,9 @@ class StateInfo(NamedTuple):
class DestinationClientConfiguration(BaseConfiguration):
destination_type: Final[str] = None # which destination to load data to
credentials: Optional[CredentialsConfiguration]
destination_name: Optional[str] = (
None # name of the destination, if not set, destination_type is used
)
destination_name: Optional[
str
] = None # name of the destination, if not set, destination_type is used
environment: Optional[str] = None

def fingerprint(self) -> str:
Expand All @@ -98,7 +98,8 @@ def __init__(
credentials: Optional[CredentialsConfiguration] = None,
destination_name: str = None,
environment: str = None,
) -> None: ...
) -> None:
...


@configspec
Expand Down Expand Up @@ -143,7 +144,8 @@ def __init__(
default_schema_name: Optional[str] = None,
destination_name: str = None,
environment: str = None,
) -> None: ...
) -> None:
...


@configspec
Expand Down Expand Up @@ -171,7 +173,8 @@ def __init__(
layout: str = None,
destination_name: str = None,
environment: str = None,
) -> None: ...
) -> None:
...


@configspec
Expand All @@ -191,7 +194,8 @@ def __init__(
staging_config: Optional[DestinationClientStagingConfiguration] = None,
destination_name: str = None,
environment: str = None,
) -> None: ...
) -> None:
...


TLoadJobState = Literal["running", "failed", "retry", "completed"]
Expand Down
35 changes: 22 additions & 13 deletions dlt/common/json/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,29 +24,38 @@ class SupportsJson(Protocol):
_impl_name: str
"""Implementation name"""

def dump(
self, obj: Any, fp: IO[bytes], sort_keys: bool = False, pretty: bool = False
) -> None: ...
def dump(self, obj: Any, fp: IO[bytes], sort_keys: bool = False, pretty: bool = False) -> None:
...

def typed_dump(self, obj: Any, fp: IO[bytes], pretty: bool = False) -> None: ...
def typed_dump(self, obj: Any, fp: IO[bytes], pretty: bool = False) -> None:
...

def typed_dumps(self, obj: Any, sort_keys: bool = False, pretty: bool = False) -> str: ...
def typed_dumps(self, obj: Any, sort_keys: bool = False, pretty: bool = False) -> str:
...

def typed_loads(self, s: str) -> Any: ...
def typed_loads(self, s: str) -> Any:
...

def typed_dumpb(self, obj: Any, sort_keys: bool = False, pretty: bool = False) -> bytes: ...
def typed_dumpb(self, obj: Any, sort_keys: bool = False, pretty: bool = False) -> bytes:
...

def typed_loadb(self, s: Union[bytes, bytearray, memoryview]) -> Any: ...
def typed_loadb(self, s: Union[bytes, bytearray, memoryview]) -> Any:
...

def dumps(self, obj: Any, sort_keys: bool = False, pretty: bool = False) -> str: ...
def dumps(self, obj: Any, sort_keys: bool = False, pretty: bool = False) -> str:
...

def dumpb(self, obj: Any, sort_keys: bool = False, pretty: bool = False) -> bytes: ...
def dumpb(self, obj: Any, sort_keys: bool = False, pretty: bool = False) -> bytes:
...

def load(self, fp: Union[IO[bytes], IO[str]]) -> Any: ...
def load(self, fp: Union[IO[bytes], IO[str]]) -> Any:
...

def loads(self, s: str) -> Any: ...
def loads(self, s: str) -> Any:
...

def loadb(self, s: Union[bytes, bytearray, memoryview]) -> Any: ...
def loadb(self, s: Union[bytes, bytearray, memoryview]) -> Any:
...


def custom_encode(obj: Any) -> str:
Expand Down
5 changes: 3 additions & 2 deletions dlt/common/libs/pydantic.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ class DltConfig(TypedDict, total=False):


def pydantic_to_table_schema_columns(
model: Union[BaseModel, Type[BaseModel]]
model: Union[BaseModel, Type[BaseModel]],
) -> TTableSchemaColumns:
"""Convert a pydantic model to a table schema columns dict

Expand Down Expand Up @@ -261,7 +261,8 @@ def create_list_model(
# TODO: use LenientList to create list model that automatically discards invalid items
# https://github.com/pydantic/pydantic/issues/2274 and https://gist.github.com/dmontagu/7f0cef76e5e0e04198dd608ad7219573
return create_model(
"List" + __name__, items=(List[model], ...) # type: ignore[return-value,valid-type]
"List" + __name__,
items=(List[model], ...), # type: ignore[return-value,valid-type]
)


Expand Down
3 changes: 2 additions & 1 deletion dlt/common/normalizers/configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,4 +25,5 @@ def on_resolved(self) -> None:

if TYPE_CHECKING:

def __init__(self, naming: str = None, json_normalizer: TJSONNormalizer = None) -> None: ...
def __init__(self, naming: str = None, json_normalizer: TJSONNormalizer = None) -> None:
...
9 changes: 6 additions & 3 deletions dlt/common/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -524,7 +524,8 @@ def run(
schema: Schema = None,
loader_file_format: TLoaderFileFormat = None,
schema_contract: TSchemaContract = None,
) -> LoadInfo: ...
) -> LoadInfo:
...

def _set_context(self, is_active: bool) -> None:
"""Called when pipeline context activated or deactivate"""
Expand All @@ -546,7 +547,8 @@ def __call__(
schema: Schema = None,
loader_file_format: TLoaderFileFormat = None,
schema_contract: TSchemaContract = None,
) -> LoadInfo: ...
) -> LoadInfo:
...


@configspec
Expand Down Expand Up @@ -596,7 +598,8 @@ class StateInjectableContext(ContainerInjectableContext):

if TYPE_CHECKING:

def __init__(self, state: TPipelineState = None) -> None: ...
def __init__(self, state: TPipelineState = None) -> None:
...


def pipeline_state(
Expand Down
3 changes: 2 additions & 1 deletion dlt/common/runners/configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,4 +25,5 @@ def __init__(
start_method: str = None,
workers: int = None,
run_sleep: float = 0.1,
) -> None: ...
) -> None:
...
3 changes: 2 additions & 1 deletion dlt/common/runtime/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@


class LogMethod(Protocol):
def __call__(self, msg: str, *args: Any, **kwds: Any) -> None: ...
def __call__(self, msg: str, *args: Any, **kwds: Any) -> None:
...


def __getattr__(name: str) -> LogMethod:
Expand Down
4 changes: 3 additions & 1 deletion dlt/common/schema/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -889,7 +889,9 @@ def _compile_settings(self) -> None:
map(utils.compile_simple_regex, table["filters"]["includes"])
)
# look for auto-detections in settings and then normalizer
self._type_detections = self._settings.get("detections") or self._normalizers_config.get("detections") or [] # type: ignore
self._type_detections = (
self._settings.get("detections") or self._normalizers_config.get("detections") or []
) # type: ignore

def __repr__(self) -> str:
return f"Schema {self.name} at {id(self)}"
9 changes: 6 additions & 3 deletions dlt/common/storages/configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,8 @@ def __init__(
schema_volume_path: str = None,
import_schema_path: str = None,
export_schema_path: str = None,
) -> None: ...
) -> None:
...


@configspec
Expand All @@ -47,7 +48,8 @@ class NormalizeStorageConfiguration(BaseConfiguration):

if TYPE_CHECKING:

def __init__(self, normalize_volume_path: str = None) -> None: ...
def __init__(self, normalize_volume_path: str = None) -> None:
...


@configspec
Expand All @@ -63,7 +65,8 @@ class LoadStorageConfiguration(BaseConfiguration):

def __init__(
self, load_volume_path: str = None, delete_completed_jobs: bool = None
) -> None: ...
) -> None:
...


FileSystemCredentials = Union[
Expand Down
4 changes: 3 additions & 1 deletion dlt/common/storages/load_package.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,9 @@ def asstr(self, verbosity: int = 0) -> str:
f" {completed_msg}.\n"
)
msg += "Jobs details:\n"
msg += "\n".join(job.asstr(verbosity) for job in flatten_list_or_items(iter(self.jobs.values()))) # type: ignore
msg += "\n".join(
job.asstr(verbosity) for job in flatten_list_or_items(iter(self.jobs.values()))
) # type: ignore
return msg

def __str__(self) -> str:
Expand Down
6 changes: 3 additions & 3 deletions dlt/common/storages/normalize_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@

class NormalizeStorage(VersionedStorage):
STORAGE_VERSION: ClassVar[str] = "1.0.1"
EXTRACTED_FOLDER: ClassVar[str] = (
"extracted" # folder within the volume where extracted files to be normalized are stored
)
EXTRACTED_FOLDER: ClassVar[
str
] = "extracted" # folder within the volume where extracted files to be normalized are stored

@with_config(spec=NormalizeStorageConfiguration, sections=(known_sections.NORMALIZE,))
def __init__(
Expand Down
8 changes: 6 additions & 2 deletions dlt/common/time.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ def ensure_pendulum_time(value: Union[str, datetime.time]) -> pendulum.Time:


def _datetime_from_ts_or_iso(
value: Union[int, float, str]
value: Union[int, float, str],
) -> Union[pendulum.DateTime, pendulum.Date, pendulum.Time]:
if isinstance(value, (int, float)):
return pendulum.from_timestamp(value)
Expand Down Expand Up @@ -172,4 +172,8 @@ def to_seconds(td: Optional[TimedeltaSeconds]) -> Optional[float]:
def reduce_pendulum_datetime_precision(value: T, microsecond_precision: int) -> T:
if microsecond_precision >= 6:
return value
return value.replace(microsecond=value.microsecond // 10 ** (6 - microsecond_precision) * 10 ** (6 - microsecond_precision)) # type: ignore
return value.replace(
microsecond=value.microsecond
// 10 ** (6 - microsecond_precision)
* 10 ** (6 - microsecond_precision)
) # type: ignore
Loading
Loading