Skip to content

Commit

Permalink
chore(deps): bump black from 22.3.0 to 24.3.0 in /requirements/extras (
Browse files Browse the repository at this point in the history
…#4519)

* chore(deps): bump black from 22.3.0 to 24.3.0 in /requirements/extras

Bumps [black](https://github.com/psf/black) from 22.3.0 to 24.3.0.
- [Release notes](https://github.com/psf/black/releases)
- [Changelog](https://github.com/psf/black/blob/main/CHANGES.md)
- [Commits](psf/black@22.3.0...24.3.0)

---
updated-dependencies:
- dependency-name: black
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <[email protected]>

* black-format

* fix flake8

* black-format

* fix black-check

* fixed black-check

---------

Signed-off-by: dependabot[bot] <[email protected]>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Erick Benitez-Ramos <[email protected]>
Co-authored-by: Kalyani Nikure <[email protected]>
Co-authored-by: knikure <[email protected]>
  • Loading branch information
4 people authored Apr 17, 2024
1 parent 7b211fe commit 55822f7
Show file tree
Hide file tree
Showing 95 changed files with 615 additions and 538 deletions.
2 changes: 1 addition & 1 deletion requirements/extras/test_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ coverage>=5.2, <6.2
mock==4.0.3
contextlib2==21.6.0
awslogs==0.14.0
black==22.3.0
black==24.3.0
stopit==1.1.2
# Update tox.ini to have correct version of airflow constraints file
apache-airflow==2.8.4
Expand Down
16 changes: 8 additions & 8 deletions src/sagemaker/amazon/record_pb2.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

12 changes: 6 additions & 6 deletions src/sagemaker/automl/automl.py
Original file line number Diff line number Diff line change
Expand Up @@ -930,9 +930,9 @@ def _load_config(cls, inputs, auto_ml, expand_role=True, validate_uri=True):

auto_ml_model_deploy_config = {}
if auto_ml.auto_generate_endpoint_name is not None:
auto_ml_model_deploy_config[
"AutoGenerateEndpointName"
] = auto_ml.auto_generate_endpoint_name
auto_ml_model_deploy_config["AutoGenerateEndpointName"] = (
auto_ml.auto_generate_endpoint_name
)
if not auto_ml.auto_generate_endpoint_name and auto_ml.endpoint_name is not None:
auto_ml_model_deploy_config["EndpointName"] = auto_ml.endpoint_name

Expand Down Expand Up @@ -1034,9 +1034,9 @@ def _prepare_auto_ml_stop_condition(
if max_candidates is not None:
stopping_condition["MaxCandidates"] = max_candidates
if max_runtime_per_training_job_in_seconds is not None:
stopping_condition[
"MaxRuntimePerTrainingJobInSeconds"
] = max_runtime_per_training_job_in_seconds
stopping_condition["MaxRuntimePerTrainingJobInSeconds"] = (
max_runtime_per_training_job_in_seconds
)
if total_job_runtime_in_seconds is not None:
stopping_condition["MaxAutoMLJobRuntimeInSeconds"] = total_job_runtime_in_seconds

Expand Down
6 changes: 3 additions & 3 deletions src/sagemaker/automl/automlv2.py
Original file line number Diff line number Diff line change
Expand Up @@ -1446,9 +1446,9 @@ def _load_config(cls, inputs, auto_ml, expand_role=True):

auto_ml_model_deploy_config = {}
if auto_ml.auto_generate_endpoint_name is not None:
auto_ml_model_deploy_config[
"AutoGenerateEndpointName"
] = auto_ml.auto_generate_endpoint_name
auto_ml_model_deploy_config["AutoGenerateEndpointName"] = (
auto_ml.auto_generate_endpoint_name
)
if not auto_ml.auto_generate_endpoint_name and auto_ml.endpoint_name is not None:
auto_ml_model_deploy_config["EndpointName"] = auto_ml.endpoint_name

Expand Down
8 changes: 5 additions & 3 deletions src/sagemaker/collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -377,9 +377,11 @@ def _convert_group_resource_response(
{
"Name": collection_name,
"Arn": collection_arn,
"Type": resource_group["Identifier"]["ResourceType"]
if is_model_group
else "Collection",
"Type": (
resource_group["Identifier"]["ResourceType"]
if is_model_group
else "Collection"
),
}
)
return collection_details
Expand Down
18 changes: 9 additions & 9 deletions src/sagemaker/debugger/profiler_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,19 +162,19 @@ def _to_request_dict(self):
profiler_config_request["DisableProfiler"] = self.disable_profiler

if self.system_monitor_interval_millis is not None:
profiler_config_request[
"ProfilingIntervalInMilliseconds"
] = self.system_monitor_interval_millis
profiler_config_request["ProfilingIntervalInMilliseconds"] = (
self.system_monitor_interval_millis
)

if self.framework_profile_params is not None:
profiler_config_request[
"ProfilingParameters"
] = self.framework_profile_params.profiling_parameters
profiler_config_request["ProfilingParameters"] = (
self.framework_profile_params.profiling_parameters
)

if self.profile_params is not None:
profiler_config_request[
"ProfilingParameters"
] = self.profile_params.profiling_parameters
profiler_config_request["ProfilingParameters"] = (
self.profile_params.profiling_parameters
)

return profiler_config_request

Expand Down
10 changes: 4 additions & 6 deletions src/sagemaker/djl_inference/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,9 +213,7 @@ def _create_estimator(
vpc_config: Optional[
Dict[
str,
List[
str,
],
List[str],
]
] = None,
volume_kms_key=None,
Expand Down Expand Up @@ -820,9 +818,9 @@ def _get_container_env(self):
logger.warning("Ignoring invalid container log level: %s", self.container_log_level)
return self.env

self.env[
"SERVING_OPTS"
] = f'"-Dai.djl.logging.level={_LOG_LEVEL_MAP[self.container_log_level]}"'
self.env["SERVING_OPTS"] = (
f'"-Dai.djl.logging.level={_LOG_LEVEL_MAP[self.container_log_level]}"'
)
return self.env


Expand Down
12 changes: 6 additions & 6 deletions src/sagemaker/estimator.py
Original file line number Diff line number Diff line change
Expand Up @@ -2539,9 +2539,9 @@ def _get_train_args(cls, estimator, inputs, experiment_config):
# which is parsed in execution time
# This does not check config because the EstimatorBase constuctor already did that check
if estimator.encrypt_inter_container_traffic:
train_args[
"encrypt_inter_container_traffic"
] = estimator.encrypt_inter_container_traffic
train_args["encrypt_inter_container_traffic"] = (
estimator.encrypt_inter_container_traffic
)

if isinstance(estimator, sagemaker.algorithm.AlgorithmEstimator):
train_args["algorithm_arn"] = estimator.algorithm_arn
Expand All @@ -2556,9 +2556,9 @@ def _get_train_args(cls, estimator, inputs, experiment_config):
train_args["debugger_hook_config"] = estimator.debugger_hook_config._to_request_dict()

if estimator.tensorboard_output_config:
train_args[
"tensorboard_output_config"
] = estimator.tensorboard_output_config._to_request_dict()
train_args["tensorboard_output_config"] = (
estimator.tensorboard_output_config._to_request_dict()
)

cls._add_spot_checkpoint_args(local_mode, estimator, train_args)

Expand Down
6 changes: 3 additions & 3 deletions src/sagemaker/experiments/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,9 +220,9 @@ def __init__(
trial_component_name=self._trial_component.trial_component_name,
sagemaker_session=sagemaker_session,
artifact_bucket=artifact_bucket,
artifact_prefix=_DEFAULT_ARTIFACT_PREFIX
if artifact_prefix is None
else artifact_prefix,
artifact_prefix=(
_DEFAULT_ARTIFACT_PREFIX if artifact_prefix is None else artifact_prefix
),
)
self._lineage_artifact_tracker = _LineageArtifactTracker(
trial_component_arn=self._trial_component.trial_component_arn,
Expand Down
6 changes: 3 additions & 3 deletions src/sagemaker/explainer/explainer_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ def _to_request_dict(self):
request_dict = {}

if self.clarify_explainer_config:
request_dict[
"ClarifyExplainerConfig"
] = self.clarify_explainer_config._to_request_dict()
request_dict["ClarifyExplainerConfig"] = (
self.clarify_explainer_config._to_request_dict()
)

return request_dict
Original file line number Diff line number Diff line change
Expand Up @@ -115,19 +115,19 @@ class FeatureProcessorLineageHandler:

def create_lineage(self, tags: Optional[List[Dict[str, str]]] = None) -> None:
"""Create and Update Feature Processor Lineage"""
input_feature_group_contexts: List[
FeatureGroupContexts
] = self._retrieve_input_feature_group_contexts()
input_feature_group_contexts: List[FeatureGroupContexts] = (
self._retrieve_input_feature_group_contexts()
)
output_feature_group_contexts: FeatureGroupContexts = (
self._retrieve_output_feature_group_contexts()
)
input_raw_data_artifacts: List[Artifact] = self._retrieve_input_raw_data_artifacts()
transformation_code_artifact: Optional[
Artifact
] = S3LineageEntityHandler.create_transformation_code_artifact(
transformation_code=self.transformation_code,
pipeline_last_update_time=self.pipeline[LAST_MODIFIED_TIME].strftime("%s"),
sagemaker_session=self.sagemaker_session,
transformation_code_artifact: Optional[Artifact] = (
S3LineageEntityHandler.create_transformation_code_artifact(
transformation_code=self.transformation_code,
pipeline_last_update_time=self.pipeline[LAST_MODIFIED_TIME].strftime("%s"),
sagemaker_session=self.sagemaker_session,
)
)
if transformation_code_artifact is not None:
logger.info("Created Transformation Code Artifact: %s", transformation_code_artifact)
Expand Down Expand Up @@ -362,40 +362,40 @@ def _update_pipeline_lineage(
current_pipeline_version_context: Context = self._get_pipeline_version_context(
last_update_time=pipeline_context.properties[LAST_UPDATE_TIME]
)
upstream_feature_group_associations: Iterator[
AssociationSummary
] = LineageAssociationHandler.list_upstream_associations(
# pylint: disable=no-member
entity_arn=current_pipeline_version_context.context_arn,
source_type=FEATURE_GROUP_PIPELINE_VERSION_CONTEXT_TYPE,
sagemaker_session=self.sagemaker_session,
upstream_feature_group_associations: Iterator[AssociationSummary] = (
LineageAssociationHandler.list_upstream_associations(
# pylint: disable=no-member
entity_arn=current_pipeline_version_context.context_arn,
source_type=FEATURE_GROUP_PIPELINE_VERSION_CONTEXT_TYPE,
sagemaker_session=self.sagemaker_session,
)
)

upstream_raw_data_associations: Iterator[
AssociationSummary
] = LineageAssociationHandler.list_upstream_associations(
# pylint: disable=no-member
entity_arn=current_pipeline_version_context.context_arn,
source_type=DATA_SET,
sagemaker_session=self.sagemaker_session,
upstream_raw_data_associations: Iterator[AssociationSummary] = (
LineageAssociationHandler.list_upstream_associations(
# pylint: disable=no-member
entity_arn=current_pipeline_version_context.context_arn,
source_type=DATA_SET,
sagemaker_session=self.sagemaker_session,
)
)

upstream_transformation_code: Iterator[
AssociationSummary
] = LineageAssociationHandler.list_upstream_associations(
# pylint: disable=no-member
entity_arn=current_pipeline_version_context.context_arn,
source_type=TRANSFORMATION_CODE,
sagemaker_session=self.sagemaker_session,
upstream_transformation_code: Iterator[AssociationSummary] = (
LineageAssociationHandler.list_upstream_associations(
# pylint: disable=no-member
entity_arn=current_pipeline_version_context.context_arn,
source_type=TRANSFORMATION_CODE,
sagemaker_session=self.sagemaker_session,
)
)

downstream_feature_group_associations: Iterator[
AssociationSummary
] = LineageAssociationHandler.list_downstream_associations(
# pylint: disable=no-member
entity_arn=current_pipeline_version_context.context_arn,
destination_type=FEATURE_GROUP_PIPELINE_VERSION_CONTEXT_TYPE,
sagemaker_session=self.sagemaker_session,
downstream_feature_group_associations: Iterator[AssociationSummary] = (
LineageAssociationHandler.list_downstream_associations(
# pylint: disable=no-member
entity_arn=current_pipeline_version_context.context_arn,
destination_type=FEATURE_GROUP_PIPELINE_VERSION_CONTEXT_TYPE,
sagemaker_session=self.sagemaker_session,
)
)

is_upstream_feature_group_equal: bool = self._compare_upstream_feature_groups(
Expand Down Expand Up @@ -598,9 +598,9 @@ def _update_last_transformation_code(
last_transformation_code_artifact.properties["state"]
== TRANSFORMATION_CODE_STATUS_ACTIVE
):
last_transformation_code_artifact.properties[
"state"
] = TRANSFORMATION_CODE_STATUS_INACTIVE
last_transformation_code_artifact.properties["state"] = (
TRANSFORMATION_CODE_STATUS_INACTIVE
)
last_transformation_code_artifact.properties["exclusive_end_date"] = self.pipeline[
LAST_MODIFIED_TIME
].strftime("%s")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -172,9 +172,9 @@ def retrieve_pipeline_schedule_artifact(
sagemaker_session=sagemaker_session,
)
pipeline_schedule_artifact.properties["pipeline_name"] = pipeline_schedule.pipeline_name
pipeline_schedule_artifact.properties[
"schedule_expression"
] = pipeline_schedule.schedule_expression
pipeline_schedule_artifact.properties["schedule_expression"] = (
pipeline_schedule.schedule_expression
)
pipeline_schedule_artifact.properties["state"] = pipeline_schedule.state
pipeline_schedule_artifact.properties["start_date"] = pipeline_schedule.start_date
pipeline_schedule_artifact.save()
Expand Down
Loading

0 comments on commit 55822f7

Please sign in to comment.