Skip to content

Commit

Permalink
Release Changes for v1.4.3
Browse files Browse the repository at this point in the history
  • Loading branch information
ysdholak committed Oct 12, 2023
1 parent 96f6b32 commit 82de37c
Show file tree
Hide file tree
Showing 39 changed files with 277 additions and 162 deletions.
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,14 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [1.4.3] - 2023-10-12

### Changed

- Upgrade aws-cdk to 2.88.0
- Upgrade deprecated methods in App-registry
- Address or Fix all SonarQube issues

## [1.4.2] - 2023-06-22

### Changed
Expand Down
8 changes: 8 additions & 0 deletions NOTICE.txt
Original file line number Diff line number Diff line change
Expand Up @@ -37,5 +37,13 @@ requests-mock under the Apache License Version 2.0
rich under the Massachusetts Institute of Technology (MIT) license
tenacity under the Apache License Version 2.0
quartz-scheduler under the Apache License Version 2.0
parsedatetime under the Apache License Version 2.0
urllib3 under the Massachusetts Institute of Technology (MIT) license
setuptools under the Massachusetts Institute of Technology (MIT) license
pipenv under the Massachusetts Institute of Technology (MIT) license
virtualenv under the Massachusetts Institute of Technology (MIT) license
tox under the Massachusetts Institute of Technology (MIT) license
tox-pyenv under the Apache License Version 2.0
poetry under the Massachusetts Institute of Technology (MIT) license

The Apache License Version Version 2.0 is included in LICENSE.txt.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -609,7 +609,7 @@ The following procedures assumes that all the OS-level configuration has been co
- [AWS Command Line Interface](https://aws.amazon.com/cli/)
- [Python](https://www.python.org/) 3.9 or newer
- [Node.js](https://nodejs.org/en/) 16.x or newer
- [AWS CDK](https://aws.amazon.com/cdk/) 2.75.0 or newer
- [AWS CDK](https://aws.amazon.com/cdk/) 2.88.0 or newer
- [Amazon Corretto OpenJDK](https://docs.aws.amazon.com/corretto/) 17.0.4.1

> **Please ensure you test the templates before updating any production deployments.**
Expand Down Expand Up @@ -707,7 +707,7 @@ After running the command, you can deploy the template:
## Collection of operational metrics

This solution collects anonymous operational metrics to help AWS improve the quality of features of the solution.
For more information, including how to disable this capability, please see the [implementation guide](https://docs.aws.amazon.com/solutions/latest/maintaining-personalized-experiences-with-ml/collection-of-operational-metrics.html).
For more information, including how to disable this capability, please see the [implementation guide](https://docs.aws.amazon.com/solutions/latest/maintaining-personalized-experiences-with-ml/reference.html).

---

Expand Down
2 changes: 1 addition & 1 deletion source/aws_lambda/prepare_input/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
metrics = Metrics()


def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict:
def lambda_handler(event: Dict[str, Any], _) -> Dict:
"""Add timeStarted to the workflowConfig of all items
:param event: AWS Lambda Event
:param context: AWS Lambda Context
Expand Down
140 changes: 68 additions & 72 deletions source/aws_lambda/shared/personalize_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,19 @@
("timeStarted", Resource),
("solutionVersionArn", SolutionVersion),
)

RESOURCE_TYPES = [
"datasetGroup",
"datasetImport",
"dataset",
"eventTracker",
"solution",
"solutionVersion",
"filter",
"recommender",
"campaign",
"batchJob",
"segmentJob"
]

def get_duplicates(items):
if isinstance(items, str):
Expand Down Expand Up @@ -714,71 +726,60 @@ def _validate_filters(self, path="filters[].serviceConfig"):
self._fill_default_vals("filter", _filter)

def _validate_type(self, var, typ, err: str):
validates = isinstance(var, typ)
validates = isinstance(var, typ) and var is not None

if not validates:
self._configuration_errors.append(err)

return validates

def _validate_solutions(self, path="solutions[]"):
def _validate_solutions(self, path="solutions[]"):
solutions = jmespath.search(path, self.config_dict) or {}
for idx, _solution in enumerate(solutions):
campaigns = _solution.get("campaigns", [])
if self._validate_type(campaigns, list, f"solutions[{idx}].campaigns must be a list"):
self._validate_campaigns(f"solutions[{idx}].campaigns", campaigns)

batch_inference_jobs = _solution.get("batchInferenceJobs", [])
if batch_inference_jobs and self._validate_type(
batch_inference_jobs,
list,
f"solutions[{idx}].batchInferenceJobs must be a list",
):
self._validate_batch_inference_jobs(
path=f"solutions[{idx}].batchInferenceJobs",
solution_name=_solution.get("serviceConfig", {}).get("name", ""),
batch_inference_jobs=batch_inference_jobs,
)

batch_segment_jobs = _solution.get("batchSegmentJobs", [])
if batch_segment_jobs and self._validate_type(
batch_segment_jobs,
list,
f"solutions[{idx}].batchSegmentJobs must be a list",
):
self._validate_batch_segment_jobs(
path=f"solutions[{idx}].batchSegmentJobs",
solution_name=_solution.get("serviceConfig", {}).get("name", ""),
batch_segment_jobs=batch_segment_jobs,
)
for idx, _solution in enumerate(solutions):
# Validate campaigns and batch jobs
self._validate_campaigns(f"solutions[{idx}].campaigns", _solution.get("campaigns", []))
self._validate_batch_inference_jobs(
path=f"solutions[{idx}].batchInferenceJobs",
solution_name=_solution.get("serviceConfig", {}).get("name", ""),
batch_inference_jobs=_solution.get("batchInferenceJobs", []),
)
self._validate_batch_segment_jobs(
path=f"solutions[{idx}].batchSegmentJobs",
solution_name=_solution.get("serviceConfig", {}).get("name", ""),
batch_segment_jobs=_solution.get("batchSegmentJobs", []),
)

_solution = _solution.get("serviceConfig")
# Validate service configuration
_service_config = _solution.get("serviceConfig")

if not self._validate_type(_solution, dict, f"solutions[{idx}].serviceConfig must be an object"):
if not self._validate_type(_service_config, dict, f"solutions[{idx}].serviceConfig must be an object"):
continue

# `performAutoML` is currently returned from InputValidator.validate() as a valid field
# Once the botocore Stubber is updated to not have this param anymore in `create_solution` call,
# this check can be deleted.
if "performAutoML" in _solution:
del _solution["performAutoML"]
if "performAutoML" in _service_config:
del _service_config["performAutoML"]
logger.error(
"performAutoML is not a valid configuration parameter - proceeding to create the "
"solution without this feature. For more details, refer to the Maintaining Personalized Experiences "
"Github project's README.md file."
)

_solution["datasetGroupArn"] = DatasetGroup().arn("validation")
if "solutionVersion" in _solution:
# To pass solution through InputValidator
solution_version_config = _solution["solutionVersion"]
del _solution["solutionVersion"]
self._validate_resource(Solution(), _solution)
_solution["solutionVersion"] = solution_version_config
_service_config["datasetGroupArn"] = DatasetGroup().arn("validation")

if "solutionVersion" in _service_config:
# To pass solution through InputValidator
solution_version_config = _service_config["solutionVersion"]
del _service_config["solutionVersion"]
self._validate_resource(Solution(), _service_config)
_service_config["solutionVersion"] = solution_version_config
else:
self._validate_resource(Solution(), _solution)
self._validate_resource(Solution(), _service_config)

self._fill_default_vals("solution", _solution)
self._validate_solution_version(_solution)
self._fill_default_vals("solution", _service_config)
self._validate_solution_version(_service_config)

def _validate_solution_version(self, solution_config):
allowed_sol_version_keys = ["trainingMode", "tags"]
Expand Down Expand Up @@ -819,6 +820,8 @@ def _validate_solution_update(self):
)

def _validate_campaigns(self, path, campaigns: List[Dict]):
self._validate_type(campaigns, list, f"{path} must be a list")

for idx, campaign_config in enumerate(campaigns):
current_path = f"{path}.campaigns[{idx}]"

Expand All @@ -832,6 +835,12 @@ def _validate_campaigns(self, path, campaigns: List[Dict]):
self._fill_default_vals("campaign", campaign)

def _validate_batch_inference_jobs(self, path, solution_name, batch_inference_jobs: List[Dict]):
self._validate_type(
batch_inference_jobs,
list,
f"solutions[{path} must be a list",
)

for idx, batch_job_config in enumerate(batch_inference_jobs):
current_path = f"{path}.batchInferenceJobs[{idx}]"

Expand Down Expand Up @@ -860,6 +869,12 @@ def _validate_batch_inference_jobs(self, path, solution_name, batch_inference_jo
self._fill_default_vals("batchJob", batch_job)

def _validate_batch_segment_jobs(self, path, solution_name, batch_segment_jobs: List[Dict]):
self._validate_type(
batch_segment_jobs,
list,
f"solutions[{path} must be a list",
)

for idx, batch_job_config in enumerate(batch_segment_jobs):
current_path = f"{path}.batchSegmentJobs[{idx}]"

Expand Down Expand Up @@ -1108,42 +1123,23 @@ def _validate_naming(self):
self._validate_no_duplicates(name="campaign names", path="solutions[].campaigns[].serviceConfig.name")
self._validate_no_duplicates(name="solution names", path="solutions[].serviceConfig.name")

def _fill_default_vals(self, resource_type, resource_dict):
"""Insert default values for tags and other fields whenever not supplied"""

if (
resource_type
in [
"datasetGroup",
"datasetImport",
"dataset",
"eventTracker",
"solution",
"solutionVersion",
"filter",
"recommender",
"campaign",
"batchJob",
"segmentJob",
]
and "tags" not in resource_dict
):
def _fill_resource_dict_tags(self, resource_type, resource_dict):
if resource_type in RESOURCE_TYPES and "tags" not in resource_dict:
if self.pass_root_tags:
resource_dict["tags"] = self.config_dict["tags"]
else:
resource_dict["tags"] = []

def _fill_default_vals(self, resource_type, resource_dict):
"""Insert default values for tags and other fields whenever not supplied"""
self._fill_resource_dict_tags(resource_type, resource_dict)

if resource_type == "datasetImport":
if "importMode" not in resource_dict:
resource_dict["importMode"] = "FULL"

if "publishAttributionMetricsToS3" not in resource_dict:
resource_dict["publishAttributionMetricsToS3"] = False

if resource_type == "solutionVersion":
if "tags" not in resource_dict:
if self.pass_root_tags:
resource_dict["tags"] = self.config_dict["tags"]
else:
resource_dict["tags"] = []
if "trainingMode" not in resource_dict:
resource_dict["trainingMode"] = "FULL"
if resource_type == "solutionVersion" and "trainingMode" not in resource_dict:
resource_dict["trainingMode"] = "FULL"
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,9 @@
from aws_solutions.cdk.aws_lambda.python.function import SolutionsPythonFunction
from aws_solutions.cdk.cfn_nag import add_cfn_nag_suppressions, CfnNagSuppression

from cdk_nag import NagSuppressions
from cdk_nag import NagPackSuppression


class ResourceHash(Construct):
"""Used to create unique resource names based on the hash of the stack ID"""
Expand Down Expand Up @@ -56,6 +59,15 @@ def __init__(
],
)

NagSuppressions.add_resource_suppressions(self._resource_name_function.role, [
NagPackSuppression(
id='AwsSolutions-IAM5',
reason='All IAM policies defined in this solution'
'grant only least-privilege permissions. Wild '
'card for resources is used only for services '
'which do not have a resource arn')],
apply_to_children=True)

properties = {
"ServiceToken": self._resource_name_function.function_arn,
"Purpose": purpose,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@
from aws_solutions.cdk.aws_lambda.python.function import SolutionsPythonFunction
from aws_solutions.cdk.cfn_nag import add_cfn_nag_suppressions, CfnNagSuppression

from cdk_nag import NagSuppressions
from cdk_nag import NagPackSuppression


class ResourceName(Construct):
"""Used to create unique resource names of the format {stack_name}-{purpose}-{id}"""
Expand Down Expand Up @@ -59,6 +62,15 @@ def __init__(
],
)

NagSuppressions.add_resource_suppressions(self._resource_name_function.role, [
NagPackSuppression(
id='AwsSolutions-IAM5',
reason='All IAM policies defined in this solution'
'grant only least-privilege permissions. Wild '
'card for resources is used only for services '
'which do not have a resource arn')],
apply_to_children=True)

properties = {
"ServiceToken": self._resource_name_function.function_arn,
"Purpose": purpose,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
requests==2.31.0
urllib3==1.26.16
urllib3==1.26.17
crhelper==2.0.11
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def __init__(
self.gradle_test = gradle_test
self.distribution_path = distribution_path

def try_bundle(self, output_dir: str, options: BundlingOptions) -> bool:
def try_bundle(self, output_dir: str, options: BundlingOptions) -> bool: #NOSONAR - Options are required for method header
source = Path(self.to_bundle).absolute()

is_gradle_build = (source / "gradlew").exists()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def platform_supports_bundling(self):
logger.info("local bundling %s supported for %s" % ("is" if os_platform_can_bundle else "is not", os_platform))
return os_platform_can_bundle

def try_bundle(self, output_dir: str, options: BundlingOptions) -> bool:
def try_bundle(self, output_dir: str, options: BundlingOptions) -> bool: #NOSONAR - Options are required for method header
if not self.platform_supports_bundling:
raise SolutionsPythonBundlingException("this platform does not support bundling")

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -173,22 +173,6 @@ def __init__(self, build_env: BuildEnvironment):
def package(self):
logger.info("packaging global assets")


def validate_version_code(ctx, param, value):
"""
Version codes are validated as semantic versions prefixed by a v, e.g. v1.2.3
:param ctx: the click context
:param param: the click parameter
:param value: the parameter value
:return: the validated value
"""
re_semver = r"^v(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$"
if re.match(re_semver, value):
return value
else:
raise click.BadParameter("please specifiy major, minor and patch versions, e.g. v1.0.0")


@click.group()
@click.option(
"--log-level",
Expand Down Expand Up @@ -309,8 +293,7 @@ def source_code_package(ctx, ignore, solution_name):
@click.option(
"--version-code",
help="The version of the package.",
required=True,
callback=validate_version_code,
required=True
)
@click.option(
"--cdk-app-path",
Expand Down
Loading

0 comments on commit 82de37c

Please sign in to comment.