diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 2083e17..7807625 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -17,7 +17,7 @@ Steps to reproduce the behavior. A clear and concise description of what you expected to happen. **Please complete the following information about the solution:** -- [ ] Version: [e.g. v1.0.0] +- [ ] Version: [e.g. v0.0.1] To get the version of the solution, you can look at the description of the created CloudFormation stack. For example, "(SO0170) Maintaining Personalized Experiences with Machine Learning [...]". diff --git a/.gitignore b/.gitignore index 441f5ee..a38b3da 100644 --- a/.gitignore +++ b/.gitignore @@ -48,9 +48,9 @@ __pycache__/ # Generated test assets source/infrastructure/tests/assets/* !source/infrastructure/tests/assets/.keep -source/aws_lambda/get_next_scheduled_event/.gradle -source/aws_lambda/get_next_scheduled_event/build -source/aws_lambda/get_next_scheduled_event/.idea +source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/build +source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/.gradle +source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/.idea # gradle build files **/.gradle/* @@ -60,4 +60,6 @@ source/aws_lambda/get_next_scheduled_event/.idea # python build files source/cdk_solution_helper_py/helpers_cdk/build/* -source/cdk_solution_helper_py/helpers_common/build/* \ No newline at end of file +source/cdk_solution_helper_py/helpers_common/build/* +source/scheduler/common/build/* +source/scheduler/cdk/build/* \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 19fd6c8..fcdd62b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,13 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [1.1.0] - 2021-11-22 +### Added +- The solution now creates an Amazon EventBridge event bus, and puts messages to the bus when resources have been +created by the workflow. This can be useful when integrating with external systems. +- The solution now contains a command line interface (CLI) that allows schedule creation for existing resources in +Amazon Personalize. + ## [1.0.1] - 2021-10-01 ### Added - The solution now exports the Amazon SNS Topic ARN as `SNSTopicArn`. diff --git a/NOTICE.txt b/NOTICE.txt index 4498f16..2d4d383 100644 --- a/NOTICE.txt +++ b/NOTICE.txt @@ -34,6 +34,7 @@ pytest-env under the Massachusetts Institute of Technology (MIT) license PyYAML under the Massachusetts Institute of Technology (MIT) license requests under the Apache License Version 2.0 requests-mock under the Apache License Version 2.0 +rich under the Massachusetts Institute of Technology (MIT) license tenacity under the Apache License Version 2.0 quartz-scheduler under the Apache License Version 2.0 diff --git a/README.md b/README.md index 83d846c..717dba2 100644 --- a/README.md +++ b/README.md @@ -35,6 +35,10 @@ The template includes the following components: 2. Perform solution FULL retraining on schedule (and update associated campaigns) 3. Perform solution UPDATE retraining on schedule (and update associated campaigns) 4. Create batch inference jobs +9. An Amazon EventBridge event bus, where resource status notification updates are posted throughout the AWS Step +functions workflow +10. A command line interface (CLI) lets existing resources be imported and allows schedules to be established for +resources that already exist in Amazon Personalize **Note**: From v1.0.0, AWS CloudFormation template resources are created by the [AWS CDK](https://aws.amazon.com/cdk/) @@ -287,9 +291,9 @@ To customize the solution, follow the steps below: The following procedures assumes that all the OS-level configuration has been completed. They are: * [AWS Command Line Interface](https://aws.amazon.com/cli/) -* [Python](https://www.python.org/) 3.7 or newer +* [Python](https://www.python.org/) 3.9 or newer * [Node.js](https://nodejs.org/en/) 16.x or newer -* [AWS CDK](https://aws.amazon.com/cdk/) 1.95.2 or newer +* [AWS CDK](https://aws.amazon.com/cdk/) 1.126.0 or newer * [Amazon Corretto OpenJDK](https://docs.aws.amazon.com/corretto/) 11 > **Please ensure you test the templates before updating any production deployments.** @@ -360,7 +364,7 @@ build-s3-cdk-dist \ S3 bucket where the name is `-`. The solution's CloudFormation template will expect the source code to be located in the bucket matching that name. - `$SOLUTION_NAME` - The name of This solution (example: personalize-solution-customization) -- `$VERSION` - The version number to use (example: v1.0.1) +- `$VERSION` - The version number to use (example: v0.0.1) - `$REGION_NAME` - The region name to use (example: us-east-1) This will result in all global assets being pushed to the `DIST_BUCKET_PREFIX`, and all regional assets being pushed to diff --git a/source/.coveragerc b/source/.coveragerc index 3c91c64..20c0237 100644 --- a/source/.coveragerc +++ b/source/.coveragerc @@ -7,6 +7,7 @@ source = infrastructure aws_lambda cdk_solution_helper_py + scheduler [report] fail_under = 80.0 \ No newline at end of file diff --git a/source/aws_lambda/create_batch_inference_job/handler.py b/source/aws_lambda/create_batch_inference_job/handler.py index 9c163e5..0b6eb72 100644 --- a/source/aws_lambda/create_batch_inference_job/handler.py +++ b/source/aws_lambda/create_batch_inference_job/handler.py @@ -64,6 +64,12 @@ "default": "omit", "as": "seconds", }, + "timeStarted": { + "source": "event", + "path": "workflowConfig.timeStarted", + "default": "omit", + "as": "iso8601", + }, }, ) def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict: diff --git a/source/aws_lambda/create_campaign/handler.py b/source/aws_lambda/create_campaign/handler.py index 5e82fd5..4ff4bf7 100644 --- a/source/aws_lambda/create_campaign/handler.py +++ b/source/aws_lambda/create_campaign/handler.py @@ -52,8 +52,14 @@ "default": "omit", "as": "seconds", }, + "timeStarted": { + "source": "event", + "path": "workflowConfig.timeStarted", + "default": "omit", + "as": "iso8601", + }, }, - status="campaign.status", + status="campaign.latestCampaignUpdate.status || campaign.status", ) def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict: """Create a campaign in Amazon Personalize based on the configuration in `event` diff --git a/source/aws_lambda/scheduler/__init__.py b/source/aws_lambda/create_config/__init__.py similarity index 100% rename from source/aws_lambda/scheduler/__init__.py rename to source/aws_lambda/create_config/__init__.py diff --git a/source/aws_lambda/create_config/handler.py b/source/aws_lambda/create_config/handler.py new file mode 100644 index 0000000..6094f32 --- /dev/null +++ b/source/aws_lambda/create_config/handler.py @@ -0,0 +1,42 @@ +# ###################################################################################################################### +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # +# # +# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance # +# with the License. You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed # +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # +# the specific language governing permissions and limitations under the License. # +# ###################################################################################################################### +import json +import os + +from aws_lambda_powertools import Logger, Tracer, Metrics +from aws_lambda_powertools.metrics import MetricUnit +from aws_lambda_powertools.utilities.data_classes import S3Event + +from shared.personalize.service_model import ServiceModel +from shared.personalize_service import Personalize + + +logger = Logger() +tracer = Tracer() +metrics = Metrics() + + +@metrics.log_metrics +@tracer.capture_lambda_handler +def lambda_handler(event, context): + """Generate and return a solution configuration file derived from the properties of a dataset group + :param dict event: AWS Lambda Event (in this case, the dataset group and schedules) + :param context: AWS Lambda Context + :return: Dict + """ + dataset_group_name = event["datasetGroupName"] + schedules = event.get("schedules") + + cli = Personalize() + model = ServiceModel(cli, dataset_group_name=dataset_group_name) + return model.get_config(dataset_group_name=dataset_group_name, schedules=schedules) diff --git a/source/aws_lambda/create_dataset/handler.py b/source/aws_lambda/create_dataset/handler.py index 500c326..62aaedd 100644 --- a/source/aws_lambda/create_dataset/handler.py +++ b/source/aws_lambda/create_dataset/handler.py @@ -30,17 +30,23 @@ config={ "name": { "source": "event", - "path": "name", + "path": "serviceConfig.name", }, "datasetType": { "source": "event", - "path": "datasetType", + "path": "serviceConfig.datasetType", }, "datasetGroupArn": { "source": "event", - "path": "datasetGroupArn", + "path": "serviceConfig.datasetGroupArn", + }, + "schemaArn": {"source": "event", "path": "serviceConfig.schemaArn"}, + "timeStarted": { + "source": "event", + "path": "workflowConfig.timeStarted", + "default": "omit", + "as": "iso8601", }, - "schemaArn": {"source": "event", "path": "schemaArn"}, }, ) def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict: diff --git a/source/aws_lambda/create_dataset_group/handler.py b/source/aws_lambda/create_dataset_group/handler.py index 6f9adfb..4c72647 100644 --- a/source/aws_lambda/create_dataset_group/handler.py +++ b/source/aws_lambda/create_dataset_group/handler.py @@ -43,6 +43,12 @@ "path": "KMS_KEY_ARN", "default": "omit", }, + "timeStarted": { + "source": "event", + "path": "workflowConfig.timeStarted", + "default": "omit", + "as": "iso8601", + }, }, ) def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict: diff --git a/source/aws_lambda/create_dataset_import_job/handler.py b/source/aws_lambda/create_dataset_import_job/handler.py index 158422a..7c8899b 100644 --- a/source/aws_lambda/create_dataset_import_job/handler.py +++ b/source/aws_lambda/create_dataset_import_job/handler.py @@ -48,6 +48,12 @@ "default": "omit", "as": "seconds", }, + "timeStarted": { + "source": "event", + "path": "workflowConfig.timeStarted", + "default": "omit", + "as": "iso8601", + }, }, ) def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict: diff --git a/source/aws_lambda/create_event_tracker/handler.py b/source/aws_lambda/create_event_tracker/handler.py index 2158e56..e46ea7e 100644 --- a/source/aws_lambda/create_event_tracker/handler.py +++ b/source/aws_lambda/create_event_tracker/handler.py @@ -31,11 +31,17 @@ config={ "name": { "source": "event", - "path": "name", + "path": "serviceConfig.name", }, "datasetGroupArn": { "source": "event", - "path": "datasetGroupArn", + "path": "serviceConfig.datasetGroupArn", + }, + "timeStarted": { + "source": "event", + "path": "workflowConfig.timeStarted", + "default": "omit", + "as": "iso8601", }, }, ) diff --git a/source/aws_lambda/create_filter/handler.py b/source/aws_lambda/create_filter/handler.py index 943d441..654d20b 100644 --- a/source/aws_lambda/create_filter/handler.py +++ b/source/aws_lambda/create_filter/handler.py @@ -41,6 +41,12 @@ "source": "event", "path": "serviceConfig.filterExpression", }, + "timeStarted": { + "source": "event", + "path": "workflowConfig.timeStarted", + "default": "omit", + "as": "iso8601", + }, }, ) def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict: diff --git a/source/aws_lambda/create_solution/handler.py b/source/aws_lambda/create_solution/handler.py index a53adf4..d0a0ecb 100644 --- a/source/aws_lambda/create_solution/handler.py +++ b/source/aws_lambda/create_solution/handler.py @@ -62,6 +62,12 @@ "path": "serviceConfig.solutionConfig", "default": "omit", }, + "timeStarted": { + "source": "event", + "path": "workflowConfig.timeStarted", + "default": "omit", + "as": "iso8601", + }, }, ) def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict: diff --git a/source/aws_lambda/create_solution_version/handler.py b/source/aws_lambda/create_solution_version/handler.py index 98c8af0..6f38648 100644 --- a/source/aws_lambda/create_solution_version/handler.py +++ b/source/aws_lambda/create_solution_version/handler.py @@ -49,6 +49,12 @@ "path": "workflowConfig.solutionVersionArn", "default": "omit", }, + "timeStarted": { + "source": "event", + "path": "workflowConfig.timeStarted", + "default": "omit", + "as": "iso8601", + }, }, ) def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict: diff --git a/source/infrastructure/personalize/aws_lambda/layers/aws_lambda_powertools/__init__.py b/source/aws_lambda/prepare_input/__init__.py similarity index 100% rename from source/infrastructure/personalize/aws_lambda/layers/aws_lambda_powertools/__init__.py rename to source/aws_lambda/prepare_input/__init__.py diff --git a/source/aws_lambda/prepare_input/handler.py b/source/aws_lambda/prepare_input/handler.py new file mode 100644 index 0000000..159fde0 --- /dev/null +++ b/source/aws_lambda/prepare_input/handler.py @@ -0,0 +1,33 @@ +# ###################################################################################################################### +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # +# # +# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance # +# with the License. You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed # +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # +# the specific language governing permissions and limitations under the License. # +# ###################################################################################################################### + +from typing import Dict, Any + +from aws_lambda_powertools import Logger, Tracer, Metrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +from shared.sfn_middleware import set_workflow_config + +logger = Logger() +tracer = Tracer() +metrics = Metrics() + + +def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict: + """Add timeStarted to the workflowConfig of all items + :param event: AWS Lambda Event + :param context: AWS Lambda Context + :return: the modified input + """ + config = set_workflow_config(event) + return config diff --git a/source/aws_lambda/shared/events.py b/source/aws_lambda/shared/events.py new file mode 100644 index 0000000..cfec0a8 --- /dev/null +++ b/source/aws_lambda/shared/events.py @@ -0,0 +1,78 @@ +# ###################################################################################################################### +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # +# # +# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance # +# with the License. You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed # +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # +# the specific language governing permissions and limitations under the License. # +# ###################################################################################################################### +from datetime import datetime +from typing import Dict, Optional + +from aws_lambda_powertools import Logger + +from shared.exceptions import ( + NotificationError, + SolutionVersionPending, +) +from shared.notifiers import NotifyEventBridge +from shared.resource import Resource + +logger = Logger() + + +NOTIFY_LIST = [NotifyEventBridge()] + + +class Notifies: + """Decorates a resource creation or describe call to provide event notifications""" + + def __init__(self, status: str): + self.status = status + + def __call__(self, function): + def wrapper(caller, resource: Resource, **kwargs): + try: + result = function(caller, resource, **kwargs) + except SolutionVersionPending as exc: + # because of how solution versions are handled, we must manually notify and re-raise + self.notify( + resource=resource, + result={ + "solutionVersionArn": str(exc), + "status": "CREATE IN_PROGRESS", + }, + cutoff=None, + ) + raise exc + + # run the notifier + cutoff = kwargs.get("timeStarted") + self.notify(resource, result, cutoff) + + return result + + return wrapper + + def notify( + self, resource: Resource, result: Dict, cutoff: Optional[datetime] + ) -> None: + """ + Notify each target in the NOTIFY_LIST + :param resource: the subject of the notification + :param result: the description of the subject of the notification + :param cutoff: the cutoff datetime for notifications (UTC required, timezone aware) + :return: None + """ + for notifier in NOTIFY_LIST: + notifier.set_cutoff(cutoff) + try: + notifier.notify(self.status, resource, result) + except NotificationError as exc: + logger.error( + f"notifier {notifier.name} failed: {str(exc)}" + ) # log and continue through notifiers diff --git a/source/aws_lambda/shared/exceptions.py b/source/aws_lambda/shared/exceptions.py index be0f6e1..1b25d93 100644 --- a/source/aws_lambda/shared/exceptions.py +++ b/source/aws_lambda/shared/exceptions.py @@ -30,3 +30,7 @@ class ResourceInvalid(Exception): class ResourceNeedsUpdate(Exception): pass + + +class NotificationError(Exception): + pass diff --git a/source/aws_lambda/shared/notifiers/__init__.py b/source/aws_lambda/shared/notifiers/__init__.py new file mode 100644 index 0000000..ae5ed5c --- /dev/null +++ b/source/aws_lambda/shared/notifiers/__init__.py @@ -0,0 +1,14 @@ +# ###################################################################################################################### +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # +# # +# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance # +# with the License. You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed # +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # +# the specific language governing permissions and limitations under the License. # +# ###################################################################################################################### + +from shared.notifiers.notify_eventbridge import NotifyEventBridge diff --git a/source/aws_lambda/shared/notifiers/base.py b/source/aws_lambda/shared/notifiers/base.py new file mode 100644 index 0000000..6749470 --- /dev/null +++ b/source/aws_lambda/shared/notifiers/base.py @@ -0,0 +1,207 @@ +# ###################################################################################################################### +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # +# # +# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance # +# with the License. You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed # +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # +# the specific language governing permissions and limitations under the License. # +# ###################################################################################################################### + +from abc import ABC, abstractmethod +from datetime import datetime +from typing import Dict + +import jmespath +from aws_lambda_powertools import Logger + +from shared.resource import Resource + +logger = Logger() + + +ACTIVE = "ACTIVE" +TIME_FMT = "{name}.latestCampaignUpdate.{date} || {name}.{date}" + + +class Notifier(ABC): + """Notifiers provide notify_create and notify_complete against a resource and its data""" + + cutoff: datetime + notified: bool = False + + @abstractmethod + def notify_create(self, status: str, resource: Resource, result: Dict) -> None: + """ + Notify for resource creation + :param status: the status of the resource (usually CREATING, sometimes UPDATING) + :param resource: the Resource + :param result: the service response (per a create or update call) + :return: None + """ + pass + + @abstractmethod + def notify_complete(self, status: str, resource: Resource, result: Dict): + """ + Notify for resource completion + :param status: the status of the resource (usually ACTIVE) + :param resource: the Resource + :param result: the servie response (per a describe call) + :return: None + """ + pass + + @property + def name(self): + """ + Get the name of the notifier + :return: str + """ + return self.__class__.__name__ + + def notify(self, status: str, resource: Resource, result: Dict) -> None: + """ + Top-level notification + :param status: the resource status + :param resource: the Resource + :param result: the resource as returned from the SDK + :return: None + """ + logger.debug(f"{resource.name.camel} status update ({status}) on {result}") + + if self._is_create(resource, result): + logger.info( + f"notifier {self.name} starting for creation of {resource.name.camel}" + ) + self.notify_create(status, resource, result) + self.notified = True + elif self._resource_stable(resource, result): + logger.info( + f"notifier {self.name} starting for completion of {resource.name.camel}" + ) + self.notify_complete(status, resource, result) + self.notified = True + + def set_cutoff(self, cutoff: datetime) -> None: + """ + Sets the cutoff for notification (if the event is received after the cutoff - notify) + :param cutoff: the cutoff time + :return: Non e + """ + self.cutoff = cutoff + + def _is_create(self, resource: Resource, result: Dict) -> bool: + """ + Checks if the resource is a create or update + :param resource: the Resource + :param result: the resource as returned from the SDK + :return: bool + """ + if f"{resource.name.camel}Arn" in result.keys(): + return True + else: + return False + + def _resource_stable(self, resource: Resource, result: Dict) -> bool: + """ + Check whether the resource has stabilized and should trigger notification + :param resource: the Resource + :param result: the resource as returned from the SDK + :return: bool + """ + last_updated = self.get_resource_last_updated(resource, result) + created = self.get_resource_created(resource, result) + status = self.get_resource_status(resource, result) + latest_campaign_update = self.get_resource_latest_campaign_update( + resource, result + ) + + if not last_updated or not created: + logger.info( + f"{resource.name.camel} is not ready for notification (missing lastUpdated or creation DateTime)" + ) + return False + elif status != ACTIVE: + logger.info(f"{resource.name.camel} is not yet {ACTIVE}") + return False + elif ( + resource.name.camel == "campaign" + and latest_campaign_update + and latest_campaign_update.get("status") != ACTIVE + ): + logger.info(f"{resource.name.camel} is updating, and not yet active") + return False + elif not self.cutoff: + logger.debug( + f"{resource.name.camel} has no cutoff specified for notification" + ) + return False + elif last_updated <= self.cutoff: + logger.info(f"{resource.name.camel} does not require update at this time") + return False + else: + logger.info(f"{resource.name.camel} is ready for notification") + return True + + def get_resource_latest_campaign_update( + self, resource: Resource, result: Dict + ) -> Dict: + """ + Campaigns track their update status separately from the top-level status - return the update status + :param resource: the Campaign resource + :param result: the Campaign as returned from the SDK + :return: Dict + """ + return result[resource.name.camel].get("latestCampaignUpdate", {}) + + def get_resource_created(self, resource: Resource, result: Dict) -> datetime: + """ + Get the time of resource creation + :param resource: the Resource + :param result: the resource as returned from the SDK + :return: datetime + """ + return jmespath.search( + TIME_FMT.format(name=resource.name.camel, date="creationDateTime"), result + ) + + def get_resource_last_updated(self, resource: Resource, result: Dict) -> datetime: + """ + Get the time of resource update + :param resource: the Resource + :param result: the resource as returned from the SDK + :return: datetime + """ + return jmespath.search( + TIME_FMT.format(name=resource.name.camel, date="lastUpdatedDateTime"), + result, + ) + + def get_resource_status(self, resource, result: Dict) -> str: + """ + Get the resource status + :param resource: the Resource + :param result: the resource as returned from the SDK + :return: str + """ + return result[resource.name.camel].get("status") + + def get_resource_arn(self, resource: Resource, result: Dict) -> str: + """ + Get the resource ARN + :param resource: the Resource + :param result: the resource as returned from the sdk + :return: str + """ + arn_key = f"{resource.name.camel}Arn" + + if resource.name.camel in result.keys(): + return result[resource.name.camel][arn_key] + elif arn_key in result.keys(): + return result[arn_key] + else: + raise ValueError("requires a valid SDK response") diff --git a/source/aws_lambda/shared/notifiers/notify_eventbridge.py b/source/aws_lambda/shared/notifiers/notify_eventbridge.py new file mode 100644 index 0000000..b61fa93 --- /dev/null +++ b/source/aws_lambda/shared/notifiers/notify_eventbridge.py @@ -0,0 +1,98 @@ +# ###################################################################################################################### +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # +# # +# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance # +# with the License. You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed # +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # +# the specific language governing permissions and limitations under the License. # +# ###################################################################################################################### +import json +import os +from typing import Dict + +from aws_lambda_powertools import Logger + +from aws_solutions.core import get_service_client +from shared.notifiers.base import Notifier +from shared.resource import Resource + +logger = Logger() + + +class NotifyEventBridge(Notifier): + """Provide notifications to EventBridge""" + + def __init__(self): + self.cli = get_service_client("events") + super().__init__() + + @property + def bus(self): + """ + The event BUS ARN + :return: str + """ + return os.environ["EVENT_BUS_ARN"] + + def notify_create(self, status: str, resource: Resource, result: Dict) -> None: + """ + Notify for the creation of a resource + :param status: the resource status + :param resource: the Resource + :param result: the resource as returned from the SDK + :return: None + """ + arn = self.get_resource_arn(resource, result) + self._notify(status, arn, resource) + + def notify_complete(self, status: str, resource: Resource, result: Dict) -> None: + """ + Notify for the update of a resource + :param status: the resource status + :param resource: the Resource + :param result: the resource as returned from the SDK + :return: None + """ + arn = self.get_resource_arn(resource, result) + + created = self.get_resource_created(resource, result) + updated = self.get_resource_last_updated(resource, result) + + seconds = int((updated - created).total_seconds()) + self._notify(status, arn, resource, duration=seconds) + + def _notify( + self, status: str, arn: str, resource: Resource, duration: int = 0 + ) -> None: + """ + The EventBridge notification implementation + :param status: the resource status + :param arn: the resource ARN + :param resource: the Resource + :param duration: the time it took the resource to stabilize + :return: None + """ + detail = {"Arn": arn, "Status": status} + if duration: + detail["Duration"] = duration + + result = self.cli.put_events( + Entries=[ + { + "Source": "solutions.aws.personalize", + "Resources": [arn], + "DetailType": f"Personalize {resource.name.dash.replace('-', ' ').title()} State Change", + "Detail": json.dumps(detail), + "EventBusName": self.bus, + } + ] + ) + if result["FailedEntryCount"] > 0: + for entry in result["Entries"]: + logger.error( + f"EventBridge failure ({entry['ErrorCode']}) {entry['ErrorMessage']}" + ) diff --git a/source/infrastructure/personalize/scheduler/aws_lambda/__init__.py b/source/aws_lambda/shared/personalize/__init__.py similarity index 100% rename from source/infrastructure/personalize/scheduler/aws_lambda/__init__.py rename to source/aws_lambda/shared/personalize/__init__.py diff --git a/source/aws_lambda/shared/personalize/service_model.py b/source/aws_lambda/shared/personalize/service_model.py new file mode 100644 index 0000000..bae7612 --- /dev/null +++ b/source/aws_lambda/shared/personalize/service_model.py @@ -0,0 +1,351 @@ +# ###################################################################################################################### +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # +# # +# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance # +# with the License. You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed # +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # +# the specific language governing permissions and limitations under the License. # +# ###################################################################################################################### + +from __future__ import annotations + +import json +from dataclasses import dataclass, field +from typing import List, Dict, Callable, Optional + +from aws_solutions.core import get_aws_partition, get_aws_region, get_aws_account +from shared.personalize_service import Personalize, logger +from shared.resource import DatasetGroup, Resource, Filter +from shared.resource import ( + EventTracker, + Dataset, + Schema, + Solution, + Campaign, + BatchInferenceJob, +) + + +@dataclass(eq=True, frozen=True) +class ResourceElement: + resource: Resource = field(repr=False, compare=True) + arn: str = field(repr=True, compare=True) + + +@dataclass +class ResourceTree: + resources: ResourceElement = field(default_factory=dict, init=False, repr=False) + _resource_elements: Dict = field(default_factory=dict, init=False, repr=False) + _resource_parentage: Dict = field(default_factory=dict, init=False, repr=False) + + def add(self, parent: ResourceElement, child: ResourceElement): + if child not in self._resource_parentage.keys(): + self._resource_parentage[child] = parent + self._resource_elements.setdefault(parent, []).append(child) + else: + raise ValueError("element already exists") + + def children( + self, of: ResourceElement, where: Callable = lambda _: True + ) -> List[ResourceElement]: + return [elem for elem in self._resource_elements[of] if where(elem)] + + +class ServiceModel: + """Lists all resources in Amazon Personalize for lookup against the dataset group ARN""" + + def __init__(self, cli: Personalize, dataset_group_name=None): + self.cli = cli + self._arn_ownership = {} + self._resource_tree = ResourceTree() + + if dataset_group_name: + dsgs = [DatasetGroup().arn(dataset_group_name)] + else: + dsgs = self._arns(self.cli.list(DatasetGroup())) + + for dsg in dsgs: + logger.debug(f"listing children of {dsg}") + self._list_children(DatasetGroup(), dsg, dsg) + + def owned_by(self, resource_arn, dataset_group_owner: str) -> bool: + """ + Check + :param resource_arn: the resource ARN to check + :param dataset_group_owner: the dataset group owner expected + :return: True if the resource is managed by the dataset group, otherwise False + """ + if not dataset_group_owner.startswith("arn:"): + dataset_group_owner = f"arn:{get_aws_partition()}:personalize:{get_aws_region()}:{get_aws_account()}:dataset-group/{dataset_group_owner}" + + return dataset_group_owner == self._arn_ownership.get(resource_arn, False) + + def available(self, resource_arn: str) -> bool: + """ + Check if the requested ARN is available + :param resource_arn: requested ARN + :return: True if the ARN is available, otherwise False + """ + all_arns = set(self._arn_ownership.keys()).union( + set(self._arn_ownership.values()) + ) + return resource_arn not in all_arns + + def _list_children(self, parent: Resource, parent_arn, dsg: str) -> None: + """ + Recursively list the children of a resource + :param parent: the parent Resource + :param parent_arn: the parent Resource ARN + :param dsg: the parent dataset group ARN + :return: None + """ + for c in parent.children: + child_arns = self._arns( + self.cli.list(c, filters={f"{parent.name.camel}Arn": parent_arn}) + ) + + for arn in child_arns: + logger.debug(f"listing children of {arn}") + self._resource_tree.add( + parent=ResourceElement(parent, parent_arn), + child=ResourceElement(c, arn), + ) + self._arn_ownership[arn] = dsg + self._list_children(c, arn, dsg) + + def _arns(self, l: List[Dict]) -> List[str]: + """ + Lists the first ARN found for each resource in a list of resources + :param l: the list of resources + :return: the list of ARNs + """ + return [ + [v for k, v in resource.items() if k.endswith("Arn")][0] for resource in l + ] + + def _filter(self, result: Dict) -> Dict: + resource_key = next(iter(k for k in result.keys() if k != "ResponseMetadata")) + result = result[resource_key] + result = { + k: v for k, v in result.items() if k == "recipeArn" or not k.endswith("Arn") + } + + # common + result.pop("status", None) + result.pop("creationDateTime", None) + result.pop("lastUpdatedDateTime", None) + + # event tracker + result.pop("accountId", None) + result.pop("trackingId", None) + + # datset + result.pop("datasetType", None) + + # schema + if resource_key == "schema": + result["schema"] = json.loads(result["schema"]) + + # solution + result.pop("latestSolutionVersion", None) + + # campaign + result.pop("latestCampaignUpdate", None) + + # batch job + for item in { + "failureReason", + "jobInput", + "jobOutput", + "jobName", + "roleArn", + "solutionVersionArn", + }: + result.pop(item, None) + + return result + + def get_config(self, dataset_group_name, schedules: Optional[Dict]) -> Dict: + dataset_group_arn = DatasetGroup().arn(dataset_group_name) + dataset_group = ResourceElement(DatasetGroup(), dataset_group_arn) + + config = { + "datasetGroup": { + "serviceConfig": self._filter( + self.cli.describe(DatasetGroup(), name=dataset_group_name) + ) + } + } + + self._add_filter_config(config, dataset_group) + self._add_event_tracker_config(config, dataset_group) + self._add_datasets(config, dataset_group) + self._add_solutions(config, dataset_group) + self._add_schedules(config, schedules) + + return config + + def _add_schedules(self, config: Dict, schedules: Optional[Dict]) -> None: + """ + Modify config in place to add schedules + :param config: the config dictionary + :param schedules: the schedules to add + :return: None + """ + if not schedules: + return + + if schedules.get("import"): + config["datasetGroup"]["workflowConfig"] = { + "schedules": {"import": schedules.get("import")} + } + + solution_schedules = schedules.get("solutions", {}) + for idx, solution in enumerate(config.get("solutions", [])): + name = solution.get("serviceConfig", {}).get("name") + schedules = solution_schedules.get(name) + if schedules: + config["solutions"][idx]["workflowConfig"] = {"schedules": schedules} + + def _add_solutions(self, config, of: ResourceElement) -> None: + """ + Modify the config in place to add solutions, campaigns, and batch inference jobs + :param config: the config dictionary + :param of: the solution ResourceElement + :return: None + """ + solutions = self._resource_tree.children( + of, where=lambda x: x.resource == Solution() + ) + if not solutions: + return + + config.setdefault("solutions", []) + for solution in solutions: + _solution = self.cli.describe_by_arn(Solution(), solution.arn) + _solution_config = {"serviceConfig": self._filter(_solution)} + + campaigns = self._resource_tree.children( + of=solution, where=lambda x: x.resource == Campaign() + ) + for campaign in campaigns: + _campaign = self.cli.describe_by_arn(Campaign(), campaign.arn) + _solution_config.setdefault("campaigns", []).append( + {"serviceConfig": self._filter(_campaign)} + ) + + batch_jobs = self._resource_tree.children( + of=solution, where=lambda x: x.resource == BatchInferenceJob() + ) + for batch_job in batch_jobs: + _batch_job = self.cli.describe_by_arn( + BatchInferenceJob(), batch_job.arn + ) + _solution_config.setdefault("batchInferenceJobs", []).append( + {"serviceConfig": self._filter(_batch_job)} + ) + config["solutions"].append(_solution_config) + + def _add_filter_config(self, config: Dict, of: ResourceElement) -> None: + """ + Modify the config in place to add filters + :param config: the config dictionary + :param of: the DatasetGroup ResourceElement + :return: None + """ + filters = self._resource_tree.children( + of, where=lambda x: x.resource == Filter() + ) + if not filters: + return + + config["filters"] = [ + { + "serviceConfig": self._filter( + self.cli.describe_by_arn(filter.resource, filter.arn) + ) + } + for filter in filters + ] + + def _add_event_tracker_config(self, config: Dict, of: ResourceElement) -> None: + """ + Modify the config in place to add an event tracker + :param config: the config dictionary + :param of: the DatasetGroup ResourceElement + :return: None + """ + event_tracker = next( + iter( + self._resource_tree.children( + of, where=lambda x: x.resource == EventTracker() + ) + ), + None, + ) + if not event_tracker: + return + config["eventTracker"] = { + "serviceConfig": self._filter( + self.cli.describe_by_arn(event_tracker.resource, event_tracker.arn) + ) + } + + def _add_datasets(self, config, of: ResourceElement) -> None: + """ + Modify the config in place to add all datasets + :param config: the config dictionary + :param of: the DatasetGroup ResourceElement + :return: None + """ + for dataset_type in Dataset().allowed_types: + self._add_dataset(config, dataset_type, of) + + def _add_dataset( + self, config: Dict, dataset_type: str, of: ResourceElement + ) -> None: + """ + Modify the config in place to add a dataset and schema + :param config: the config dictionary + :param dataset_type: the dataset type (must be ITEMS, INTERACTIONS, or USERS) + :param of: the DatasetGroup ResourceElement + :return: None + """ + if dataset_type not in Dataset().allowed_types: + raise ValueError( + f"dataset type {dataset_type} must be one of {Dataset().allowed_types}" + ) + + dataset = next( + iter( + self._resource_tree.children( + of, + where=lambda x: x.resource == Dataset() + and x.arn.endswith(dataset_type), + ) + ), + None, + ) + if not dataset: + return + + dataset = self.cli.describe_by_arn(Dataset(), dataset.arn) + config.setdefault("datasets", {}) + config["datasets"].setdefault(dataset_type.lower(), {}) + config["datasets"][dataset_type.lower()].setdefault( + "dataset", {"serviceConfig": self._filter(dataset)} + ) + config["datasets"][dataset_type.lower()].setdefault( + "schema", + { + "serviceConfig": self._filter( + self.cli.describe_by_arn( + Schema(), arn=dataset["dataset"]["schemaArn"] + ) + ) + }, + ) diff --git a/source/aws_lambda/shared/personalize_service.py b/source/aws_lambda/shared/personalize_service.py index 42f8729..511e1fb 100644 --- a/source/aws_lambda/shared/personalize_service.py +++ b/source/aws_lambda/shared/personalize_service.py @@ -30,6 +30,8 @@ get_aws_region, get_aws_account, ) +from aws_solutions.scheduler.common import ScheduleError, Schedule +from shared.events import Notifies from shared.exceptions import ( ResourcePending, ResourceNeedsUpdate, @@ -50,7 +52,6 @@ Campaign, ) from shared.s3 import S3 -from shared.scheduler import Schedule, ScheduleError logger = Logger() metrics = Metrics() @@ -58,7 +59,11 @@ STATUS_CREATING = ("ACTIVE", "CREATE PENDING", "CREATE IN_PROGRESS") CRON_ANY_WILDCARD = "?" CRON_MIN_MAX_YEAR = (1970, 2199) -SOLUTION_PARAMETERS = (("maxAge", Resource), ("solutionVersionArn", SolutionVersion)) +WORKFLOW_PARAMETERS = ( + ("maxAge", Resource), + ("timeStarted", Resource), + ("solutionVersionArn", SolutionVersion), +) def get_duplicates(items): @@ -92,6 +97,7 @@ def list(self, resource: Resource, filters: Optional[Dict] = None): for item in page[resource_key]: yield item + @Notifies("ACTIVE") def describe(self, resource: Resource, **kwargs): """ Describe a resource in Amazon Personalize @@ -126,6 +132,11 @@ def describe_default(self, resource: Resource, **kwargs): describe_fn = getattr(self.cli, describe_fn_name) return describe_fn(**self.arn(resource, kwargs["name"])) + def describe_by_arn(self, resource: Resource, arn: str): + describe_fn_name = f"describe_{resource.name.snake}" + describe_fn = getattr(self.cli, describe_fn_name) + return describe_fn(**{f"{resource.name.camel}Arn": arn}) + def _check_solution(self, sv_arn_expected: str, sv_arn_received: str) -> bool: """ Check if solution versions sv_received and sv_expected have the same solution ARN @@ -149,6 +160,7 @@ def describe_with_update(self, resource: Resource, **kwargs): :param kwargs: the resource keyword arguments to validate :return: the response from Amazon Personalize """ + kwargs = self._remove_workflow_parameters(resource, kwargs.copy()) result = self.describe_default(resource, **kwargs) for k, v in kwargs.items(): received = result[resource.name.camel][k] @@ -159,16 +171,18 @@ def describe_with_update(self, resource: Resource, **kwargs): self._check_solution(expected, received) if result[resource.name.camel].get(k) != v: - raise ResourceNeedsUpdate() + raise ResourceNeedsUpdate( + result[resource.name.camel][f"{resource.name.camel}Arn"] + ) return result - def _remove_solution_parameters(self, resource: Resource, kwargs): + def _remove_workflow_parameters(self, resource: Resource, kwargs): """ - Remove solution parameters for the keyword arguments presented + Remove workflow parameters for the keyword arguments presented :param kwargs: :return: the kwargs with the solution parameters removed """ - for key, resource_type in SOLUTION_PARAMETERS: + for key, resource_type in WORKFLOW_PARAMETERS: if isinstance(resource, resource_type): kwargs.pop(key, None) return kwargs @@ -374,10 +388,14 @@ def is_active_batch_inference_job(job: Dict): **kwargs, ) + @Notifies("UPDATING") def update(self, resource: Resource, **kwargs): update_fn_name = f"update_{resource.name.snake}" update_fn = getattr(self.cli, update_fn_name) + # always remove the workflow configuration parameters before update + kwargs = self._remove_workflow_parameters(resource, kwargs) + # set up the ARN to update kwargs_arn = self.arn(resource, kwargs.pop("name")) kwargs.update(kwargs_arn) @@ -395,12 +413,13 @@ def update(self, resource: Resource, **kwargs): return result + @Notifies("CREATING") def create(self, resource: Resource, **kwargs): create_fn_name = f"create_{resource.name.snake}" create_fn = getattr(self.cli, create_fn_name) # always remove the workflow configuration parameters before create - kwargs = self._remove_solution_parameters(resource, kwargs) + kwargs = self._remove_workflow_parameters(resource, kwargs) try: result = create_fn(**kwargs) @@ -468,71 +487,6 @@ def exceptions(self): return self.cli.exceptions -class ServiceModel: - """Lists all resources in Amazon Personalize for lookup against the dataset group ARN""" - - _arn_ownership = {} - - def __init__(self, cli: Personalize): - self.cli = cli - - dsgs = self._arns(self.cli.list(DatasetGroup())) - for dsg in dsgs: - logger.debug(f"listing children of {dsg}") - self._list_children(DatasetGroup(), dsg, dsg) - - def owned_by(self, resource_arn, dataset_group_owner: str) -> bool: - """ - Check - :param resource_arn: the resource ARN to check - :param dataset_group_owner: the dataset group owner expected - :return: True if the resource is managed by the dataset group, otherwise False - """ - if not dataset_group_owner.startswith("arn:"): - dataset_group_owner = f"arn:{get_aws_partition()}:personalize:{get_aws_region()}:{get_aws_account()}:dataset-group/{dataset_group_owner}" - - return dataset_group_owner == self._arn_ownership.get(resource_arn, False) - - def available(self, resource_arn: str) -> bool: - """ - Check if the requested ARN is available - :param resource_arn: requested ARN - :return: True if the ARN is available, otherwise False - """ - all_arns = set(self._arn_ownership.keys()).union( - set(self._arn_ownership.values()) - ) - return resource_arn not in all_arns - - def _list_children(self, parent: Resource, parent_arn, dsg: str) -> None: - """ - Recursively list the children of a resource - :param parent: the parent Resource - :param parent_arn: the parent Resource ARN - :param dsg: the parent dataset group ARN - :return: None - """ - for c in parent.children: - child_arns = self._arns( - self.cli.list(c, filters={f"{parent.name.camel}Arn": parent_arn}) - ) - - for arn in child_arns: - logger.debug(f"listing children of {arn}") - self._arn_ownership[arn] = dsg - self._list_children(c, arn, dsg) - - def _arns(self, l: List[Dict]) -> List[str]: - """ - Lists the first ARN found for each resource in a list of resources - :param l: the list of resources - :return: the list of ARNs - """ - return [ - [v for k, v in resource.items() if k.endswith("Arn")][0] for resource in l - ] - - class InputValidator: @classmethod def validate(cls, method: str, expected_params: Dict) -> None: @@ -552,7 +506,12 @@ class Configuration: { "datasetGroup": [ "serviceConfig", - {"workflowConfig": [{"schedules": ["import"]}, "maxAge"]}, + { + "workflowConfig": [ + {"schedules": ["import"]}, + "maxAge", + ] + }, ] }, { @@ -735,13 +694,9 @@ def _validate_solutions(self, path="solutions[]"): self._validate_resource(Solution(), _solution) def _validate_solution_update(self): - valid_recipes = [ - "arn:aws:personalize:::recipe/aws-hrnn-coldstart", - "arn:aws:personalize:::recipe/aws-user-personalization", - ] invalid = ( jmespath.search( - f"solutions[?workflowConfig.schedules.update && (serviceConfig.recipeArn != '{valid_recipes[0]}' || serviceConfig.recipeArn != '{valid_recipes[1]}')].serviceConfig.name", + "solutions[].{name: serviceConfig.name, recipe: serviceConfig.recipeArn, update: workflowConfig.schedules.update} | @[?update && (!contains(recipe, `aws-hrnn-coldstart`) && !contains(recipe, `aws-user-personalization`))].name", self.config_dict, ) or [] diff --git a/source/aws_lambda/shared/resource/base.py b/source/aws_lambda/shared/resource/base.py index d37d873..741cf36 100644 --- a/source/aws_lambda/shared/resource/base.py +++ b/source/aws_lambda/shared/resource/base.py @@ -27,6 +27,16 @@ def __init__(self): name = name[0].lower() + name[1:] self.name = ResourceName(name) - def arn(self, name: str) -> str: - arn_prefix = f"arn:{get_aws_partition()}:personalize:{get_aws_region()}:{get_aws_account()}" - return f"{arn_prefix}:{self.name.dash}/{name}" + def arn(self, name: str, **kwargs) -> str: + if self.name.camel == "solutionVersion": + arn_prefix = f"arn:{get_aws_partition()}:personalize:{get_aws_region()}:{get_aws_account()}" + return f"{arn_prefix}:solution/{name}/{kwargs.get('sv_id', 'unknown')}" + else: + arn_prefix = f"arn:{get_aws_partition()}:personalize:{get_aws_region()}:{get_aws_account()}" + return f"{arn_prefix}:{self.name.dash}/{name}" + + def __eq__(self, other): + return self.name.camel == other.name.camel + + def __hash__(self): + return hash(self.name.camel) diff --git a/source/aws_lambda/shared/resource/dataset.py b/source/aws_lambda/shared/resource/dataset.py index 755bc98..f32cc46 100644 --- a/source/aws_lambda/shared/resource/dataset.py +++ b/source/aws_lambda/shared/resource/dataset.py @@ -16,3 +16,4 @@ class Dataset(Resource): children = [DatasetImportJob()] + allowed_types = {"INTERACTIONS", "ITEMS", "USERS"} diff --git a/source/aws_lambda/shared/resource/dataset_group.py b/source/aws_lambda/shared/resource/dataset_group.py index 20f8c29..4cc9312 100644 --- a/source/aws_lambda/shared/resource/dataset_group.py +++ b/source/aws_lambda/shared/resource/dataset_group.py @@ -12,9 +12,10 @@ # ###################################################################################################################### from shared.resource.base import Resource from shared.resource.dataset import Dataset +from shared.resource.event_tracker import EventTracker from shared.resource.filter import Filter from shared.resource.solution import Solution class DatasetGroup(Resource): - children = [Dataset(), Filter(), Solution()] + children = [Dataset(), Filter(), Solution(), EventTracker()] diff --git a/source/aws_lambda/shared/sfn_middleware.py b/source/aws_lambda/shared/sfn_middleware.py index e61af68..89d5428 100644 --- a/source/aws_lambda/shared/sfn_middleware.py +++ b/source/aws_lambda/shared/sfn_middleware.py @@ -17,12 +17,14 @@ import json import os from dataclasses import dataclass, field +from enum import Enum, auto from pathlib import Path from typing import Dict, Any, Callable, Optional, List, Union from uuid import uuid4 import jmespath from aws_lambda_powertools import Logger +from dateutil.parser import isoparse from aws_solutions.core import get_service_client from shared.date_helpers import parse_datetime @@ -35,6 +37,8 @@ from shared.personalize_service import Personalize from shared.resource import get_resource +logger = Logger() + STATUS_IN_PROGRESS = ( "CREATE PENDING", "CREATE IN_PROGRESS", @@ -44,7 +48,18 @@ STATUS_FAILED = "CREATE FAILED" STATUS_ACTIVE = "ACTIVE" -logger = Logger() +WORKFLOW_PARAMETERS = { + "maxAge", + "timeStarted", +} +WORKFLOW_CONFIG_DEFAULT = { + "timeStarted": datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ") +} + + +class Arity(Enum): + ONE = auto() + MANY = auto() def json_handler(item): @@ -57,6 +72,41 @@ def json_handler(item): raise TypeError("Unknown Type") +def set_workflow_config(config: Dict) -> Dict: + """ + Set the defaults for workflowConfiguration for all configured items + :param config: the configuration dictionary + :return: the configuration with defaults set + """ + + resources = { + "datasetGroup": Arity.ONE, + "solutions": Arity.MANY, + "campaigns": Arity.MANY, + "batchInferenceJobs": Arity.MANY, + "filters": Arity.MANY, + "solutionVersion": Arity.ONE, + } + # Note: schema creation notification is not supported at this time + # Note: dataset, dataset import job, event tracker notifications are added in the workflow + + for k, v in config.items(): + if k in {"serviceConfig", "workflowConfig", "bucket", "currentDate"}: + pass # do not modify any serviceConfig keys + elif k in resources.keys() and resources[k] == Arity.ONE: + config[k].setdefault("workflowConfig", {}) + config[k]["workflowConfig"] |= WORKFLOW_CONFIG_DEFAULT + elif k in resources.keys() and resources[k] == Arity.MANY: + for idx, i in enumerate(v): + config[k][idx].setdefault("workflowConfig", {}) + config[k][idx]["workflowConfig"] |= WORKFLOW_CONFIG_DEFAULT + config[k][idx] = set_workflow_config(config[k][idx]) + else: + config[k] = set_workflow_config(config[k]) if config[k] else config[k] + + return config + + def set_defaults(config: Dict) -> Dict: """ Set the defaults for schedule/ solutions/ solution versions/ campaigns as empty if not set @@ -122,6 +172,8 @@ def format(self, resolved): return json.dumps(resolved) elif self.format_as == "seconds": return parse_datetime(resolved) + elif self.format_as == "iso8601": + return isoparse(resolved) elif self.format_as == "int": return int(resolved) else: @@ -226,7 +278,7 @@ def check_status( # NOSONAR - allow higher complexity continue if self.resource == "solutionVersion" and expected_key == "trainingMode": continue - if expected_key == "maxAge": + if expected_key in WORKFLOW_PARAMETERS: continue if actual_value != expected_value: diff --git a/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/environment.py b/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/environment.py new file mode 100644 index 0000000..4388231 --- /dev/null +++ b/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/environment.py @@ -0,0 +1,48 @@ +# ###################################################################################################################### +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # +# # +# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance # +# with the License. You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed # +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # +# the specific language governing permissions and limitations under the License. # +# ###################################################################################################################### + +from dataclasses import dataclass, field + +from aws_cdk.aws_lambda import IFunction +from aws_cdk.core import Aws + +from aws_solutions.cdk.aws_lambda.environment_variable import EnvironmentVariable + + +@dataclass +class Environment: + """ + Tracks environment variables common to AWS Lambda functions deployed by this solution + """ + + scope: IFunction + solution_name: EnvironmentVariable = field(init=False, repr=False) + solution_id: EnvironmentVariable = field(init=False, repr=False) + solution_version: EnvironmentVariable = field(init=False, repr=False) + log_level: EnvironmentVariable = field(init=False, repr=False) + powertools_service_name: EnvironmentVariable = field(init=False, repr=False) + + def __post_init__(self): + cloudwatch_namespace_id = f"personalize_solution_{Aws.STACK_NAME}" + cloudwatch_service_id_default = f"Workflow" + + self.solution_name = EnvironmentVariable(self.scope, "SOLUTION_NAME") + self.solution_id = EnvironmentVariable(self.scope, "SOLUTION_ID") + self.solution_version = EnvironmentVariable(self.scope, "SOLUTION_VERSION") + self.log_level = EnvironmentVariable(self.scope, "LOG_LEVEL", "INFO") + self.powertools_service_name = EnvironmentVariable( + self.scope, "POWERTOOLS_SERVICE_NAME", cloudwatch_service_id_default + ) + self.powertools_metrics_namespace = EnvironmentVariable( + self.scope, "POWERTOOLS_METRICS_NAMESPACE", cloudwatch_namespace_id + ) diff --git a/source/infrastructure/personalize/aws_lambda/functions/environment_variable.py b/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/environment_variable.py similarity index 100% rename from source/infrastructure/personalize/aws_lambda/functions/environment_variable.py rename to source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/environment_variable.py diff --git a/source/infrastructure/personalize/scheduler/__init__.py b/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/layers/__init__.py similarity index 96% rename from source/infrastructure/personalize/scheduler/__init__.py rename to source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/layers/__init__.py index cc56cd7..ef2f9eb 100644 --- a/source/infrastructure/personalize/scheduler/__init__.py +++ b/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/layers/__init__.py @@ -10,5 +10,3 @@ # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # # the specific language governing permissions and limitations under the License. # # ###################################################################################################################### - -from personalize.scheduler.base import Scheduler diff --git a/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/layers/aws_lambda_powertools/__init__.py b/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/layers/aws_lambda_powertools/__init__.py new file mode 100644 index 0000000..8b086e5 --- /dev/null +++ b/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/layers/aws_lambda_powertools/__init__.py @@ -0,0 +1,16 @@ +# ###################################################################################################################### +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # +# # +# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance # +# with the License. You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed # +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # +# the specific language governing permissions and limitations under the License. # +# ###################################################################################################################### + +from aws_solutions.cdk.aws_lambda.layers.aws_lambda_powertools.layer import ( + PowertoolsLayer, +) diff --git a/source/infrastructure/personalize/aws_lambda/layers/aws_lambda_powertools/layer.py b/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/layers/aws_lambda_powertools/layer.py similarity index 100% rename from source/infrastructure/personalize/aws_lambda/layers/aws_lambda_powertools/layer.py rename to source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/layers/aws_lambda_powertools/layer.py diff --git a/source/infrastructure/personalize/aws_lambda/layers/aws_lambda_powertools/requirements/requirements.txt b/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/layers/aws_lambda_powertools/requirements/requirements.txt similarity index 100% rename from source/infrastructure/personalize/aws_lambda/layers/aws_lambda_powertools/requirements/requirements.txt rename to source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/layers/aws_lambda_powertools/requirements/requirements.txt diff --git a/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/helpers/copytree.py b/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/helpers/copytree.py index b162f73..25987e5 100644 --- a/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/helpers/copytree.py +++ b/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/helpers/copytree.py @@ -22,8 +22,6 @@ def ignore_globs(*globs): Patterns is a sequence of glob-style patterns that are used to exclude files""" - # globs = globs + tuple([glob[:-2] for glob in globs if glob.endswith('/*')]) # ignore folders - def _ignore_globs(path, names): ignored_names = [] paths = [Path(os.path.join(path, name)).resolve() for name in names] diff --git a/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/stack.py b/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/stack.py index b85ed84..bf75bf0 100644 --- a/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/stack.py +++ b/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/stack.py @@ -50,12 +50,14 @@ def __init__( self.metrics = {} self.solution_id = self.node.try_get_context("SOLUTION_ID") + self.solution_version = self.node.try_get_context("SOLUTION_VERSION") self.mappings = Mappings(self, solution_id=self.solution_id) self.solutions_template_filename = validate_template_filename(template_filename) + self.description = description.strip(".") self.solutions_template_options = TemplateOptions( self, construct_id=construct_id, - description=f"({self.solution_id}) {description}", + description=f"({self.solution_id}) - {self.description}. Version {self.solution_version}", filename=template_filename, ) diff --git a/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/stepfunctions/__init__.py b/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/stepfunctions/__init__.py new file mode 100644 index 0000000..ef2f9eb --- /dev/null +++ b/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/stepfunctions/__init__.py @@ -0,0 +1,12 @@ +# ###################################################################################################################### +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # +# # +# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance # +# with the License. You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed # +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # +# the specific language governing permissions and limitations under the License. # +# ###################################################################################################################### diff --git a/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/stepfunctions/solution_fragment.py b/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/stepfunctions/solution_fragment.py new file mode 100644 index 0000000..48e0c0e --- /dev/null +++ b/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/stepfunctions/solution_fragment.py @@ -0,0 +1,81 @@ +# ###################################################################################################################### +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # +# # +# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance # +# with the License. You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed # +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # +# the specific language governing permissions and limitations under the License. # +# ###################################################################################################################### +from typing import List, Dict +from typing import Optional + +from aws_cdk.aws_lambda import CfnFunction +from aws_cdk.aws_stepfunctions import State, INextable, TaskInput, StateMachineFragment +from aws_cdk.aws_stepfunctions_tasks import LambdaInvoke +from aws_cdk.core import Construct, Duration + + +class SolutionFragment(StateMachineFragment): + def __init__( + self, # NOSONAR (python:S107) - allow large number of method parameters + scope: Construct, + id: str, + function: CfnFunction, + payload: Optional[TaskInput] = None, + input_path: Optional[str] = "$", + result_path: Optional[str] = "$", + output_path: Optional[str] = "$", + result_selector: Optional[Dict] = None, + failure_state: Optional[State] = None, + backoff_rate: Optional[int] = 1.05, + interval: Optional[Duration] = Duration.seconds(5), + max_attempts: Optional[int] = 5, + ): + super().__init__(scope, id) + + self.failure_state = failure_state + + self.task = LambdaInvoke( + self, + id, + lambda_function=function, + retry_on_service_exceptions=True, + input_path=input_path, + result_path=result_path, + output_path=output_path, + payload=payload, + payload_response_only=True, + result_selector=result_selector, + ) + self.task.add_retry( + backoff_rate=backoff_rate, + interval=interval, + max_attempts=max_attempts, + errors=["ResourcePending"], + ) + if self.failure_state: + self.task.add_catch( + failure_state, + errors=["ResourceFailed", "ResourceInvalid"], + result_path="$.statesError", + ) + self.task.add_catch( + failure_state, errors=["States.ALL"], result_path="$.statesError" + ) + + @property + def start_state(self) -> State: + return self.task + + @property + def end_states(self) -> List[INextable]: + """ + Get the end states of this chain + :return: The chainable end states of this chain (i.e. not the failure state) + """ + states = [self.task] + return states diff --git a/source/infrastructure/personalize/aws_lambda/functions/solutionstep.py b/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/stepfunctions/solutionstep.py similarity index 85% rename from source/infrastructure/personalize/aws_lambda/functions/solutionstep.py rename to source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/stepfunctions/solutionstep.py index 7805672..279e61b 100644 --- a/source/infrastructure/personalize/aws_lambda/functions/solutionstep.py +++ b/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/stepfunctions/solutionstep.py @@ -12,16 +12,17 @@ # ###################################################################################################################### from pathlib import Path -from typing import Optional +from typing import Optional, List +from aws_cdk.aws_events import EventBus from aws_cdk.aws_lambda import Tracing, Runtime, RuntimeFamily from aws_cdk.aws_stepfunctions import IChainable, TaskInput, State from aws_cdk.core import Construct, Duration +from aws_solutions.cdk.aws_lambda.environment import Environment from aws_solutions.cdk.aws_lambda.python.function import SolutionsPythonFunction from aws_solutions.cdk.cfn_nag import add_cfn_nag_suppressions, CfnNagSuppression -from personalize.aws_lambda.functions.environment import Environment -from personalize.step_functions.personalization_fragment import PersonalizationFragment +from aws_solutions.cdk.stepfunctions.solution_fragment import SolutionFragment class SolutionStep(Construct): @@ -37,6 +38,7 @@ def __init__( payload: Optional[TaskInput] = None, layers=None, failure_state: Optional[IChainable] = None, + libraries: Optional[List[Path]] = None, ): super().__init__(scope, f"{id} Solution Step") @@ -46,6 +48,7 @@ def __init__( layers=layers, function=function, entrypoint=entrypoint, + libraries=libraries, ) add_cfn_nag_suppressions( self.function.role.node.try_find_child("DefaultPolicy").node.find_child( @@ -86,7 +89,7 @@ def state( output_path = output_path or self._output_path failure_state = failure_state or self._failure_state - return PersonalizationFragment( + return SolutionFragment( scope, construct_id, function=self.function, @@ -105,6 +108,10 @@ def _snake_case(self, name) -> str: def _set_permissions(self) -> None: raise NotImplementedError("please implement _set_permissions") + def grant_put_events(self, bus: EventBus): + self.function.add_environment("EVENT_BUS_ARN", bus.event_bus_arn) + bus.grant_put_events_to(self.function) + def _create_resources(self) -> None: pass # not required @@ -114,14 +121,13 @@ def _set_environment(self) -> Environment: class _CreateLambdaFunction(SolutionsPythonFunction): def __init__(self, scope: Construct, construct_id: str, **kwargs): entrypoint = kwargs.pop("entrypoint", None) - if not entrypoint: - entrypoint = ( - Path(__file__).absolute().parents[4] - / "aws_lambda" - / construct_id.replace("_fn", "") - / "handler.py" - ) - libraries = [Path(__file__).absolute().parents[4] / "aws_lambda" / "shared"] + if not entrypoint or not entrypoint.exists(): + raise ValueError("an entrypoint (Path to a .py file) must be provided") + + libraries = kwargs.pop("libraries", None) + if libraries and any(not l.exists() for l in libraries): + raise ValueError(f"libraries provided, but do not exist at {libraries}") + function = kwargs.pop("function") kwargs["layers"] = kwargs.get("layers", []) kwargs["tracing"] = Tracing.ACTIVE diff --git a/source/cdk_solution_helper_py/helpers_cdk/setup.py b/source/cdk_solution_helper_py/helpers_cdk/setup.py index 98a095b..d830dfb 100644 --- a/source/cdk_solution_helper_py/helpers_cdk/setup.py +++ b/source/cdk_solution_helper_py/helpers_cdk/setup.py @@ -49,8 +49,8 @@ def get_version(): ] }, install_requires=[ - "aws-cdk.core>=1.120.0", - "aws-cdk.aws_lambda>=1.120.0", + "aws-cdk.core>=1.126.0", + "aws-cdk.aws_lambda>=1.126.0", "Click>=7.1.2", "boto3>=1.17.52", "requests>=2.24.0", diff --git a/source/cdk_solution_helper_py/requirements-dev.txt b/source/cdk_solution_helper_py/requirements-dev.txt index 6ee358c..31540ea 100644 --- a/source/cdk_solution_helper_py/requirements-dev.txt +++ b/source/cdk_solution_helper_py/requirements-dev.txt @@ -1,5 +1,5 @@ -aws-cdk.core>=1.123.0 -aws-cdk.aws_lambda>=1.123.0 +aws-cdk.core>=1.126.0 +aws-cdk.aws_lambda>=1.126.0 black boto3>=1.17.49 requests>=2.24.0 diff --git a/source/images/solution-architecture.jpg b/source/images/solution-architecture.jpg index 519f953..7634f22 100644 Binary files a/source/images/solution-architecture.jpg and b/source/images/solution-architecture.jpg differ diff --git a/source/infrastructure/cdk.json b/source/infrastructure/cdk.json index 56e85b6..07ce503 100644 --- a/source/infrastructure/cdk.json +++ b/source/infrastructure/cdk.json @@ -3,7 +3,7 @@ "context": { "SOLUTION_NAME": "Maintaining Personalized Experiences with Machine Learning", "SOLUTION_ID": "SO0170", - "SOLUTION_VERSION": "1.0.1", + "SOLUTION_VERSION": "v1.1.0", "@aws-cdk/core:newStyleStackSynthesis": "true", "@aws-cdk/core:enableStackNameDuplicates": "true", "aws-cdk:enableDiffNoFail": "true", diff --git a/source/infrastructure/deploy.py b/source/infrastructure/deploy.py index 311124c..f27264d 100644 --- a/source/infrastructure/deploy.py +++ b/source/infrastructure/deploy.py @@ -34,7 +34,7 @@ def build_app(context): PersonalizeStack( app, "PersonalizeStack", - description="Deploy, deliver and maintain personalized experiences with Amazon Personalize", + description=f"Deploy, deliver and maintain personalized experiences with Amazon Personalize", template_filename="maintaining-personalized-experiences-with-machine-learning.template", synthesizer=solution.synthesizer, ) diff --git a/source/infrastructure/personalize/aws_lambda/functions/__init__.py b/source/infrastructure/personalize/aws_lambda/functions/__init__.py index da885fb..1c533ee 100644 --- a/source/infrastructure/personalize/aws_lambda/functions/__init__.py +++ b/source/infrastructure/personalize/aws_lambda/functions/__init__.py @@ -15,6 +15,7 @@ CreateBatchInferenceJob, ) from personalize.aws_lambda.functions.create_campaign import CreateCampaign +from personalize.aws_lambda.functions.create_config import CreateConfig from personalize.aws_lambda.functions.create_dataset import CreateDataset from personalize.aws_lambda.functions.create_dataset_group import CreateDatasetGroup from personalize.aws_lambda.functions.create_dataset_import_job import ( @@ -30,4 +31,3 @@ ) from personalize.aws_lambda.functions.create_timestamp import CreateTimestamp from personalize.aws_lambda.functions.s3_event import S3EventHandler -from personalize.aws_lambda.functions.sns_notification import SNSNotification diff --git a/source/infrastructure/personalize/aws_lambda/functions/create_batch_inference_job.py b/source/infrastructure/personalize/aws_lambda/functions/create_batch_inference_job.py index 6d61d19..a38de74 100644 --- a/source/infrastructure/personalize/aws_lambda/functions/create_batch_inference_job.py +++ b/source/infrastructure/personalize/aws_lambda/functions/create_batch_inference_job.py @@ -10,12 +10,13 @@ # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # # the specific language governing permissions and limitations under the License. # # ###################################################################################################################### +from pathlib import Path import aws_cdk.aws_iam as iam from aws_cdk.aws_s3 import IBucket from aws_cdk.core import Construct, Aws -from personalize.aws_lambda.functions.solutionstep import SolutionStep +from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep class CreateBatchInferenceJob(SolutionStep): @@ -63,6 +64,13 @@ def __init__( scope, id, layers=layers, + entrypoint=( + Path(__file__).absolute().parents[4] + / "aws_lambda" + / "create_batch_inference_job" + / "handler.py" + ), + libraries=[Path(__file__).absolute().parents[4] / "aws_lambda" / "shared"], ) def _set_permissions(self): diff --git a/source/infrastructure/personalize/aws_lambda/functions/create_campaign.py b/source/infrastructure/personalize/aws_lambda/functions/create_campaign.py index 704da1f..edf9f68 100644 --- a/source/infrastructure/personalize/aws_lambda/functions/create_campaign.py +++ b/source/infrastructure/personalize/aws_lambda/functions/create_campaign.py @@ -10,11 +10,12 @@ # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # # the specific language governing permissions and limitations under the License. # # ###################################################################################################################### +from pathlib import Path import aws_cdk.aws_iam as iam from aws_cdk.core import Construct, Aws -from personalize.aws_lambda.functions.solutionstep import SolutionStep +from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep class CreateCampaign(SolutionStep): @@ -28,6 +29,13 @@ def __init__( scope, id, layers=layers, + entrypoint=( + Path(__file__).absolute().parents[4] + / "aws_lambda" + / "create_campaign" + / "handler.py" + ), + libraries=[Path(__file__).absolute().parents[4] / "aws_lambda" / "shared"], ) def _set_permissions(self): diff --git a/source/infrastructure/personalize/aws_lambda/functions/sns_notification.py b/source/infrastructure/personalize/aws_lambda/functions/create_config.py similarity index 74% rename from source/infrastructure/personalize/aws_lambda/functions/sns_notification.py rename to source/infrastructure/personalize/aws_lambda/functions/create_config.py index 571e141..9fc4432 100644 --- a/source/infrastructure/personalize/aws_lambda/functions/sns_notification.py +++ b/source/infrastructure/personalize/aws_lambda/functions/create_config.py @@ -14,25 +14,28 @@ from pathlib import Path from aws_cdk.aws_lambda import Tracing, Runtime, RuntimeFamily -from aws_cdk.aws_sns import Topic -from aws_cdk.core import Construct, Duration +from aws_cdk.core import Construct, Duration, Aws +import aws_cdk.aws_iam as iam +from aws_solutions.cdk.aws_lambda.environment import Environment from aws_solutions.cdk.aws_lambda.python.function import SolutionsPythonFunction from aws_solutions.cdk.cfn_nag import add_cfn_nag_suppressions, CfnNagSuppression -from personalize.aws_lambda.functions.environment import Environment -class SNSNotification(SolutionsPythonFunction): +class CreateConfig(SolutionsPythonFunction): def __init__(self, scope: Construct, construct_id: str, **kwargs): entrypoint = ( Path(__file__).absolute().parents[4] / "aws_lambda" - / "sns_notification" + / "create_config" / "handler.py" ) function = "lambda_handler" + kwargs["libraries"] = [ + Path(__file__).absolute().parents[4] / "aws_lambda" / "shared" + ] kwargs["tracing"] = Tracing.ACTIVE - kwargs["timeout"] = Duration.seconds(15) + kwargs["timeout"] = Duration.seconds(90) kwargs["runtime"] = Runtime("python3.9", RuntimeFamily.PYTHON) super().__init__(scope, construct_id, entrypoint, function, **kwargs) @@ -48,6 +51,18 @@ def __init__(self, scope: Construct, construct_id: str, **kwargs): ], ) - def grant_publish(self, topic: Topic): - topic.grant_publish(self) - self.add_environment("SNS_TOPIC_ARN", topic.topic_arn) + self._set_permissions() + + def _set_permissions(self): + self.add_to_role_policy( + statement=iam.PolicyStatement( + actions=[ + "personalize:Describe*", + "personalize:List*", + ], + effect=iam.Effect.ALLOW, + resources=[ + f"arn:{Aws.PARTITION}:personalize:{Aws.REGION}:{Aws.ACCOUNT_ID}:*", + ], + ) + ) diff --git a/source/infrastructure/personalize/aws_lambda/functions/create_dataset.py b/source/infrastructure/personalize/aws_lambda/functions/create_dataset.py index 1d65b70..06eace2 100644 --- a/source/infrastructure/personalize/aws_lambda/functions/create_dataset.py +++ b/source/infrastructure/personalize/aws_lambda/functions/create_dataset.py @@ -10,14 +10,14 @@ # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # # the specific language governing permissions and limitations under the License. # # ###################################################################################################################### - +from pathlib import Path from typing import Optional import aws_cdk.aws_iam as iam from aws_cdk.aws_stepfunctions import IChainable from aws_cdk.core import Construct, Aws -from personalize.aws_lambda.functions.solutionstep import SolutionStep +from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep class CreateDataset(SolutionStep): @@ -33,6 +33,13 @@ def __init__( id, layers=layers, failure_state=failure_state, + entrypoint=( + Path(__file__).absolute().parents[4] + / "aws_lambda" + / "create_dataset" + / "handler.py" + ), + libraries=[Path(__file__).absolute().parents[4] / "aws_lambda" / "shared"], ) def _set_permissions(self): diff --git a/source/infrastructure/personalize/aws_lambda/functions/create_dataset_group.py b/source/infrastructure/personalize/aws_lambda/functions/create_dataset_group.py index eec9ca8..0dcd9ac 100644 --- a/source/infrastructure/personalize/aws_lambda/functions/create_dataset_group.py +++ b/source/infrastructure/personalize/aws_lambda/functions/create_dataset_group.py @@ -10,7 +10,7 @@ # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # # the specific language governing permissions and limitations under the License. # # ###################################################################################################################### - +from pathlib import Path from typing import Optional import aws_cdk.aws_iam as iam @@ -19,7 +19,7 @@ from aws_solutions.cdk.aws_lambda.cfn_custom_resources.resource_hash import ResourceHash from aws_solutions.cdk.cfn_nag import add_cfn_nag_suppressions, CfnNagSuppression -from personalize.aws_lambda.functions.solutionstep import SolutionStep +from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep class CreateDatasetGroup(SolutionStep): @@ -41,6 +41,13 @@ def __init__( id, layers=layers, failure_state=failure_state, + entrypoint=( + Path(__file__).absolute().parents[4] + / "aws_lambda" + / "create_dataset_group" + / "handler.py" + ), + libraries=[Path(__file__).absolute().parents[4] / "aws_lambda" / "shared"], **kwargs, ) diff --git a/source/infrastructure/personalize/aws_lambda/functions/create_dataset_import_job.py b/source/infrastructure/personalize/aws_lambda/functions/create_dataset_import_job.py index 3f2d636..7959c5a 100644 --- a/source/infrastructure/personalize/aws_lambda/functions/create_dataset_import_job.py +++ b/source/infrastructure/personalize/aws_lambda/functions/create_dataset_import_job.py @@ -10,7 +10,7 @@ # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # # the specific language governing permissions and limitations under the License. # # ###################################################################################################################### - +from pathlib import Path from typing import Optional import aws_cdk.aws_iam as iam @@ -18,7 +18,7 @@ from aws_cdk.aws_stepfunctions import IChainable from aws_cdk.core import Construct, Aws -from personalize.aws_lambda.functions.solutionstep import SolutionStep +from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep class CreateDatasetImportJob(SolutionStep): @@ -74,6 +74,13 @@ def __init__( id, layers=layers, failure_state=failure_state, + entrypoint=( + Path(__file__).absolute().parents[4] + / "aws_lambda" + / "create_dataset_import_job" + / "handler.py" + ), + libraries=[Path(__file__).absolute().parents[4] / "aws_lambda" / "shared"], ) def _set_permissions(self): diff --git a/source/infrastructure/personalize/aws_lambda/functions/create_event_tracker.py b/source/infrastructure/personalize/aws_lambda/functions/create_event_tracker.py index 584c147..042d817 100644 --- a/source/infrastructure/personalize/aws_lambda/functions/create_event_tracker.py +++ b/source/infrastructure/personalize/aws_lambda/functions/create_event_tracker.py @@ -10,14 +10,14 @@ # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # # the specific language governing permissions and limitations under the License. # # ###################################################################################################################### - +from pathlib import Path from typing import Optional import aws_cdk.aws_iam as iam from aws_cdk.aws_stepfunctions import IChainable, TaskInput from aws_cdk.core import Construct, Aws -from personalize.aws_lambda.functions.solutionstep import SolutionStep +from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep class CreateEventTracker(SolutionStep): @@ -33,13 +33,25 @@ def __init__( id, payload=TaskInput.from_object( { - "name.$": "$.eventTracker.serviceConfig.name", - "datasetGroupArn.$": "$.datasetGroup.serviceConfig.datasetGroupArn", + "serviceConfig": { + "name.$": "$.eventTracker.serviceConfig.name", + "datasetGroupArn.$": "$.datasetGroup.serviceConfig.datasetGroupArn", + }, + "workflowConfig": { + "timeStarted.$": "$$.State.EnteredTime", + }, } ), result_path="$.eventTracker.serviceConfig", layers=layers, failure_state=failure_state, + entrypoint=( + Path(__file__).absolute().parents[4] + / "aws_lambda" + / "create_event_tracker" + / "handler.py" + ), + libraries=[Path(__file__).absolute().parents[4] / "aws_lambda" / "shared"], ) def _set_permissions(self): diff --git a/source/infrastructure/personalize/aws_lambda/functions/create_filter.py b/source/infrastructure/personalize/aws_lambda/functions/create_filter.py index c6d2a9e..51e6f09 100644 --- a/source/infrastructure/personalize/aws_lambda/functions/create_filter.py +++ b/source/infrastructure/personalize/aws_lambda/functions/create_filter.py @@ -10,14 +10,14 @@ # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # # the specific language governing permissions and limitations under the License. # # ###################################################################################################################### - +from pathlib import Path from typing import Optional import aws_cdk.aws_iam as iam from aws_cdk.aws_stepfunctions import IChainable from aws_cdk.core import Construct, Aws -from personalize.aws_lambda.functions.solutionstep import SolutionStep +from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep class CreateFilter(SolutionStep): @@ -33,6 +33,13 @@ def __init__( id, layers=layers, failure_state=failure_state, + entrypoint=( + Path(__file__).absolute().parents[4] + / "aws_lambda" + / "create_filter" + / "handler.py" + ), + libraries=[Path(__file__).absolute().parents[4] / "aws_lambda" / "shared"], ) def _set_permissions(self): diff --git a/source/infrastructure/personalize/aws_lambda/functions/create_scheduled_task.py b/source/infrastructure/personalize/aws_lambda/functions/create_scheduled_task.py index 97cb942..4f7a027 100644 --- a/source/infrastructure/personalize/aws_lambda/functions/create_scheduled_task.py +++ b/source/infrastructure/personalize/aws_lambda/functions/create_scheduled_task.py @@ -10,10 +10,11 @@ # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # # the specific language governing permissions and limitations under the License. # # ###################################################################################################################### +from pathlib import Path from aws_cdk.core import Construct -from personalize.aws_lambda.functions.solutionstep import SolutionStep +from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep class CreateScheduledTask(SolutionStep): @@ -27,6 +28,13 @@ def __init__( scope, id, layers=layers, + entrypoint=( + Path(__file__).absolute().parents[4] + / "aws_lambda" + / "create_scheduled_task" + / "handler.py" + ), + libraries=[Path(__file__).absolute().parents[4] / "aws_lambda" / "shared"], ) def _set_permissions(self): diff --git a/source/infrastructure/personalize/aws_lambda/functions/create_schema.py b/source/infrastructure/personalize/aws_lambda/functions/create_schema.py index 79b9182..4d11537 100644 --- a/source/infrastructure/personalize/aws_lambda/functions/create_schema.py +++ b/source/infrastructure/personalize/aws_lambda/functions/create_schema.py @@ -10,14 +10,14 @@ # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # # the specific language governing permissions and limitations under the License. # # ###################################################################################################################### - +from pathlib import Path from typing import Optional import aws_cdk.aws_iam as iam from aws_cdk.aws_stepfunctions import IChainable from aws_cdk.core import Construct, Aws -from personalize.aws_lambda.functions.solutionstep import SolutionStep +from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep class CreateSchema(SolutionStep): @@ -33,6 +33,13 @@ def __init__( id, layers=layers, failure_state=failure_state, + entrypoint=( + Path(__file__).absolute().parents[4] + / "aws_lambda" + / "create_schema" + / "handler.py" + ), + libraries=[Path(__file__).absolute().parents[4] / "aws_lambda" / "shared"], ) def _set_permissions(self): diff --git a/source/infrastructure/personalize/aws_lambda/functions/create_solution.py b/source/infrastructure/personalize/aws_lambda/functions/create_solution.py index ef1e597..3c98ab0 100644 --- a/source/infrastructure/personalize/aws_lambda/functions/create_solution.py +++ b/source/infrastructure/personalize/aws_lambda/functions/create_solution.py @@ -10,11 +10,12 @@ # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # # the specific language governing permissions and limitations under the License. # # ###################################################################################################################### +from pathlib import Path import aws_cdk.aws_iam as iam from aws_cdk.core import Construct, Aws -from personalize.aws_lambda.functions.solutionstep import SolutionStep +from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep class CreateSolution(SolutionStep): @@ -28,6 +29,13 @@ def __init__( scope, id, layers=layers, + entrypoint=( + Path(__file__).absolute().parents[4] + / "aws_lambda" + / "create_solution" + / "handler.py" + ), + libraries=[Path(__file__).absolute().parents[4] / "aws_lambda" / "shared"], ) def _set_permissions(self): diff --git a/source/infrastructure/personalize/aws_lambda/functions/create_solution_version.py b/source/infrastructure/personalize/aws_lambda/functions/create_solution_version.py index d442782..cbe3027 100644 --- a/source/infrastructure/personalize/aws_lambda/functions/create_solution_version.py +++ b/source/infrastructure/personalize/aws_lambda/functions/create_solution_version.py @@ -10,11 +10,12 @@ # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # # the specific language governing permissions and limitations under the License. # # ###################################################################################################################### +from pathlib import Path import aws_cdk.aws_iam as iam from aws_cdk.core import Construct, Aws -from personalize.aws_lambda.functions.solutionstep import SolutionStep +from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep class CreateSolutionVersion(SolutionStep): @@ -28,6 +29,13 @@ def __init__( scope, id, layers=layers, + entrypoint=( + Path(__file__).absolute().parents[4] + / "aws_lambda" + / "create_solution_version" + / "handler.py" + ), + libraries=[Path(__file__).absolute().parents[4] / "aws_lambda" / "shared"], ) def _set_permissions(self): diff --git a/source/infrastructure/personalize/aws_lambda/functions/create_timestamp.py b/source/infrastructure/personalize/aws_lambda/functions/create_timestamp.py index 5811b43..f83fb91 100644 --- a/source/infrastructure/personalize/aws_lambda/functions/create_timestamp.py +++ b/source/infrastructure/personalize/aws_lambda/functions/create_timestamp.py @@ -10,10 +10,11 @@ # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # # the specific language governing permissions and limitations under the License. # # ###################################################################################################################### +from pathlib import Path from aws_cdk.core import Construct -from personalize.aws_lambda.functions.solutionstep import SolutionStep +from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep class CreateTimestamp(SolutionStep): @@ -23,7 +24,17 @@ def __init__( id: str, layers=None, ): - super().__init__(scope, id, layers=layers) + super().__init__( + scope, + id, + layers=layers, + entrypoint=( + Path(__file__).absolute().parents[4] + / "aws_lambda" + / "create_timestamp" + / "handler.py" + ), + ) def _set_permissions(self): pass # NOSONAR (python:S1186) - no permissions required diff --git a/source/infrastructure/personalize/aws_lambda/functions/environment.py b/source/infrastructure/personalize/aws_lambda/functions/environment.py index f18c809..4388231 100644 --- a/source/infrastructure/personalize/aws_lambda/functions/environment.py +++ b/source/infrastructure/personalize/aws_lambda/functions/environment.py @@ -16,7 +16,7 @@ from aws_cdk.aws_lambda import IFunction from aws_cdk.core import Aws -from personalize.aws_lambda.functions.environment_variable import EnvironmentVariable +from aws_solutions.cdk.aws_lambda.environment_variable import EnvironmentVariable @dataclass diff --git a/source/infrastructure/personalize/aws_lambda/functions/prepare_input.py b/source/infrastructure/personalize/aws_lambda/functions/prepare_input.py new file mode 100644 index 0000000..2194f68 --- /dev/null +++ b/source/infrastructure/personalize/aws_lambda/functions/prepare_input.py @@ -0,0 +1,42 @@ +# ###################################################################################################################### +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # +# # +# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance # +# with the License. You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed # +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # +# the specific language governing permissions and limitations under the License. # +# ###################################################################################################################### + +from pathlib import Path + +from aws_cdk.core import Construct + +from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep + + +class PrepareInput(SolutionStep): + def __init__( + self, + scope: Construct, + id: str, + layers=None, + ): + super().__init__( + scope, + id, + layers=layers, + entrypoint=( + Path(__file__).absolute().parents[4] + / "aws_lambda" + / "prepare_input" + / "handler.py" + ), + libraries=[Path(__file__).absolute().parents[4] / "aws_lambda" / "shared"], + ) + + def _set_permissions(self): + pass # NOSONAR (python:S1186) - no permissions required diff --git a/source/infrastructure/personalize/aws_lambda/functions/s3_event.py b/source/infrastructure/personalize/aws_lambda/functions/s3_event.py index 44fc8e3..610c5de 100644 --- a/source/infrastructure/personalize/aws_lambda/functions/s3_event.py +++ b/source/infrastructure/personalize/aws_lambda/functions/s3_event.py @@ -19,9 +19,9 @@ from aws_cdk.aws_stepfunctions import StateMachine from aws_cdk.core import Construct, Duration +from aws_solutions.cdk.aws_lambda.environment import Environment from aws_solutions.cdk.aws_lambda.python.function import SolutionsPythonFunction from aws_solutions.cdk.cfn_nag import add_cfn_nag_suppressions, CfnNagSuppression -from personalize.aws_lambda.functions.environment import Environment class S3EventHandler(SolutionsPythonFunction): diff --git a/source/infrastructure/personalize/aws_lambda/layers/__init__.py b/source/infrastructure/personalize/aws_lambda/layers/__init__.py index e1f85ba..0e77372 100644 --- a/source/infrastructure/personalize/aws_lambda/layers/__init__.py +++ b/source/infrastructure/personalize/aws_lambda/layers/__init__.py @@ -11,5 +11,4 @@ # the specific language governing permissions and limitations under the License. # # ###################################################################################################################### -from personalize.aws_lambda.layers.aws_lambda_powertools.layer import PowertoolsLayer from personalize.aws_lambda.layers.aws_solutions.layer import SolutionsLayer diff --git a/source/infrastructure/personalize/aws_lambda/layers/aws_solutions/requirements/requirements.txt b/source/infrastructure/personalize/aws_lambda/layers/aws_solutions/requirements/requirements.txt index 5115579..68789cf 100644 --- a/source/infrastructure/personalize/aws_lambda/layers/aws_solutions/requirements/requirements.txt +++ b/source/infrastructure/personalize/aws_lambda/layers/aws_solutions/requirements/requirements.txt @@ -1,4 +1,5 @@ ../../../../../../cdk_solution_helper_py/helpers_common +../../../../../../scheduler/common avro==1.10.2 cronex==0.1.3.1 jmespath==0.10.0 diff --git a/source/infrastructure/personalize/sns/notifications.py b/source/infrastructure/personalize/sns/notifications.py index 0574f9c..86b7811 100644 --- a/source/infrastructure/personalize/sns/notifications.py +++ b/source/infrastructure/personalize/sns/notifications.py @@ -10,6 +10,7 @@ # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # # the specific language governing permissions and limitations under the License. # # ##################################################################################################################### +from pathlib import Path from typing import Optional from aws_cdk.aws_sns import Subscription, SubscriptionProtocol @@ -24,7 +25,7 @@ from aws_solutions_constructs.aws_lambda_sns import LambdaToSns from aws_solutions.cdk.aspects import ConditionalResources -from personalize.aws_lambda.functions.solutionstep import SolutionStep +from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep class Notifications(SolutionStep): @@ -42,7 +43,19 @@ def __init__( self.topic = None # delay creation until after parent is setup self.subscription = None # delay creation until after parent is setup - super().__init__(scope, id, layers=layers, failure_state=failure_state) + super().__init__( + scope, + id, + layers=layers, + failure_state=failure_state, + entrypoint=( + Path(__file__).absolute().parents[3] + / "aws_lambda" + / "sns_notification" + / "handler.py" + ), + libraries=[Path(__file__).absolute().parents[3] / "aws_lambda" / "shared"], + ) def create_sns(self): """ diff --git a/source/infrastructure/personalize/stack.py b/source/infrastructure/personalize/stack.py index 4dd3320..3864f5b 100644 --- a/source/infrastructure/personalize/stack.py +++ b/source/infrastructure/personalize/stack.py @@ -12,6 +12,7 @@ # ###################################################################################################################### from aws_cdk import core as cdk +from aws_cdk.aws_events import EventBus from aws_cdk.aws_s3 import EventType, NotificationKeyFilter from aws_cdk.aws_s3_notifications import LambdaDestination from aws_cdk.aws_stepfunctions import ( @@ -23,12 +24,14 @@ from aws_cdk.core import CfnCondition, Fn, Aws, Duration from aws_solutions.cdk.aws_lambda.cfn_custom_resources.resource_name import ResourceName +from aws_solutions.cdk.aws_lambda.layers.aws_lambda_powertools import PowertoolsLayer from aws_solutions.cdk.cfn_nag import ( CfnNagSuppression, add_cfn_nag_suppressions, CfnNagSuppressAll, ) from aws_solutions.cdk.stack import SolutionStack +from aws_solutions.scheduler.cdk.construct import Scheduler from personalize.aws_lambda.functions import ( S3EventHandler, CreateDatasetGroup, @@ -42,11 +45,12 @@ CreateFilter, CreateBatchInferenceJob, CreateTimestamp, + CreateConfig, ) -from personalize.aws_lambda.layers import PowertoolsLayer, SolutionsLayer +from personalize.aws_lambda.functions.prepare_input import PrepareInput +from personalize.aws_lambda.layers import SolutionsLayer from personalize.cloudwatch.dashboard import Dashboard from personalize.s3 import AccessLogsBucket, DataBucket -from personalize.scheduler import Scheduler from personalize.sns.notifications import Notifications from personalize.step_functions.dataset_imports_fragment import DatasetImportsFragment from personalize.step_functions.event_tracker_fragment import EventTrackerFragment @@ -132,6 +136,12 @@ def __init__( ) # the AWS lambda functions required by the shared step functions + create_config = CreateConfig(self, "CreateConfig", layers=common_layers) + prepare_input = PrepareInput( + self, + "Prepare Input", + layers=common_layers, + ) create_dataset_group = CreateDatasetGroup( self, "Create Dataset Group", @@ -195,6 +205,32 @@ def __init__( self, "Create Timestamp", layers=[layer_powertools] ) + # EventBridge events can be triggered for resource creation and update + # Note: https://github.com/aws/aws-cdk/issues/17338 + bus_name = ( + f"aws-solutions-{self.node.try_get_context('SOLUTION_ID')}-{Aws.STACK_NAME}" + ) + event_bus = EventBus( + self, + id="Notifications", + event_bus_name=bus_name, + ) + event_bus.node.default_child.add_override( + "Properties.Name", + bus_name, + ) + + create_dataset_group.grant_put_events(event_bus) + create_schema.grant_put_events(event_bus) + create_dataset.grant_put_events(event_bus) + create_dataset_import_job.grant_put_events(event_bus) + create_event_tracker.grant_put_events(event_bus) + create_solution.grant_put_events(event_bus) + create_solution_version.grant_put_events(event_bus) + create_campaign.grant_put_events(event_bus) + create_batch_inference_job.grant_put_events(event_bus) + create_filter.grant_put_events(event_bus) + dataset_management_functions = { "create_schema": create_schema, "create_dataset": create_dataset, @@ -215,11 +251,13 @@ def __init__( dataset_management_functions=dataset_management_functions, create_timestamp=create_timestamp, notifications=notifications, + prepare_input=prepare_input, ).state_machine solution_maintenance_schedule_sfn = ScheduledSolutionMaintenance( self, "Scheduled Solution Maintenance", + prepare_input=prepare_input, create_solution=create_solution, create_solution_version=create_solution_version, create_campaign=create_campaign, @@ -267,14 +305,18 @@ def __init__( definition = Chain.start( Parallel(self, "Manage The Execution") .branch( - create_dataset_group.state( + prepare_input.state( self, - "Create Dataset Group", - backoff_rate=1.02, - interval=Duration.seconds(5), - max_attempts=30, - ) - .next( + "Prepare Input", + ) .next( + create_dataset_group.state( + self, + "Create Dataset Group", + backoff_rate=1.02, + interval=Duration.seconds(5), + max_attempts=30, + ) + ).next( DatasetImportsFragment( self, "Handle Dataset Imports", @@ -407,6 +449,12 @@ def __init__( value=scheduler.scheduler_table.table_name, export_name=f"{Aws.STACK_NAME}-SchedulerTableName", ) + cdk.CfnOutput( + self, + "SchedulerStepFunctionArn", + value=scheduler.state_machine_arn, + export_name=f"{Aws.STACK_NAME}-SchedulerStepFunctionArn", + ) cdk.CfnOutput( self, "Dashboard", @@ -419,3 +467,15 @@ def __init__( value=notifications.topic.topic_arn, export_name=f"{Aws.STACK_NAME}-SNSTopicArn", ) + cdk.CfnOutput( + self, + "EventBusArn", + value=event_bus.event_bus_arn, + export_name=f"{Aws.STACK_NAME}-EventBusArn", + ) + cdk.CfnOutput( + self, + "CreateConfigFunctionArn", + value=create_config.function_arn, + export_name=f"{Aws.STACK_NAME}-CreateConfigFunctionArn", + ) diff --git a/source/infrastructure/personalize/step_functions/batch_inference_jobs_fragment.py b/source/infrastructure/personalize/step_functions/batch_inference_jobs_fragment.py index 4ae83f7..b83ab10 100644 --- a/source/infrastructure/personalize/step_functions/batch_inference_jobs_fragment.py +++ b/source/infrastructure/personalize/step_functions/batch_inference_jobs_fragment.py @@ -25,11 +25,11 @@ ) from aws_cdk.core import Construct, Duration +from aws_solutions.scheduler.cdk.construct import Scheduler +from aws_solutions.scheduler.cdk.scheduler_fragment import SchedulerFragment from personalize.aws_lambda.functions import ( CreateBatchInferenceJob, ) -from personalize.scheduler import Scheduler -from personalize.step_functions.scheduler_fragment import SchedulerFragment TEMPORARY_PATH = "$._tmp" BATCH_INFERENCE_JOB_PATH = "$.batchInferenceJob" diff --git a/source/infrastructure/personalize/step_functions/dataset_import_fragment.py b/source/infrastructure/personalize/step_functions/dataset_import_fragment.py index f4d1141..ca00627 100644 --- a/source/infrastructure/personalize/step_functions/dataset_import_fragment.py +++ b/source/infrastructure/personalize/step_functions/dataset_import_fragment.py @@ -69,7 +69,8 @@ def __init__( }, }, "workflowConfig": { - "maxAge.$": "$.datasetGroup.workflowConfig.maxAge" + "maxAge.$": "$.datasetGroup.workflowConfig.maxAge", # NOSONAR (python:S1192) - string for clarity + "timeStarted.$": "$$.State.EnteredTime" # NOSONAR (python:S1192) - string for clarity } }), result_path=JsonPath.DISCARD, @@ -86,7 +87,8 @@ def __init__( }, }, "workflowConfig": { - "maxAge.$": "$.datasetGroup.workflowConfig.maxAge" + "maxAge.$": "$.datasetGroup.workflowConfig.maxAge", # NOSONAR (python:S1192) - string for clarity + "timeStarted.$": "$$.State.EnteredTime", # NOSONAR (python:S1192) - string for clarity } }), result_path=JsonPath.DISCARD, @@ -101,10 +103,16 @@ def __init__( result_path=f"$.datasets.{id.lower()}.schema.serviceConfig") .next(create_dataset.state(self, f"Create {id} Dataset", payload=TaskInput.from_object({ - "name.$": f"$.datasets.{id.lower()}.dataset.serviceConfig.name", - "schemaArn.$": f"$.datasets.{id.lower()}.schema.serviceConfig.schemaArn", - "datasetGroupArn.$": "$.datasetGroup.serviceConfig.datasetGroupArn", - "datasetType": f"{id.lower()}", + "serviceConfig": { + "name.$": f"$.datasets.{id.lower()}.dataset.serviceConfig.name", + "schemaArn.$": f"$.datasets.{id.lower()}.schema.serviceConfig.schemaArn", + "datasetGroupArn.$": "$.datasetGroup.serviceConfig.datasetGroupArn", + "datasetType": f"{id.lower()}", + }, + "workflowConfig": { + "maxAge.$": "$.datasetGroup.workflowConfig.maxAge", + "timeStarted.$": "$$.State.EnteredTime", + } }), result_path=f"$.datasets.{id.lower()}.dataset.serviceConfig", **retry_config)) diff --git a/source/infrastructure/personalize/step_functions/scheduled_dataset_import.py b/source/infrastructure/personalize/step_functions/scheduled_dataset_import.py index 9deeab9..4438aaf 100644 --- a/source/infrastructure/personalize/step_functions/scheduled_dataset_import.py +++ b/source/infrastructure/personalize/step_functions/scheduled_dataset_import.py @@ -17,7 +17,7 @@ from aws_solutions.cdk.aws_lambda.cfn_custom_resources.resource_name import ResourceName from aws_solutions.cdk.cfn_nag import add_cfn_nag_suppressions, CfnNagSuppression -from personalize.aws_lambda.functions.solutionstep import SolutionStep +from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep from personalize.step_functions.dataset_imports_fragment import DatasetImportsFragment from personalize.step_functions.failure_fragment import FailureFragment @@ -30,6 +30,7 @@ def __init__( dataset_management_functions: Dict[str, SolutionStep], create_timestamp: SolutionStep, notifications: SolutionStep, + prepare_input: SolutionStep, ): super().__init__(scope, construct_id) @@ -45,7 +46,9 @@ def __init__( .branch( create_timestamp.state( self, "Set Current Timestamp", result_path="$.currentDate" - ).next( + ) + .next(prepare_input.state(self, "Prepare Input")) + .next( DatasetImportsFragment( self, "Handle Periodic Dataset Imports", diff --git a/source/infrastructure/personalize/step_functions/scheduled_solution_maintenance.py b/source/infrastructure/personalize/step_functions/scheduled_solution_maintenance.py index 57318da..b97ee78 100644 --- a/source/infrastructure/personalize/step_functions/scheduled_solution_maintenance.py +++ b/source/infrastructure/personalize/step_functions/scheduled_solution_maintenance.py @@ -16,13 +16,14 @@ from aws_solutions.cdk.aws_lambda.cfn_custom_resources.resource_name import ResourceName from aws_solutions.cdk.cfn_nag import add_cfn_nag_suppressions, CfnNagSuppression +from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep from personalize.aws_lambda.functions import ( CreateBatchInferenceJob, CreateSolution, CreateSolutionVersion, CreateCampaign, ) -from personalize.aws_lambda.functions.solutionstep import SolutionStep +from personalize.aws_lambda.functions.prepare_input import PrepareInput from personalize.step_functions.failure_fragment import FailureFragment from personalize.step_functions.solution_fragment import SolutionFragment @@ -36,6 +37,7 @@ def __init__( create_solution_version: CreateSolutionVersion, create_campaign: CreateCampaign, create_batch_inference_job: CreateBatchInferenceJob, + prepare_input: PrepareInput, create_timestamp: SolutionStep, notifications: SolutionStep, ): @@ -56,7 +58,9 @@ def __init__( .branch( create_timestamp.state( self, "Set Current Timestamp", result_path="$.currentDate" - ).next( + ) + .next(prepare_input.state(self, "Prepare Input")) + .next( SolutionFragment( self, "Handle Periodic Solution Maintenance", diff --git a/source/infrastructure/personalize/step_functions/scheduler_fragment.py b/source/infrastructure/personalize/step_functions/scheduler_fragment.py index cab5af8..28d860f 100644 --- a/source/infrastructure/personalize/step_functions/scheduler_fragment.py +++ b/source/infrastructure/personalize/step_functions/scheduler_fragment.py @@ -26,7 +26,7 @@ ) from aws_cdk.core import Construct -from personalize.scheduler.base import Scheduler +from aws_solutions.scheduler.cdk.construct import Scheduler class SchedulerFragment(StateMachineFragment): diff --git a/source/infrastructure/personalize/step_functions/solution_fragment.py b/source/infrastructure/personalize/step_functions/solution_fragment.py index 285060a..0396000 100644 --- a/source/infrastructure/personalize/step_functions/solution_fragment.py +++ b/source/infrastructure/personalize/step_functions/solution_fragment.py @@ -26,13 +26,13 @@ ) from aws_cdk.core import Construct, Duration +from aws_solutions.scheduler.cdk.construct import Scheduler from personalize.aws_lambda.functions import ( CreateSolution, CreateSolutionVersion, CreateCampaign, CreateBatchInferenceJob, ) -from personalize.scheduler import Scheduler from personalize.step_functions.batch_inference_jobs_fragment import ( BatchInferenceJobsFragment, ) @@ -96,7 +96,8 @@ def __init__( "trainingMode": "FULL" }, "workflowConfig": { - "maxAge": "365 days" # do not create a new solution version on new file upload + "maxAge": "365 days", # do not create a new solution version on new file upload + "timeStarted.$": "$$.State.EnteredTime", } }, result_path = "$.solution.solutionVersion", # NOSONAR (python:S1192) - string for clarity @@ -155,7 +156,8 @@ def __init__( }, "workflowConfig": { "maxAge.$": "$.solution.solutionVersion.workflowConfig.maxAge", - "solutionVersionArn.$": f"{TEMPORARY_PATH}.errorInfo.errorMessage" + "solutionVersionArn.$": f"{TEMPORARY_PATH}.errorInfo.errorMessage", + "timeStarted.$": "$$.State.EnteredTime", } }, result_path="$.solution.solutionVersion" diff --git a/source/infrastructure/setup.py b/source/infrastructure/setup.py index 790a2f6..fc3dea2 100644 --- a/source/infrastructure/setup.py +++ b/source/infrastructure/setup.py @@ -35,18 +35,18 @@ author="AWS Solutions Builders", packages=setuptools.find_packages(), install_requires=[ - "aws-cdk.core>=1.95.2", + "aws-cdk.core>=1.126.0", ], - python_requires=">=3.6", + python_requires=">=3.7", classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Programming Language :: JavaScript", "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", "Topic :: Software Development :: Code Generators", "Topic :: Utilities", "Typing :: Typed", diff --git a/source/pytest.ini b/source/pytest.ini index 70cdb14..14f3217 100644 --- a/source/pytest.ini +++ b/source/pytest.ini @@ -8,6 +8,7 @@ env = AWS_REGION=us-east-1 AWS_DEFAULT_REGION=us-east-1 DDB_SCHEDULES_TABLE=scheduler + DDB_SCHEDULER_STEPFUNCTION=arn:aws:states:us-east-1:111111111111:stateMachine:personalizestack-personalize-scheduler POWERTOOLS_SERVICE_NAME=personalize_solution_teststack POWERTOOLS_METRICS_NAMESPACE=personalize_solution_teststack norecursedirs = cdk.out* diff --git a/source/requirements-dev.txt b/source/requirements-dev.txt index 8426e0a..c81dec0 100644 --- a/source/requirements-dev.txt +++ b/source/requirements-dev.txt @@ -1,9 +1,9 @@ avro==1.10.2 black boto3 -aws_cdk.core>=1.123.0 -aws_cdk.aws_stepfunctions_tasks>=1.123.0 -aws_solutions_constructs.aws_lambda_sns>=1.123.0 +aws_cdk.core==1.126.0 +aws_cdk.aws_stepfunctions_tasks==1.126.0 +aws_solutions_constructs.aws_lambda_sns==1.126.0 requests==2.24.0 crhelper==2.0.6 cronex==0.1.3.1 @@ -13,9 +13,13 @@ pytest pytest-cov>=2.11.1 pytest-env>=0.6.2 pytest-mock>=3.5.1 +pyyaml==5.4.1 +responses==0.14.0 tenacity>=8.0.1 -e cdk_solution_helper_py/helpers_cdk -e cdk_solution_helper_py/helpers_common -aws-lambda-powertools==1.15.0 +-e scheduler/cdk +-e scheduler/common +aws-lambda-powertools>=1.15.0 docker==5.0.0 -e infrastructure \ No newline at end of file diff --git a/source/scheduler/CHANGELOG.md b/source/scheduler/CHANGELOG.md new file mode 100644 index 0000000..0bc7be1 --- /dev/null +++ b/source/scheduler/CHANGELOG.md @@ -0,0 +1,10 @@ +# Change Log +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [1.1.0] - 2021-11-11 +### Added +- initial release + diff --git a/source/scheduler/README.md b/source/scheduler/README.md new file mode 100644 index 0000000..6a0e43d --- /dev/null +++ b/source/scheduler/README.md @@ -0,0 +1,218 @@ +# AWS Solutions Step Functions Scheduler +## Scheduling for AWS Step Functions + +This tooling adds scheduling support for AWS Step Functions via a set of libraries and CDK packages. + +This README summarizes using the scheduler. + +## Prerequisites + +Install this package. It requires at least: + +- Python 3.7 +- AWS CDK version 1.126.0 or higher + +To install the packages: + +``` +pip install /scheduler/cdk # where is the path to the scheduler namespace package +pip install /scheduler/common # where is the path to the scheduler namespace package +``` + +## 1. Add the scheduler to your CDK application + +```python +from pathlib import Path + +from aws_cdk.core import Construct +from aws_cdk.aws_stepfunctions import StateMachine + +from aws_solutions.cdk import CDKSolution +from aws_solutions.cdk.stack import SolutionStack +from aws_solutions.scheduler.cdk.construct import Scheduler + + +solution = CDKSolution(cdk_json_path=Path(__file__).parent.absolute() / "cdk.json") + + +class MyStack(SolutionStack): + def __init__(self, scope: Construct, construct_id: str, description: str, template_filename, state_machine: StateMachine, **kwargs): + super().__init__(scope, construct_id, description, template_filename, **kwargs) + + scheduler = Scheduler(self, "Scheduler") + scheduler.grant_invoke(state_machine) +``` + +## 2. Allow a state machine to create new schedules + +You may have an existing `StateMachine` you wish to create schedules with - to do so, use the `SchedulerFragment`. + +```python +# creates a scheduled item called "my-schedule-suffix" - typically you will use part of the state input for the suffix. +SchedulerFragment( + self, + schedule_for="my schedule", + schedule_for_suffix="suffix", + scheduler=scheduler, + target=state_machine, + schedule_path="$.path.to.cron.expression", + schedule_input={ + "static_input": "value", + "derived_input.$": "$.field_in_state_input", + }, +) +``` + +# 3. Check the status of schedules using the included CLI + +This package also provides a CLI `aws-solutions-scheduler`. This CLI can be used to control the scheduler and establish +schedules for the [Maintaining Personalized Experiences with Machine Learning](https://aws.amazon.com/solutions/implementations/maintaining-personalized-experiences-with-ml/) +solution. + +### Installation + +It is recommended that you perform the following steps in a dedicated virtual environment: + +```shell +cd source +pip install --upgrade pip +pip install cdk_solution_helper_py/helpers_common +pip install scheduler/common +``` + +### Usage + +```shell +Usage: aws-solutions-scheduler [OPTIONS] COMMAND [ARGS]... + + Scheduler CLI + +Options: + -s, --stack TEXT [required] + -r, --region TEXT [required] + --scheduler-table-name-output TEXT + --scheduler-stepfunction-arn-output TEXT + --help Show this message and exit. + +Commands: + activate Activate a scheduled task + deactivate Deactivate a scheduled task + describe Describe a scheduled task + import-dataset-group Create a new configuration from an existing... + list List all scheduled tasks +``` + +#### Create new schedule(s) for an Amazon Personalize dataset group + +Schedules for dataset import, solution version FULL and UPDATE retraining can be established using the CLI for dataset +groups in Amazon Personalize. This example creates a weekly schedule for full dataset import (`-i`) and for full +solution version retraining (-f) + +```shell +> aws-solutions-scheduler -s PersonalizeStack -r us-east-1 import-dataset-group -d item-recommender -i "cron(0 0 ? * 1 *)" -f "item-recommender-user-personalization@cron(0 3 ? * 1 *)" -p train/item-recommender/config.json +``` + +#### Listing Schedules + +```shell +> aws-solutions-scheduler -s PersonalizeStack -r us-east-1 list +``` +
+See sample result + +```json +{ + "tasks": [ + "personalize-dataset-import-item-recommender", + "solution-maintenance-full-item-recommender-user-personalization" + ] +} +``` + +
+ +#### Describing Schedules + +```shell +> aws-solutions-scheduler -s PersonalizeStack -r us-east-1 describe --task personalize-dataset-import-item-recommender +``` +
+See sample result + +```json +{ + "task": { + "active": true, + "name": "personalize-dataset-import-item-recommender", + "schedule": "cron(*/15 * * * ? *)", + "step_function": "arn:aws:states:us-east-1:111122223333:stateMachine:personalizestack-periodic-dataset-import-aaaaaaaaaaaa", + "version": "v1" + } +} +``` + +
+ +#### Activating Schedules + +Deactivate schedules can be activated + +```shell +> aws-solutions-scheduler -s PersonalizeStack -r us-east-1 activate --task personalize-dataset-import-item-recommender +``` +
+See sample result + +```json +{ + "task": { + "active": true, + "name": "personalize-dataset-import-item-recommender", + "schedule": "cron(0 0 ? * 1 *)", + "step_function": "arn:aws:states:us-east-1:111122223333:stateMachine:personalizestack-periodic-dataset-import-aaaaaaaaaaaa", + "version": "v1" + } +} +``` + +
+ +#### Deactivating Schedules + +Deactivate schedules can be activated + +```shell +> aws-solutions-scheduler -s PersonalizeStack -r us-east-1 deactivate --task personalize-dataset-import-item-recommender +``` +
+See sample result + +```json +{ + "task": { + "active": false, + "name": "personalize-dataset-import-item-recommender", + "schedule": "cron(0 0 ? * 1 *)", + "step_function": "arn:aws:states:us-east-1:111122223333:stateMachine:personalizestack-periodic-dataset-import-aaaaaaaaaaaa", + "version": "v1" + } +} +``` + +
+ +*** + +Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. \ No newline at end of file diff --git a/source/scheduler/cdk/aws_solutions/scheduler/cdk/__init__.py b/source/scheduler/cdk/aws_solutions/scheduler/cdk/__init__.py new file mode 100644 index 0000000..ef2f9eb --- /dev/null +++ b/source/scheduler/cdk/aws_solutions/scheduler/cdk/__init__.py @@ -0,0 +1,12 @@ +# ###################################################################################################################### +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # +# # +# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance # +# with the License. You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed # +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # +# the specific language governing permissions and limitations under the License. # +# ###################################################################################################################### diff --git a/source/infrastructure/personalize/scheduler/aws_lambda/functions/__init__.py b/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/__init__.py similarity index 81% rename from source/infrastructure/personalize/scheduler/aws_lambda/functions/__init__.py rename to source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/__init__.py index f6370b0..aed94dd 100644 --- a/source/infrastructure/personalize/scheduler/aws_lambda/functions/__init__.py +++ b/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/__init__.py @@ -11,15 +11,13 @@ # the specific language governing permissions and limitations under the License. # # ###################################################################################################################### -from personalize.scheduler.aws_lambda.functions.create_scheduled_task import ( +from aws_solutions.scheduler.cdk.aws_lambda.create_scheduled_task import ( CreateScheduledTask, ) -from personalize.scheduler.aws_lambda.functions.delete_scheduled_task import ( +from aws_solutions.scheduler.cdk.aws_lambda.delete_scheduled_task import ( DeleteScheduledTask, ) -from personalize.scheduler.aws_lambda.functions.read_scheduled_task import ( - ReadScheduledTask, -) -from personalize.scheduler.aws_lambda.functions.update_scheduled_task import ( +from aws_solutions.scheduler.cdk.aws_lambda.read_scheduled_task import ReadScheduledTask +from aws_solutions.scheduler.cdk.aws_lambda.update_scheduled_task import ( UpdateScheduledTask, ) diff --git a/source/infrastructure/personalize/scheduler/aws_lambda/functions/create_scheduled_task.py b/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/create_scheduled_task.py similarity index 96% rename from source/infrastructure/personalize/scheduler/aws_lambda/functions/create_scheduled_task.py rename to source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/create_scheduled_task.py index 1fd4a2d..90a8da5 100644 --- a/source/infrastructure/personalize/scheduler/aws_lambda/functions/create_scheduled_task.py +++ b/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/create_scheduled_task.py @@ -19,7 +19,7 @@ from aws_cdk.aws_stepfunctions import IChainable from aws_cdk.core import Construct -from personalize.aws_lambda.functions.solutionstep import SolutionStep +from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep class CreateScheduledTask(SolutionStep): @@ -43,7 +43,7 @@ def __init__( layers=layers, failure_state=failure_state, function="create_schedule", - entrypoint=Path(__file__).parents[5].resolve() + entrypoint=Path(__file__).parents[1].resolve() / "aws_lambda" / "scheduler" / "handler.py", diff --git a/source/infrastructure/personalize/scheduler/aws_lambda/functions/delete_scheduled_task.py b/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/delete_scheduled_task.py similarity index 95% rename from source/infrastructure/personalize/scheduler/aws_lambda/functions/delete_scheduled_task.py rename to source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/delete_scheduled_task.py index 67cd8ca..cd4ab6c 100644 --- a/source/infrastructure/personalize/scheduler/aws_lambda/functions/delete_scheduled_task.py +++ b/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/delete_scheduled_task.py @@ -19,7 +19,7 @@ from aws_cdk.aws_stepfunctions import IChainable from aws_cdk.core import Construct -from personalize.aws_lambda.functions.solutionstep import SolutionStep +from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep class DeleteScheduledTask(SolutionStep): @@ -41,7 +41,7 @@ def __init__( layers=layers, failure_state=failure_state, function="delete_schedule", - entrypoint=Path(__file__).parents[5].resolve() + entrypoint=Path(__file__).parents[1].resolve() / "aws_lambda" / "scheduler" / "handler.py", diff --git a/source/aws_lambda/get_next_scheduled_event/build.gradle b/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/build.gradle similarity index 100% rename from source/aws_lambda/get_next_scheduled_event/build.gradle rename to source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/build.gradle diff --git a/source/aws_lambda/get_next_scheduled_event/gradle/wrapper/gradle-wrapper.jar b/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/gradle/wrapper/gradle-wrapper.jar similarity index 100% rename from source/aws_lambda/get_next_scheduled_event/gradle/wrapper/gradle-wrapper.jar rename to source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/gradle/wrapper/gradle-wrapper.jar diff --git a/source/aws_lambda/get_next_scheduled_event/gradle/wrapper/gradle-wrapper.properties b/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/gradle/wrapper/gradle-wrapper.properties similarity index 100% rename from source/aws_lambda/get_next_scheduled_event/gradle/wrapper/gradle-wrapper.properties rename to source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/gradle/wrapper/gradle-wrapper.properties diff --git a/source/aws_lambda/get_next_scheduled_event/gradlew b/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/gradlew similarity index 100% rename from source/aws_lambda/get_next_scheduled_event/gradlew rename to source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/gradlew diff --git a/source/aws_lambda/get_next_scheduled_event/settings.gradle b/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/settings.gradle similarity index 100% rename from source/aws_lambda/get_next_scheduled_event/settings.gradle rename to source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/settings.gradle diff --git a/source/aws_lambda/get_next_scheduled_event/src/main/java/com/amazonaws/solutions/schedule_sfn_task/HandleScheduleEvent.java b/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/src/main/java/com/amazonaws/solutions/schedule_sfn_task/HandleScheduleEvent.java similarity index 100% rename from source/aws_lambda/get_next_scheduled_event/src/main/java/com/amazonaws/solutions/schedule_sfn_task/HandleScheduleEvent.java rename to source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/src/main/java/com/amazonaws/solutions/schedule_sfn_task/HandleScheduleEvent.java diff --git a/source/aws_lambda/get_next_scheduled_event/src/main/java/com/amazonaws/solutions/schedule_sfn_task/ScheduleEvent.java b/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/src/main/java/com/amazonaws/solutions/schedule_sfn_task/ScheduleEvent.java similarity index 100% rename from source/aws_lambda/get_next_scheduled_event/src/main/java/com/amazonaws/solutions/schedule_sfn_task/ScheduleEvent.java rename to source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/src/main/java/com/amazonaws/solutions/schedule_sfn_task/ScheduleEvent.java diff --git a/source/aws_lambda/get_next_scheduled_event/src/main/java/com/amazonaws/solutions/schedule_sfn_task/ScheduleException.java b/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/src/main/java/com/amazonaws/solutions/schedule_sfn_task/ScheduleException.java similarity index 100% rename from source/aws_lambda/get_next_scheduled_event/src/main/java/com/amazonaws/solutions/schedule_sfn_task/ScheduleException.java rename to source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/src/main/java/com/amazonaws/solutions/schedule_sfn_task/ScheduleException.java diff --git a/source/aws_lambda/get_next_scheduled_event/src/test/java/com/amazonaws/solutions/schedule_sfn_task/HandleScheduleEventTest.java b/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/src/test/java/com/amazonaws/solutions/schedule_sfn_task/HandleScheduleEventTest.java similarity index 100% rename from source/aws_lambda/get_next_scheduled_event/src/test/java/com/amazonaws/solutions/schedule_sfn_task/HandleScheduleEventTest.java rename to source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/get_next_scheduled_event/src/test/java/com/amazonaws/solutions/schedule_sfn_task/HandleScheduleEventTest.java diff --git a/source/infrastructure/personalize/scheduler/aws_lambda/functions/read_scheduled_task.py b/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/read_scheduled_task.py similarity index 95% rename from source/infrastructure/personalize/scheduler/aws_lambda/functions/read_scheduled_task.py rename to source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/read_scheduled_task.py index 22a3420..b4c0222 100644 --- a/source/infrastructure/personalize/scheduler/aws_lambda/functions/read_scheduled_task.py +++ b/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/read_scheduled_task.py @@ -19,7 +19,7 @@ from aws_cdk.aws_stepfunctions import IChainable from aws_cdk.core import Construct -from personalize.aws_lambda.functions.solutionstep import SolutionStep +from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep class ReadScheduledTask(SolutionStep): @@ -41,7 +41,7 @@ def __init__( layers=layers, failure_state=failure_state, function="read_schedule", - entrypoint=Path(__file__).parents[5].resolve() + entrypoint=Path(__file__).parents[1].resolve() / "aws_lambda" / "scheduler" / "handler.py", diff --git a/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/scheduler/__init__.py b/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/scheduler/__init__.py new file mode 100644 index 0000000..ef2f9eb --- /dev/null +++ b/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/scheduler/__init__.py @@ -0,0 +1,12 @@ +# ###################################################################################################################### +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # +# # +# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance # +# with the License. You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed # +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # +# the specific language governing permissions and limitations under the License. # +# ###################################################################################################################### diff --git a/source/aws_lambda/scheduler/handler.py b/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/scheduler/handler.py similarity index 94% rename from source/aws_lambda/scheduler/handler.py rename to source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/scheduler/handler.py index e22d7af..5cd60cb 100644 --- a/source/aws_lambda/scheduler/handler.py +++ b/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/scheduler/handler.py @@ -16,9 +16,11 @@ from aws_lambda_powertools import Logger, Tracer, Metrics from aws_lambda_powertools.utilities.typing import LambdaContext -from shared.scheduler.base import Scheduler -from shared.scheduler.task import Task -from shared.scheduler.task_resource import TaskResource +from aws_solutions.scheduler.common import ( + Scheduler, + Task, + TaskResource, +) logger = Logger() tracer = Tracer() diff --git a/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/scheduler/requirements.txt b/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/scheduler/requirements.txt new file mode 100644 index 0000000..201fe8d --- /dev/null +++ b/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/scheduler/requirements.txt @@ -0,0 +1,6 @@ +avro==1.10.2 +cronex==0.1.3.1 +jmespath==0.10.0 +parsedatetime==2.6 +../../../../../../../scheduler/common +../../../../../../../cdk_solution_helper_py/helpers_common diff --git a/source/infrastructure/personalize/scheduler/aws_lambda/functions/update_scheduled_task.py b/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/update_scheduled_task.py similarity index 96% rename from source/infrastructure/personalize/scheduler/aws_lambda/functions/update_scheduled_task.py rename to source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/update_scheduled_task.py index 2876909..22ab0c3 100644 --- a/source/infrastructure/personalize/scheduler/aws_lambda/functions/update_scheduled_task.py +++ b/source/scheduler/cdk/aws_solutions/scheduler/cdk/aws_lambda/update_scheduled_task.py @@ -19,7 +19,7 @@ from aws_cdk.aws_stepfunctions import IChainable from aws_cdk.core import Construct -from personalize.aws_lambda.functions.solutionstep import SolutionStep +from aws_solutions.cdk.stepfunctions.solutionstep import SolutionStep class UpdateScheduledTask(SolutionStep): @@ -43,7 +43,7 @@ def __init__( layers=layers, failure_state=failure_state, function="update_schedule", - entrypoint=Path(__file__).parents[5].resolve() + entrypoint=Path(__file__).parents[1].resolve() / "aws_lambda" / "scheduler" / "handler.py", diff --git a/source/infrastructure/personalize/scheduler/base.py b/source/scheduler/cdk/aws_solutions/scheduler/cdk/construct.py similarity index 97% rename from source/infrastructure/personalize/scheduler/base.py rename to source/scheduler/cdk/aws_solutions/scheduler/cdk/construct.py index 401c1c6..ba0586e 100644 --- a/source/infrastructure/personalize/scheduler/base.py +++ b/source/scheduler/cdk/aws_solutions/scheduler/cdk/construct.py @@ -31,11 +31,11 @@ from aws_cdk.core import Construct, Aws from aws_solutions.cdk.aws_lambda.cfn_custom_resources.resource_name import ResourceName +from aws_solutions.cdk.aws_lambda.environment import Environment from aws_solutions.cdk.aws_lambda.java.function import SolutionsJavaFunction +from aws_solutions.cdk.aws_lambda.layers.aws_lambda_powertools import PowertoolsLayer from aws_solutions.cdk.cfn_nag import add_cfn_nag_suppressions, CfnNagSuppression -from personalize.aws_lambda.functions.environment import Environment -from personalize.aws_lambda.layers import PowertoolsLayer, SolutionsLayer -from personalize.scheduler.aws_lambda.functions import ( +from aws_solutions.scheduler.cdk.aws_lambda import ( CreateScheduledTask, ReadScheduledTask, UpdateScheduledTask, @@ -85,8 +85,7 @@ def __init__(self, scope: Construct, construct_id: str, sync: bool = True): # Layers required for the AWS Lambda Functions provisioned by the Scheduler construct layer_powertools = PowertoolsLayer.get_or_create(self) - layer_solutions = SolutionsLayer.get_or_create(self) - common_layers = [layer_powertools, layer_solutions] + common_layers = [layer_powertools] # CRUD tasks/ states to integrate with the Scheduler self.create_scheduled_task = CreateScheduledTask( @@ -106,7 +105,7 @@ def __init__(self, scope: Construct, construct_id: str, sync: bool = True): ) self.update_scheduled_task = UpdateScheduledTask( self, - "update_scheudled_task", + "update_scheduled_task", layers=common_layers, scheduler_table=self.scheduler_table, state_machine_arn=self.state_machine_arn, @@ -370,7 +369,8 @@ def _scheduler_function( :return: SolutionsJavaFunction """ project_path = ( - Path(__file__).absolute().parents[3] + Path(__file__).absolute().parents[1] + / "cdk" / "aws_lambda" / "get_next_scheduled_event" ) diff --git a/source/scheduler/cdk/aws_solutions/scheduler/cdk/scheduler_fragment.py b/source/scheduler/cdk/aws_solutions/scheduler/cdk/scheduler_fragment.py new file mode 100644 index 0000000..28d860f --- /dev/null +++ b/source/scheduler/cdk/aws_solutions/scheduler/cdk/scheduler_fragment.py @@ -0,0 +1,91 @@ +# ###################################################################################################################### +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # +# # +# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance # +# with the License. You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed # +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # +# the specific language governing permissions and limitations under the License. # +# ###################################################################################################################### +import re +from typing import List, Optional, Dict + +from aws_cdk.aws_stepfunctions import ( + StateMachineFragment, + State, + INextable, + StateMachine, + TaskInput, + Pass, + Choice, + Condition, + JsonPath, +) +from aws_cdk.core import Construct + +from aws_solutions.scheduler.cdk.construct import Scheduler + + +class SchedulerFragment(StateMachineFragment): + def __init__( + self, # NOSONAR (python:S107) - allow large number of method parameters + scope: Construct, + schedule_for: str, + schedule_for_suffix: str, + scheduler: Scheduler, + target: StateMachine, + schedule_path: str, + schedule_input_path: Optional[str] = "", + schedule_input: Optional[Dict] = None, + ): + construct_id = " ".join(["Schedule", schedule_for]).strip() + super().__init__(scope, construct_id) + + if not schedule_input_path and not schedule_input: + raise ValueError( + "schedule_input_path or schedule_input must be provided, not both" + ) + schedule_input = schedule_input or schedule_input_path + + schedule_input_key = "input" + if schedule_input_path: + schedule_input_key += ".$" + + # set up the schedule name + schedule_for_task_name = re.sub(r"[^0-9A-Za-z-_]", "-", schedule_for)[:80] + schedule_for_task_name = ( + f"States.Format('{schedule_for_task_name}-{{}}', {schedule_for_suffix})" + ) + + self.not_required = Pass(self, f"{schedule_for.title()} Schedule Not Required") + self.create_schedule = scheduler.create_scheduled_task.state( + self, + f"Create Schedule For {schedule_for.title()}", + payload=TaskInput.from_object( + { + "name.$": schedule_for_task_name, + "schedule.$": schedule_path, + "state_machine": { + "arn": target.state_machine_arn, + schedule_input_key: schedule_input, + }, + } + ), + result_path=JsonPath.DISCARD, + ) + self.start = ( + Choice(self, f"Check if {schedule_for.title()} Schedule Required") + .when(Condition.is_present(schedule_path), self.create_schedule) + .otherwise(self.not_required) + ) + + @property + def start_state(self) -> State: + return self.start.start_state + + @property + def end_states(self) -> List[INextable]: + return [self.not_required, self.create_schedule] diff --git a/source/scheduler/cdk/setup.py b/source/scheduler/cdk/setup.py new file mode 100644 index 0000000..36b6121 --- /dev/null +++ b/source/scheduler/cdk/setup.py @@ -0,0 +1,66 @@ +# ###################################################################################################################### +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # +# # +# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance # +# with the License. You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed # +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # +# the specific language governing permissions and limitations under the License. # +# ###################################################################################################################### + +import re +from pathlib import Path + +import setuptools + +VERSION_RE = re.compile(r"\#\# \[(?P.*)\]", re.MULTILINE) # NOSONAR + + +def get_version(): + """ + Detect the solution version from the changelog. Latest version on top. + """ + changelog = open(Path(__file__).resolve().parent.parent / "CHANGELOG.md").read() + versions = VERSION_RE.findall(changelog) + if not len(versions): + raise ValueError("use the standard semver format in your CHANGELOG.md") + build_version = versions[0] + print(f"Build Version: {build_version}") + return build_version + + +setuptools.setup( + name="aws-solutions-scheduler-cdk", + version=get_version(), + description="Scheduler CDK Constructs", + long_description=open("../README.md").read(), + author="Amazon Web Services", + url="https://aws.amazon.com/solutions/implementations", + license="Apache License 2.0", + packages=setuptools.find_namespace_packages(), + install_requires=[ + "aws-cdk.core>=1.126.0", + "aws-cdk.aws_lambda>=1.126.0", + "aws-cdk.aws_stepfunctions>=1.126.0", + "Click>=7.1.2", + "boto3>=1.17.52", + ], + python_requires=">=3.7", + classifiers=[ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: JavaScript", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Topic :: Software Development :: Code Generators", + "Topic :: Utilities", + "Typing :: Typed", + ], + zip_safe=False, +) diff --git a/source/aws_lambda/shared/scheduler/__init__.py b/source/scheduler/common/aws_solutions/scheduler/common/__init__.py similarity index 85% rename from source/aws_lambda/shared/scheduler/__init__.py rename to source/scheduler/common/aws_solutions/scheduler/common/__init__.py index 45cd046..77dee24 100644 --- a/source/aws_lambda/shared/scheduler/__init__.py +++ b/source/scheduler/common/aws_solutions/scheduler/common/__init__.py @@ -16,7 +16,8 @@ CRON_ANY_WILDCARD = "?" CRON_MIN_MAX_YEAR = (1970, 2199) -from shared.scheduler.base import Scheduler -from shared.scheduler.schedule import Schedule, ScheduleError -from shared.scheduler.task import Task -from shared.scheduler.task_resource import TaskResource + +from aws_solutions.scheduler.common.base import Scheduler +from aws_solutions.scheduler.common.schedule import Schedule, ScheduleError +from aws_solutions.scheduler.common.task import Task +from aws_solutions.scheduler.common.task_resource import TaskResource diff --git a/source/aws_lambda/shared/scheduler/base.py b/source/scheduler/common/aws_solutions/scheduler/common/base.py similarity index 92% rename from source/aws_lambda/shared/scheduler/base.py rename to source/scheduler/common/aws_solutions/scheduler/common/base.py index df8946a..fe9aefc 100644 --- a/source/aws_lambda/shared/scheduler/base.py +++ b/source/scheduler/common/aws_solutions/scheduler/common/base.py @@ -21,8 +21,8 @@ from aws_lambda_powertools.metrics import MetricUnit from aws_solutions.core import get_service_client, get_service_resource -from shared.scheduler import TASK_PK -from shared.scheduler.task import Task +from aws_solutions.scheduler.common import TASK_PK +from aws_solutions.scheduler.common.task import Task logger = Logger() metrics = Metrics(service="Scheduler") @@ -47,11 +47,21 @@ def dynamo_to_python(obj: Optional[Dict]) -> Optional[Task]: class Scheduler: """Create schedules for events that invoke a step function""" - def __init__(self): + def __init__(self, table_name=None, stepfunction=None): self.ddb = get_service_resource("dynamodb") self.ddb_cli = self.ddb.meta.client - self.table_name = os.environ.get("DDB_SCHEDULES_TABLE") - self.stepfunction = os.environ.get("DDB_SCHEDULER_STEPFUNCTION") + self.table_name = os.environ.get("DDB_SCHEDULES_TABLE", table_name) + self.stepfunction = os.environ.get("DDB_SCHEDULER_STEPFUNCTION", stepfunction) + + if not self.table_name: + raise ValueError( + "requires table_name at initialization or DDB_SCHEDULES_TABLE env var" + ) + if not self.stepfunction: + raise ValueError( + "requires stepfunction at initialization or DDB_SCHEDULER_STEPFUNCTION env var" + ) + self.sfn_cli = get_service_client("stepfunctions") self.table = self.ddb.Table(self.table_name) @@ -158,6 +168,10 @@ def list(self) -> Generator[str, None, None]: start_key = response.get("LastEvaluatedKey", None) done = start_key is None + def is_enabled(self, task: Task) -> bool: + arn = self._get_running_execution_arn(task) + return True if arn else False + def _get_running_execution_arn(self, task: Task) -> Optional[str]: paginator = self.sfn_cli.get_paginator("list_executions") iterator = paginator.paginate( @@ -183,6 +197,9 @@ def _get_running_execution_arn(self, task: Task) -> Optional[str]: return execution_arn return None + def deactivate(self, task: Task) -> None: + self._disable_schedule(task) + def _disable_schedule(self, task: Task) -> None: execution_arn = self._get_running_execution_arn(task) if execution_arn: @@ -195,6 +212,9 @@ def _disable_schedule(self, task: Task) -> None: else: logger.info(f"{task.name} already disabled") + def activate(self, task: Task) -> None: + self._enable_schedule(task) + def _enable_schedule(self, task: Task) -> None: execution_arn = self._get_running_execution_arn(task) if execution_arn: diff --git a/source/aws_lambda/shared/scheduler/schedule.py b/source/scheduler/common/aws_solutions/scheduler/common/schedule.py similarity index 98% rename from source/aws_lambda/shared/scheduler/schedule.py rename to source/scheduler/common/aws_solutions/scheduler/common/schedule.py index 2316470..d77f5cd 100644 --- a/source/aws_lambda/shared/scheduler/schedule.py +++ b/source/scheduler/common/aws_solutions/scheduler/common/schedule.py @@ -16,7 +16,7 @@ import cronex -from shared.scheduler import CRON_ANY_WILDCARD, CRON_MIN_MAX_YEAR +from aws_solutions.scheduler.common import CRON_ANY_WILDCARD, CRON_MIN_MAX_YEAR class ScheduleError(ValueError): diff --git a/source/scheduler/common/aws_solutions/scheduler/common/scripts/__init__.py b/source/scheduler/common/aws_solutions/scheduler/common/scripts/__init__.py new file mode 100644 index 0000000..ef2f9eb --- /dev/null +++ b/source/scheduler/common/aws_solutions/scheduler/common/scripts/__init__.py @@ -0,0 +1,12 @@ +# ###################################################################################################################### +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # +# # +# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance # +# with the License. You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed # +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # +# the specific language governing permissions and limitations under the License. # +# ###################################################################################################################### diff --git a/source/scheduler/common/aws_solutions/scheduler/common/scripts/scheduler_cli.py b/source/scheduler/common/aws_solutions/scheduler/common/scripts/scheduler_cli.py new file mode 100644 index 0000000..4c9d60b --- /dev/null +++ b/source/scheduler/common/aws_solutions/scheduler/common/scripts/scheduler_cli.py @@ -0,0 +1,355 @@ +# ###################################################################################################################### +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # +# # +# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance # +# with the License. You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed # +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # +# the specific language governing permissions and limitations under the License. # +# ###################################################################################################################### +import json +import os +from typing import List, Tuple, Dict, Any + +import boto3 +import click +from rich import print_json + +from aws_solutions.scheduler.common import Scheduler +from aws_solutions.scheduler.common.base import logger + +logger.setLevel("ERROR") + + +def get_stack_output_value(stack, key: str) -> str: + """ + Get a stack output value + :param stack: the boto3 stack resource + :param key: the output key + :return: str + """ + results = [i for i in stack.outputs if i["OutputKey"] == key] + if not results: + raise ValueError(f"could not find output with key {key} in stack") + return results[0]["OutputValue"] + + +def get_stack_tag_value(stack, key: str) -> str: + """ + Get a stack tag value + :param stack: the boto3 stack resource + :param key: the tag key + :return: str + """ + results = [i for i in stack.tags if i["Key"] == key] + if not results: + raise ValueError(f"could not find tag with key {key} in stack") + return results[0]["Value"] + + +def setup_cli_env(stack, region: str) -> None: + """ + Set the environment variables required by the scheduler + :param stack: the stack name + :param region: the AWS region + :return: None + """ + os.environ["AWS_REGION"] = region + os.environ["SOLUTION_ID"] = get_stack_tag_value(stack, "SOLUTION_ID") + os.environ["SOLUTION_VERSION"] = f"{get_stack_tag_value(stack, 'SOLUTION_VERSION')}" + + +@click.group() +@click.option("-s", "--stack", required=True, envvar="SCHEDULER_STACK") +@click.option("-r", "--region", required=True, envvar="AWS_REGION") +@click.option("--scheduler-table-name-output", default="SchedulerTableName") +@click.option("--scheduler-stepfunction-arn-output", default="SchedulerStepFunctionArn") +@click.pass_context +def cli( + ctx, + stack: str, + region: str, + scheduler_table_name_output: str, + scheduler_stepfunction_arn_output: str, +) -> None: + """ + Scheduler CLI + \f + + :param ctx: the click context + :param stack: the AWS CloudFormation stack name + :param region: the AWS region + :param scheduler_table_name_output: the scheduler table name + :param scheduler_stepfunction_arn_output: the scheduler step function ARN + :return: None + """ + ctx.ensure_object(dict) + + cloudformation = boto3.resource("cloudformation", region_name=region) + stack = cloudformation.Stack(stack) + setup_cli_env(stack, region) + + ctx.obj["SCHEDULER"] = Scheduler( + table_name=get_stack_output_value(stack, scheduler_table_name_output), + stepfunction=get_stack_output_value(stack, scheduler_stepfunction_arn_output), + ) + ctx.obj["REGION"] = region + ctx.obj["STACK"] = stack + + +@cli.command("list") +@click.pass_context +def list_command(ctx) -> None: + """ + List all scheduled tasks + \f + + :param ctx: the click context + :return: None + """ + scheduler: Scheduler = ctx.obj["SCHEDULER"] + tasks = [] + + for task in scheduler.list(): + tasks.append(task) + tasks = sorted(tasks) + + print_json(data={"tasks": tasks}) + + +def _describe(ctx, task: str) -> None: + """ + Describe a scheduled task + :param ctx: the click context + :param task: the task name + :return: None + """ + scheduler: Scheduler = ctx.obj["SCHEDULER"] + + tracker = scheduler.read(task, 0) + latest = scheduler.read(task, int(tracker.latest)) + + print_json( + data={ + "task": { + "active": scheduler.is_enabled(latest), + "name": latest.name, + "schedule": latest.schedule.expression, + "step_function": latest.state_machine.get("arn"), + "version": f"v{tracker.latest}", + } + } + ) + + +@cli.command() +@click.option("-t", "--task", required=True) +@click.pass_context +def describe(ctx, task: str) -> Any: + """ + Describe a scheduled task + \f + + :param ctx: the click context + :param task: the task + :return: ctx click context + """ + return _describe(ctx, task) + + +@cli.command() +@click.option("-t", "--task", required=True) +@click.pass_context +def activate(ctx, task: str) -> None: + """ + Activate a scheduled task + \f + + :param ctx: the click context + :param task: the task + :return: None + """ + scheduler: Scheduler = ctx.obj["SCHEDULER"] + + tracker = scheduler.read(task, 0) + latest = scheduler.read(task, int(tracker.latest)) + + scheduler.activate(latest) + _describe(ctx, task) + + +@cli.command() +@click.option("-t", "--task", required=True) +@click.pass_context +def deactivate(ctx, task) -> None: + """ + Deactivate a scheduled task + \f + + :param ctx: the click context + :param task: the task + :return: None + """ + scheduler: Scheduler = ctx.obj["SCHEDULER"] + + tracker = scheduler.read(task, 0) + if not tracker: + raise click.ClickException(f"task {task} does not exist") + latest = scheduler.read(task, int(tracker.latest)) + + scheduler.deactivate(latest) + _describe(ctx=ctx, task=latest) + + +def _validate_path(ctx, param, value) -> str: + """ + Callback to validate the path parameter + :param ctx: the click context + :param param: the click parameter + :param value: the click parameter value + :return: str + """ + if not value.startswith("train/"): + raise click.BadParameter("must start with 'train/") + if not value.endswith(".json"): + raise click.BadParameter("must end with a suffix of .json") + return value + + +def _validate_schedules(ctx, param, value) -> Tuple[Tuple[str, str], ...]: + """ + Callback to validate the schedule parameters + :param ctx: the click context + :param param: the click parameters + :param value: the click parameter values + :return: Tuple[Tuple[str, str], ...] + """ + if len(value) == 0: + return value + + values = [] + for idx, item in enumerate(value): + solution, _, schedule = item.partition("@") + if solution and schedule: + values.append((solution, schedule)) + else: + raise click.BadParameter( + "format must be solution_name@schedule_expression e.g solution@cron(0 */12 * * ? *)" + ) + return tuple(set(values)) + + +def get_payload( + dataset_group: str, + import_schedule: str, + update_schedule: List[Tuple[str, str]], + full_schedule: List[Tuple[str, str]], +) -> Dict: + """ + Gets the AWS Lambda Function payload for setting up schedules/ importing a dataset group into the solution + :param dataset_group: dataset group name + :param import_schedule: import schedule (e.g. "cron(* * * * ? *)") + :param update_schedule: update schedules (eg. ("name","cron(* * * * ? *))) + :param full_schedule: full schedules (eg. ("name","cron(* * * * ? *))) + :return: Dict + """ + payload = { + "datasetGroupName": dataset_group, + } + if import_schedule: + payload.setdefault("schedules", {})["import"] = import_schedule + if update_schedule: + for solution, schedule in update_schedule: + payload.setdefault("schedules", {}).setdefault("solutions", {})[ + solution + ] = {"update": schedule} + if full_schedule: + for solution, schedule in full_schedule: + payload.setdefault("schedules", {}).setdefault("solutions", {})[ + solution + ] = {"full": schedule} + return payload + + +@cli.command() +@click.option( + "-d", "--dataset-group", required=True, help="dataset group name to import" +) +@click.option("-p", "--path", required=True, callback=_validate_path, help="s3 key") +@click.option("-i", "--import-schedule", help="cron schedule for dataset import") +@click.option( + "-f", + "--full-schedule", + multiple=True, + callback=_validate_schedules, + help="cron schedules for FULL solution version updates", +) +@click.option( + "-u", + "--update-schedule", + multiple=True, + callback=_validate_schedules, + help="cron schedules for UPDATE solution version updates", +) +@click.pass_context +def import_dataset_group( + ctx, dataset_group, path, import_schedule, full_schedule, update_schedule +): + """ + Create a new configuration from an existing dataset group in Amazon Personalize and add scheduled tasks + \f + + :param ctx: the click context + :param dataset_group: the dataset group name + :param path: the full s3 key of the configuration file + :param import_schedule: the import cron schedule + :param full_schedule: the full schedules + :param update_schedule: the update schedules + """ + region = ctx.obj["REGION"] + stack = ctx.obj["STACK"] + cli_lambda = boto3.client("lambda", region_name=region) + cli_s3 = boto3.client("s3", region_name=region) + cli_sts = boto3.client("sts", region_name=region) + config_function = get_stack_output_value(stack, "CreateConfigFunctionArn") + bucket = get_stack_output_value(stack, "PersonalizeBucketName") + account = cli_sts.get_caller_identity()["Account"] + + payload = get_payload( + dataset_group=dataset_group, + import_schedule=import_schedule, + update_schedule=update_schedule, + full_schedule=full_schedule, + ) + + # Run the lambda function to generate the configuration and get the result + result = cli_lambda.invoke( + FunctionName=config_function, + InvocationType="RequestResponse", + Payload=json.dumps(payload).encode("utf-8"), + ) + status = result.get("StatusCode") + if status != 200: + raise click.ClickException( + "there was an error generating configuration ({status})" + ) + if result.get("FunctionError"): + error_message = json.loads(result.get("Payload").read()).get("errorMessage") + raise click.ClickException( + f"Could not generate configuration for {dataset_group}: {error_message}" + ) + + # to trigger the workflow and set up new schedules, upload the returned configuration to S3. + cli_s3.upload_fileobj( + Fileobj=result.get("Payload"), + Bucket=bucket, + Key=path, + ExtraArgs={"ExpectedBucketOwner": account}, + ) + + +if __name__ == "__main__": + cli() diff --git a/source/aws_lambda/shared/scheduler/task.py b/source/scheduler/common/aws_solutions/scheduler/common/task.py similarity index 96% rename from source/aws_lambda/shared/scheduler/task.py rename to source/scheduler/common/aws_solutions/scheduler/common/task.py index 8ed7809..840e071 100644 --- a/source/aws_lambda/shared/scheduler/task.py +++ b/source/scheduler/common/aws_solutions/scheduler/common/task.py @@ -18,8 +18,8 @@ from typing import Union, Dict from uuid import uuid4 -from shared.scheduler import TASK_PK, TASK_SK -from shared.scheduler.schedule import Schedule +from aws_solutions.scheduler.common import TASK_PK, TASK_SK +from aws_solutions.scheduler.common.schedule import Schedule @dataclass diff --git a/source/aws_lambda/shared/scheduler/task_resource.py b/source/scheduler/common/aws_solutions/scheduler/common/task_resource.py similarity index 94% rename from source/aws_lambda/shared/scheduler/task_resource.py rename to source/scheduler/common/aws_solutions/scheduler/common/task_resource.py index 1950d13..854fe1a 100644 --- a/source/aws_lambda/shared/scheduler/task_resource.py +++ b/source/scheduler/common/aws_solutions/scheduler/common/task_resource.py @@ -14,8 +14,8 @@ import dataclasses import functools -from shared.scheduler.schedule import Schedule -from shared.scheduler.task import Task +from aws_solutions.scheduler.common.schedule import Schedule +from aws_solutions.scheduler.common.task import Task class TaskResource: diff --git a/source/scheduler/common/setup.py b/source/scheduler/common/setup.py new file mode 100644 index 0000000..eccc3b3 --- /dev/null +++ b/source/scheduler/common/setup.py @@ -0,0 +1,75 @@ +# ###################################################################################################################### +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # +# # +# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance # +# with the License. You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed # +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # +# the specific language governing permissions and limitations under the License. # +# ###################################################################################################################### + +import re +from pathlib import Path + +import setuptools + +VERSION_RE = re.compile(r"\#\# \[(?P.*)\]", re.MULTILINE) # NOSONAR + + +def get_version(): + """ + Detect the solution version from the changelog. Latest version on top. + """ + changelog = open(Path(__file__).resolve().parent.parent / "CHANGELOG.md").read() + versions = VERSION_RE.findall(changelog) + if not len(versions): + raise ValueError("use the standard semver format in your CHANGELOG.md") + build_version = versions[0] + print(f"Build Version: {build_version}") + return build_version + + +setuptools.setup( + name="aws-solutions-scheduler-common", + version=get_version(), + description="Scheduler shared libraries and CLI", + long_description=open("../README.md").read(), + author="Amazon Web Services", + url="https://aws.amazon.com/solutions/implementations", + license="Apache License 2.0", + packages=setuptools.find_namespace_packages(), + install_requires=[ + "aws-cdk.core>=1.126.0", + "aws-cdk.aws_lambda>=1.126.0", + "aws-lambda-powertools>=1.21.1", + "aws-solutions-python>=1.0.0", + "Click>=7.1.2", + "cronex==0.1.3.1", + "boto3>=1.17.52", + "requests>=2.24.0", + "crhelper>=2.0.6", + "rich>=10.12.0", + ], + entry_points=""" + [console_scripts] + aws-solutions-scheduler=aws_solutions.scheduler.common.scripts.scheduler_cli:cli + """, + python_requires=">=3.7", + classifiers=[ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: JavaScript", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Topic :: Software Development :: Code Generators", + "Topic :: Utilities", + "Typing :: Typed", + ], + zip_safe=False, +) diff --git a/source/tests/aws_lambda/create_campaign/test_create_campaign_handler.py b/source/tests/aws_lambda/create_campaign/test_create_campaign_handler.py index 3b6b33c..3f8d181 100644 --- a/source/tests/aws_lambda/create_campaign/test_create_campaign_handler.py +++ b/source/tests/aws_lambda/create_campaign/test_create_campaign_handler.py @@ -11,11 +11,260 @@ # the specific language governing permissions and limitations under the License. # # ###################################################################################################################### +from datetime import datetime, timedelta + import pytest +from dateutil.parser import isoparse +from dateutil.tz import tzlocal +from moto import mock_sts from aws_lambda.create_campaign.handler import lambda_handler +from shared.exceptions import ResourcePending +from shared.resource import Campaign, SolutionVersion def test_create_campaign(): with pytest.raises(ValueError): lambda_handler({}, None) + + +@mock_sts +def test_describe_campaign_response(personalize_stubber, notifier_stubber): + c_name = "cp_name" + sv_arn = SolutionVersion().arn("unit_test", sv_id="12345678") + personalize_stubber.add_response( + method="describe_campaign", + service_response={ + "campaign": { + "campaignArn": Campaign().arn(c_name), + "name": c_name, + "solutionVersionArn": sv_arn, + "minProvisionedTPS": 1, + "status": "ACTIVE", + "lastUpdatedDateTime": datetime.now(tzlocal()), + "creationDateTime": datetime.now(tz=tzlocal()) - timedelta(seconds=100), + } + }, + expected_params={"campaignArn": Campaign().arn(c_name)}, + ) + + result = lambda_handler( + { + "serviceConfig": { + "name": c_name, + "solutionVersionArn": sv_arn, + "minProvisionedTPS": 1, + }, + "workflowConfig": { + "maxAge": "365 days", + "timeStarted": "2021-10-19T15:18:32Z", + }, + }, + None, + ) + + assert notifier_stubber.has_notified_for_complete + assert notifier_stubber.latest_notification_status == "ACTIVE" + + +@mock_sts +def test_create_campaign_response(personalize_stubber, notifier_stubber): + c_name = "cp_name" + sv_arn = SolutionVersion().arn("unit_test", sv_id="12345678") + personalize_stubber.add_client_error( + method="describe_campaign", + service_error_code="ResourceNotFoundException", + expected_params={"campaignArn": Campaign().arn(c_name)}, + ) + personalize_stubber.add_response( + method="create_campaign", + expected_params={ + "name": c_name, + "minProvisionedTPS": 1, + "solutionVersionArn": sv_arn, + }, + service_response={"campaignArn": Campaign().arn(c_name)}, + ) + + with pytest.raises(ResourcePending): + lambda_handler( + { + "serviceConfig": { + "name": c_name, + "solutionVersionArn": sv_arn, + "minProvisionedTPS": 1, + }, + "workflowConfig": { + "maxAge": "365 days", + "timeStarted": "2021-10-19T15:18:32Z", + }, + }, + None, + ) + + assert notifier_stubber.has_notified_for_creation + assert notifier_stubber.latest_notification_status == "CREATING" + + +@mock_sts +def test_update_campaign_start(personalize_stubber, notifier_stubber): + c_name = "cp_name" + sv_arn_old = SolutionVersion().arn("unit_test", sv_id="12345678") + sv_arn_new = SolutionVersion().arn("unit_test", sv_id="01234567") + personalize_stubber.add_response( + method="describe_campaign", + service_response={ + "campaign": { + "campaignArn": Campaign().arn(c_name), + "name": c_name, + "solutionVersionArn": sv_arn_old, + "minProvisionedTPS": 1, + "status": "ACTIVE", + "lastUpdatedDateTime": datetime.now(tzlocal()), + "creationDateTime": datetime.now(tz=tzlocal()) - timedelta(seconds=100), + } + }, + expected_params={"campaignArn": Campaign().arn(c_name)}, + ) + personalize_stubber.add_response( + method="update_campaign", + service_response={ + "campaignArn": Campaign().arn(c_name), + }, + expected_params={ + "campaignArn": Campaign().arn(c_name), + "minProvisionedTPS": 1, + "solutionVersionArn": sv_arn_new, + }, + ) + + with pytest.raises(ResourcePending): + lambda_handler( + { + "serviceConfig": { + "name": c_name, + "solutionVersionArn": sv_arn_new, + "minProvisionedTPS": 1, + }, + "workflowConfig": { + "maxAge": "365 days", + "timeStarted": "2021-10-19T15:18:32Z", + }, + }, + None, + ) + + assert notifier_stubber.has_notified_for_creation + assert notifier_stubber.latest_notification_status == "UPDATING" + + +@mock_sts +def test_describe_campaign_response_updating(personalize_stubber, notifier_stubber): + c_name = "cp_name" + sv_arn_old = SolutionVersion().arn("unit_test", sv_id="12345678") + sv_arn_new = SolutionVersion().arn("unit_test", sv_id="01234567") + personalize_stubber.add_response( + method="describe_campaign", + service_response={ + "campaign": { + "campaignArn": Campaign().arn(c_name), + "name": c_name, + "solutionVersionArn": sv_arn_old, + "minProvisionedTPS": 1, + "status": "ACTIVE", + "lastUpdatedDateTime": datetime.now(tzlocal()) + - timedelta(seconds=1000), + "creationDateTime": datetime.now(tz=tzlocal()) + - timedelta(seconds=1100), + "latestCampaignUpdate": { + "minProvisionedTPS": 1, + "solutionVersionArn": sv_arn_new, + "creationDateTime": datetime.now(tzlocal()), + "lastUpdatedDateTime": datetime.now(tzlocal()), + "status": "UPDATE IN_PROGRESS", + }, + } + }, + expected_params={"campaignArn": Campaign().arn(c_name)}, + ) + personalize_stubber.add_client_error( + method="update_campaign", + service_error_code="ResourceInUseException", + ) + + with pytest.raises(ResourcePending): + result = lambda_handler( + { + "serviceConfig": { + "name": c_name, + "solutionVersionArn": sv_arn_new, + "minProvisionedTPS": 1, + }, + "workflowConfig": { + "maxAge": "365 days", + "timeStarted": "2021-10-19T15:18:32Z", + }, + }, + None, + ) + + assert not notifier_stubber.has_notified_for_complete + assert not notifier_stubber.has_notified_for_creation + + +@mock_sts +def test_describe_campaign_response_updated(personalize_stubber, notifier_stubber): + c_name = "cp_name" + sv_arn_new = SolutionVersion().arn("unit_test", sv_id="01234567") + personalize_stubber.add_response( + method="describe_campaign", + service_response={ + "campaign": { + "campaignArn": Campaign().arn(c_name), + "name": c_name, + "solutionVersionArn": sv_arn_new, + "minProvisionedTPS": 1, + "status": "ACTIVE", + "lastUpdatedDateTime": datetime.now(tzlocal()) + - timedelta(seconds=1000), + "creationDateTime": datetime.now(tz=tzlocal()) + - timedelta(seconds=1100), + "latestCampaignUpdate": { + "minProvisionedTPS": 1, + "solutionVersionArn": sv_arn_new, + "creationDateTime": datetime.now(tzlocal()) + - timedelta(seconds=100), + "lastUpdatedDateTime": datetime.now(tzlocal()), + "status": "ACTIVE", + }, + } + }, + expected_params={"campaignArn": Campaign().arn(c_name)}, + ) + + result = lambda_handler( + { + "serviceConfig": { + "name": c_name, + "solutionVersionArn": sv_arn_new, + "minProvisionedTPS": 1, + }, + "workflowConfig": { + "maxAge": "365 days", + "timeStarted": "2021-10-19T15:18:32Z", + }, + }, + None, + ) + + assert notifier_stubber.has_notified_for_complete + assert not notifier_stubber.has_notified_for_creation + assert notifier_stubber.latest_notification_status == "ACTIVE" + + last_updated = isoparse( + notifier_stubber.get_resource_last_updated(Campaign(), {"campaign": result}) + ) + created = isoparse( + notifier_stubber.get_resource_created(Campaign(), {"campaign": result}) + ) + assert (last_updated - created).seconds == 100 diff --git a/source/tests/aws_lambda/create_config/test_create_config_handler.py b/source/tests/aws_lambda/create_config/test_create_config_handler.py new file mode 100644 index 0000000..cf6cce7 --- /dev/null +++ b/source/tests/aws_lambda/create_config/test_create_config_handler.py @@ -0,0 +1,180 @@ +# ###################################################################################################################### +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # +# # +# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance # +# with the License. You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed # +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # +# the specific language governing permissions and limitations under the License. # +# ###################################################################################################################### + +from aws_lambda.create_config.handler import lambda_handler +from shared.resource import ( + DatasetGroup, + Dataset, + Solution, + Campaign, + SolutionVersion, + BatchInferenceJob, + EventTracker, + Schema, +) + + +def test_create_config(personalize_stubber): + dsg_name = "dsg" + event_tracker_name = "dsgeventtracker" + dataset_name = "dsg/INTERACTIONS" + schema_name = "dsg_interactions_schema" + solution_name = "dsgsolution" + campaign_name = "dsgcampaign" + + dsg_arn = DatasetGroup().arn(dsg_name) + dataset_arn = Dataset().arn(dataset_name) + event_tracker_arn = EventTracker().arn(event_tracker_name) + schema_arn = Schema().arn(schema_name) + solution_arn = Solution().arn(solution_name) + campaign_arn = Campaign().arn(campaign_name) + + personalize_stubber.add_response( + method="list_datasets", + service_response={ + "datasets": [{"name": "dsg_interactions", "datasetArn": dataset_arn}] + }, + ) + personalize_stubber.add_response( + method="list_dataset_import_jobs", service_response={"datasetImportJobs": []} + ) + personalize_stubber.add_response( + method="list_filters", + service_response={"Filters": []}, + expected_params={"datasetGroupArn": dsg_arn}, + ) + personalize_stubber.add_response( + method="list_solutions", + service_response={"solutions": [{"solutionArn": solution_arn}]}, + expected_params={"datasetGroupArn": dsg_arn}, + ) + personalize_stubber.add_response( + method="list_campaigns", + service_response={"campaigns": [{"campaignArn": campaign_arn}]}, + expected_params={"solutionArn": solution_arn}, + ) + personalize_stubber.add_response( + method="list_solution_versions", + service_response={ + "solutionVersions": [ + { + "solutionVersionArn": SolutionVersion().arn( + "dsgsolution", sv_id="aaaaaaaa" + ) + } + ] + }, + expected_params={"solutionArn": solution_arn}, + ) + personalize_stubber.add_response( + method="list_batch_inference_jobs", + service_response={ + "batchInferenceJobs": [ + {"batchInferenceJobArn": BatchInferenceJob().arn("dsgbatch")} + ] + }, + ) + personalize_stubber.add_response( + method="list_event_trackers", + service_response={"eventTrackers": [{"eventTrackerArn": event_tracker_arn}]}, + expected_params={"datasetGroupArn": dsg_arn}, + ) + personalize_stubber.add_response( + method="describe_dataset_group", + service_response={ + "datasetGroup": {"name": dsg_name, "datasetGroupArn": dsg_arn} + }, + expected_params={"datasetGroupArn": dsg_arn}, + ) + personalize_stubber.add_response( + method="describe_event_tracker", + service_response={ + "eventTracker": { + "name": event_tracker_name, + "eventTrackerArn": event_tracker_arn, + } + }, + expected_params={"eventTrackerArn": event_tracker_arn}, + ) + personalize_stubber.add_response( + method="describe_dataset", + service_response={ + "dataset": { + "name": dataset_name, + "datasetArn": dataset_arn, + "schemaArn": schema_arn, + } + }, + expected_params={"datasetArn": dataset_arn}, + ) + personalize_stubber.add_response( + method="describe_schema", + service_response={ + "schema": {"name": schema_name, "schemaArn": schema_arn, "schema": "{}"} + }, + expected_params={"schemaArn": schema_arn}, + ) + personalize_stubber.add_response( + method="describe_solution", + service_response={ + "solution": {"name": solution_name, "solutionArn": solution_arn} + }, + expected_params={"solutionArn": solution_arn}, + ) + personalize_stubber.add_response( + method="describe_campaign", + service_response={ + "campaign": { + "name": campaign_name, + "campaignArn": campaign_arn, + } + }, + expected_params={"campaignArn": campaign_arn}, + ) + + result = lambda_handler( + { + "datasetGroupName": dsg_name, + "schedules": { + "import": "cron(0 */6 * * ? *)", + "solutions": { + solution_name: { + "full": "cron(0 0 ? * 1 *)", + "update": "cron(0 * * * ? *)", + } + }, + }, + }, + None, + ) + assert result["datasetGroup"]["serviceConfig"]["name"] == dsg_name + assert ( + result["datasetGroup"]["workflowConfig"]["schedules"]["import"] + == "cron(0 */6 * * ? *)" + ) + assert result["eventTracker"]["serviceConfig"]["name"] == event_tracker_name + assert not result.get("filters") + assert len(result["solutions"]) == 1 + assert result["solutions"][0]["serviceConfig"]["name"] == solution_name + assert ( + result["solutions"][0]["workflowConfig"]["schedules"]["full"] + == "cron(0 0 ? * 1 *)" + ) + assert ( + result["solutions"][0]["workflowConfig"]["schedules"]["update"] + == "cron(0 * * * ? *)" + ) + assert len(result["solutions"][0]["campaigns"]) == 1 + assert ( + result["solutions"][0]["campaigns"][0]["serviceConfig"]["name"] == campaign_name + ) diff --git a/source/tests/aws_lambda/test_events.py b/source/tests/aws_lambda/test_events.py new file mode 100644 index 0000000..64e76c7 --- /dev/null +++ b/source/tests/aws_lambda/test_events.py @@ -0,0 +1,79 @@ +# ###################################################################################################################### +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # +# # +# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance # +# with the License. You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed # +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # +# the specific language governing permissions and limitations under the License. # +# ###################################################################################################################### + +from datetime import datetime + +from dateutil.relativedelta import relativedelta +from dateutil.tz import tzlocal + +from shared.events import Notifies +from shared.resource import DatasetGroup + + +def test_notifies_decorator_create(notifier_stubber): + status = "ACTIVE" + + class RequiresNotification: + @Notifies(status=status) + def notifies_something(self, resource, **kwargs): + return {"datasetGroupArn": "SOME_ARN"} + + rn = RequiresNotification() + rn.notifies_something(DatasetGroup(), timeStarted="2021-10-10T10:00:00Z") + + assert notifier_stubber.creation_notifications[0] == { + "resource": "datasetGroup", + "status": "ACTIVE", + "result": { + "datasetGroupArn": "SOME_ARN", + }, + } + assert len(notifier_stubber.creation_notifications) == 1 + assert len(notifier_stubber.completion_notifications) == 0 + + +def test_notifies_decorator_complete(mocker, notifier_stubber): + status = "ACTIVE" + + created = datetime.now(tzlocal()) + updated = created + relativedelta(seconds=120) + + class RequiresNotification: + @Notifies(status=status) + def notifies_something(self, resource, **kwargs): + return { + "datasetGroup": { + "datasetGroupArn": "SOME_ARN", + "creationDateTime": created, + "lastUpdatedDateTime": updated, + "status": "ACTIVE", + } + } + + rn = RequiresNotification() + rn.notifies_something(DatasetGroup(), timeStarted=created) + + assert notifier_stubber.completion_notifications[0] == { + "resource": "datasetGroup", + "result": { + "datasetGroup": { + "datasetGroupArn": "SOME_ARN", + "lastUpdatedDateTime": updated, + "creationDateTime": created, + "status": "ACTIVE", + } + }, + "status": "ACTIVE", + } + assert len(notifier_stubber.creation_notifications) == 0 + assert len(notifier_stubber.completion_notifications) == 1 diff --git a/source/tests/aws_lambda/test_personalize_service.py b/source/tests/aws_lambda/test_personalize_service.py index a4cf79f..32f0d46 100644 --- a/source/tests/aws_lambda/test_personalize_service.py +++ b/source/tests/aws_lambda/test_personalize_service.py @@ -24,11 +24,11 @@ from aws_lambda.shared.personalize_service import ( S3, Personalize, - ServiceModel, Configuration, get_duplicates, ) from shared.exceptions import ResourceNeedsUpdate, ResourceFailed +from shared.personalize.service_model import ServiceModel from shared.resource import Campaign @@ -153,10 +153,10 @@ def test_service_model(personalize_stubber): filter_arn_1 = f"arn:aws:personalize:us-east-1:{'1' * 12}:filter/{filter_name_1}" filter_arn_2 = f"arn:aws:personalize:us-east-1:{'1' * 12}:filter/{filter_name_2}" campaign_arn_1 = ( - f"arn:aws:personalize:us-east-1:{'1' * 12}:filter/{campaign_name_1}" + f"arn:aws:personalize:us-east-1:{'1' * 12}:campaign/{campaign_name_1}" ) campaign_arn_2 = ( - f"arn:aws:personalize:us-east-1:{'1' * 12}:filter/{campaign_name_2}" + f"arn:aws:personalize:us-east-1:{'1' * 12}:campaign/{campaign_name_2}" ) # all dataset groups @@ -201,6 +201,11 @@ def test_service_model(personalize_stubber): expected_params={"solutionArn": solution_arn_1}, service_response={"solutionVersions": []}, ) + personalize_stubber.add_response( + method="list_event_trackers", + expected_params={"datasetGroupArn": dataset_group_arn_1}, + service_response={"eventTrackers": []}, + ) # second dataset group personalize_stubber.add_response( @@ -233,6 +238,11 @@ def test_service_model(personalize_stubber): expected_params={"solutionArn": solution_arn_2}, service_response={"solutionVersions": []}, ) + personalize_stubber.add_response( + method="list_event_trackers", + expected_params={"datasetGroupArn": dataset_group_arn_2}, + service_response={"eventTrackers": []}, + ) sm = ServiceModel(cli) @@ -299,6 +309,7 @@ def test_describe_with_update(mocker): describe_mock.return_value = { "campaign": { "solutionVersionArn": arn, + "campaignArn": Campaign().arn("campaign_name"), } } personalize.describe_default = describe_mock @@ -490,3 +501,61 @@ def test_record_offline_metrics( assert metrics["precision_at_5"] assert metrics["precision_at_10"] assert metrics["precision_at_25"] + + +def test_solution_version_update_validation(): + cfg = Configuration() + cfg.config_dict = { + "solutions": [ + { + "serviceConfig": { + "name": "valid", + "recipeArn": "arn:aws:personalize:::recipe/aws-user-personalization", + }, + "workflowConfig": { + "schedules": { + "full": "cron(0 0 ? * 1 *)", + "update": "cron(0 * * * ? *)", + } + }, + }, + { + "serviceConfig": { + "name": "valid", + "recipeArn": "arn:aws:personalize:::recipe/aws-sims", + }, + "workflowConfig": { + "schedules": { + "full": "cron(0 0 ? * 1 *)", + } + }, + }, + { + "serviceConfig": { + "name": "valid", + "recipeArn": "arn:aws:personalize:::recipe/aws-hrnn-coldstart", + }, + "workflowConfig": { + "schedules": { + "full": "cron(0 0 ? * 1 *)", + "update": "cron(0 * * * ? *)", + } + }, + }, + { + "serviceConfig": { + "name": "invalid", + "recipeArn": "arn:aws:personalize:::recipe/aws-sims", + }, + "workflowConfig": { + "schedules": { + "full": "cron(0 0 ? * 1 *)", + "update": "cron(0 * * * ? *)", + } + }, + }, + ] + } + cfg._validate_solution_update() + assert len(cfg._configuration_errors) == 1 + assert cfg._configuration_errors[0].startswith("solution invalid does not support") diff --git a/source/tests/aws_lambda/test_sfn_middleware.py b/source/tests/aws_lambda/test_sfn_middleware.py index c6b2892..17739ce 100644 --- a/source/tests/aws_lambda/test_sfn_middleware.py +++ b/source/tests/aws_lambda/test_sfn_middleware.py @@ -10,8 +10,8 @@ # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # # the specific language governing permissions and limitations under the License. # # ###################################################################################################################### -import datetime import logging +from datetime import datetime from decimal import Decimal import pytest @@ -29,7 +29,9 @@ set_bucket, parse_datetime, Parameter, + set_workflow_config, ) +from shared.resource import DatasetGroup @pytest.fixture @@ -64,7 +66,9 @@ def test_personalize_status_invalid(personalize_resource): @mock_sts -def test_personalize_resource_decorator(personalize_resource, personalize_stubber): +def test_personalize_resource_decorator( + personalize_resource, personalize_stubber, notifier_stubber +): """ The typical workflow is to describe, then create, then raise ResourcePending """ @@ -73,7 +77,11 @@ def test_personalize_resource_decorator(personalize_resource, personalize_stubbe "describe_dataset_group", "ResourceNotFoundException" ) personalize_stubber.add_response( - "create_dataset_group", {}, expected_params={"name": dsg_name} + "create_dataset_group", + service_response={"datasetGroupArn": DatasetGroup().arn(dsg_name)}, + expected_params={ + "name": dsg_name, + }, ) @personalize_resource @@ -87,7 +95,7 @@ def decorated(event, context): @pytest.mark.parametrize( "item,serialized", [ - (datetime.datetime(2020, 1, 1), "2020-01-01T00:00:00"), + (datetime(2020, 1, 1), "2020-01-01T00:00:00"), (Decimal(1), 1), (Decimal(1.5), 1.5), ], @@ -113,11 +121,9 @@ def test_set_defaults_2(): del defaults["currentDate"] del defaults["datasetGroup"] - assert defaults == { - "solutions": [ - {"solutionVersions": [], "campaigns": [], "batchInferenceJobs": []} - ], - } + assert defaults["solutions"][0]["solutionVersions"] == [] + assert defaults["solutions"][0]["campaigns"] == [] + assert defaults["solutions"][0]["batchInferenceJobs"] == [] def test_set_defaults_3(): @@ -204,3 +210,75 @@ def test_parameter_resolution(key, source, path, format_as, default, result): ).resolve(event) == result ) + + +def test_set_workflow_config(): + result = set_workflow_config( + { + "datasetGroup": { + "serviceConfig": {"datasetGroup": "should-not-change"}, + "workflowConfig": {"maxAge": "one day"}, + }, + "eventTracker": { + "serviceConfig": {}, + }, + "datasets": { + "users": { + "dataset": {"serviceConfig": {}}, + "schema": {"serviceConfig": {}}, + }, + "items": { + "dataset": {"serviceConfig": {}}, + "schema": {"serviceConfig": {}}, + }, + "interactions": { + "dataset": {"serviceConfig": {}}, + "schema": {"serviceConfig": {}}, + }, + }, + "filters": [{"serviceConfig": {}}], + "solutions": [ + { + "serviceConfig": {"datasetGroup": "should-not-change"}, + "campaigns": [ + { + "serviceConfig": {}, + "workflowConfig": {"maxAge": "should-not-change"}, + }, + {"serviceConfig": {}}, + ], + "batchInferenceJobs": [ + { + "serviceConfig": {}, + } + ], + }, + {"serviceConfig": {}}, + ], + } + ) + + # all workflowConfig should be set + assert result.get("datasetGroup").get("workflowConfig") + assert all(s.get("workflowConfig") for s in result["solutions"]) + assert all(f.get("workflowConfig") for f in result["filters"]) + assert all(c.get("workflowConfig") for c in result["solutions"][0]["campaigns"]) + assert all( + c.get("workflowConfig") for c in result["solutions"][0]["batchInferenceJobs"] + ) + + # keys under serviceConfig should not change + assert ( + result.get("datasetGroup").get("serviceConfig").get("datasetGroup") + == "should-not-change" + ) + assert ( + result.get("solutions")[0].get("serviceConfig").get("datasetGroup") + == "should-not-change" + ) + + # overrides to the default must remain unchanged + assert ( + result.get("solutions")[0]["campaigns"][0]["workflowConfig"]["maxAge"] + == "should-not-change" + ) diff --git a/source/tests/cdk_solution_helper/test_stack.py b/source/tests/cdk_solution_helper/test_stack.py index 2099b5e..5334d47 100644 --- a/source/tests/cdk_solution_helper/test_stack.py +++ b/source/tests/cdk_solution_helper/test_stack.py @@ -92,12 +92,14 @@ def test_solution_stack(): stack_description = "stack description" stack_filename = "stack-name.template" - app = App(context={"SOLUTION_ID": stack_id}) + app = App(context={"SOLUTION_ID": stack_id, "SOLUTION_VERSION": "v0.0.1"}) SolutionStack(app, "stack", stack_description, stack_filename) template = app.synth().stacks[0].template - assert template["Description"] == f"({stack_id}) {stack_description}" + assert ( + template["Description"] == f"({stack_id}) - {stack_description}. Version v0.0.1" + ) assert template["Metadata"] == { "AWS::CloudFormation::Interface": { "ParameterGroups": [], diff --git a/source/tests/conftest.py b/source/tests/conftest.py index 37f79f5..2ba35a6 100644 --- a/source/tests/conftest.py +++ b/source/tests/conftest.py @@ -14,6 +14,7 @@ import sys from pathlib import Path from tempfile import TemporaryDirectory +from typing import Dict import jsii import pytest @@ -35,6 +36,10 @@ sys.path.insert(0, shared_path) +from shared.notifiers.base import Notifier +from shared.resource import Resource + + class Solution: id = "SO0170test" version = "99.99.99" @@ -61,6 +66,9 @@ def solution_env(): os.environ[ "STATE_MACHINE_ARN" ] = f"arn:aws:states:us-east-1:{'1'*12}:stateMachine:personalize-workflow" + os.environ[ + "EVENT_BUS_ARN" + ] = f"arn:aws:events:us-east-1:{'1'*12}:event-bus/PersonalizeEventBus" yield @@ -123,3 +131,60 @@ def cdk_lambda_mocks(mocker, request): @pytest.fixture def configuration_path(): return Path(__file__).parent / "fixtures" / "config" / "sample_config.json" + + +class NotifierStub(Notifier): + def __init__(self): + self.creation_notifications = [] + self.completion_notifications = [] + + @property + def has_notified_for_creation(self) -> bool: + if len(self.creation_notifications) > 1: + raise ValueError("should not notify for creation more than once") + return len(self.creation_notifications) == 1 + + @property + def has_notified_for_complete(self) -> bool: + if len(self.completion_notifications) > 1: + raise ValueError("should not notify for completion more than once") + return len(self.completion_notifications) == 1 + + @property + def latest_notification_status(self): + if self.has_notified_for_complete and self.has_notified_for_creation: + raise ValueError("should not notifiy for both creation and completion") + + if self.has_notified_for_creation: + status = self.creation_notifications[0]["status"] + elif self.has_notified_for_complete: + status = self.completion_notifications[0]["status"] + else: + raise ValueError("no notifications have been requested") + + return status + + def notify_create(self, status: str, resource: Resource, result: Dict): + self.creation_notifications.append( + { + "resource": resource.name.camel, + "result": result, + "status": status, + } + ) + + def notify_complete(self, status: str, resource: Resource, result: Dict): + self.completion_notifications.append( + { + "resource": resource.name.camel, + "result": result, + "status": status, + } + ) + + +@pytest.fixture(scope="function") +def notifier_stubber(mocker): + notifier = NotifierStub() + mocker.patch("shared.events.NOTIFY_LIST", [notifier]) + yield notifier diff --git a/source/tests/fixtures/config/interactions.csv b/source/tests/fixtures/config/interactions.csv new file mode 100644 index 0000000..462f71c --- /dev/null +++ b/source/tests/fixtures/config/interactions.csv @@ -0,0 +1,1001 @@ +ITEM_ID,USER_ID,TIMESTAMP,EVENT_TYPE,EVENT_VALUE +98,15,1584365781,RATING,4 +95,4,1568148087,RATING,5 +100,21,1595021513,RATING,8 +66,11,1590385766,RATING,3 +97,21,1606292264,RATING,4 +82,6,1595594827,RATING,6 +87,15,1593672319,RATING,9 +83,23,1616713539,RATING,10 +87,18,1584753273,RATING,2 +40,16,1617445226,RATING,1 +78,12,1621985298,RATING,8 +47,21,1620787263,RATING,10 +41,14,1583047958,RATING,4 +49,20,1560435332,RATING,7 +12,9,1574707239,RATING,7 +43,1,1571776732,RATING,3 +43,22,1613516794,RATING,6 +81,6,1582628001,RATING,5 +4,20,1577057660,RATING,1 +45,7,1611292791,RATING,3 +46,8,1561341566,RATING,8 +67,4,1582026073,RATING,4 +98,0,1585966667,RATING,1 +98,22,1615941158,RATING,5 +35,5,1586924584,RATING,1 +49,5,1583615097,RATING,7 +34,16,1590078616,RATING,4 +47,22,1587564883,RATING,6 +61,11,1620242299,RATING,10 +92,21,1590929405,RATING,8 +31,6,1583294233,RATING,3 +75,7,1562956240,RATING,4 +10,0,1573243678,RATING,2 +32,7,1564831451,RATING,4 +85,20,1586473046,RATING,8 +87,12,1604160428,RATING,6 +40,16,1572660798,RATING,3 +58,15,1597495045,RATING,5 +23,9,1563390033,RATING,3 +71,11,1560945456,RATING,1 +41,3,1601619139,RATING,8 +53,0,1568791360,RATING,1 +60,0,1613761922,RATING,9 +48,8,1608604953,RATING,5 +86,11,1560032563,RATING,10 +67,9,1609714128,RATING,2 +22,14,1563377976,RATING,4 +34,12,1616347776,RATING,9 +70,23,1615014632,RATING,3 +30,1,1620944826,RATING,1 +76,14,1565596495,RATING,8 +39,19,1595218682,RATING,5 +23,8,1569715059,RATING,8 +80,15,1559630095,RATING,2 +55,23,1579324249,RATING,7 +40,7,1602448392,RATING,6 +18,21,1616272877,RATING,4 +87,16,1588245409,RATING,1 +76,13,1613301982,RATING,2 +47,14,1607708002,RATING,1 +23,19,1567239480,RATING,2 +21,18,1561417646,RATING,8 +60,23,1612403900,RATING,5 +70,24,1605707272,RATING,7 +11,18,1559558343,RATING,7 +52,20,1560416070,RATING,10 +20,3,1575052846,RATING,7 +23,17,1613223347,RATING,6 +19,7,1592929775,RATING,4 +73,5,1562928725,RATING,1 +14,11,1560742098,RATING,4 +20,11,1609293695,RATING,8 +39,14,1605472123,RATING,4 +93,0,1590720673,RATING,9 +27,11,1619152021,RATING,5 +37,13,1618025083,RATING,2 +78,6,1607825444,RATING,10 +53,20,1619834539,RATING,9 +92,9,1586176015,RATING,9 +79,15,1563196246,RATING,3 +43,2,1604414325,RATING,1 +72,8,1618888507,RATING,9 +76,12,1606335516,RATING,3 +89,17,1603247699,RATING,6 +11,6,1571365148,RATING,3 +61,22,1578712883,RATING,8 +51,4,1614226476,RATING,3 +90,3,1561071224,RATING,5 +8,2,1620289996,RATING,9 +71,22,1597034996,RATING,1 +18,17,1570613299,RATING,7 +79,14,1616945752,RATING,6 +91,10,1590181737,RATING,6 +73,3,1586920361,RATING,4 +8,19,1607111559,RATING,4 +37,11,1564170905,RATING,10 +96,6,1580607562,RATING,5 +78,13,1607352028,RATING,1 +14,9,1619924466,RATING,1 +14,9,1594268460,RATING,2 +13,3,1577119164,RATING,5 +73,24,1595561179,RATING,2 +44,22,1571414629,RATING,3 +2,0,1592610430,RATING,10 +45,14,1618062281,RATING,3 +44,24,1602861408,RATING,7 +76,1,1619530583,RATING,3 +34,8,1593488577,RATING,2 +68,18,1615499823,RATING,5 +70,2,1559853980,RATING,7 +98,1,1573066104,RATING,9 +34,19,1583780541,RATING,3 +13,8,1582480437,RATING,4 +81,9,1572568398,RATING,10 +53,14,1585044873,RATING,3 +79,17,1590628616,RATING,3 +54,20,1597000616,RATING,2 +6,17,1610474277,RATING,4 +44,23,1565732134,RATING,9 +77,11,1609805856,RATING,10 +37,19,1601323016,RATING,1 +39,17,1600045914,RATING,6 +97,10,1601890547,RATING,8 +51,16,1601508282,RATING,5 +63,16,1594969483,RATING,3 +93,4,1578046084,RATING,2 +71,0,1585250540,RATING,4 +35,9,1586253099,RATING,2 +39,0,1569385753,RATING,3 +11,18,1580480370,RATING,10 +64,16,1582860535,RATING,2 +11,9,1593650492,RATING,2 +51,8,1596319357,RATING,3 +6,5,1576981066,RATING,4 +61,22,1578812896,RATING,10 +23,21,1603821749,RATING,1 +44,7,1619617381,RATING,9 +45,10,1600172151,RATING,9 +9,2,1568588418,RATING,5 +98,18,1598949850,RATING,3 +82,9,1586624788,RATING,2 +26,9,1605141610,RATING,6 +25,11,1568366450,RATING,10 +90,19,1562112359,RATING,1 +100,18,1584339694,RATING,3 +88,15,1570641867,RATING,1 +16,21,1579725819,RATING,3 +82,20,1571418676,RATING,4 +97,9,1565366165,RATING,7 +1,20,1590395812,RATING,9 +67,14,1610219804,RATING,7 +1,22,1604137622,RATING,10 +11,23,1579141400,RATING,6 +12,2,1582299721,RATING,3 +33,10,1619857133,RATING,1 +1,16,1597728272,RATING,6 +47,21,1613040367,RATING,7 +16,20,1562253032,RATING,5 +12,10,1574216777,RATING,2 +68,13,1615932843,RATING,5 +98,19,1586886243,RATING,8 +56,2,1577246705,RATING,3 +45,22,1609884936,RATING,4 +87,7,1586740417,RATING,5 +20,9,1583260309,RATING,4 +37,13,1567643618,RATING,9 +24,13,1569027209,RATING,1 +77,17,1612186736,RATING,8 +100,24,1589561792,RATING,4 +43,22,1561963255,RATING,3 +13,21,1572266542,RATING,4 +74,22,1581580899,RATING,3 +15,9,1617238465,RATING,10 +58,15,1584679149,RATING,8 +74,18,1595538731,RATING,10 +82,13,1568658448,RATING,8 +17,9,1582221000,RATING,9 +44,15,1598585487,RATING,8 +97,4,1597610596,RATING,6 +53,1,1584663176,RATING,8 +84,4,1592260788,RATING,4 +67,10,1560577420,RATING,4 +51,3,1564091938,RATING,6 +86,24,1619535069,RATING,1 +48,21,1615398550,RATING,6 +50,8,1606252919,RATING,10 +21,7,1597049477,RATING,2 +50,11,1586567148,RATING,2 +84,4,1577053742,RATING,10 +29,8,1586995353,RATING,2 +22,0,1592421064,RATING,7 +100,11,1586008061,RATING,1 +88,12,1609902082,RATING,9 +92,17,1600603863,RATING,1 +49,18,1589420881,RATING,10 +62,17,1606592338,RATING,7 +76,16,1572568605,RATING,7 +86,0,1603301367,RATING,3 +42,18,1615604895,RATING,2 +15,17,1595120439,RATING,9 +13,20,1573675441,RATING,6 +46,17,1592570228,RATING,10 +88,11,1565500081,RATING,8 +67,8,1577380601,RATING,3 +5,12,1583074107,RATING,8 +23,6,1565677182,RATING,9 +33,5,1614128321,RATING,7 +68,13,1562327640,RATING,2 +53,1,1575790876,RATING,3 +88,21,1596423069,RATING,8 +61,17,1562150429,RATING,8 +72,16,1581077707,RATING,1 +45,2,1616564377,RATING,2 +18,18,1615411965,RATING,1 +34,3,1591473615,RATING,9 +40,21,1568812572,RATING,9 +32,18,1583305972,RATING,2 +8,13,1567741643,RATING,3 +64,12,1594025255,RATING,1 +48,24,1619174632,RATING,9 +62,18,1619296635,RATING,2 +38,11,1572287631,RATING,2 +82,20,1587236984,RATING,5 +26,19,1620870157,RATING,9 +10,9,1577713965,RATING,4 +28,2,1608215445,RATING,8 +34,23,1593595272,RATING,5 +88,18,1583585835,RATING,1 +45,2,1589335070,RATING,5 +22,12,1615969077,RATING,5 +75,0,1583135668,RATING,10 +97,9,1566346905,RATING,2 +63,21,1605202897,RATING,2 +38,7,1580210396,RATING,3 +86,15,1614055763,RATING,8 +54,23,1597385019,RATING,10 +89,18,1584119931,RATING,4 +52,18,1618173596,RATING,9 +38,3,1618612009,RATING,9 +32,4,1596249446,RATING,8 +37,23,1586659922,RATING,2 +72,14,1579735019,RATING,4 +10,21,1606925383,RATING,3 +33,16,1603350026,RATING,5 +88,4,1614949535,RATING,1 +65,11,1602970086,RATING,7 +28,3,1592281504,RATING,5 +13,20,1617243324,RATING,4 +21,24,1581485663,RATING,6 +24,11,1570554440,RATING,6 +21,0,1585831694,RATING,8 +42,5,1581968934,RATING,4 +100,1,1616948680,RATING,9 +97,13,1599908698,RATING,3 +47,1,1618196863,RATING,8 +82,14,1599112146,RATING,3 +71,17,1572986374,RATING,2 +2,8,1581459263,RATING,8 +76,9,1607568770,RATING,4 +99,16,1587818652,RATING,8 +17,16,1575480929,RATING,4 +96,0,1600388934,RATING,5 +55,21,1585242251,RATING,9 +25,16,1576082853,RATING,5 +68,15,1590316524,RATING,2 +52,8,1579590950,RATING,7 +45,8,1607699956,RATING,9 +33,1,1609983615,RATING,3 +25,17,1618714626,RATING,9 +21,20,1606487487,RATING,6 +9,19,1592647222,RATING,3 +62,1,1616592787,RATING,3 +38,19,1619275546,RATING,4 +12,11,1618609885,RATING,4 +18,14,1612586143,RATING,8 +33,16,1560556620,RATING,6 +83,22,1596934383,RATING,9 +94,11,1562838709,RATING,1 +64,12,1566516140,RATING,1 +68,5,1590006522,RATING,7 +17,22,1579006232,RATING,10 +98,8,1594841642,RATING,2 +46,4,1597679730,RATING,7 +22,14,1619675350,RATING,5 +36,17,1601947194,RATING,6 +8,8,1619634421,RATING,9 +63,7,1585940236,RATING,6 +9,21,1576403852,RATING,6 +42,13,1593153115,RATING,9 +87,13,1568213158,RATING,6 +33,7,1564346811,RATING,9 +38,2,1581706381,RATING,3 +81,18,1574632435,RATING,5 +12,6,1573963537,RATING,9 +75,20,1583398594,RATING,3 +18,0,1590096518,RATING,1 +51,2,1610413098,RATING,4 +100,21,1574617315,RATING,9 +64,20,1564715511,RATING,1 +5,7,1595799490,RATING,9 +85,6,1570635845,RATING,6 +56,2,1604718073,RATING,3 +33,15,1620587873,RATING,5 +94,22,1571121822,RATING,4 +85,21,1593172647,RATING,8 +47,8,1608771704,RATING,8 +23,5,1608042872,RATING,1 +40,18,1574427665,RATING,3 +35,24,1592796704,RATING,10 +99,21,1601050172,RATING,4 +37,6,1586691628,RATING,9 +57,14,1561438072,RATING,4 +2,8,1564406312,RATING,1 +73,15,1606503598,RATING,7 +37,16,1601220292,RATING,5 +93,4,1585567276,RATING,5 +99,17,1586697567,RATING,1 +51,7,1571353038,RATING,1 +18,6,1569217483,RATING,10 +54,6,1596661047,RATING,2 +56,16,1591293740,RATING,1 +7,2,1560711891,RATING,4 +23,18,1613518029,RATING,3 +58,22,1570363547,RATING,9 +14,23,1605561017,RATING,7 +46,18,1608044963,RATING,6 +36,2,1606001869,RATING,1 +65,12,1606189537,RATING,6 +23,17,1610587350,RATING,6 +91,9,1605438673,RATING,10 +38,24,1570871333,RATING,10 +88,24,1585636260,RATING,10 +31,7,1563495195,RATING,1 +47,0,1613303233,RATING,8 +40,21,1593187130,RATING,9 +26,8,1579290675,RATING,10 +57,22,1577881375,RATING,9 +48,7,1576199252,RATING,8 +3,3,1620203019,RATING,2 +11,23,1573994676,RATING,2 +3,5,1568325647,RATING,2 +68,1,1593640252,RATING,2 +80,19,1615273425,RATING,2 +8,10,1617415452,RATING,8 +73,8,1580721254,RATING,9 +38,1,1613491497,RATING,1 +68,3,1582477307,RATING,1 +40,19,1607862266,RATING,1 +21,13,1620635844,RATING,7 +69,16,1596566818,RATING,10 +59,9,1620367373,RATING,5 +87,14,1599270108,RATING,7 +36,17,1611283773,RATING,8 +14,0,1605535508,RATING,3 +77,20,1609267663,RATING,9 +90,10,1605262034,RATING,2 +2,10,1597295519,RATING,6 +96,17,1595545213,RATING,10 +64,15,1613537557,RATING,7 +54,11,1599074164,RATING,10 +31,10,1595872030,RATING,6 +93,1,1581337377,RATING,8 +49,5,1616291193,RATING,6 +100,7,1617749781,RATING,4 +18,3,1611704585,RATING,9 +70,16,1568525759,RATING,9 +21,20,1592782462,RATING,4 +70,4,1609182118,RATING,6 +9,5,1609736516,RATING,2 +71,5,1583682498,RATING,8 +88,23,1561911140,RATING,8 +6,9,1576803858,RATING,2 +68,11,1567049745,RATING,5 +77,9,1591521038,RATING,4 +72,7,1606157205,RATING,7 +41,22,1594744598,RATING,3 +97,8,1615016958,RATING,3 +43,0,1574117789,RATING,6 +20,16,1586311313,RATING,6 +38,4,1580063142,RATING,6 +96,9,1582997881,RATING,8 +21,0,1602903524,RATING,2 +82,23,1603665494,RATING,10 +69,5,1596765054,RATING,5 +10,20,1590993138,RATING,3 +56,3,1570071620,RATING,4 +88,18,1577386956,RATING,9 +5,10,1615222240,RATING,4 +29,4,1581222941,RATING,7 +12,9,1615575623,RATING,10 +85,8,1615854671,RATING,4 +17,0,1586822560,RATING,8 +61,2,1566426159,RATING,8 +46,5,1564826505,RATING,10 +14,3,1596542805,RATING,4 +73,1,1582629868,RATING,6 +48,2,1577907136,RATING,2 +85,2,1613836250,RATING,10 +11,7,1603478376,RATING,10 +81,16,1571806611,RATING,6 +82,24,1578641743,RATING,9 +68,11,1610699400,RATING,5 +75,21,1580754101,RATING,8 +74,19,1587894533,RATING,8 +23,7,1607368837,RATING,5 +95,14,1616336758,RATING,10 +5,0,1593913973,RATING,4 +60,23,1611257022,RATING,1 +96,13,1600483979,RATING,6 +85,8,1590267806,RATING,6 +94,22,1614623835,RATING,7 +19,4,1612115623,RATING,7 +76,20,1611477743,RATING,6 +93,1,1589821092,RATING,6 +22,7,1609381526,RATING,2 +14,16,1591930773,RATING,3 +49,15,1577569161,RATING,4 +64,19,1594063163,RATING,5 +33,15,1609709665,RATING,4 +92,15,1564393442,RATING,4 +34,1,1578040268,RATING,3 +37,2,1600306050,RATING,1 +34,15,1582034070,RATING,7 +21,20,1582182697,RATING,10 +98,21,1561242721,RATING,8 +59,16,1577953356,RATING,10 +30,19,1612417410,RATING,6 +57,7,1618050551,RATING,7 +94,8,1591009904,RATING,5 +64,1,1605771139,RATING,9 +1,23,1589257315,RATING,10 +48,20,1599425244,RATING,8 +30,9,1596185455,RATING,4 +57,23,1573361584,RATING,1 +94,15,1603643894,RATING,8 +53,22,1578346365,RATING,6 +88,5,1611543778,RATING,1 +98,16,1610119559,RATING,5 +80,0,1585097792,RATING,2 +52,4,1604812962,RATING,9 +24,24,1569692560,RATING,1 +74,2,1597611321,RATING,9 +62,5,1612198240,RATING,5 +59,1,1561970914,RATING,8 +24,7,1580636913,RATING,9 +69,9,1580288292,RATING,1 +33,16,1563177745,RATING,7 +71,22,1596495833,RATING,7 +47,21,1577193958,RATING,2 +39,0,1621560181,RATING,8 +72,11,1575250731,RATING,5 +76,18,1576135770,RATING,9 +21,7,1620163825,RATING,2 +75,0,1574624886,RATING,9 +77,19,1581256537,RATING,1 +48,20,1569828145,RATING,5 +33,23,1616911662,RATING,5 +73,5,1579820227,RATING,6 +12,5,1580956858,RATING,9 +48,18,1601911233,RATING,10 +51,1,1559709964,RATING,2 +84,18,1561766681,RATING,3 +91,8,1598582552,RATING,5 +85,21,1596237488,RATING,10 +61,18,1569892026,RATING,5 +47,0,1606030615,RATING,6 +52,12,1572394323,RATING,9 +72,10,1592899254,RATING,6 +10,6,1609114743,RATING,1 +77,23,1591065357,RATING,5 +48,16,1583698754,RATING,7 +10,18,1561048543,RATING,5 +65,19,1569499648,RATING,5 +77,8,1571100855,RATING,8 +70,8,1560171244,RATING,1 +61,2,1584422139,RATING,8 +34,1,1589320947,RATING,10 +81,11,1579505111,RATING,8 +22,3,1587427648,RATING,2 +58,2,1592978170,RATING,2 +44,18,1610220513,RATING,8 +32,3,1562035091,RATING,8 +96,21,1588249271,RATING,7 +23,12,1609597057,RATING,6 +32,1,1573803266,RATING,4 +24,17,1569560116,RATING,9 +90,24,1575162762,RATING,5 +88,13,1620726828,RATING,6 +59,6,1563878999,RATING,5 +85,22,1585960680,RATING,8 +91,13,1619025632,RATING,9 +96,23,1594111633,RATING,5 +91,19,1561050102,RATING,1 +49,17,1583235250,RATING,2 +30,22,1614225600,RATING,3 +80,10,1605940644,RATING,3 +9,4,1596033815,RATING,7 +35,17,1613554413,RATING,1 +82,8,1575792643,RATING,3 +12,22,1605626872,RATING,6 +86,8,1614070467,RATING,10 +59,12,1616728577,RATING,6 +81,12,1584640558,RATING,4 +66,24,1570290491,RATING,6 +86,22,1578940615,RATING,9 +45,17,1571018493,RATING,9 +7,8,1571858123,RATING,5 +65,0,1590861706,RATING,10 +15,17,1599550747,RATING,3 +35,4,1583010560,RATING,4 +29,22,1605435413,RATING,5 +20,12,1592868743,RATING,7 +70,13,1621789435,RATING,1 +41,7,1561083858,RATING,6 +77,13,1570909048,RATING,1 +92,0,1560354022,RATING,4 +15,8,1559290389,RATING,10 +10,4,1574572667,RATING,6 +71,9,1604975899,RATING,4 +19,17,1593113553,RATING,7 +42,15,1589378260,RATING,5 +43,0,1574627285,RATING,3 +59,14,1566221206,RATING,4 +86,4,1573918988,RATING,8 +64,24,1605518434,RATING,2 +97,23,1580830810,RATING,4 +81,19,1581493953,RATING,4 +55,18,1594583443,RATING,6 +96,7,1608648411,RATING,8 +58,7,1579478732,RATING,5 +47,19,1562734703,RATING,10 +73,17,1605772586,RATING,2 +48,0,1603145094,RATING,8 +87,4,1575800076,RATING,5 +29,6,1592367059,RATING,2 +72,20,1575457700,RATING,2 +77,10,1605876943,RATING,1 +51,19,1609552990,RATING,1 +96,22,1595935889,RATING,9 +32,4,1584849419,RATING,7 +33,20,1607986508,RATING,10 +69,5,1587241689,RATING,3 +94,10,1617602339,RATING,4 +40,1,1608661229,RATING,6 +41,4,1566497383,RATING,1 +58,17,1562239718,RATING,1 +72,11,1616838707,RATING,10 +95,19,1562800670,RATING,3 +92,1,1571840580,RATING,4 +28,13,1601482919,RATING,4 +29,22,1617246190,RATING,5 +76,9,1593823441,RATING,2 +76,19,1601403118,RATING,4 +45,4,1596608474,RATING,1 +27,23,1600216506,RATING,2 +59,23,1570845686,RATING,5 +75,1,1574245925,RATING,1 +86,21,1564618293,RATING,4 +76,24,1566697468,RATING,6 +4,18,1613984255,RATING,6 +43,20,1601079639,RATING,3 +39,16,1596454469,RATING,3 +49,13,1614755697,RATING,6 +5,8,1588565640,RATING,10 +87,8,1566575207,RATING,8 +58,20,1599415661,RATING,6 +86,24,1574939512,RATING,3 +67,3,1608548584,RATING,3 +47,5,1599040953,RATING,4 +85,13,1563121730,RATING,10 +100,23,1568353189,RATING,4 +81,11,1607201075,RATING,7 +19,18,1567378609,RATING,3 +35,4,1620042215,RATING,2 +100,16,1582462702,RATING,8 +100,16,1575316587,RATING,3 +86,3,1562020015,RATING,7 +90,1,1596066572,RATING,6 +16,16,1620660654,RATING,1 +49,23,1571977039,RATING,8 +81,18,1569036011,RATING,5 +86,4,1561360446,RATING,2 +17,21,1611832247,RATING,6 +20,20,1576414906,RATING,6 +99,11,1598964056,RATING,5 +87,13,1576795659,RATING,7 +27,6,1590130017,RATING,6 +21,9,1602561646,RATING,7 +82,4,1620395121,RATING,9 +3,12,1561423551,RATING,5 +4,1,1573959891,RATING,3 +44,18,1601292728,RATING,10 +89,2,1580960226,RATING,5 +78,11,1603697773,RATING,9 +84,0,1595400867,RATING,1 +6,14,1593299097,RATING,7 +40,0,1603726035,RATING,3 +21,12,1584074278,RATING,7 +21,4,1605958984,RATING,3 +45,4,1603440757,RATING,4 +56,8,1616194399,RATING,8 +36,10,1607392309,RATING,2 +79,19,1585746858,RATING,1 +83,19,1566914015,RATING,6 +52,0,1572424973,RATING,8 +92,14,1583651329,RATING,6 +51,15,1597506629,RATING,6 +55,8,1614739587,RATING,5 +67,21,1596388361,RATING,5 +95,0,1578116284,RATING,2 +57,3,1562756196,RATING,6 +50,10,1603731659,RATING,3 +66,7,1585464024,RATING,1 +90,3,1598051160,RATING,10 +30,12,1594903371,RATING,9 +73,12,1590598356,RATING,9 +1,19,1612333235,RATING,6 +25,9,1603224041,RATING,5 +47,12,1594190405,RATING,3 +93,8,1596846220,RATING,6 +26,17,1598563070,RATING,6 +31,0,1612811381,RATING,2 +3,21,1601483497,RATING,8 +30,12,1614191698,RATING,4 +17,5,1609177145,RATING,2 +29,3,1574496003,RATING,8 +9,9,1578432809,RATING,8 +28,0,1606073545,RATING,5 +73,21,1572011919,RATING,5 +100,8,1592985878,RATING,2 +4,23,1564209948,RATING,4 +68,12,1566185038,RATING,10 +12,10,1559955769,RATING,9 +40,20,1569484555,RATING,10 +64,24,1563495877,RATING,4 +50,4,1585959783,RATING,5 +83,12,1578528224,RATING,5 +96,17,1581021561,RATING,8 +61,16,1593229896,RATING,2 +25,7,1572222922,RATING,7 +65,11,1565069332,RATING,8 +73,9,1597560156,RATING,3 +99,7,1560284899,RATING,7 +53,6,1581322075,RATING,1 +98,9,1617479249,RATING,10 +28,11,1567890666,RATING,5 +70,16,1572720285,RATING,1 +89,3,1573255259,RATING,6 +50,4,1618643407,RATING,3 +30,20,1570922149,RATING,3 +94,10,1576180422,RATING,7 +56,9,1563927009,RATING,2 +52,22,1621444640,RATING,8 +31,20,1568329367,RATING,6 +6,7,1573084774,RATING,10 +82,0,1571778599,RATING,3 +16,23,1560934083,RATING,9 +40,0,1619671420,RATING,4 +2,10,1603365205,RATING,6 +2,18,1606339408,RATING,7 +14,18,1617972334,RATING,8 +25,14,1619233877,RATING,5 +35,13,1619885792,RATING,7 +72,11,1581835618,RATING,8 +43,3,1593933595,RATING,8 +36,1,1565719522,RATING,1 +4,24,1565816577,RATING,10 +31,14,1566152892,RATING,6 +55,24,1575758212,RATING,3 +57,18,1619811418,RATING,3 +52,20,1620260249,RATING,2 +40,6,1561123400,RATING,5 +100,18,1602329256,RATING,2 +45,4,1595524605,RATING,6 +45,16,1612571280,RATING,3 +31,23,1621286143,RATING,6 +85,0,1611649677,RATING,2 +87,12,1589912227,RATING,2 +28,2,1576208766,RATING,4 +59,15,1585929455,RATING,1 +19,24,1567716597,RATING,8 +33,16,1563409123,RATING,2 +65,7,1577657111,RATING,2 +1,21,1589993975,RATING,6 +89,14,1577679412,RATING,6 +66,13,1620996575,RATING,5 +30,8,1610101136,RATING,5 +79,21,1577595786,RATING,2 +62,23,1567265456,RATING,9 +6,16,1574324305,RATING,5 +74,18,1611237085,RATING,1 +94,9,1566722828,RATING,3 +75,15,1571549166,RATING,6 +89,7,1568695041,RATING,9 +67,12,1567807830,RATING,8 +77,11,1621706988,RATING,7 +1,21,1565921677,RATING,10 +73,6,1606423668,RATING,9 +98,3,1579562808,RATING,7 +94,6,1607874330,RATING,3 +67,17,1593576557,RATING,5 +64,0,1618762177,RATING,4 +58,15,1593942261,RATING,5 +58,20,1577366473,RATING,10 +11,9,1611827244,RATING,10 +97,1,1580011857,RATING,2 +89,9,1608906723,RATING,9 +88,23,1614967940,RATING,2 +100,0,1580831730,RATING,7 +8,19,1594219122,RATING,4 +45,20,1561930880,RATING,8 +53,7,1559322185,RATING,5 +5,1,1565269820,RATING,5 +90,20,1573642044,RATING,8 +34,13,1562573893,RATING,8 +84,13,1590863372,RATING,6 +45,20,1579324000,RATING,3 +6,17,1618263618,RATING,2 +2,17,1612115252,RATING,8 +80,15,1565156813,RATING,2 +45,17,1581011869,RATING,10 +29,4,1593194020,RATING,2 +69,20,1594664584,RATING,6 +47,14,1560055685,RATING,6 +39,17,1565667743,RATING,7 +22,14,1610584968,RATING,6 +58,10,1617792237,RATING,2 +16,7,1573846618,RATING,6 +1,11,1621068237,RATING,8 +76,16,1586900210,RATING,9 +18,10,1566942118,RATING,8 +13,4,1565600801,RATING,7 +85,3,1604648813,RATING,6 +96,12,1585199381,RATING,1 +92,7,1569657105,RATING,2 +98,14,1613139576,RATING,9 +64,3,1566551104,RATING,6 +21,6,1611399040,RATING,1 +22,21,1583464094,RATING,4 +99,14,1598185743,RATING,7 +3,18,1592253699,RATING,2 +13,4,1605899044,RATING,9 +100,8,1577632936,RATING,8 +64,12,1569070485,RATING,8 +74,1,1603903103,RATING,3 +23,23,1574178943,RATING,5 +85,2,1577935501,RATING,6 +22,15,1581739361,RATING,10 +20,21,1618744872,RATING,10 +86,13,1576553165,RATING,3 +82,19,1572517729,RATING,7 +2,22,1584338242,RATING,3 +35,23,1570346118,RATING,9 +65,22,1599494529,RATING,10 +78,1,1578538414,RATING,6 +63,20,1562227577,RATING,1 +85,19,1586857168,RATING,9 +27,3,1579656052,RATING,4 +97,1,1609887898,RATING,7 +36,3,1590670117,RATING,6 +30,13,1581854080,RATING,8 +27,19,1585410559,RATING,10 +57,24,1571435837,RATING,4 +100,14,1571801458,RATING,6 +8,20,1580679239,RATING,8 +61,12,1595410837,RATING,10 +70,13,1596228424,RATING,6 +89,3,1585756529,RATING,7 +35,14,1563177791,RATING,2 +42,3,1609851195,RATING,8 +99,19,1567947556,RATING,8 +93,6,1571519913,RATING,4 +11,18,1578222374,RATING,9 +47,11,1589571924,RATING,6 +80,10,1584334748,RATING,10 +18,17,1578105525,RATING,3 +24,8,1582053664,RATING,5 +91,11,1561249055,RATING,2 +94,11,1577445426,RATING,9 +14,16,1583729305,RATING,5 +47,9,1600290381,RATING,10 +20,16,1602918874,RATING,8 +17,8,1602433603,RATING,8 +37,7,1582839705,RATING,10 +15,4,1603347252,RATING,9 +10,17,1609881681,RATING,10 +90,17,1566699175,RATING,4 +3,20,1577239942,RATING,4 +72,19,1589675207,RATING,10 +38,19,1573744363,RATING,2 +7,23,1564353503,RATING,7 +39,12,1581905085,RATING,5 +94,8,1620350259,RATING,6 +78,10,1569419557,RATING,10 +17,15,1571149360,RATING,9 +43,14,1568194611,RATING,9 +85,7,1583162835,RATING,8 +49,17,1609109960,RATING,3 +31,20,1598562532,RATING,3 +93,21,1575271188,RATING,6 +31,4,1563908341,RATING,2 +46,20,1606818996,RATING,1 +57,6,1618345097,RATING,2 +39,11,1622020757,RATING,2 +33,24,1603642191,RATING,1 +99,7,1613882220,RATING,1 +100,5,1604037196,RATING,10 +23,16,1593254527,RATING,10 +79,10,1570120797,RATING,7 +87,10,1562088882,RATING,1 +86,18,1573447466,RATING,10 +10,9,1599430333,RATING,5 +54,9,1619320739,RATING,10 +75,20,1588706525,RATING,2 +90,20,1574018013,RATING,1 +69,23,1604921582,RATING,7 +9,12,1573006720,RATING,10 +4,18,1618836191,RATING,6 +5,4,1620894027,RATING,3 +48,12,1602344117,RATING,7 +55,14,1619852825,RATING,2 +4,7,1591550289,RATING,4 +66,22,1608694274,RATING,9 +80,3,1578373429,RATING,10 +64,7,1586976575,RATING,4 +76,4,1561499527,RATING,2 +92,9,1591515746,RATING,4 +79,15,1620551743,RATING,7 +58,11,1619062324,RATING,6 +13,22,1569913922,RATING,4 +25,22,1592729989,RATING,5 +53,6,1605664946,RATING,4 +92,24,1603888039,RATING,9 +55,4,1563928374,RATING,3 +68,18,1590477420,RATING,5 +71,7,1597927447,RATING,3 +64,4,1561059890,RATING,9 +48,24,1601309330,RATING,8 +66,20,1618241335,RATING,7 +34,23,1561276550,RATING,8 +75,20,1575615165,RATING,4 +68,6,1585076246,RATING,1 +65,10,1612874518,RATING,8 +84,11,1576856391,RATING,6 +30,7,1615306809,RATING,6 +16,19,1601684617,RATING,3 +48,10,1566315721,RATING,7 +73,5,1593319276,RATING,10 +46,16,1565800176,RATING,6 +97,15,1619382864,RATING,3 +61,17,1565488701,RATING,3 +30,8,1584304041,RATING,6 +44,5,1597564313,RATING,2 +78,17,1589095311,RATING,9 +75,24,1576539012,RATING,7 +74,17,1617784511,RATING,7 +23,21,1577549281,RATING,5 +8,16,1603949177,RATING,8 +79,1,1615917534,RATING,8 +10,10,1588881047,RATING,3 +6,17,1614498261,RATING,4 +20,22,1583753347,RATING,10 +77,10,1575329432,RATING,7 +57,2,1585383444,RATING,5 +45,4,1581277196,RATING,6 +54,9,1570047039,RATING,9 +11,19,1587467901,RATING,1 +28,14,1564799576,RATING,7 +9,5,1600095461,RATING,8 +33,19,1575437959,RATING,2 +88,15,1572002190,RATING,3 +48,2,1590095677,RATING,10 +81,8,1613395864,RATING,8 +17,5,1564774565,RATING,10 +32,21,1577115987,RATING,10 +82,14,1582317172,RATING,10 +17,3,1606483562,RATING,8 +82,9,1596640292,RATING,10 +19,12,1607693125,RATING,4 +80,18,1609196226,RATING,10 +50,6,1613064146,RATING,6 +8,11,1584901770,RATING,10 +16,2,1568790188,RATING,9 +35,21,1608758689,RATING,2 +53,9,1620894715,RATING,2 +32,4,1570286748,RATING,9 +28,10,1594317371,RATING,5 +58,7,1591380380,RATING,10 +49,0,1613081018,RATING,9 +92,14,1591325144,RATING,9 +3,19,1592774324,RATING,7 +55,22,1564332904,RATING,3 +73,22,1589258347,RATING,10 +87,5,1585145466,RATING,6 +3,11,1586997890,RATING,7 +7,9,1570248527,RATING,10 +45,13,1588933052,RATING,5 +2,18,1577466206,RATING,7 +11,1,1613956749,RATING,10 +78,18,1597381222,RATING,3 +97,7,1606375381,RATING,8 +66,11,1587740315,RATING,3 +80,14,1598167788,RATING,9 +31,12,1580868146,RATING,10 +26,15,1618069392,RATING,1 +7,11,1608478627,RATING,1 +9,7,1582267532,RATING,2 +64,24,1611104464,RATING,3 +14,13,1595698003,RATING,9 +36,10,1569048324,RATING,5 +56,1,1591278556,RATING,1 +71,4,1584275720,RATING,2 +67,3,1607094639,RATING,6 +65,18,1574368413,RATING,2 +56,6,1607702018,RATING,9 +100,21,1586749794,RATING,8 +21,7,1593187107,RATING,7 +77,9,1595386165,RATING,8 +60,22,1617797010,RATING,9 +50,10,1577437920,RATING,3 +66,3,1574653402,RATING,1 +53,16,1582002563,RATING,8 +92,22,1621670325,RATING,2 +58,23,1567977640,RATING,4 +96,11,1600941449,RATING,6 +27,7,1619239062,RATING,8 +44,2,1559485568,RATING,9 +43,24,1560535270,RATING,8 +4,8,1610656617,RATING,1 +70,17,1587572512,RATING,9 +8,19,1620374531,RATING,6 +44,13,1613860557,RATING,9 +39,24,1561683553,RATING,9 +90,14,1603199231,RATING,2 +13,23,1568560999,RATING,1 +3,9,1570262385,RATING,4 +15,0,1582264281,RATING,3 +53,3,1612157739,RATING,6 +51,0,1562851228,RATING,9 +41,24,1579524075,RATING,1 +99,19,1617365868,RATING,7 +15,5,1574761349,RATING,8 +33,23,1590305309,RATING,1 +49,14,1592179565,RATING,4 +4,5,1566871828,RATING,9 +22,22,1576336178,RATING,8 +13,22,1606551884,RATING,1 +78,7,1586375766,RATING,2 +57,3,1614914476,RATING,4 +67,8,1606469158,RATING,1 +45,6,1600145772,RATING,8 +66,21,1583214245,RATING,2 +93,9,1614854240,RATING,3 +65,20,1601757263,RATING,2 +31,0,1585811559,RATING,5 +43,7,1613464499,RATING,3 +63,14,1607360015,RATING,8 +11,19,1615427668,RATING,3 +78,4,1612293587,RATING,10 +97,12,1578908468,RATING,2 +42,5,1615225558,RATING,5 +2,23,1597466458,RATING,7 +94,18,1597760638,RATING,8 +84,6,1586548852,RATING,2 +64,19,1611520986,RATING,4 +52,17,1570274184,RATING,9 +51,6,1594723052,RATING,10 +56,19,1571285739,RATING,9 +12,23,1615649736,RATING,2 +68,14,1560128162,RATING,10 +72,17,1563244695,RATING,8 +48,13,1608522432,RATING,2 +83,20,1563874191,RATING,7 +52,3,1585067285,RATING,6 +78,1,1565382006,RATING,2 +1,11,1600974852,RATING,2 +52,12,1583355984,RATING,8 +76,7,1606486667,RATING,1 +19,4,1596975719,RATING,5 +21,19,1615887543,RATING,7 +16,23,1592547689,RATING,2 +45,12,1621938942,RATING,5 +88,10,1570505267,RATING,10 +7,13,1615795735,RATING,2 +12,14,1573914234,RATING,9 +5,20,1605204228,RATING,5 +98,16,1617852423,RATING,9 +36,8,1597747860,RATING,1 +20,20,1577505009,RATING,5 +47,20,1620702333,RATING,8 +53,16,1612003927,RATING,6 +69,16,1594969180,RATING,6 +80,3,1565574016,RATING,6 +37,6,1570487740,RATING,1 +15,24,1574948109,RATING,10 +2,19,1567737535,RATING,2 +93,11,1595982896,RATING,2 +24,6,1617864290,RATING,7 +69,1,1608977177,RATING,4 +22,15,1610338045,RATING,9 diff --git a/source/tests/fixtures/config/step_1.json b/source/tests/fixtures/config/step_1.json deleted file mode 100644 index c59d237..0000000 --- a/source/tests/fixtures/config/step_1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "datasetGroup": { - "name": "unit_test_only_datasetgroup" - } -} \ No newline at end of file diff --git a/source/tests/fixtures/config/step_2.json b/source/tests/fixtures/config/step_2.json deleted file mode 100644 index e9b73d0..0000000 --- a/source/tests/fixtures/config/step_2.json +++ /dev/null @@ -1,124 +0,0 @@ -{ - "schedule": { - "ScheduleExpression": "rate(1 minute)" - }, - "datasetGroup": { - "name": "unit_test_only_datasetgroup" - }, - "datasets": { - "users": { - "dataset": { - "name": "unit_test_only_users" - }, - "schema": { - "name": "unit_test_only_users_schema", - "schema": { - "type": "record", - "name": "users", - "namespace": "com.amazonaws.personalize.schema", - "fields": [ - { - "name": "USER_ID", - "type": "string" - }, - { - "name": "AGE", - "type": "int" - }, - { - "name": "GENDER", - "type": "string", - "categorical": true - } - ] - } - } - }, - "interactions": { - "dataset": { - "name": "unit_test_only_interactions" - }, - "schema": { - "name": "unit_test_only_interactions_schema", - "schema": { - "type": "record", - "name": "interactions", - "namespace": "com.amazonaws.personalize.schema", - "fields": [ - { - "name": "ITEM_ID", - "type": "string" - }, - { - "name": "USER_ID", - "type": "string" - }, - { - "name": "TIMESTAMP", - "type": "long" - }, - { - "name": "EVENT_TYPE", - "type": "string" - }, - { - "name": "EVENT_VALUE", - "type": "float" - } - ] - } - } - } - }, - "eventTracker": { - "name": "unit_test_event_tracker" - }, - "filters": [ - { - "name": "clicked-or-streamed", - "filterExpression": "INCLUDE ItemID WHERE Interactions.EVENT_TYPE in (\"click\", \"stream\")" - }, - { - "name": "interacted", - "filterExpression": "INCLUDE ItemID WHERE Interactions.EVENT_TYPE in (\"*\")" - } - ], - "solutions": [ - { - "solution": { - "name": "unit_test_sims", - "recipeArn": "arn:aws:personalize:::recipe/aws-sims" - } - }, - { - "solution": { - "name": "unit_test_popularity_count", - "recipeArn": "arn:aws:personalize:::recipe/aws-popularity-count" - }, - "solutionVersions": [ - { - "solutionVersion": {} - } - ] - }, - { - "solution": { - "name": "unit_test_personalized_ranking", - "recipeArn": "arn:aws:personalize:::recipe/aws-personalized-ranking" - }, - "solutionVersions": [ - { - "solutionVersion": {}, - "campaigns": [ - { - "campaign": { - "name": "unit_test_personalized_ranking_campaign", - "minProvisionedTPS": 1 - } - } - ] - } - ] - } - ] -} \ No newline at end of file diff --git a/source/tests/fixtures/config/step_4.json b/source/tests/fixtures/config/step_4.json deleted file mode 100644 index e7582dd..0000000 --- a/source/tests/fixtures/config/step_4.json +++ /dev/null @@ -1,130 +0,0 @@ -{ - "schedule": { - "ScheduleExpression": "rate(6 hours)" - }, - "datasetGroup": { - "name": "unit_test_only_datasetgroup" - }, - "datasets": { - "users": { - "dataset": { - "name": "unit_test_only_users" - }, - "schema": { - "name": "unit_test_only_users_schema", - "schema": { - "type": "record", - "name": "users", - "namespace": "com.amazonaws.personalize.schema", - "fields": [ - { - "name": "USER_ID", - "type": "string" - }, - { - "name": "AGE", - "type": "int" - }, - { - "name": "GENDER", - "type": "string", - "categorical": true - } - ] - } - } - }, - "interactions": { - "dataset": { - "name": "unit_test_only_interactions" - }, - "schema": { - "name": "unit_test_only_interactions_schema", - "schema": { - "type": "record", - "name": "interactions", - "namespace": "com.amazonaws.personalize.schema", - "fields": [ - { - "name": "ITEM_ID", - "type": "string" - }, - { - "name": "USER_ID", - "type": "string" - }, - { - "name": "TIMESTAMP", - "type": "long" - }, - { - "name": "EVENT_TYPE", - "type": "string" - }, - { - "name": "EVENT_VALUE", - "type": "float" - } - ] - } - } - } - }, - "eventTracker": { - "name": "unit_test_event_tracker" - }, - "filters": [ - { - "name": "clicked-or-streamed", - "filterExpression": "INCLUDE ItemID WHERE Interactions.EVENT_TYPE in (\"click\", \"stream\")" - }, - { - "name": "interacted", - "filterExpression": "INCLUDE ItemID WHERE Interactions.EVENT_TYPE in (\"*\")" - } - ], - "solutions": [ - { - "solution": { - "name": "unit_test_sims", - "recipeArn": "arn:aws:personalize:::recipe/aws-sims" - } - }, - { - "solution": { - "name": "unit_test_popularity_count", - "recipeArn": "arn:aws:personalize:::recipe/aws-popularity-count" - }, - "solutionVersions": [ - { - "solutionVersion": {} - } - ] - }, - { - "solution": { - "name": "unit_test_personalized_ranking", - "recipeArn": "arn:aws:personalize:::recipe/aws-personalized-ranking" - }, - "solutionVersions": [ - { - "solutionVersion": {}, - "campaigns": [ - { - "campaign": { - "name": "unit_test_personalized_ranking_campaign", - "minProvisionedTPS": 1 - } - } - ], - "batchInferenceJobs": [ - { - "batchInferenceJob": { - } - } - ] - } - ] - } - ] -} \ No newline at end of file diff --git a/source/tests/fixtures/config/users.csv b/source/tests/fixtures/config/users.csv index 7246776..f00348c 100644 --- a/source/tests/fixtures/config/users.csv +++ b/source/tests/fixtures/config/users.csv @@ -1,3 +1,4 @@ +USER_ID,AGE,GENDER 0,71,F 1,67,M 2,25,F diff --git a/source/tests/test_notifies.py b/source/tests/test_notifies.py new file mode 100644 index 0000000..0b5af84 --- /dev/null +++ b/source/tests/test_notifies.py @@ -0,0 +1,112 @@ +# ###################################################################################################################### +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # +# # +# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance # +# with the License. You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed # +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # +# the specific language governing permissions and limitations under the License. # +# ###################################################################################################################### +from datetime import datetime, timedelta +from typing import Dict + +import pytest + +from shared.notifiers.base import Notifier +from shared.resource import Resource, Campaign + + +class NotifierName(Notifier): + def notify_create(self, status: str, resource: Resource, result: Dict) -> None: + pass + + def notify_complete(self, status: str, resource: Resource, result: Dict): + pass + + +@pytest.fixture +def notifier(): + return NotifierName() + + +def test_notify_name(notifier): + assert notifier.name == "NotifierName" + + +def test_set_cutoff(notifier): + now = datetime.now() + notifier.set_cutoff(now) + assert notifier.cutoff == now + + +@pytest.mark.parametrize( + "resource,result,is_create", + [ + [Resource(), {"resourceArn": "arn"}, True], + [Resource(), {"resource": {"resourceArn": "arn"}}, False], + ], +) +def test_is_create(notifier, resource, result, is_create): + assert notifier._is_create(resource, result) == is_create + + +@pytest.mark.parametrize( + "resource,result,is_stable", + [ + [Resource(), {"resource": {}}, False], + [ + Resource(), + { + "resource": { + "lastUpdatedDateTime": datetime.now(), + "creationDateTime": datetime.now(), + } + }, + False, + ], + [ + Campaign(), + { + "campaign": { + "lastUpdatedDateTime": datetime.now(), + "creationDateTime": datetime.now(), + "status": "ACTIVE", + "latestCampaignUpdate": {"status": "UPDATING"}, + } + }, + False, + ], + [ + Resource(), + { + "resource": { + "lastUpdatedDateTime": datetime.now(), + "creationDateTime": datetime.now(), + } + }, + False, + ], + ], +) +def test_is_stable(notifier, resource, result, is_stable): + notifier.set_cutoff(datetime.now() - timedelta(seconds=100)) + assert notifier._resource_stable(resource, result) == is_stable + + +@pytest.mark.parametrize( + "resource,result", + [ + [Resource(), {"resourceArn": "ARN"}], + [Resource(), {"resource": {"resourceArn": "ARN"}}], + ], +) +def test_get_resource_arn(notifier, resource, result): + assert notifier.get_resource_arn(resource, result) == "ARN" + + +def test_get_resource_value_error(notifier): + with pytest.raises(ValueError): + notifier.get_resource_arn(Resource(), {}) diff --git a/source/tests/test_scheduler.py b/source/tests/test_scheduler.py index 6bcb6ee..acf347d 100644 --- a/source/tests/test_scheduler.py +++ b/source/tests/test_scheduler.py @@ -19,15 +19,18 @@ from moto.dynamodb2 import mock_dynamodb2 from moto.stepfunctions import mock_stepfunctions -from aws_lambda.scheduler.handler import ( +from aws_solutions.scheduler.cdk.aws_lambda.scheduler.handler import ( create_schedule, read_schedule, update_schedule, delete_schedule, ) -from shared.scheduler.base import Scheduler -from shared.scheduler.schedule import Schedule, ScheduleError -from shared.scheduler.task import Task +from aws_solutions.scheduler.common import ( + Scheduler, + Schedule, + ScheduleError, + Task, +) @pytest.fixture @@ -119,7 +122,9 @@ def scheduler(scheduler_table, scheduler_stepfunctions, mocker): _scheduler = Scheduler() _scheduler.sfn_cli = sfn_cli _scheduler.stepfunction = sfn_arn - mocker.patch("aws_lambda.scheduler.handler.scheduler", _scheduler) + mocker.patch( + "aws_solutions.scheduler.cdk.aws_lambda.scheduler.handler.scheduler", _scheduler + ) yield _scheduler diff --git a/source/tests/test_scheduler_cli.py b/source/tests/test_scheduler_cli.py new file mode 100644 index 0000000..1151762 --- /dev/null +++ b/source/tests/test_scheduler_cli.py @@ -0,0 +1,108 @@ +# ###################################################################################################################### +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # +# # +# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance # +# with the License. You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed # +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # +# the specific language governing permissions and limitations under the License. # +# ###################################################################################################################### + +import pytest +import json +import boto3 +from moto import mock_cloudformation +import os +from unittest import mock + +from aws_solutions.scheduler.common.scripts.scheduler_cli import ( + get_stack_output_value, + get_stack_tag_value, + setup_cli_env, + get_payload, +) + + +@pytest.fixture +def stack(): + template = { + "AWSTemplateFormatVersion": "2010-09-09", + "Resources": { + "QueueResource": { + "Type": "AWS::SQS::Queue", + "Properties": {"QueueName": "my-queue"}, + } + }, + "Outputs": { + "QueueOutput": {"Description": "The Queue Name", "Value": "my-queue"} + }, + } + with mock_cloudformation(): + cli = boto3.client("cloudformation") + cli.create_stack( + StackName="TestStack", + TemplateBody=json.dumps(template), + Tags=[ + { + "Key": "TestTag", + "Value": "TestValue", + }, + {"Key": "SOLUTION_ID", "Value": "SOLUTION_ID_VALUE"}, + {"Key": "SOLUTION_VERSION", "Value": "SOLUTION_VERSION_VALUE"}, + ], + ) + yield boto3.resource("cloudformation").Stack("TestStack") + + +def test_get_stack_output_value(stack): + assert get_stack_output_value(stack, "QueueOutput") == "my-queue" + + +def test_get_stack_output_value_not_present(stack): + with pytest.raises(ValueError): + get_stack_output_value(stack, "missing") + + +def test_get_stack_tag_value(stack): + assert get_stack_tag_value(stack, "TestTag") == "TestValue" + + +def test_get_stack_tag_value_not_present(stack): + with pytest.raises(ValueError): + get_stack_tag_value(stack, "missing") + + +def test_setup_cli_env(stack): + with mock.patch.dict(os.environ, {}): + setup_cli_env(stack, "eu-central-1") + assert os.environ.get("AWS_REGION") == "eu-central-1" + assert os.environ.get("SOLUTION_ID") == "SOLUTION_ID_VALUE" + assert os.environ.get("SOLUTION_VERSION") == "SOLUTION_VERSION_VALUE" + + +def test_get_payload(): + payload = get_payload( + dataset_group="dsg", + import_schedule="cron(* * * * ? *)", + update_schedule=[ + ("a", "cron(0 * * * ? *)"), + ("b", "cron(1 * * * ? *)"), + ], + full_schedule=[("c", "cron(3 * * * ? *)"), ("d", "cron(4 * * * ? *)")], + ) + + assert payload == { + "datasetGroupName": "dsg", + "schedules": { + "import": "cron(* * * * ? *)", + "solutions": { + "a": {"update": "cron(0 * * * ? *)"}, + "b": {"update": "cron(1 * * * ? *)"}, + "c": {"full": "cron(3 * * * ? *)"}, + "d": {"full": "cron(4 * * * ? *)"}, + }, + }, + }