diff --git a/CHANGELOG.md b/CHANGELOG.md index 214a545..19fd6c8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,16 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [1.0.1] - 2021-10-01 +### Added +- The solution now exports the Amazon SNS Topic ARN as `SNSTopicArn`. + +### Changed +- The SNS message format will change based on the protocol used. For Amazon SQS and Email-JSON endpoints, a JSON payload +will be sent. The message sent to subscribed Email endpoints is unchanged. +- The Amazon CloudWatch dashboard deployed by the solution will be replaced with a dashboard containing the stack's +region name. + ## [1.0.0] - 2021-09-23 ### Added - All files, initial version diff --git a/README.md b/README.md index 5b2bcc9..83d846c 100644 --- a/README.md +++ b/README.md @@ -377,7 +377,7 @@ After running the command, you can deploy the template: ## Collection of operational metrics This solution collects anonymous operational metrics to help AWS improve the quality of features of the solution. -For more information, including how to disable this capability, please see the [implementation guide](https://aws.amazon.com/solutions/implementations/maintaining-personalized-experiences-with-ml). +For more information, including how to disable this capability, please see the [implementation guide](https://docs.aws.amazon.com/solutions/latest/maintaining-personalized-experiences-with-ml/collection-of-operational-metrics.html). *** diff --git a/source/aws_lambda/s3_event/handler.py b/source/aws_lambda/s3_event/handler.py index e40254b..3ed5c86 100644 --- a/source/aws_lambda/s3_event/handler.py +++ b/source/aws_lambda/s3_event/handler.py @@ -10,9 +10,8 @@ # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # # the specific language governing permissions and limitations under the License. # # ###################################################################################################################### - +import json import os -from typing import List from aws_lambda_powertools import Logger, Tracer, Metrics from aws_lambda_powertools.metrics import MetricUnit @@ -43,21 +42,50 @@ def solution_name() -> str: return os.environ["SOLUTION_NAME"] -def send_configuration_error(errors: List[str]): +def send_configuration_error(configuration: Configuration): + errors = configuration.errors sns = get_service_client("sns") + dataset_group = configuration.dataset_group + subject = f"{solution_name()} Notifications" - message = "There were errors detected when reading a personalization job configuration file:\n\n" - for error in errors: - logger.error(f"Personalization job configuration error: {error}") - message += f" - {error}\n" - message += "\nPlease correct these errors and upload the configuration again." + def build_default_message(): + f"The personalization workflow for {configuration.dataset_group} completed with errors." + + def build_json_message(): + return json.dumps( + { + "datasetGroup": dataset_group, + "status": "UPDATE FAILED", + "summary": "There were errors detected when reading a personalization job configuration file", + "description": [error for error in errors], + } + ) + + def build_long_message(): + message = "There were errors detected when reading a personalization job configuration file:\n\n" + for error in errors: + logger.error(f"Personalization job configuration error: {error}") + message += f" - {error}\n" + message += "\nPlease correct these errors and upload the configuration again." + return message + logger.error("publishing configuration error to SQS") sns.publish( TopicArn=topic_arn(), - Message=message, + Message=json.dumps( + { + "default": build_default_message(), + "sms": build_default_message(), + "email": build_long_message(), + "email-json": build_json_message(), + "sqs": build_json_message(), + } + ), + MessageStructure="json", Subject=subject, ) + logger.error("published configuration error to SQS") @metrics.log_metrics @@ -86,7 +114,7 @@ def lambda_handler(event, context): configuration = Configuration() configuration.load(config_text) if configuration.errors: - send_configuration_error(configuration.errors) + send_configuration_error(configuration) metrics.add_metric( "ConfigurationsProcessedFailures", unit=MetricUnit.Count, value=1 ) @@ -98,7 +126,7 @@ def lambda_handler(event, context): metrics.add_metric( "ConfigurationsProcessedFailures", unit=MetricUnit.Count, value=1 ) - send_configuration_error(configuration.errors) + send_configuration_error(configuration) else: config = configuration.config_dict config = set_bucket(config, bucket, key) diff --git a/source/aws_lambda/shared/personalize_service.py b/source/aws_lambda/shared/personalize_service.py index f8a83ed..42f8729 100644 --- a/source/aws_lambda/shared/personalize_service.py +++ b/source/aws_lambda/shared/personalize_service.py @@ -609,6 +609,7 @@ class Configuration: def __init__(self): self._configuration_errors = [] self.config_dict = {} + self.dataset_group = "UNKNOWN" def load(self, content: Union[Path, str]): if isinstance(content, Path): @@ -670,6 +671,8 @@ def _validate_dataset_group(self, path="datasetGroup.serviceConfig"): ) else: self._validate_resource(DatasetGroup(), dataset_group) + if isinstance(dataset_group, dict): + self.dataset_group = dataset_group.get("name", self.dataset_group) def _validate_event_tracker(self, path="eventTracker.serviceConfig"): event_tracker = jmespath.search(path, self.config_dict) diff --git a/source/aws_lambda/shared/scheduler/base.py b/source/aws_lambda/shared/scheduler/base.py index 1540f40..df8946a 100644 --- a/source/aws_lambda/shared/scheduler/base.py +++ b/source/aws_lambda/shared/scheduler/base.py @@ -139,7 +139,10 @@ def list(self) -> Generator[str, None, None]: :return: Generator[str] of the schedules (by name) """ done = False - scan_kwargs = {"ProjectionExpression": TASK_PK} + scan_kwargs = { + "ProjectionExpression": "#name", + "ExpressionAttributeNames": {"#name": TASK_PK}, + } start_key = None discovered = set() while not done: diff --git a/source/aws_lambda/shared/sfn_middleware.py b/source/aws_lambda/shared/sfn_middleware.py index 41868f2..e61af68 100644 --- a/source/aws_lambda/shared/sfn_middleware.py +++ b/source/aws_lambda/shared/sfn_middleware.py @@ -212,7 +212,11 @@ def check_status( # NOSONAR - allow higher complexity expected_value = expected_value.lower() # some parameters don't require checking: - if self.resource == "datasetImportJob" and expected_key == "jobName": + if self.resource == "datasetImportJob" and expected_key in { + "jobName", + "dataSource", + "roleArn", + }: continue if self.resource == "batchInferenceJob" and expected_key in { "jobName", diff --git a/source/aws_lambda/sns_notification/handler.py b/source/aws_lambda/sns_notification/handler.py index a92a267..d4bf2d2 100644 --- a/source/aws_lambda/sns_notification/handler.py +++ b/source/aws_lambda/sns_notification/handler.py @@ -67,6 +67,26 @@ def __init__(self, event: Dict, context: LambdaContext): metrics.add_metric("JobSuccess", unit=MetricUnit.Count, value=1) self.message = self._build_success_message() + self.default = self._build_default_message() + self.sms = self._build_sms_message() + self.json = self._build_json_message() + + def _build_json_message(self): + return json.dumps( + { + "datasetGroup": self.dataset_group, + "status": "UPDATE FAILED" if self.error else "UPDATE COMPLETE", + "summary": self._build_default_message(), + "description": self.message, + } + ) + + def _build_default_message(self) -> str: + return f"The personalization workflow for {self.dataset_group} completed {'with errors' if self.error else 'successfully'}" + + def _build_sms_message(self) -> str: + return self._build_default_message() + def _build_error_message(self) -> str: """ Build the error message @@ -116,12 +136,21 @@ def lambda_handler(event, context): :return: None """ sns = get_service_client("sns") - message = MessageBuilder(event, context).message + message_builder = MessageBuilder(event, context) subject = f"{solution_name()} Notifications" logger.info("publishing message for event", extra={"event": event}) sns.publish( TopicArn=topic_arn(), - Message=message, + Message=json.dumps( + { + "default": message_builder.default, + "sms": message_builder.sms, + "email": message_builder.message, + "email-json": message_builder.json, + "sqs": message_builder.json, + } + ), + MessageStructure="json", Subject=subject, ) diff --git a/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/__init__.py b/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/__init__.py index 24aae9c..6a7627e 100644 --- a/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/__init__.py +++ b/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/__init__.py @@ -29,5 +29,13 @@ class CDKSolution: """ def __init__(self, cdk_json_path: Path, qualifier="hnb659fds"): + self.qualifier = qualifier self.context = SolutionContext(cdk_json_path=cdk_json_path) - self.synthesizer = SolutionStackSubstitions(qualifier=qualifier) + self.synthesizer = SolutionStackSubstitions(qualifier=self.qualifier) + + def reset(self) -> None: + """ + Get a new synthesizer for this CDKSolution - useful for testing + :return: None + """ + self.synthesizer = SolutionStackSubstitions(qualifier=self.qualifier) diff --git a/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/interfaces.py b/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/interfaces.py index c3a7a2e..5321524 100644 --- a/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/interfaces.py +++ b/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/interfaces.py @@ -77,9 +77,12 @@ def metadata(self) -> dict: return self._metadata def _get_metadata(self) -> dict: - parameter_groups = list( - set([parameter.group for parameter in self._parameters]) - ) + pgs = set() + parameter_groups = [ + p.group + for p in self._parameters + if p.group not in pgs and not pgs.add(p.group) + ] metadata = { "AWS::CloudFormation::Interface": { "ParameterGroups": [ diff --git a/source/cdk_solution_helper_py/requirements-dev.txt b/source/cdk_solution_helper_py/requirements-dev.txt index ac24f7b..6ee358c 100644 --- a/source/cdk_solution_helper_py/requirements-dev.txt +++ b/source/cdk_solution_helper_py/requirements-dev.txt @@ -1,5 +1,5 @@ -aws-cdk.core>=1.120.0 -aws-cdk.aws_lambda>=1.120.0 +aws-cdk.core>=1.123.0 +aws-cdk.aws_lambda>=1.123.0 black boto3>=1.17.49 requests>=2.24.0 diff --git a/source/infrastructure/cdk.json b/source/infrastructure/cdk.json index cd34618..56e85b6 100644 --- a/source/infrastructure/cdk.json +++ b/source/infrastructure/cdk.json @@ -3,7 +3,7 @@ "context": { "SOLUTION_NAME": "Maintaining Personalized Experiences with Machine Learning", "SOLUTION_ID": "SO0170", - "SOLUTION_VERSION": "1.0.0", + "SOLUTION_VERSION": "1.0.1", "@aws-cdk/core:newStyleStackSynthesis": "true", "@aws-cdk/core:enableStackNameDuplicates": "true", "aws-cdk:enableDiffNoFail": "true", diff --git a/source/infrastructure/personalize/cloudwatch/dashboard.py b/source/infrastructure/personalize/cloudwatch/dashboard.py index 51e1159..f0c87d8 100644 --- a/source/infrastructure/personalize/cloudwatch/dashboard.py +++ b/source/infrastructure/personalize/cloudwatch/dashboard.py @@ -43,7 +43,7 @@ def __init__( self.dashboard = cw.Dashboard( self, "PersonalizeDashboard", - dashboard_name=f"PersonalizeSolution-{Aws.STACK_NAME}", + dashboard_name=f"PersonalizeSolution-{Aws.STACK_NAME}-{Aws.REGION}", period_override=cw.PeriodOverride.AUTO, start="-PT1D", ) diff --git a/source/infrastructure/personalize/stack.py b/source/infrastructure/personalize/stack.py index a28901c..4dd3320 100644 --- a/source/infrastructure/personalize/stack.py +++ b/source/infrastructure/personalize/stack.py @@ -68,6 +68,25 @@ def __init__( super().__init__(scope, construct_id, *args, **kwargs) # CloudFormation Parameters + self.email = cdk.CfnParameter( + self, + id="Email", + type="String", + description="Email to notify with personalize workflow results", + default="", + max_length=50, + allowed_pattern=r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$|^$)", + constraint_description="Must be a valid email address or blank", + ) + self.solutions_template_options.add_parameter( + self.email, "Email", "Solution Configuration" + ) + self.email_provided = CfnCondition( + self, + "EmailProvided", + expression=Fn.condition_not(Fn.condition_equals(self.email, "")), + ) + self.personalize_kms_key_arn = cdk.CfnParameter( self, id="PersonalizeKmsKeyArn", @@ -88,25 +107,6 @@ def __init__( ), ) - self.email = cdk.CfnParameter( - self, - id="Email", - type="String", - description="Email to notify with personalize workflow results", - default="", - max_length=50, - allowed_pattern=r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$|^$)", - constraint_description="Must be a valid email address or blank", - ) - self.solutions_template_options.add_parameter( - self.email, "Email", "Solution Configuration" - ) - self.email_provided = CfnCondition( - self, - "EmailProvided", - expression=Fn.condition_not(Fn.condition_equals(self.email, "")), - ) - # layers layer_powertools = PowertoolsLayer.get_or_create(self) layer_solutions = SolutionsLayer.get_or_create(self) @@ -413,3 +413,9 @@ def __init__( value=self.dashboard.name, export_name=f"{Aws.STACK_NAME}-Dashboard", ) + cdk.CfnOutput( + self, + "SNSTopicArn", + value=notifications.topic.topic_arn, + export_name=f"{Aws.STACK_NAME}-SNSTopicArn", + ) diff --git a/source/infrastructure/setup.py b/source/infrastructure/setup.py index 8ed4d70..790a2f6 100644 --- a/source/infrastructure/setup.py +++ b/source/infrastructure/setup.py @@ -12,6 +12,7 @@ # ###################################################################################################################### +import json from pathlib import Path import setuptools @@ -20,10 +21,14 @@ with open(readme_path) as fp: long_description = fp.read() +cdk_json_path = Path(__file__).resolve().parent / "cdk.json" +cdk_json = json.loads(cdk_json_path.read_text()) +VERSION = cdk_json["context"]["SOLUTION_VERSION"] + setuptools.setup( name="infrastructure", - version="1.0.0", + version=VERSION, description="AWS CDK stack to deploy the AWS MLOps for Amazon Personalize solution.", long_description=long_description, long_description_content_type="text/markdown", diff --git a/source/requirements-dev.txt b/source/requirements-dev.txt index e5fe524..8426e0a 100644 --- a/source/requirements-dev.txt +++ b/source/requirements-dev.txt @@ -1,9 +1,9 @@ avro==1.10.2 black boto3 -aws_cdk.core>=1.120.0 -aws_cdk.aws_stepfunctions_tasks>=1.120.0 -aws_solutions_constructs.aws_lambda_sns>=1.120.0 +aws_cdk.core>=1.123.0 +aws_cdk.aws_stepfunctions_tasks>=1.123.0 +aws_solutions_constructs.aws_lambda_sns>=1.123.0 requests==2.24.0 crhelper==2.0.6 cronex==0.1.3.1 diff --git a/source/tests/aws_lambda/sns_notification/test_sns_notification.py b/source/tests/aws_lambda/sns_notification/test_sns_notification.py index a20be01..beddc2a 100644 --- a/source/tests/aws_lambda/sns_notification/test_sns_notification.py +++ b/source/tests/aws_lambda/sns_notification/test_sns_notification.py @@ -10,23 +10,40 @@ # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # # the specific language governing permissions and limitations under the License. # # ###################################################################################################################### +import json import os from collections import namedtuple +import boto3 import pytest -from botocore.stub import Stubber +from moto import mock_sns, mock_sqs from aws_lambda.sns_notification.handler import lambda_handler -from aws_solutions.core import get_service_client TRACE_ID = "1-57f5498f-d91047849216d0f2ea3b6442" @pytest.fixture -def sns_stubber(): - sns_client = get_service_client("sns") - with Stubber(sns_client) as stubber: - yield stubber +def sqs_mock(): + topic_arn = os.environ.get("SNS_TOPIC_ARN") + topic_name = topic_arn.split(":")[-1] + + with mock_sqs(): + with mock_sns(): + + cli = boto3.client("sns") + cli.create_topic(Name=topic_name) + + sqs = boto3.client("sqs") + sqs.create_queue(QueueName="TestQueue") + + cli.subscribe( + TopicArn=topic_arn, + Protocol="sqs", + Endpoint=f"arn:aws:sqs:us-east-1:{'1'*12}:TestQueue", + ) + + yield sqs @pytest.fixture @@ -37,23 +54,6 @@ def trace_enabled(): DATASET_GROUP_NAME = "DATASET_GROUP_NAME" -EXPECTED_MESSAGE = """ -There was an error running the personalization job for dataset group DATASET_GROUP_NAME - -Message: ERROR_MESSAGE - -""".lstrip( - "\n" -) -EXPECTED_MESSAGE_TRACE = f""" -There was an error running the personalization job for dataset group DATASET_GROUP_NAME - -Message: ERROR_MESSAGE - -Traces: https://console.aws.amazon.com/xray/home?region=us-east-1#/traces/{TRACE_ID} -""".strip( - "\n" -) @pytest.fixture @@ -62,18 +62,8 @@ def context(): return ctx(f"arn:aws:lambda:us-east-1:{'1' * 12}:function:my-function:1") -def test_sns_notification(sns_stubber, context): +def test_sns_notification(context, sqs_mock): """Test without traces""" - sns_stubber.add_response( - "publish", - {}, - expected_params={ - "TopicArn": os.environ.get("SNS_TOPIC_ARN"), - "Subject": "Maintaining Personalized Experiences with Machine Learning Notifications", - "Message": EXPECTED_MESSAGE, - }, - ) - lambda_handler( { "datasetGroup": DATASET_GROUP_NAME, @@ -84,19 +74,32 @@ def test_sns_notification(sns_stubber, context): context, ) + url = sqs_mock.get_queue_url(QueueName="TestQueue")["QueueUrl"] + msg = json.loads( + json.loads( + sqs_mock.receive_message(QueueUrl=url, MaxNumberOfMessages=1,)["Messages"][ + 0 + ]["Body"] + )["Message"] + ) -def test_sns_notification_trace(sns_stubber, trace_enabled, context): - """Test with traces""" - sns_stubber.add_response( - "publish", - {}, - expected_params={ - "TopicArn": os.environ.get("SNS_TOPIC_ARN"), - "Subject": "Maintaining Personalized Experiences with Machine Learning Notifications", - "Message": EXPECTED_MESSAGE_TRACE, - }, + error_default = ( + f"The personalization workflow for {DATASET_GROUP_NAME} completed with errors" ) + error_json = { + "datasetGroup": DATASET_GROUP_NAME, + "status": "UPDATE FAILED", + "summary": f"The personalization workflow for {DATASET_GROUP_NAME} completed with errors", + "description": f"There was an error running the personalization job for dataset group {DATASET_GROUP_NAME}\n\nMessage: ERROR_MESSAGE\n\n", + } + + assert msg["default"] == error_default + assert msg["sms"] == error_default + assert json.loads(msg["sqs"]) == error_json + +def test_sns_notification_trace(sqs_mock, trace_enabled, context): + """Test with traces""" lambda_handler( { "datasetGroup": DATASET_GROUP_NAME, @@ -106,3 +109,26 @@ def test_sns_notification_trace(sns_stubber, trace_enabled, context): }, context, ) + + url = sqs_mock.get_queue_url(QueueName="TestQueue")["QueueUrl"] + msg = json.loads( + json.loads( + sqs_mock.receive_message(QueueUrl=url, MaxNumberOfMessages=1,)["Messages"][ + 0 + ]["Body"] + )["Message"] + ) + + error_default = ( + f"The personalization workflow for {DATASET_GROUP_NAME} completed with errors" + ) + error_json = { + "datasetGroup": f"{DATASET_GROUP_NAME}", + "status": "UPDATE FAILED", + "summary": f"The personalization workflow for {DATASET_GROUP_NAME} completed with errors", + "description": f"There was an error running the personalization job for dataset group {DATASET_GROUP_NAME}\n\nMessage: ERROR_MESSAGE\n\nTraces: https://console.aws.amazon.com/xray/home?region=us-east-1#/traces/1-57f5498f-d91047849216d0f2ea3b6442", + } + + assert msg["default"] == error_default + assert msg["sms"] == error_default + assert json.loads(msg["sqs"]) == error_json diff --git a/source/tests/cdk_solution_helper/test_stack.py b/source/tests/cdk_solution_helper/test_stack.py index 48e9e18..2099b5e 100644 --- a/source/tests/cdk_solution_helper/test_stack.py +++ b/source/tests/cdk_solution_helper/test_stack.py @@ -14,7 +14,7 @@ import re import pytest -from aws_cdk.core import App +from aws_cdk.core import App, CfnParameter from aws_solutions.cdk.stack import ( SolutionStack, @@ -113,3 +113,39 @@ def test_solution_stack(): ] } } + + +@pytest.mark.parametrize("execution_number", range(5)) +def test_stack_parameter_ordering(execution_number): + app = App(context={"SOLUTION_ID": "SO0123"}) + stack = SolutionStack(app, "stack", "test stack", "test-stack.template") + + param_1 = CfnParameter(stack, "parameter1") + param_2 = CfnParameter(stack, "parameter2") + + stack.solutions_template_options.add_parameter(param_1, "parameter 1", "group 1") + stack.solutions_template_options.add_parameter(param_2, "parameter 2", "group 2") + + template = app.synth().stacks[0].template + + assert ( + template["Metadata"]["AWS::CloudFormation::Interface"]["ParameterGroups"][0][ + "Label" + ]["default"] + == "group 1" + ) + assert template["Metadata"]["AWS::CloudFormation::Interface"]["ParameterGroups"][0][ + "Parameters" + ] == ["parameter1"] + assert ( + template["Metadata"]["AWS::CloudFormation::Interface"]["ParameterLabels"][ + "parameter1" + ]["default"] + == "parameter 1" + ) + assert ( + template["Metadata"]["AWS::CloudFormation::Interface"]["ParameterLabels"][ + "parameter2" + ]["default"] + == "parameter 2" + ) diff --git a/source/tests/test_deploy.py b/source/tests/test_deploy.py index 7d5b678..f67e949 100644 --- a/source/tests/test_deploy.py +++ b/source/tests/test_deploy.py @@ -25,7 +25,9 @@ def cdk_entrypoint(): def test_deploy(solution, cdk_entrypoint): - from deploy import build_app + from deploy import build_app, solution as cdk_solution + + cdk_solution.reset() extra_context = "EXTRA_CONTEXT" source_bucket = "SOURCE_BUCKET" @@ -40,3 +42,40 @@ def test_deploy(solution, cdk_entrypoint): "Yes" == stack.template["Mappings"]["Solution"]["Data"]["SendAnonymousUsageData"] ) + assert stack.template["Outputs"]["PersonalizeBucketName"] + assert stack.template["Outputs"]["SchedulerTableName"] + assert stack.template["Outputs"]["SNSTopicArn"] + + +def test_parameters(solution, cdk_entrypoint): + """Ensure parameter ordering is kept""" + from deploy import build_app, solution as cdk_solution + + cdk_solution.reset() + + extra_context = "EXTRA_CONTEXT" + source_bucket = "SOURCE_BUCKET" + synth = build_app({extra_context: extra_context, "BUCKET_NAME": source_bucket}) + stack = synth.get_stack("PersonalizeStack").template + + assert ( + stack["Metadata"]["AWS::CloudFormation::Interface"]["ParameterGroups"][0][ + "Label" + ]["default"] + == "Solution Configuration" + ) + assert stack["Metadata"]["AWS::CloudFormation::Interface"]["ParameterGroups"][0][ + "Parameters" + ] == ["Email"] + assert ( + stack["Metadata"]["AWS::CloudFormation::Interface"]["ParameterLabels"]["Email"][ + "default" + ] + == "Email" + ) + assert ( + stack["Metadata"]["AWS::CloudFormation::Interface"]["ParameterLabels"][ + "PersonalizeKmsKeyArn" + ]["default"] + == "(Optional) KMS key ARN used to encrypt Datasets managed by Amazon Personalize" + )