diff --git a/CHANGELOG.md b/CHANGELOG.md index 71ba6f0..4770fd2 100755 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,11 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [2.0.1] - 2023-10-25 +### Added +- Update urllib to v1.26.18 +- Fix operational policy permissions + ## [2.0.0] - 2023-10-05 ### Added diff --git a/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/solutions_metrics/src/custom_resources/requirements.txt b/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/solutions_metrics/src/custom_resources/requirements.txt index 73c0b24..4312b51 100644 --- a/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/solutions_metrics/src/custom_resources/requirements.txt +++ b/source/cdk_solution_helper_py/helpers_cdk/aws_solutions/cdk/aws_lambda/cfn_custom_resources/solutions_metrics/src/custom_resources/requirements.txt @@ -1,3 +1,3 @@ requests>=2.28.1 -urllib3<2.0.0 +urllib3>=1.26.18, <2.0.0 crhelper==2.0.6 \ No newline at end of file diff --git a/source/infrastructure/amc_insights/amc_insights_stack.py b/source/infrastructure/amc_insights/amc_insights_stack.py index 37abcd6..47ff049 100644 --- a/source/infrastructure/amc_insights/amc_insights_stack.py +++ b/source/infrastructure/amc_insights/amc_insights_stack.py @@ -223,6 +223,8 @@ def __init__(self, scope: Construct, id: str, *args: Any, **kwargs: Any) -> None wfm_resources=wfm_construct, pmn_resources=pmn_construct, foundations_resources=foundations_construct, + insights_pipeline_resources=insights_pipeline_construct, + amc_dataset_resources=amc_dataset_construct, creating_resources_condition=self._is_deplopying_full_app_condition ) diff --git a/source/infrastructure/amc_insights/custom_resource/user_iam/lambdas/create_user_iam.py b/source/infrastructure/amc_insights/custom_resource/user_iam/lambdas/create_user_iam.py index c7a0bad..775c2d5 100644 --- a/source/infrastructure/amc_insights/custom_resource/user_iam/lambdas/create_user_iam.py +++ b/source/infrastructure/amc_insights/custom_resource/user_iam/lambdas/create_user_iam.py @@ -11,12 +11,15 @@ helper = CfnResource() STACK_NAME = os.environ['STACK_NAME'] +APPLICATION_REGION = os.environ['APPLICATION_REGION'] APPLICATION_ACCOUNT = os.environ['APPLICATION_ACCOUNT'] SAGEMAKER_NOTEBOOK = os.environ['SAGEMAKER_NOTEBOOK'] SAGEMAKER_NOTEBOOK_LC = os.environ['SAGEMAKER_NOTEBOOK_LC'] -TPS_INITIALIZE_SM = os.environ['TPS_INITIALIZE_SM'] -WFM_WORKFLOWS_SM = os.environ['WFM_WORKFLOWS_SM'] -WFM_WORKFLOW_EXECUTION_SM = os.environ['WFM_WORKFLOW_EXECUTION_SM'] +TPS_INITIALIZE_SM_NAME = os.environ['TPS_INITIALIZE_SM_NAME'] +WFM_WORKFLOWS_SM_NAME = os.environ['WFM_WORKFLOWS_SM_NAME'] +WFM_WORKFLOW_EXECUTION_SM_NAME = os.environ['WFM_WORKFLOW_EXECUTION_SM_NAME'] +STAGE_A_TRANSFORM_SM_NAME = os.environ['STAGE_A_TRANSFORM_SM_NAME'] +STAGE_B_TRANSFORM_SM_NAME = os.environ['STAGE_B_TRANSFORM_SM_NAME'] DATALAKE_CUSTOMER_TABLE = os.environ['DATALAKE_CUSTOMER_TABLE'] WFM_CUSTOMER_TABLE = os.environ['WFM_CUSTOMER_TABLE'] WFM_WORKFLOWS_TABLE = os.environ['WFM_WORKFLOWS_TABLE'] @@ -40,6 +43,11 @@ ATHENA_BUCKET = os.environ['ATHENA_BUCKET'] ATHENA_BUCKET_KEY = os.environ['ATHENA_BUCKET_KEY'] LAKE_FORMATION_CATALOG = os.environ['LAKE_FORMATION_CATALOG'] +OCTAGON_DATASETS_TABLE_KEY = os.environ['OCTAGON_DATASETS_TABLE_KEY'] +OCTAGON_OBJECT_METADATA_TABLE_KEY = os.environ['OCTAGON_OBJECT_METADATA_TABLE_KEY'] +OCTAGON_PIPELINE_EXECUTION_TABLE_KEY = os.environ['OCTAGON_PIPELINE_EXECUTION_TABLE_KEY'] +OCTAGON_PIPELINES_TABLE_KEY = os.environ['OCTAGON_PIPELINES_TABLE_KEY'] +GLUE_JOB_NAME = os.environ['GLUE_JOB_NAME'] FILE_NAME = 'IAM_POLICY_OPERATE.json' IAM_POLICY_TEMPLATE = { @@ -48,11 +56,29 @@ { "Effect": "Allow", "Action": [ + "glue:SearchTables", + "glue:Get*", + "athena:ListNamedQueries", + "athena:GetWorkGroup", + "athena:StartQueryExecution", + "athena:GetQueryExecution", + "athena:GetQueryResults", + "athena:ListQueryExecutions", + "states:ListStateMachines", + "states:DescribeStateMachine", + "logs:DescribeLogGroups", + "lambda:ListFunctions", + "dynamodb:ListTables", + "dynamodb:DescribeTable", + "iam:ListRoles", + "iam:ListUsers", + "lambda:GetAccountSettings", + "events:Describe*", + "s3:ListAllMyBuckets", + "events:List*", "sagemaker:ListNotebookInstances" ], - "Resource": [ - "*" - ] + "Resource": "*" }, { "Effect": "Allow", @@ -64,34 +90,22 @@ SAGEMAKER_NOTEBOOK_LC ] }, - { - "Effect": "Allow", - "Action": [ - "states:ListStateMachines", - "states:DescribeStateMachine" - ], - "Resource": [ - "*" - ] - }, { "Effect": "Allow", "Action": [ "states:*" ], "Resource": [ - f"{TPS_INITIALIZE_SM}:*", - f"{WFM_WORKFLOWS_SM}:*", - f"{WFM_WORKFLOW_EXECUTION_SM}:*" - ] - }, - { - "Effect": "Allow", - "Action": [ - "s3:ListAllMyBuckets" - ], - "Resource": [ - "*" + f"arn:aws:states:{APPLICATION_REGION}:{APPLICATION_ACCOUNT}:stateMachine:{TPS_INITIALIZE_SM_NAME}*", + f"arn:aws:states:{APPLICATION_REGION}:{APPLICATION_ACCOUNT}:execution:{TPS_INITIALIZE_SM_NAME}*", + f"arn:aws:states:{APPLICATION_REGION}:{APPLICATION_ACCOUNT}:stateMachine:{WFM_WORKFLOWS_SM_NAME}*", + f"arn:aws:states:{APPLICATION_REGION}:{APPLICATION_ACCOUNT}:execution:{WFM_WORKFLOWS_SM_NAME}*", + f"arn:aws:states:{APPLICATION_REGION}:{APPLICATION_ACCOUNT}:stateMachine:{WFM_WORKFLOW_EXECUTION_SM_NAME}*", + f"arn:aws:states:{APPLICATION_REGION}:{APPLICATION_ACCOUNT}:execution:{WFM_WORKFLOW_EXECUTION_SM_NAME}*", + f"arn:aws:states:{APPLICATION_REGION}:{APPLICATION_ACCOUNT}:stateMachine:{STAGE_A_TRANSFORM_SM_NAME}*", + f"arn:aws:states:{APPLICATION_REGION}:{APPLICATION_ACCOUNT}:execution:{STAGE_A_TRANSFORM_SM_NAME}*", + f"arn:aws:states:{APPLICATION_REGION}:{APPLICATION_ACCOUNT}:stateMachine:{STAGE_B_TRANSFORM_SM_NAME}*", + f"arn:aws:states:{APPLICATION_REGION}:{APPLICATION_ACCOUNT}:execution:{STAGE_B_TRANSFORM_SM_NAME}*" ] }, { @@ -112,37 +126,20 @@ f"{ARTIFACTS_BUCKET}/*" ] }, - { - "Effect": "Allow", - "Action": [ - "dynamodb:ListTables", - "dynamodb:DescribeTable" - ], - "Resource": [ - "*" - ] - }, { "Effect": "Allow", "Action": [ "dynamodb:*" ], "Resource": [ - TPS_CUSTOMER_TABLE, f"{TPS_CUSTOMER_TABLE}*", - WFM_CUSTOMER_TABLE, f"{WFM_CUSTOMER_TABLE}*", - WFM_WORKFLOWS_TABLE, f"{WFM_WORKFLOWS_TABLE}*", - DATALAKE_CUSTOMER_TABLE, + f"{WFM_WORKFLOW_EXECUTION_TABLE}*", f"{DATALAKE_CUSTOMER_TABLE}*", - OCTAGON_DATASETS_TABLE, f"{OCTAGON_DATASETS_TABLE}*", - OCTAGON_OBJECT_METADATA_TABLE, f"{OCTAGON_OBJECT_METADATA_TABLE}*", - OCTAGON_PIPELINE_EXECUTION_TABLE, f"{OCTAGON_PIPELINE_EXECUTION_TABLE}*", - OCTAGON_PIPELINE_TABLE, f"{OCTAGON_PIPELINE_TABLE}*", ] }, @@ -159,17 +156,11 @@ STAGE_BUCKET_KEY, ATHENA_BUCKET_KEY, LOGGING_BUCKET_KEY, - ARTIFACTS_BUCKET_KEY - ] - }, - { - "Effect": "Allow", - "Action": [ - "events:List*", - "events:Describe*" - ], - "Resource": [ - "*" + ARTIFACTS_BUCKET_KEY, + OCTAGON_DATASETS_TABLE_KEY, + OCTAGON_OBJECT_METADATA_TABLE_KEY, + OCTAGON_PIPELINE_EXECUTION_TABLE_KEY, + OCTAGON_PIPELINES_TABLE_KEY ] }, { @@ -182,16 +173,6 @@ f"arn:aws:events:*:{APPLICATION_ACCOUNT}:rule/{STACK_NAME}*" ] }, - { - "Effect": "Allow", - "Action": [ - "lambda:GetAccountSettings", - "lambda:ListFunctions" - ], - "Resource": [ - "*" - ] - }, { "Effect": "Allow", "Action": [ @@ -201,15 +182,6 @@ f"arn:aws:lambda:*:{APPLICATION_ACCOUNT}:function:{STACK_NAME}*" ] }, - { - "Effect": "Allow", - "Action": [ - "logs:DescribeLogGroups" - ], - "Resource": [ - "*" - ] - }, { "Effect": "Allow", "Action": [ @@ -222,21 +194,7 @@ { "Effect": "Allow", "Action": [ - "iam:ListRoles", - "iam:ListUsers" - ], - "Resource": [ - "*" - ] - }, - { - "Effect": "Allow", - "Action": [ - "lakeformation:PutDataLakeSettings", - "lakeformation:GetDataLakeSettings", - "lakeformation:ListPermissions", - "lakeformation:ListLFTags", - "lakeformation:BatchGrantPermissions" + "lakeformation:*" ], "Resource": [ LAKE_FORMATION_CATALOG @@ -245,27 +203,19 @@ { "Effect": "Allow", "Action": [ - "glue:GetDatabases", - "glue:SearchTables", - "glue:GetTables", - "glue:GetDatabase" + "cloudformation:*" ], "Resource": [ - "*" + f"arn:aws:cloudformation:*:{APPLICATION_ACCOUNT}:stack/{STACK_NAME}*" ] }, { "Effect": "Allow", "Action": [ - "athena:GetWorkGroup", - "athena:StartQueryExecution", - "athena:GetQueryExecution", - "athena:GetQueryResults", - "athena:ListQueryExecutions", - "athena:ListNamedQueries" + "glue:*" ], "Resource": [ - "*" + f"arn:aws:glue:{APPLICATION_REGION}:{APPLICATION_ACCOUNT}:job/{GLUE_JOB_NAME}" ] } ] diff --git a/source/infrastructure/amc_insights/custom_resource/user_iam/user_iam_construct.py b/source/infrastructure/amc_insights/custom_resource/user_iam/user_iam_construct.py index d305194..6c889b3 100644 --- a/source/infrastructure/amc_insights/custom_resource/user_iam/user_iam_construct.py +++ b/source/infrastructure/amc_insights/custom_resource/user_iam/user_iam_construct.py @@ -24,6 +24,8 @@ def __init__( wfm_resources, pmn_resources, foundations_resources, + insights_pipeline_resources, + amc_dataset_resources, creating_resources_condition ) -> None: super().__init__(scope, id) @@ -34,6 +36,8 @@ def __init__( self._pmn_resources = pmn_resources self._foundations_resources = foundations_resources self._resource_prefix = Aws.STACK_NAME + self._insights_pipeline_resources = insights_pipeline_resources + self._amc_dataset_resources = amc_dataset_resources self._creating_resources_condition = creating_resources_condition # Apply condition to resources in Construct @@ -82,13 +86,16 @@ def _create_user_iam_lambda(self): "SOLUTION_ID": self.node.try_get_context("SOLUTION_ID"), "SOLUTION_VERSION": self.node.try_get_context("SOLUTION_VERSION"), "STACK_NAME": Aws.STACK_NAME, + "APPLICATION_REGION": Aws.REGION, "APPLICATION_ACCOUNT": Aws.ACCOUNT_ID, - "SAGEMAKER_NOTEBOOK": f"arn:aws:sagemaker:{Aws.REGION}:{Aws.ACCOUNT_ID}:notebook-instance/{Aws.STACK_NAME}-{self._pmn_resources.notebook_instance.attr_notebook_instance_name}", + "SAGEMAKER_NOTEBOOK": f"arn:aws:sagemaker:{Aws.REGION}:{Aws.ACCOUNT_ID}:notebook-instance/{self._pmn_resources.notebook_instance.attr_notebook_instance_name}", "SAGEMAKER_NOTEBOOK_LC": f"arn:aws:sagemaker:{Aws.REGION}:{Aws.ACCOUNT_ID}:notebook-instance-lifecycle-config/{self._pmn_resources.sagemaker_lifecycle_config.attr_notebook_instance_lifecycle_config_name}", - "TPS_INITIALIZE_SM": self._tps_resources._sm.attr_arn, - "WFM_WORKFLOWS_SM": self._wfm_resources.statemachine_workflows_sm.state_machine_arn, - "WFM_WORKFLOW_EXECUTION_SM": self._wfm_resources.statemachine_workflow_executions_sm.state_machine_arn, "DATALAKE_CUSTOMER_TABLE": self._foundations_resources.customer_config_table.table_arn, + "TPS_INITIALIZE_SM_NAME": self._tps_resources._sm.attr_name, + "WFM_WORKFLOWS_SM_NAME": self._wfm_resources.statemachine_workflows_sm.state_machine_name, + "WFM_WORKFLOW_EXECUTION_SM_NAME": self._wfm_resources.statemachine_workflow_executions_sm.state_machine_name, + "STAGE_A_TRANSFORM_SM_NAME": self._insights_pipeline_resources._stage_a_transform.sm_a.attr_name, + "STAGE_B_TRANSFORM_SM_NAME": self._insights_pipeline_resources._stage_b_transform.sm_b.attr_name, "WFM_CUSTOMER_TABLE": self._wfm_resources.dynamodb_customer_config_table.table_arn, "WFM_WORKFLOWS_TABLE": self._wfm_resources.dynamodb_workflows_table.table_arn, "WFM_WORKFLOW_EXECUTION_TABLE": self._wfm_resources.dynamodb_execution_status_table.table_arn, @@ -110,7 +117,12 @@ def _create_user_iam_lambda(self): "STAGE_BUCKET_KEY": self._foundations_resources.stage_bucket_key.key_arn, "ATHENA_BUCKET": self._foundations_resources.athena_bucket.bucket_arn, "ATHENA_BUCKET_KEY": self._foundations_resources.athena_bucket_key.key_arn, - "LAKE_FORMATION_CATALOG": f"arn:aws:lakeformation:{Aws.REGION}:{Aws.ACCOUNT_ID}:catalog:{Aws.ACCOUNT_ID}" + "LAKE_FORMATION_CATALOG": f"arn:aws:lakeformation:{Aws.REGION}:{Aws.ACCOUNT_ID}:catalog:{Aws.ACCOUNT_ID}", + "OCTAGON_DATASETS_TABLE_KEY": self._foundations_resources.datasets.encryption_key.key_arn, + "OCTAGON_OBJECT_METADATA_TABLE_KEY": self._foundations_resources.object_metadata.encryption_key.key_arn, + "OCTAGON_PIPELINE_EXECUTION_TABLE_KEY": self._foundations_resources.peh.encryption_key.key_arn, + "OCTAGON_PIPELINES_TABLE_KEY": self._foundations_resources.pipelines.encryption_key.key_arn, + "GLUE_JOB_NAME": self._amc_dataset_resources.job.name }, layers=[ PowertoolsLayer.get_or_create(self), diff --git a/source/infrastructure/amc_insights/microservices/workflow_manager_service/lambda_layers/wfm_layer/python/wfm_utilities/wfm_utilities.py b/source/infrastructure/amc_insights/microservices/workflow_manager_service/lambda_layers/wfm_layer/python/wfm_utilities/wfm_utilities.py index 1d2288e..70ccbfc 100644 --- a/source/infrastructure/amc_insights/microservices/workflow_manager_service/lambda_layers/wfm_layer/python/wfm_utilities/wfm_utilities.py +++ b/source/infrastructure/amc_insights/microservices/workflow_manager_service/lambda_layers/wfm_layer/python/wfm_utilities/wfm_utilities.py @@ -139,7 +139,7 @@ def get_last_day_of_month( def process_parameter_functions( self, - parameter_value: str + parameter_value ) -> str: """ Replaces values that have function names such as NOW() TODAY() LASTDAYOFOFFSETMONTH() FIRSTDAYOFOFFSETMONTH() FIFTEENTHDAYOFOFFSETMONTH() @@ -156,41 +156,42 @@ def process_parameter_functions( parameter value will be returned unchanged """ - if parameter_value.upper() == 'NOW()': - return dt.datetime.today().strftime('%Y-%m-%dT%H:%M:%S') - - if "TODAY(" in parameter_value.upper(): - if parameter_value.upper() == "TODAY()": - return self.get_current_date_with_offset(0) - else: - return self.get_current_date_with_offset(self.get_offset_value(parameter_value)) - - if "LASTDAYOFOFFSETMONTH(" in parameter_value.upper(): - date_with_month_offset = self.get_current_date_with_month_offset( - self.get_offset_value(parameter_value)) - last_day_of_previous_month = self.get_last_day_of_month( - date_with_month_offset) - return_value = dt.datetime(date_with_month_offset.year, date_with_month_offset.month, - last_day_of_previous_month, - date_with_month_offset.hour, date_with_month_offset.minute).strftime( - '%Y-%m-%dT00:00:00') - return return_value - - if "FIRSTDAYOFOFFSETMONTH(" in parameter_value.upper(): - date_with_month_offset = self.get_current_date_with_month_offset( - self.get_offset_value(parameter_value)) - return_value = dt.datetime(date_with_month_offset.year, date_with_month_offset.month, 1, - date_with_month_offset.hour, - date_with_month_offset.minute).strftime('%Y-%m-%dT00:00:00') - return return_value - - if "FIFTEENTHDAYOFOFFSETMONTH(" in parameter_value.upper(): - date_with_month_offset = self.get_current_date_with_month_offset( - self.get_offset_value(parameter_value)) - return_value = dt.datetime(date_with_month_offset.year, date_with_month_offset.month, 15, - date_with_month_offset.hour, - date_with_month_offset.minute).strftime('%Y-%m-%dT00:00:00') - return return_value + if isinstance(parameter_value, str): + if parameter_value.upper() == 'NOW()': + return dt.datetime.today().strftime('%Y-%m-%dT%H:%M:%S') + + if "TODAY(" in parameter_value.upper(): + if parameter_value.upper() == "TODAY()": + return self.get_current_date_with_offset(0) + else: + return self.get_current_date_with_offset(self.get_offset_value(parameter_value)) + + if "LASTDAYOFOFFSETMONTH(" in parameter_value.upper(): + date_with_month_offset = self.get_current_date_with_month_offset( + self.get_offset_value(parameter_value)) + last_day_of_previous_month = self.get_last_day_of_month( + date_with_month_offset) + return_value = dt.datetime(date_with_month_offset.year, date_with_month_offset.month, + last_day_of_previous_month, + date_with_month_offset.hour, date_with_month_offset.minute).strftime( + '%Y-%m-%dT00:00:00') + return return_value + + if "FIRSTDAYOFOFFSETMONTH(" in parameter_value.upper(): + date_with_month_offset = self.get_current_date_with_month_offset( + self.get_offset_value(parameter_value)) + return_value = dt.datetime(date_with_month_offset.year, date_with_month_offset.month, 1, + date_with_month_offset.hour, + date_with_month_offset.minute).strftime('%Y-%m-%dT00:00:00') + return return_value + + if "FIFTEENTHDAYOFOFFSETMONTH(" in parameter_value.upper(): + date_with_month_offset = self.get_current_date_with_month_offset( + self.get_offset_value(parameter_value)) + return_value = dt.datetime(date_with_month_offset.year, date_with_month_offset.month, 15, + date_with_month_offset.hour, + date_with_month_offset.minute).strftime('%Y-%m-%dT00:00:00') + return return_value # if no conditions are met, return the parameter unchanged return parameter_value diff --git a/source/infrastructure/data_lake/datasets/sdlf_dataset.py b/source/infrastructure/data_lake/datasets/sdlf_dataset.py index b246a81..afda146 100644 --- a/source/infrastructure/data_lake/datasets/sdlf_dataset.py +++ b/source/infrastructure/data_lake/datasets/sdlf_dataset.py @@ -227,7 +227,7 @@ def _create_sdlf_glue_job_role(self): ) def _create_sdlf_stage_b_glue_job(self) -> None: - job: CfnJob = CfnJob( + self.job: CfnJob = CfnJob( self, "sdlf-heavy-transform-glue-job", name=f"{self._resource_prefix}-{self._team}-{self._dataset}-glue-job", @@ -256,7 +256,7 @@ def _create_sdlf_stage_b_glue_job(self) -> None: f"amc-heavy-transform-{self._team}-{self._dataset}-job-name", parameter_name=f"/{self._resource_prefix}/Glue/{self._team}/{self._dataset}/SDLFHeavyTransformJobName", simple_name=True, - string_value=job.name, # type: ignore + string_value=self.job.name, # type: ignore ) def _create_glue_database(self) -> None: diff --git a/source/infrastructure/data_lake/lambda_layers/data_lake_library/python/datalake_library/transforms/stage_a_transforms/amc_light_transform.py b/source/infrastructure/data_lake/lambda_layers/data_lake_library/python/datalake_library/transforms/stage_a_transforms/amc_light_transform.py index 0ff426d..dfc7dcd 100755 --- a/source/infrastructure/data_lake/lambda_layers/data_lake_library/python/datalake_library/transforms/stage_a_transforms/amc_light_transform.py +++ b/source/infrastructure/data_lake/lambda_layers/data_lake_library/python/datalake_library/transforms/stage_a_transforms/amc_light_transform.py @@ -6,10 +6,6 @@ # where a JSON file is downloaded from RAW to /tmp # then parsed before being re-uploaded to STAGE ####################################################### -# License: Apache 2.0 -####################################################### -# Author: jaidi -####################################################### import awswrangler as wr import re diff --git a/source/infrastructure/data_lake/lambda_layers/data_lake_library/python/datalake_library/transforms/stage_b_transforms/amc_heavy_transform.py b/source/infrastructure/data_lake/lambda_layers/data_lake_library/python/datalake_library/transforms/stage_b_transforms/amc_heavy_transform.py index 6f9ec29..2591702 100755 --- a/source/infrastructure/data_lake/lambda_layers/data_lake_library/python/datalake_library/transforms/stage_b_transforms/amc_heavy_transform.py +++ b/source/infrastructure/data_lake/lambda_layers/data_lake_library/python/datalake_library/transforms/stage_b_transforms/amc_heavy_transform.py @@ -6,10 +6,6 @@ # where a number of CSV files are dowloaded from # Stage bucket and then submitted to a Glue Job ####################################################### -# License: Apache 2.0 -####################################################### -# Author: jaidi -####################################################### ####################################################### # Import section diff --git a/source/infrastructure/data_lake/stages/sdlf_heavy_transform/sdlf_heavy_transform.py b/source/infrastructure/data_lake/stages/sdlf_heavy_transform/sdlf_heavy_transform.py index f36f900..b924706 100644 --- a/source/infrastructure/data_lake/stages/sdlf_heavy_transform/sdlf_heavy_transform.py +++ b/source/infrastructure/data_lake/stages/sdlf_heavy_transform/sdlf_heavy_transform.py @@ -575,7 +575,7 @@ def _create_state_machine(self, name) -> None: ] ) - sm_b = sfn.CfnStateMachine( + self.sm_b = sfn.CfnStateMachine( self, 'sdlf-heavy-sm-b', role_arn=sfn_role.role_arn, @@ -597,7 +597,7 @@ def _create_state_machine(self, name) -> None: "sdlf-heavy-sm-b-arn", parameter_name=f"/{self.resource_prefix}/SM/{self.team}/{self.pipeline}StageBSM", simple_name=True, - string_value=sm_b.attr_arn, + string_value=self.sm_b.attr_arn, ) def _suppress_cfn_nag_warnings(self): diff --git a/source/infrastructure/data_lake/stages/sdlf_light_transform/sdlf_light_transform.py b/source/infrastructure/data_lake/stages/sdlf_light_transform/sdlf_light_transform.py index 990cce5..bf72d47 100644 --- a/source/infrastructure/data_lake/stages/sdlf_light_transform/sdlf_light_transform.py +++ b/source/infrastructure/data_lake/stages/sdlf_light_transform/sdlf_light_transform.py @@ -622,7 +622,7 @@ def _create_state_machine(self) -> None: ] ) - sm_a = sfn.CfnStateMachine( + self.sm_a = sfn.CfnStateMachine( self, 'sdlf-light-sm-a', role_arn=sfn_role.role_arn, @@ -644,7 +644,7 @@ def _create_state_machine(self) -> None: "sdlf-light-sm-a-arn", parameter_name=f"/{self.resource_prefix}/SM/{self.team}/{self.pipeline}StageASM", simple_name=True, - string_value=sm_a.attr_arn, + string_value=self.sm_a.attr_arn, ) def _suppress_cfn_nag_warnings(self):