diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a03f897..385c73b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -13,8 +13,8 @@ jobs: - name: Checkout code uses: actions/checkout@v2 - - name: Build - s3-inventory-move - working-directory: s3-inventory-move/ + - name: Build - s3-inventory-copy + working-directory: s3-inventory-copy/ run: | npm ci npx sls package @@ -35,10 +35,10 @@ jobs: tag_name="$(echo "${tag_ref##*/}" )" echo "::set-output name=tag_name::$tag_name" - - name: Upload Assets - s3-inventory-move + - name: Upload Assets - s3-inventory-copy id: upload_assets_s3_inventory uses: AButler/upload-release-assets@v2.0 with: - files: 's3-inventory-move/.serverless/*.zip' + files: 's3-inventory-copy/.serverless/*.zip' repo-token: ${{ secrets.GITHUB_TOKEN }} release-tag: ${{steps.get-tag-name.outputs.tag_name}} diff --git a/hamlet/s3support/modules/extensions/s3_inventory_copy_event/extension.ftl b/hamlet/s3support/modules/extensions/s3_inventory_copy_event/extension.ftl new file mode 100644 index 0000000..beeedbb --- /dev/null +++ b/hamlet/s3support/modules/extensions/s3_inventory_copy_event/extension.ftl @@ -0,0 +1,43 @@ +[#ftl] + +[@addExtension + id="s3_inventory_copy_event" + aliases=[ + "_s3_inventory_copy_event" + ] + description=[ + "Configures the s3 event lambda with batch permissions" + ] + supportedTypes=[ + LAMBDA_COMPONENT_TYPE, + LAMBDA_FUNCTION_COMPONENT_TYPE + ] +/] + +[#macro shared_extension_s3_inventory_copy_event_deployment_setup occurrence ] + + [#-- When submitting an s3 batch we need to give batch an IAM role which allows it to access the source and invoke the lambda --] + [#-- This creates a new role using the same links as the lambda but with a different trust --] + [#local s3BatchRoleId = formatResourceId(IAM_ROLE_RESOURCE_TYPE, occurrence.Core.Id, "s3batch") ] + [#local s3BatchPolicies = getLinkTargetsOutboundRoles(_context.Links) ] + [@createRole + id=s3BatchRoleId + trustedServices=[ + "batchoperations.s3.amazonaws.com" + ] + policies=[getPolicyDocument(linkPolicies, "links")] + /] + + [@Settings + { + "S3_BATCH_ROLE_ARN" : getReference(s3BatchRoleId, ARN_ATTRIBUTE_TYPE) + } + /] + + [@Settings + [ + "S3_BATCH_JOB_LAMBDA_ARN", + "S3_BATCH_PRIORITY" + ] + /] +[/#macro] diff --git a/hamlet/s3support/modules/s3_inventory_copy/module.ftl b/hamlet/s3support/modules/s3_inventory_copy/module.ftl new file mode 100644 index 0000000..b426419 --- /dev/null +++ b/hamlet/s3support/modules/s3_inventory_copy/module.ftl @@ -0,0 +1,222 @@ +[#ftl] + +[@addModule + name="s3_inventory_copy" + description="Copies files to a new location based on updates to an S3 inventory report" + provider=S3SUPPORT_PROVIDER + properties=[ + { + "Names" : "id", + "Description" : "A unique id for this instance of the api", + "Type" : STRING_TYPE, + "Mandatory" : true + }, + { + "Names" : "tier", + "Description" : "The tier the components will belong to", + "Type" : STRING_TYPE, + "Mandatory" : true + }, + { + "Names" : "instance", + "Description" : "The instance id of the components", + "Type" : STRING_TYPE, + "Default" : "default" + }, + { + "Names" : "s3KeyPrefix", + "Description" : "A prefix to append to all keys in the report when copying", + "Type" : STRING_TYPE, + "Default" : "" + }, + { + "Names" : "s3KeySuffix", + "Description" : "A suffix to append to all keys in the report when copying", + "Type" : STRING_TYPE, + "Default" : "" + }, + { + "Names" : "s3InventoryPrefix", + "Description" : "The prefix to use for inventory generation on the source bucket", + "Type" : STRING_TYPE, + "Default" : "s3_inventory_copy/" + }, + { + "Names" : "soucrceBucketLink", + "Description" : "A link to the source s3 bucket which will trigger the copy", + "Children" : linkChildrenConfiguration + } + { + "Names" : "destinationBucketLink", + "Description" : "A link to an S3 bucket to copy the report objects to", + "Children" : linkChildrenConfiguration + } + { + "Names" : "s3InventoryProfileSuffix", + "Description" : "The suffix ( added to the id ) for the deployment profile which configures the userpool client", + "Type" : STRING_TYPE, + "Default" : "_cognitoqs" + }, + { + "Names" : "lambdaImageUrl", + "Description" : "The url to the lambda zip image", + "Type" : STRING_TYPE, + "Default" : "https://github.com/hamlet-io/lambda-s3-support/releases/download/v0.0.5/s3-inventory-copy.zip" + }, + { + "Names" : "lambdaImageHash", + "Description" : "The sha1 hash of the lambda zip image", + "Type" : STRING_TYPE, + "Default" : "4ecc2684e18be6ad91b704cf211b074919314144" + }, + { + "Names" : "batchPriorty", + "Description" : "The priority of the s3 batch call - Highest wins", + "Type" : NUMBER_TYPE, + "Default" : 100 + } + ] +/] + + +[#macro s3support_module_s3_inventory_copy + id + tier + instance + s3KeyPrefix + s3KeySuffix + s3InventoryProfileSuffix + lambdaImageUrl + lambdaImageHash + batchPriorty +] + + [#local product = getActiveLayer(PRODUCT_LAYER_TYPE) ] + [#local environment = getActiveLayer(ENVIRONMENT_LAYER_TYPE)] + [#local segment = getActiveLayer(SEGMENT_LAYER_TYPE)] + [#local instance = (instance == "default")?then("", instance)] + [#local namespace = formatName(product["Name"], environment["Name"], segment["Name"])] + + [#local lambdaId = formatName(id, "lambda") ] + [#local lambdaSettingsNamespace = formatName(namespace, tier, lambdaId, instance)] + + + [#-- Lambda Configuration --] + [@loadModule + settingSets=[ + { + "Type" : "Settings", + "Scope" : "Products", + "Namespace" : lambdaSettingsNamespace, + "Settings" : { + "S3_DESTINATION_PREFIX" : s3KeyPrefix, + "S3_DESTINATION_SUFFIX" : s3KeySuffix, + "S3_BATCH_PRIORITY" : batchPriorty + } + } + ] + /] + + [#-- Solution Configuration --] + [@loadModule + blueprint={ + "Tiers" : { + tier : { + "Components" : { + lambdaId : { + "Title": "", + "lambda": { + "deployment:Unit" : lambdaId, + "Image" : { + "Source" : "url", + "UrlSource" : { + "Url" : lambdaImageUrl, + "ImageHash" : lambdaImageHash + } + }, + "RunTime": "python3.6, + "MemorySize": 128, + "PredefineLogGroup": true, + "VPCAccess": false, + "Timeout": 10 + "Functions": { + "s3event": { + "Handler": "src/lambda.s3event_lambda_handler", + "Extensions": [ "_noenv" ], + "Links" : { + "S3_BATCH_JOB_LAMBDA" : { + "Tier" : tier, + "Component" : lambdaId, + "Instance" : instance, + "Version" : "", + "Function" : "s3batch", + "Role" : "invoke" + }, + "S3_SOURCE" : + soucrceBucketLink + + { + "Role" : "consume" + } + } + }, + "s3batch": { + "Handler": "src/lambda.s3batch_lambda_handler", + "Extensions": [ "_noenv" ], + "Links" : { + "S3_SOURCE" : + soucrceBucketLink + + { + "Role" : "consume" + }, + "s3_DESTINATION" : + destinationBucketLink + + { + "Role" : "produce" + } + } + } + } + } + } + } + } + }, + "DeploymentProfiles" : { + id + s3InventoryProfileSuffix : { + "Modes" : { + "*" : { + "s3" : { + "Notifications" : { + "InventoryCreate" : { + "Links" : { + "s3move" : { + "Tier" : tier, + "Component" : lambdaId, + "Instance" : "", + "Version" : "", + "Role" : "invoke", + "Function" : "s3event" + } + }, + "Prefix" : s3InventoryPrefix, + "Suffix" : "manifest.checksum", + "Events" : "create" + } + }, + "InventoryReports" : { + "IntventoryCopy" : { + "Destination" : { + "Type" : "self" + }, + "DestinationPrefix" : s3InventoryPrefix + } + } + } + } + } + } + } + } + /] + +[/#macro] diff --git a/hamlet/s3support/provider.ftl b/hamlet/s3support/provider.ftl new file mode 100644 index 0000000..b45cec5 --- /dev/null +++ b/hamlet/s3support/provider.ftl @@ -0,0 +1 @@ +[#assign S3SUPPORT_PROVIDER = "s3support" ] diff --git a/s3-inventory-move/.gitignore b/s3-inventory-copy/.gitignore similarity index 100% rename from s3-inventory-move/.gitignore rename to s3-inventory-copy/.gitignore diff --git a/s3-inventory-copy/README.md b/s3-inventory-copy/README.md new file mode 100644 index 0000000..6830b8a --- /dev/null +++ b/s3-inventory-copy/README.md @@ -0,0 +1,8 @@ +# S3 Inventory Copy + +Uses S3Batch to Copy files listed in an S3Inventory Report + +The configuration allows for: +- Copying contents to a new bucket +- Appending a suffix to keys +- Appending a prefix to keys diff --git a/s3-inventory-move/package-lock.json b/s3-inventory-copy/package-lock.json similarity index 99% rename from s3-inventory-move/package-lock.json rename to s3-inventory-copy/package-lock.json index 162d2f5..01a2ead 100644 --- a/s3-inventory-move/package-lock.json +++ b/s3-inventory-copy/package-lock.json @@ -1,5 +1,5 @@ { - "name": "s3-inventory-move", + "name": "s3-inventory-copy", "version": "1.0.0", "lockfileVersion": 1, "requires": true, diff --git a/s3-inventory-move/package.json b/s3-inventory-copy/package.json similarity index 88% rename from s3-inventory-move/package.json rename to s3-inventory-copy/package.json index 994b216..18b58f6 100644 --- a/s3-inventory-move/package.json +++ b/s3-inventory-copy/package.json @@ -1,5 +1,5 @@ { - "name": "s3-inventory-move", + "name": "s3-inventory-copy", "version": "1.0.0", "description": "SLS packaging", "main": "''", diff --git a/s3-inventory-move/serverless.yml b/s3-inventory-copy/serverless.yml similarity index 94% rename from s3-inventory-move/serverless.yml rename to s3-inventory-copy/serverless.yml index f8152bb..7914fc1 100644 --- a/s3-inventory-move/serverless.yml +++ b/s3-inventory-copy/serverless.yml @@ -1,4 +1,4 @@ -service: s3-inventory-move +service: s3-inventory-copy package: individually: false diff --git a/s3-inventory-move/src/lambda.py b/s3-inventory-copy/src/lambda.py similarity index 98% rename from s3-inventory-move/src/lambda.py rename to s3-inventory-copy/src/lambda.py index 176b201..ac0a53d 100644 --- a/s3-inventory-move/src/lambda.py +++ b/s3-inventory-copy/src/lambda.py @@ -57,7 +57,7 @@ def s3event_lambda_handler(event, context): def s3batch_lambda_handler(event, context): ''' - Appends suffix or prefix to files and also moves to another bucket if required + Appends suffix or prefix to files and also copies to another bucket if required ''' s3Client = boto3.client('s3') diff --git a/s3-inventory-move/README.md b/s3-inventory-move/README.md deleted file mode 100644 index ae71ea1..0000000 --- a/s3-inventory-move/README.md +++ /dev/null @@ -1,8 +0,0 @@ -# S3 Inventory Move - -Uses S3Batch to move files listed in an S3Inventory Report - -The configuration allows for: -- Moving contents to a new bucket -- Appending a suffix to keys -- Appending a prefix to keys