Skip to content

Commit

Permalink
refactor: rename to provide understanding
Browse files Browse the repository at this point in the history
  • Loading branch information
roleyfoley committed Jan 29, 2021
1 parent 9fd912f commit 2a151a4
Show file tree
Hide file tree
Showing 11 changed files with 282 additions and 16 deletions.
8 changes: 4 additions & 4 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@ jobs:
- name: Checkout code
uses: actions/checkout@v2

- name: Build - s3-inventory-move
working-directory: s3-inventory-move/
- name: Build - s3-inventory-copy
working-directory: s3-inventory-copy/
run: |
npm ci
npx sls package
Expand All @@ -35,10 +35,10 @@ jobs:
tag_name="$(echo "${tag_ref##*/}" )"
echo "::set-output name=tag_name::$tag_name"
- name: Upload Assets - s3-inventory-move
- name: Upload Assets - s3-inventory-copy
id: upload_assets_s3_inventory
uses: AButler/[email protected]
with:
files: 's3-inventory-move/.serverless/*.zip'
files: 's3-inventory-copy/.serverless/*.zip'
repo-token: ${{ secrets.GITHUB_TOKEN }}
release-tag: ${{steps.get-tag-name.outputs.tag_name}}
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
[#ftl]

[@addExtension
id="s3_inventory_copy_event"
aliases=[
"_s3_inventory_copy_event"
]
description=[
"Configures the s3 event lambda with batch permissions"
]
supportedTypes=[
LAMBDA_COMPONENT_TYPE,
LAMBDA_FUNCTION_COMPONENT_TYPE
]
/]

[#macro shared_extension_s3_inventory_copy_event_deployment_setup occurrence ]

[#-- When submitting an s3 batch we need to give batch an IAM role which allows it to access the source and invoke the lambda --]
[#-- This creates a new role using the same links as the lambda but with a different trust --]
[#local s3BatchRoleId = formatResourceId(IAM_ROLE_RESOURCE_TYPE, occurrence.Core.Id, "s3batch") ]
[#local s3BatchPolicies = getLinkTargetsOutboundRoles(_context.Links) ]
[@createRole
id=s3BatchRoleId
trustedServices=[
"batchoperations.s3.amazonaws.com"
]
policies=[getPolicyDocument(linkPolicies, "links")]
/]

[@Settings
{
"S3_BATCH_ROLE_ARN" : getReference(s3BatchRoleId, ARN_ATTRIBUTE_TYPE)
}
/]

[@Settings
[
"S3_BATCH_JOB_LAMBDA_ARN",
"S3_BATCH_PRIORITY"
]
/]
[/#macro]
222 changes: 222 additions & 0 deletions hamlet/s3support/modules/s3_inventory_copy/module.ftl
Original file line number Diff line number Diff line change
@@ -0,0 +1,222 @@
[#ftl]

[@addModule
name="s3_inventory_copy"
description="Copies files to a new location based on updates to an S3 inventory report"
provider=S3SUPPORT_PROVIDER
properties=[
{
"Names" : "id",
"Description" : "A unique id for this instance of the api",
"Type" : STRING_TYPE,
"Mandatory" : true
},
{
"Names" : "tier",
"Description" : "The tier the components will belong to",
"Type" : STRING_TYPE,
"Mandatory" : true
},
{
"Names" : "instance",
"Description" : "The instance id of the components",
"Type" : STRING_TYPE,
"Default" : "default"
},
{
"Names" : "s3KeyPrefix",
"Description" : "A prefix to append to all keys in the report when copying",
"Type" : STRING_TYPE,
"Default" : ""
},
{
"Names" : "s3KeySuffix",
"Description" : "A suffix to append to all keys in the report when copying",
"Type" : STRING_TYPE,
"Default" : ""
},
{
"Names" : "s3InventoryPrefix",
"Description" : "The prefix to use for inventory generation on the source bucket",
"Type" : STRING_TYPE,
"Default" : "s3_inventory_copy/"
},
{
"Names" : "soucrceBucketLink",
"Description" : "A link to the source s3 bucket which will trigger the copy",
"Children" : linkChildrenConfiguration
}
{
"Names" : "destinationBucketLink",
"Description" : "A link to an S3 bucket to copy the report objects to",
"Children" : linkChildrenConfiguration
}
{
"Names" : "s3InventoryProfileSuffix",
"Description" : "The suffix ( added to the id ) for the deployment profile which configures the userpool client",
"Type" : STRING_TYPE,
"Default" : "_cognitoqs"
},
{
"Names" : "lambdaImageUrl",
"Description" : "The url to the lambda zip image",
"Type" : STRING_TYPE,
"Default" : "https://github.com/hamlet-io/lambda-s3-support/releases/download/v0.0.5/s3-inventory-copy.zip"
},
{
"Names" : "lambdaImageHash",
"Description" : "The sha1 hash of the lambda zip image",
"Type" : STRING_TYPE,
"Default" : "4ecc2684e18be6ad91b704cf211b074919314144"
},
{
"Names" : "batchPriorty",
"Description" : "The priority of the s3 batch call - Highest wins",
"Type" : NUMBER_TYPE,
"Default" : 100
}
]
/]


[#macro s3support_module_s3_inventory_copy
id
tier
instance
s3KeyPrefix
s3KeySuffix
s3InventoryProfileSuffix
lambdaImageUrl
lambdaImageHash
batchPriorty
]

[#local product = getActiveLayer(PRODUCT_LAYER_TYPE) ]
[#local environment = getActiveLayer(ENVIRONMENT_LAYER_TYPE)]
[#local segment = getActiveLayer(SEGMENT_LAYER_TYPE)]
[#local instance = (instance == "default")?then("", instance)]
[#local namespace = formatName(product["Name"], environment["Name"], segment["Name"])]

[#local lambdaId = formatName(id, "lambda") ]
[#local lambdaSettingsNamespace = formatName(namespace, tier, lambdaId, instance)]


[#-- Lambda Configuration --]
[@loadModule
settingSets=[
{
"Type" : "Settings",
"Scope" : "Products",
"Namespace" : lambdaSettingsNamespace,
"Settings" : {
"S3_DESTINATION_PREFIX" : s3KeyPrefix,
"S3_DESTINATION_SUFFIX" : s3KeySuffix,
"S3_BATCH_PRIORITY" : batchPriorty
}
}
]
/]

[#-- Solution Configuration --]
[@loadModule
blueprint={
"Tiers" : {
tier : {
"Components" : {
lambdaId : {
"Title": "",
"lambda": {
"deployment:Unit" : lambdaId,
"Image" : {
"Source" : "url",
"UrlSource" : {
"Url" : lambdaImageUrl,
"ImageHash" : lambdaImageHash
}
},
"RunTime": "python3.6,
"MemorySize": 128,
"PredefineLogGroup": true,
"VPCAccess": false,
"Timeout": 10
"Functions": {
"s3event": {
"Handler": "src/lambda.s3event_lambda_handler",
"Extensions": [ "_noenv" ],
"Links" : {
"S3_BATCH_JOB_LAMBDA" : {
"Tier" : tier,
"Component" : lambdaId,
"Instance" : instance,
"Version" : "",
"Function" : "s3batch",
"Role" : "invoke"
},
"S3_SOURCE" :
soucrceBucketLink +
{
"Role" : "consume"
}
}
},
"s3batch": {
"Handler": "src/lambda.s3batch_lambda_handler",
"Extensions": [ "_noenv" ],
"Links" : {
"S3_SOURCE" :
soucrceBucketLink +
{
"Role" : "consume"
},
"s3_DESTINATION" :
destinationBucketLink +
{
"Role" : "produce"
}
}
}
}
}
}
}
}
},
"DeploymentProfiles" : {
id + s3InventoryProfileSuffix : {
"Modes" : {
"*" : {
"s3" : {
"Notifications" : {
"InventoryCreate" : {
"Links" : {
"s3move" : {
"Tier" : tier,
"Component" : lambdaId,
"Instance" : "",
"Version" : "",
"Role" : "invoke",
"Function" : "s3event"
}
},
"Prefix" : s3InventoryPrefix,
"Suffix" : "manifest.checksum",
"Events" : "create"
}
},
"InventoryReports" : {
"IntventoryCopy" : {
"Destination" : {
"Type" : "self"
},
"DestinationPrefix" : s3InventoryPrefix
}
}
}
}
}
}
}
}
/]

[/#macro]
1 change: 1 addition & 0 deletions hamlet/s3support/provider.ftl
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
[#assign S3SUPPORT_PROVIDER = "s3support" ]
File renamed without changes.
8 changes: 8 additions & 0 deletions s3-inventory-copy/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# S3 Inventory Copy

Uses S3Batch to Copy files listed in an S3Inventory Report

The configuration allows for:
- Copying contents to a new bucket
- Appending a suffix to keys
- Appending a prefix to keys

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"name": "s3-inventory-move",
"name": "s3-inventory-copy",
"version": "1.0.0",
"description": "SLS packaging",
"main": "''",
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
service: s3-inventory-move
service: s3-inventory-copy

package:
individually: false
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def s3event_lambda_handler(event, context):

def s3batch_lambda_handler(event, context):
'''
Appends suffix or prefix to files and also moves to another bucket if required
Appends suffix or prefix to files and also copies to another bucket if required
'''

s3Client = boto3.client('s3')
Expand Down
8 changes: 0 additions & 8 deletions s3-inventory-move/README.md

This file was deleted.

0 comments on commit 2a151a4

Please sign in to comment.