Skip to content

Commit

Permalink
new lamda
Browse files Browse the repository at this point in the history
  • Loading branch information
joelbalcaen committed Apr 26, 2024
1 parent 793f8b6 commit d712d43
Show file tree
Hide file tree
Showing 6 changed files with 132 additions and 0 deletions.
64 changes: 64 additions & 0 deletions lambdas/document_filler/lambda.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
locals {
lambda_function_name = "levio-esta-document-filler"
timeout = 30
runtime = "python3.11"
powertools_layer_arn = "arn:aws:lambda:${var.aws_region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:67"
}

data "aws_caller_identity" "current" {}


module "lambda_function_container_image" {
source = "terraform-aws-modules/lambda/aws"
function_name = local.lambda_function_name
handler = "index.lambda_handler"
publish = true
runtime = local.runtime
timeout = local.timeout
layers = [local.powertools_layer_arn]
source_path = "${path.module}/src"
s3_bucket = var.lambda_storage_bucket
memory_size = 256
role_name = "${local.lambda_function_name}-role"
attach_policy_statements = true

policy_statements = {
log_group = {
effect = "Allow"
actions = [
"logs:CreateLogGroup"
]
resources = [
"arn:aws:logs:*:*:*"
]
}

s3 = {
effect = "Allow"
actions = [
"s3:Get*",
"s3:List*",
"s3:Describe*",
"s3:PutObject",
"s3-object-lambda:Get*",
"s3-object-lambda:List*",
"s3-object-lambda:WriteGetObjectResponse"
]
resources = var.allowed_s3_resources
}

log_write = {
effect = "Allow"

resources = [
"arn:aws:logs:*:*:log-group:/aws/${local.lambda_function_name}/*:*"
]

actions = [
"logs:CreateLogStream",
"logs:PutLogEvents",
]
}

}
}
7 changes: 7 additions & 0 deletions lambdas/document_filler/output.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
output "lambda_function_arn" {
value = module.lambda_function_container_image.lambda_function_arn
}

output "lambda_function_name" {
value = module.lambda_function_container_image.lambda_function_name
}
37 changes: 37 additions & 0 deletions lambdas/document_filler/src/index.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
import boto3
from docx import Document
from io import BytesIO

s3 = boto3.client('s3')

def lambda_handler(event, context):
"""
Downloads the given docx and for each replacement item it replaces the matching document_key with the text_to_fill
"""
s3_arn = event['doc_s3_arn']
replacements = event['replacements']

bucket_name = s3_arn.split(':')[5].split('/')[0]
key = '/'.join(s3_arn.split(':')[5].split('/')[1:])

print(f"Download bucket: {bucket_name}, key: {key}")

file_obj = s3.get_object(Bucket=bucket_name, Key=key)
file_content = file_obj['Body'].read()

doc = Document(BytesIO(file_content))

for paragraph in doc.paragraphs:
for replacement in replacements:
if replacement['document_key'] in paragraph.text:
paragraph.text = paragraph.text.replace(replacement['document_key'], replacement['text_to_fill'])

output_stream = BytesIO()
doc.save(output_stream)

s3.put_object(Bucket=bucket_name, Key=key, Body=output_stream.getvalue())

return {
'statusCode': 200,
'body': f'Successfully modified {key} and uploaded to {bucket_name}'
}
1 change: 1 addition & 0 deletions lambdas/document_filler/src/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
docx
15 changes: 15 additions & 0 deletions lambdas/document_filler/variables.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
variable "lambda_storage_bucket" {
type = string
nullable = false
}

variable "aws_region" {
type = string
nullable = false
}

variable "allowed_s3_resources" {
type = list(string)
nullable = false
description = "values for the s3 resources that the lambda function can access"
}
8 changes: 8 additions & 0 deletions terraform/modules.tf
Original file line number Diff line number Diff line change
Expand Up @@ -275,4 +275,12 @@ module "bedrock_invoker" {
allowed_s3_resources = [module.s3_bucket.s3_bucket_arn, "${module.s3_bucket.s3_bucket_arn}/*"]
}

module "levio_esta_document_filler" {
source = "../lambdas/document_filler"
lambda_storage_bucket = aws_s3_bucket.lambda_storage.id
aws_region = var.aws_region
allowed_s3_resources = [module.s3_bucket.s3_bucket_arn, "${module.s3_bucket.s3_bucket_arn}/*"]
}



0 comments on commit d712d43

Please sign in to comment.