-
Notifications
You must be signed in to change notification settings - Fork 0
/
transfer-process.py
48 lines (42 loc) · 2.28 KB
/
transfer-process.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
#
# This software is Copyright ©️ 2020 The University of Southern California. All Rights Reserved.
# Permission to use, copy, modify, and distribute this software and its documentation for educational, research and non-profit purposes, without fee, and without a written agreement is hereby granted, provided that the above copyright notice and subject to the full license file found in the root of this software deliverable. Permission to make commercial use of this software may be obtained by contacting: USC Stevens Center for Innovation University of Southern California 1150 S. Olive Street, Suite 2300, Los Angeles, CA 90115, USA Email: [email protected]
#
# The full terms of this copyright and license should always be found in the root directory of this software deliverable as "license.txt" and if these terms are not found with this software, please contact the USC Stevens Center for the full license.
#
import json
import boto3
import gzip
from base64 import b64decode
from module.utils import load_sentry, require_env, s3_bucket
from module.logger import get_logger
from module.transfer import process_transfer_mentor
load_sentry()
log = get_logger("transfer-process")
JOBS_TABLE_NAME = require_env("JOBS_TABLE_NAME")
log.info(f"using table {JOBS_TABLE_NAME}")
aws_region = require_env("REGION")
s3_client = boto3.client("s3", region_name=aws_region)
dynamodb = boto3.resource("dynamodb", region_name=aws_region)
job_table = dynamodb.Table(JOBS_TABLE_NAME)
def handler(event, context):
log.info(event)
records = list(
filter(
lambda r: r["eventName"] == "INSERT"
and r["dynamodb"]
and r["dynamodb"]["NewImage"],
event["Records"],
)
)
log.debug("records to process: %s", len(records))
for record in records:
payload = b64decode(record["dynamodb"]["NewImage"]["payload"]["B"]) # binary
auth_headers = json.loads(record["dynamodb"]["NewImage"]["authHeaders"]["S"])
request = json.loads(gzip.decompress(payload).decode("utf-8"))
process_transfer_mentor(s3_client, s3_bucket, request, auth_headers)
# # for local debugging:
# if __name__ == '__main__':
# with open('__events__/transfer-process-event.json.dist') as f:
# event = json.loads(f.read())
# handler(event, {})