-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #9 from wednesday-solutions/feat/glue-bash
Feat/glue-bash: Added automation script for glue job deployemnt
- Loading branch information
Showing
12 changed files
with
202 additions
and
34 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -14,6 +14,7 @@ jobs: | |
S3_BUCKET_NAME: ${{ secrets.S3_BUCKET_NAME }} | ||
S3_SCRIPTS_PATH: ${{ secrets.S3_SCRIPTS_PATH }} | ||
AWS_REGION: ${{ secrets.AWS_REGION }} | ||
AWS_GLUE_ROLE: ${{ secrets.AWS_GLUE_ROLE}} | ||
steps: | ||
- uses: actions/checkout@v2 | ||
|
||
|
@@ -22,12 +23,12 @@ jobs: | |
with: | ||
python-version: 3.9 | ||
|
||
- run: | | ||
- name: Build App Wheel | ||
run: | | ||
pip install setuptools wheel | ||
python3 setup.py bdist_wheel | ||
# Step 1: Copy script to S3 bucket | ||
- name: Copy script to S3 bucket | ||
- name: Setup AWS cli & upload App Wheel to S3 | ||
uses: jakejarvis/[email protected] | ||
with: | ||
args: --follow-symlinks | ||
|
@@ -36,7 +37,9 @@ jobs: | |
DEST_DIR: $S3_SCRIPTS_PATH | ||
AWS_S3_BUCKET: $S3_BUCKET_NAME | ||
|
||
- name: Upload Scripts to S3 | ||
run: aws s3 cp jobs "s3://$S3_BUCKET_NAME/$S3_SCRIPTS_PATH/" --recursive --region ap-south-1 | ||
|
||
- name: Upload Script file to S3 | ||
run: aws s3 cp ./main.py "s3://$S3_BUCKET_NAME/$S3_SCRIPTS_PATH/" --region ap-south-1 | ||
|
||
- name: Deploy Jobs on Glue | ||
run: | | ||
automation/deploy_glue_job.sh $S3_BUCKET_NAME $AWS_GLUE_ROLE $KAGGLE_TOKEN $KAGGLE_USERNAME |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,9 @@ | ||
# this is my custom file for read & write path based on environment | ||
|
||
GLUE_READ_PATH="s3://glue-bucket-vighnesh/rawdata/" | ||
GLUE_WRITE_PATH="s3://glue-bucket-vighnesh/transformed/" | ||
|
||
DATABRICKS_READ_PATH="/mnt/rawdata/" | ||
DATABRICKS_WRITE_PATH="/mnt/transformed/" | ||
|
||
KAGGLE_PATH="mastmustu/insurance-claims-fraud-data" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,32 @@ | ||
{ | ||
"Name": "samplename", | ||
"Description": "", | ||
"LogUri": "", | ||
"Role": "samplerole", | ||
"ExecutionProperty": { | ||
"MaxConcurrentRuns": 1 | ||
}, | ||
"Command": { | ||
"Name": "glueetl", | ||
"ScriptLocation": "sample-location", | ||
"PythonVersion": "3" | ||
}, | ||
"DefaultArguments": { | ||
"--enable-glue-datacatalog": "true", | ||
"--job-bookmark-option": "job-bookmark-disable", | ||
"--TempDir": "sample-bucket/Logs/temp/", | ||
"--enable-metrics": "true", | ||
"--extra-py-files": "sample-bucket/scripts/sample-wheel", | ||
"--spark-event-logs-path": "sample-bucket/Logs/UILogs/", | ||
"--enable-job-insights": "false", | ||
"--additional-python-modules": "python-dotenv,kaggle", | ||
"--enable-observability-metrics": "true", | ||
"--enable-continuous-cloudwatch-log": "true", | ||
"--job-language": "python" | ||
}, | ||
"MaxRetries": 0, | ||
"Timeout": 10, | ||
"WorkerType": "G.1X", | ||
"NumberOfWorkers": 2, | ||
"GlueVersion": "4.0" | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,73 @@ | ||
#!/bin/bash | ||
s3_bucket="$1" | ||
role="$2" | ||
kaggle_key="$3" | ||
kaggle_username="$4" | ||
|
||
source ./app/.custom_env | ||
|
||
job_names=$(aws glue get-jobs | jq -r '.Jobs | map(.Name)[]') | ||
|
||
for file in jobs/*.py; do | ||
filename=$(basename "$file" .py) | ||
|
||
if [ "$filename" != "__init__" ]; then | ||
|
||
if [[ $job_names != *"$filename"* ]]; then | ||
|
||
jq --arg NAME "$filename" \ | ||
--arg SCRIPT_LOCATION "s3://$s3_bucket/scripts/$filename.py" \ | ||
--arg ROLE "$role" \ | ||
--arg TEMP_DIR "s3://$s3_bucket/Logs/temp/" \ | ||
--arg EVENT_LOG "s3://$s3_bucket/Logs/UILogs/" \ | ||
--arg WHEEL "s3://$s3_bucket/scripts/app-0.9-py3-none-any.whl" \ | ||
--arg KAGGLE_KEY "$kaggle_key" \ | ||
--arg KAGGLE_USERNAME "$kaggle_username" \ | ||
--arg GLUE_READ_PATH "$GLUE_READ_PATH" \ | ||
--arg GLUE_WRITE_PATH "$GLUE_WRITE_PATH" \ | ||
--arg KAGGLE_PATH "$KAGGLE_PATH" \ | ||
'.Name=$NAME | | ||
.Command.ScriptLocation=$SCRIPT_LOCATION | | ||
.Role=$ROLE | | ||
.DefaultArguments["--TempDir"]=$TEMP_DIR | | ||
.DefaultArguments["--spark-event-logs-path"]=$EVENT_LOG | | ||
.DefaultArguments["--extra-py-files"]=$WHEEL | | ||
.DefaultArguments["--KAGGLE_KEY"]=$KAGGLE_KEY | | ||
.DefaultArguments["--KAGGLE_USERNAME"]=$KAGGLE_USERNAME | | ||
.DefaultArguments["--GLUE_READ_PATH"] = $GLUE_READ_PATH | | ||
.DefaultArguments["--GLUE_WRITE_PATH"] = $GLUE_WRITE_PATH | | ||
.DefaultArguments["--KAGGLE_PATH"] = $KAGGLE_PATH' \ | ||
automation/create_glue_job.json > "automation/output_$filename.json" | ||
|
||
aws glue create-job --cli-input-json file://"automation/output_$filename.json" | ||
|
||
else | ||
|
||
jq --arg NAME "$filename" \ | ||
--arg SCRIPT_LOCATION "s3://$s3_bucket/scripts/$filename.py" \ | ||
--arg ROLE "$role" \ | ||
--arg TEMP_DIR "s3://$s3_bucket/Logs/temp/" \ | ||
--arg EVENT_LOG "s3://$s3_bucket/Logs/UILogs/" \ | ||
--arg WHEEL "s3://$s3_bucket/scripts/app-0.9-py3-none-any.whl" \ | ||
--arg KAGGLE_KEY "$kaggle_key" \ | ||
--arg KAGGLE_USERNAME "$kaggle_username" \ | ||
--arg GLUE_READ_PATH "$GLUE_READ_PATH" \ | ||
--arg GLUE_WRITE_PATH "$GLUE_WRITE_PATH" \ | ||
--arg KAGGLE_PATH "$KAGGLE_PATH" \ | ||
'.JobName=$NAME | | ||
.JobUpdate.Command.ScriptLocation=$SCRIPT_LOCATION | | ||
.JobUpdate.Role=$ROLE | | ||
.JobUpdate.DefaultArguments["--TempDir"]=$TEMP_DIR | | ||
.JobUpdate.DefaultArguments["--spark-event-logs-path"]=$EVENT_LOG | | ||
.JobUpdate.DefaultArguments["--extra-py-files"]=$WHEEL | | ||
.JobUpdate.DefaultArguments["--KAGGLE_KEY"]=$KAGGLE_KEY | | ||
.JobUpdate.DefaultArguments["--KAGGLE_USERNAME"]=$KAGGLE_USERNAME | | ||
.JobUpdate.DefaultArguments["--GLUE_READ_PATH"] = $GLUE_READ_PATH | | ||
.JobUpdate.DefaultArguments["--GLUE_WRITE_PATH"] = $GLUE_WRITE_PATH | | ||
.JobUpdate.DefaultArguments["--KAGGLE_PATH"] = $KAGGLE_PATH' \ | ||
automation/update_glue_job.json > "automation/output_$filename.json" | ||
|
||
aws glue update-job --cli-input-json file://"automation/output_$filename.json" | ||
fi | ||
fi | ||
done |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
{ | ||
"JobName": "sample-name", | ||
"JobUpdate": { | ||
"Description": "", | ||
"Role": "sample-role", | ||
"ExecutionProperty": { | ||
"MaxConcurrentRuns": 1 | ||
}, | ||
"Command": { | ||
"Name": "glueetl", | ||
"ScriptLocation": "sample-location", | ||
"PythonVersion": "3" | ||
}, | ||
"DefaultArguments": { | ||
"--enable-glue-datacatalog": "true", | ||
"--job-bookmark-option": "job-bookmark-enable", | ||
"--TempDir": "s3://sample-bucket/scripts/temp/", | ||
"--enable-metrics": "true", | ||
"--enable-spark-ui": "true", | ||
"--spark-event-logs-path": "s3://sample-bucket/Logs/UILogs/", | ||
"--enable-job-insights": "true", | ||
"--enable-continuous-cloudwatch-log": "true", | ||
"--job-language": "python" | ||
}, | ||
"MaxRetries": 0, | ||
"Timeout": 10, | ||
"WorkerType": "G.1X", | ||
"NumberOfWorkers": 2, | ||
"GlueVersion": "4.0" | ||
} | ||
} |
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,30 @@ | ||
# This is demo file for writing your transformations | ||
from dotenv import load_dotenv | ||
import app.environment as env | ||
|
||
load_dotenv("app/.custom-env") | ||
|
||
# COMMAND ---------- | ||
|
||
if "dbutils" in locals(): | ||
databricks = True | ||
else: | ||
spark = None | ||
dbutils = None | ||
databricks = False | ||
|
||
# COMMAND ---------- | ||
# This is the example specific for "mastmustu/insurance-claims-fraud-data" data, different frames will be returned based on your data | ||
# fmt: off | ||
|
||
# Keep this flag True if you want to extract data from kaggle, else False | ||
kaggle_extraction = True | ||
|
||
[employee, insurance, vendor] = env.get_data(databricks, kaggle_extraction, dbutils, spark) #pylint: disable=unbalanced-tuple-unpacking | ||
|
||
write_path = env.get_write_path(databricks) | ||
|
||
# fmt: on | ||
# COMMAND ---------- | ||
|
||
# Write all your transformations below: |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters