Skip to content

Commit

Permalink
refactor: remove datapipeline script support (#324)
Browse files Browse the repository at this point in the history
- Removes the code that interacts with aws datapipelines
- Removes the pipeline registry type as this should really be a
script generic registry item
  • Loading branch information
roleyfoley authored Sep 28, 2023
1 parent a1dcd32 commit be0d7a3
Show file tree
Hide file tree
Showing 8 changed files with 6 additions and 262 deletions.
34 changes: 0 additions & 34 deletions automation/jenkins/aws/buildPipeline.sh

This file was deleted.

2 changes: 1 addition & 1 deletion automation/jenkins/aws/buildSetup.sh
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ for IMAGE_FORMAT in "${IMAGE_FORMATS_ARRAY[@]}"; do
[[ "${RESULT}" -eq 0 ]] && PRESENT=1
;;

lambda|pipeline|scripts|openapi|swagger|spa|contentnode)
lambda|scripts|openapi|swagger|spa|contentnode)
${AUTOMATION_DIR}/manageS3Registry.sh -v \
-u "${DEPLOYMENT_UNIT}" -g "${CODE_COMMIT}" -c "${REGISTRY_SCOPE}" \
-y "${IMAGE_FORMAT,,}" -f "${IMAGE_FORMAT,,}.zip"
Expand Down
6 changes: 3 additions & 3 deletions automation/jenkins/aws/manageBuildReferences.sh
Original file line number Diff line number Diff line change
Expand Up @@ -368,7 +368,7 @@ function main() {
-r "${ACCEPTANCE_TAG}" \
-c "${REGISTRY_SCOPE}" || return $?
;;
lambda|spa|contentnode|scripts|pipeline|dataset|rdssnapshot|openapi|swagger)
lambda|spa|contentnode|scripts|dataset|rdssnapshot|openapi|swagger)
${AUTOMATION_DIR}/manageS3Registry.sh -k \
-a "${IMAGE_PROVIDER}" \
-u "${REGISTRY_DEPLOYMENT_UNIT}" \
Expand Down Expand Up @@ -556,7 +556,7 @@ function main() {
-g "${CODE_COMMIT}" \
-c "${REGISTRY_SCOPE}" || local_image_available="false"
;;
lambda|pipeline|scripts|openapi|swagger|spa|contentnode)
lambda|scripts|openapi|swagger|spa|contentnode)
${AUTOMATION_DIR}/manageS3Registry.sh -v \
-y "${IMAGE_FORMAT,,}" \
-f "${IMAGE_FORMAT,,}.zip" \
Expand Down Expand Up @@ -614,7 +614,7 @@ function main() {
-z "${FROM_IMAGE_PROVIDER}" \
-c "${REGISTRY_SCOPE}" || remote_image_avaialble="false"
;;
lambda|pipeline|scripts|openapi|swagger|spa|contentnode)
lambda|scripts|openapi|swagger|spa|contentnode)
${AUTOMATION_DIR}/manageS3Registry.sh -p \
-y "${IMAGE_FORMAT,,}" \
-f "${IMAGE_FORMAT,,}.zip" \
Expand Down
2 changes: 1 addition & 1 deletion automation/jenkins/aws/manageImages.sh
Original file line number Diff line number Diff line change
Expand Up @@ -215,7 +215,7 @@ function main() {
fi
;;

lambda|pipeline|scripts|openapi|swagger|spa|contentnode)
lambda|scripts|openapi|swagger|spa|contentnode)
IMAGE_FILENAME="${FORMAT,,}.zip"
if [[ -n "${PATHS[index]}" ]]; then
pushd "$(pwd)" > /dev/null
Expand Down
28 changes: 0 additions & 28 deletions automation/jenkins/aws/runPipeline.sh

This file was deleted.

2 changes: 1 addition & 1 deletion automation/setContext.sh
Original file line number Diff line number Diff line change
Expand Up @@ -444,7 +444,7 @@ function main() {
defineGitProviderSettings "PRODUCT" "CODE" "${PRODUCT}" "${ENVIRONMENT}" "${PRODUCT_GIT_PROVIDER}"

# - local registry providers
REGISTRY_TYPES=("dataset" "docker" "lambda" "lambda_jar" "pipeline" "scripts" "swagger" "openapi" "spa" "contentnode" "rdssnapshot" )
REGISTRY_TYPES=("dataset" "docker" "lambda" "lambda_jar" "scripts" "swagger" "openapi" "spa" "contentnode" "rdssnapshot" )
for REGISTRY_TYPE in "${REGISTRY_TYPES[@]}"; do
defineRegistryProviderSettings "${REGISTRY_TYPE}" "PRODUCT" "" "${PRODUCT}" "${ENVIRONMENT}" "${ACCOUNT}"
done
Expand Down
149 changes: 0 additions & 149 deletions cli/runPipeline.sh

This file was deleted.

45 changes: 0 additions & 45 deletions execution/utility.sh
Original file line number Diff line number Diff line change
Expand Up @@ -1549,51 +1549,6 @@ function get_cognito_userpool_custom_distribution() {
aws --region "${region}" cognito-idp describe-user-pool-domain --domain ${domain} --query "DomainDescription.CloudFrontDistribution" --output text || return $?
}

# -- Data Pipeline --
function create_data_pipeline() {
local region="$1"; shift
local configfile="$1"; shift

pipeline="$(aws --region "${region}" datapipeline create-pipeline --cli-input-json "file://${configfile}" || return $?)"
if [[ -n "${pipeline}" ]]; then
echo "${pipeline}" | jq -r '.pipelineId | select (.!=null)'
return 0

else
fatal "Could not create pipeline"
return 255
fi
}

function update_data_pipeline() {
local region="$1"; shift
local pipelineid="$1"; shift
local definitionfile="$1"; shift
local parameterobjectfile="$1"; shift
local parametervaluefile="$1"; shift
local cfnStackName="$1"; shift
local securityGroupId="$1"; shift

# Add resources created during stack creation
securityGroup="$(get_cloudformation_stack_output "${region}" "${cfnStackName}" "${securityGroupId}" "ref" || return $?)"

arnLookupValueFile="$(filePath ${parametervaluefile})/ArnLookup-$(fileBase ${parametervaluefile})"
jq --arg pipelineRole "${pipelineRole}" --arg resourceRole "${resourceRole}" --arg securityGroup "${securityGroup}" '.values.my_SECURITY_GROUP_ID = $securityGroup ' < "${parametervaluefile}" > "${arnLookupValueFile}"

pipeline_details="$(aws --region "${region}" datapipeline put-pipeline-definition --pipeline-id "${pipelineid}" --pipeline-definition "file://${definitionfile}" --parameter-objects "file://${parameterobjectfile}" --parameter-values-uri "file://${arnLookupValueFile}" )"
pipeline_errored="$(echo "${pipeline_details}" | jq -r '.errored ')"

if [[ "${pipeline_errored}" == "false" ]]; then
info "Pipeline definition update successful"
info "${pipeline_details}"
return 0
else
fatal "Pipeline definition did not work as expected"
fatal "${pipeline_details}"
return 255
fi
}

#-- DynamoDB --
function upsert_dynamodb_item() {
local region="$1"; shift
Expand Down

0 comments on commit be0d7a3

Please sign in to comment.