From c1fb5fc0c2b98b1b7b49f51a4a452df943e3c527 Mon Sep 17 00:00:00 2001 From: Peter Braun Date: Fri, 1 Mar 2019 17:13:34 +0100 Subject: [PATCH] remove mounts for gpg and s3 secrets --- image/tools/entrypoint.sh | 12 +++-- image/tools/lib/backend/s3.sh | 54 +++++++++++++++---- image/tools/lib/encryption/gpg.sh | 36 ++++++++++--- .../openshift/backup-cronjob-template.yaml | 9 ++-- templates/openshift/backup-job-template.yaml | 15 ++++-- 5 files changed, 98 insertions(+), 28 deletions(-) diff --git a/image/tools/entrypoint.sh b/image/tools/entrypoint.sh index cc53a9f..9233697 100755 --- a/image/tools/entrypoint.sh +++ b/image/tools/entrypoint.sh @@ -43,9 +43,15 @@ export HOME=$DEST component_dump_data $DEST echo '==> Component data dump completed' if [[ "$encryption_engine" ]]; then - encrypt_prepare $DEST - encrypted_files="$(encrypt_archive $ARCHIVES_DEST)" - echo '==> Data encryption completed' + check_encryption_enabled + if [[ $? -eq 0 ]]; then + encrypt_prepare ${DEST} + encrypted_files="$(encrypt_archive $ARCHIVES_DEST)" + echo '==> Data encryption completed' + else + echo "==> encryption secret not found. Skipping" + encrypted_files="$ARCHIVES_DEST/*" + fi else encrypted_files="$ARCHIVES_DEST/*" fi diff --git a/image/tools/lib/backend/s3.sh b/image/tools/lib/backend/s3.sh index cefc6cd..99cd38f 100644 --- a/image/tools/lib/backend/s3.sh +++ b/image/tools/lib/backend/s3.sh @@ -1,21 +1,55 @@ -#required env vars: AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY and AWS_S3_BUCKET_NAME -#optional env vars: AWS_S3_BUCKET_SUFFIX +function check_backup_enabled { + local result=$(oc get secret -n default ${BACKEND_SECRET_NAME} -o template --template='{{.metadata.name}}') + if [[ "$result" == "${BACKEND_SECRET_NAME}" ]]; then + return 0 + else + return 1 + fi +} + +function get_s3_bucket_name { + echo "`oc get secret -n default ${BACKEND_SECRET_NAME} -o jsonpath='{.data.AWS_S3_BUCKET_NAME}' | base64 --decode`" +} + +function get_s3_bucket_suffix { + echo "`oc get secret -n default ${BACKEND_SECRET_NAME} -o jsonpath='{.data.AWS_S3_BUCKET_SUFFIX}' | base64 --decode`" +} + +function get_s3_key_id { + echo "`oc get secret -n default ${BACKEND_SECRET_NAME} -o jsonpath='{.data.AWS_ACCESS_KEY_ID}' | base64 --decode`" +} + +function get_s3_access_key { + echo "`oc get secret -n default ${BACKEND_SECRET_NAME} -o jsonpath='{.data.AWS_SECRET_ACCESS_KEY}' | base64 --decode`" +} + function upload_archive { - file_list=$1 - datestamp=$2 + check_backup_enabled + if [[ $? -eq 1 ]]; then + echo "==> backend secret not found. Skipping" + return 0 + fi + + local file_list=$1 + local datestamp=$2 + + local AWS_S3_BUCKET_NAME=$(get_s3_bucket_name) + local AWS_S3_BUCKET_SUFFIX="$(get_s3_bucket_suffix)" + local AWS_ACCESS_KEY_ID="$(get_s3_key_id)" + local AWS_SECRET_ACCESS_KEY="$(get_s3_access_key)" if [[ "$AWS_S3_BUCKET_SUFFIX" ]]; then bucket_folder="$3/$AWS_S3_BUCKET_SUFFIX" - else + else bucket_folder=$3 fi - for fname in $file_list; do - s3cmd put --progress $fname "s3://$AWS_S3_BUCKET_NAME/$bucket_folder/$datestamp/$(basename $fname)" + for fname in ${file_list}; do + s3cmd put --access_key ${AWS_ACCESS_KEY_ID} --secret_key ${AWS_SECRET_ACCESS_KEY} --progress ${fname} "s3://$AWS_S3_BUCKET_NAME/$bucket_folder/$datestamp/$(basename ${fname})" rc=$? - if [ $rc -ne 0 ]; then - echo "==> Upload $name: FAILED" + if [[ ${rc} -ne 0 ]]; then + echo "==> Upload $fname: FAILED" exit 1 fi done -} \ No newline at end of file +} diff --git a/image/tools/lib/encryption/gpg.sh b/image/tools/lib/encryption/gpg.sh index a7715e9..07ccc7e 100644 --- a/image/tools/lib/encryption/gpg.sh +++ b/image/tools/lib/encryption/gpg.sh @@ -1,20 +1,44 @@ +function check_encryption_enabled { + local result=$(oc get secret -n default ${ENCRYPTION_SECRET_NAME} -o template --template='{{.metadata.name}}') + if [[ "$result" == "${ENCRYPTION_SECRET_NAME}" ]]; then + return 0 + else + return 1 + fi +} + +function get_public_key { + echo "`oc get secret ${ENCRYPTION_SECRET_NAME} -n default -o jsonpath={.data.GPG_PUBLIC_KEY} | base64 --decode`" +} + +function get_trust_model { + echo "`oc get secret ${ENCRYPTION_SECRET_NAME} -n default -o jsonpath={.data.GPG_TRUST_MODEL} | base64 --decode`" +} + +function get_recipient { + echo "`oc get secret ${ENCRYPTION_SECRET_NAME} -n default -o jsonpath={.data.GPG_RECIPIENT} | base64 --decode`" +} + function encrypt_prepare { dest=$1/gpg mkdir -p $dest key_path=$dest/gpg_public_key - echo -e "$GPG_PUBLIC_KEY" > $key_path - - gpg --import $key_path + local key=$(get_public_key) + echo -e "${key}" > ${key_path} + gpg --import ${key_path} gpg --list-keys } function encrypt_archive { dest=$1 - for fname in $dest/*; do - gpg --no-tty --batch --yes --encrypt --recipient "$GPG_RECIPIENT" --trust-model $GPG_TRUST_MODEL $fname + local recipient=$(get_recipient) + local trust=$(get_trust_model) + + for fname in ${dest}/*; do + gpg --no-tty --batch --yes --encrypt --recipient "$recipient" --trust-model ${trust} ${fname} rc=$? - if [ $rc -ne 0 ]; then + if [[ ${rc} -ne 0 ]]; then echo "==> Encrypt $fname: FAILED" exit 1 fi diff --git a/templates/openshift/backup-cronjob-template.yaml b/templates/openshift/backup-cronjob-template.yaml index d376528..7c959f5 100644 --- a/templates/openshift/backup-cronjob-template.yaml +++ b/templates/openshift/backup-cronjob-template.yaml @@ -35,13 +35,14 @@ objects: - "${ENCRYPTION}" - "-d" - "${DEBUG}" + env: + - name: BACKEND_SECRET_NAME + value: ${BACKEND_SECRET_NAME} + - name: ENCRYPTION_SECRET_NAME + value: ${ENCRYPTION_SECRET_NAME} envFrom: - secretRef: name: "${COMPONENT_SECRET_NAME}" - - secretRef: - name: "${BACKEND_SECRET_NAME}" - - secretRef: - name: "${ENCRYPTION_SECRET_NAME}" restartPolicy: Never parameters: - name: NAME diff --git a/templates/openshift/backup-job-template.yaml b/templates/openshift/backup-job-template.yaml index 4da2e80..e7f47e2 100644 --- a/templates/openshift/backup-job-template.yaml +++ b/templates/openshift/backup-job-template.yaml @@ -4,7 +4,7 @@ kind: Template metadata: name: "integreatly-job-backup-template" annotations: - description: 'Job for backing up integreatly data on deman' + description: 'Job for backing up integreatly data on demand' objects: - apiVersion: batch/v1 kind: Job @@ -34,13 +34,14 @@ objects: - "${ENCRYPTION}" - "-d" - "${DEBUG}" + env: + - name: BACKEND_SECRET_NAME + value: ${BACKEND_SECRET_NAME} + - name: ENCRYPTION_SECRET_NAME + value: ${ENCRYPTION_SECRET_NAME} envFrom: - secretRef: name: "${COMPONENT_SECRET_NAME}" - - secretRef: - name: "${BACKEND_SECRET_NAME}" - - secretRef: - name: "${ENCRYPTION_SECRET_NAME}" restartPolicy: Never parameters: - name: NAME @@ -54,15 +55,19 @@ parameters: value: s3 - name: ENCRYPTION description: Encryption engine to encrypt component archive before uploading it + value: gpg - name: COMPONENT_SECRET_NAME description: Component secret name to create environment variables from required: true + value: dummy - name: BACKEND_SECRET_NAME description: Backend secret name to create environment variables from required: true + value: s3-credentials - name: ENCRYPTION_SECRET_NAME description: Encruption secret name to create environment variables from required: true + value: gpg-credentials - name: IMAGE description: 'Backup docker image URL' value: 'quay.io/integreatly/backup-container:master'