From ed40fd6a65a1311b783042f9e6f2e0a26491d306 Mon Sep 17 00:00:00 2001 From: Murad Khateeb Date: Wed, 6 Dec 2023 09:46:17 +0500 Subject: [PATCH 01/17] Adding digital ocean cluster connection setup --- scripts/run-builder.sh | 8 +++++++- scripts/setup_do.sh | 24 ++++++++++++++++++++++++ 2 files changed, 31 insertions(+), 1 deletion(-) create mode 100644 scripts/setup_do.sh diff --git a/scripts/run-builder.sh b/scripts/run-builder.sh index 3d9f441c99..54b0b32c03 100755 --- a/scripts/run-builder.sh +++ b/scripts/run-builder.sh @@ -12,7 +12,13 @@ mkdir -pv ~/.docker cp -v /var/lib/docker/certs/client/* ~/.docker touch ./builder-started.txt bash ./scripts/setup_helm.sh -bash ./scripts/setup_aws.sh $EKS_AWS_ACCESS_KEY_ID $EKS_AWS_ACCESS_KEY_SECRET $AWS_REGION $CLUSTER_NAME +if [[ "$CLOUD_PROVIDER" == "aws" ]]; then + bash ./scripts/setup_aws.sh $EKS_AWS_ACCESS_KEY_ID $EKS_AWS_ACCESS_KEY_SECRET $AWS_REGION $CLUSTER_NAME +elif [[ "$CLOUD_PROVIDER" == "do" ]]; then + bash ./scripts/setup_do.sh $DO_API_TOKEN $CLUSTER_NAME +else + echo "Please specifiy a valid Cloud provider" +fi npx cross-env ts-node --swc scripts/check-db-exists.ts npm run prepare-database npm run create-build-status diff --git a/scripts/setup_do.sh b/scripts/setup_do.sh new file mode 100644 index 0000000000..1d1b98a95b --- /dev/null +++ b/scripts/setup_do.sh @@ -0,0 +1,24 @@ +#!/bin/bash +set -e +set -x + +if [[ -z $(which doctl) ]]; then + curl -OL https://github.com/digitalocean/doctl/releases/download/v1.100.0/doctl-1.100.0-linux-amd64.tar.gz + tar xf doctl-1.100.0-linux-amd64.tar.gz + sudo mv doctl /usr/local/bin +fi + +set +x + +# Authenticate with DigitalOcean with access token +doctl auth init -t $1 + +# Set kubectl context to the existing cluster with its name provided as variable +doctl kubernetes cluster kubeconfig save $2 + +cat ~/.kube/config +# Verifying the cluster connection +kubectl cluster-info + +set -x +set +x From 11f1faec2fcc6c0fba0788c9d01111880cea8f30 Mon Sep 17 00:00:00 2001 From: Murad Khateeb Date: Tue, 12 Dec 2023 11:50:18 +0500 Subject: [PATCH 02/17] add code for logging into DOCR and creating repo --- scripts/build_and_publish_package.sh | 26 +++++++++++++++++++++----- 1 file changed, 21 insertions(+), 5 deletions(-) diff --git a/scripts/build_and_publish_package.sh b/scripts/build_and_publish_package.sh index cd286ccb49..39c2ae6e13 100755 --- a/scripts/build_and_publish_package.sh +++ b/scripts/build_and_publish_package.sh @@ -10,14 +10,30 @@ START_TIME=$5 REGION=$6 NODE_ENV=$7 PRIVATE_ECR=$8 +DOCR_API_TOKEN=$9 +DOCR_REGISTRY=${10} +CLOUD_PROVIDER=${11} -if [ $PRIVATE_ECR == "true" ] + +if [ $CLOUD_PROVIDER == "do" ] then - aws ecr get-login-password --region $REGION | docker login -u AWS --password-stdin $ECR_URL - aws ecr describe-repositories --repository-names $REPO_NAME-$PACKAGE --region $REGION || aws ecr create-repository --repository-name $REPO_NAME-$PACKAGE --region $REGION + docker login -u $DOCR_API_TOKEN -p $DOCR_API_TOKEN $DOCR_REGISTRY + docker inspect $REPO_NAME-$PACKAGE > /dev/null 2>&1 + if [ $? != 0 ]; then + echo "Repository '$REPO_NAME-$PACKAGE' already exists." + else + echo "Repository '$REPO_NAME-$PACKAGE' does not exist. Creating..." + docker image push $REPO_NAME-$PACKAGE + fi else - aws ecr-public get-login-password --region us-east-1 | docker login -u AWS --password-stdin $ECR_URL - aws ecr-public describe-repositories --repository-names $REPO_NAME-$PACKAGE --region us-east-1 || aws ecr-public create-repository --repository-name $REPO_NAME-$PACKAGE --region us-east-1 + if [ $PRIVATE_ECR == "true" ] + then + aws ecr get-login-password --region $REGION | docker login -u AWS --password-stdin $ECR_URL + aws ecr describe-repositories --repository-names $REPO_NAME-$PACKAGE --region $REGION || aws ecr create-repository --repository-name $REPO_NAME-$PACKAGE --region $REGION + else + aws ecr-public get-login-password --region us-east-1 | docker login -u AWS --password-stdin $ECR_URL + aws ecr-public describe-repositories --repository-names $REPO_NAME-$PACKAGE --region us-east-1 || aws ecr-public create-repository --repository-name $REPO_NAME-$PACKAGE --region us-east-1 + fi #echo "PRUNED" From 5f66ee217b76be9d54aae988633458d2298a30f2 Mon Sep 17 00:00:00 2001 From: Murad Khateeb Date: Wed, 13 Dec 2023 07:32:28 +0500 Subject: [PATCH 03/17] Update AWS ECR references to DOCR --- scripts/build_and_publish_package.sh | 26 +--- scripts/build_and_publish_package_do.sh | 167 ++++++++++++++++++++++++ scripts/build_docker_builder_do.sh | 49 +++++++ scripts/run-builder.sh | 45 ++++--- 4 files changed, 245 insertions(+), 42 deletions(-) create mode 100755 scripts/build_and_publish_package_do.sh create mode 100755 scripts/build_docker_builder_do.sh diff --git a/scripts/build_and_publish_package.sh b/scripts/build_and_publish_package.sh index 39c2ae6e13..cd286ccb49 100755 --- a/scripts/build_and_publish_package.sh +++ b/scripts/build_and_publish_package.sh @@ -10,30 +10,14 @@ START_TIME=$5 REGION=$6 NODE_ENV=$7 PRIVATE_ECR=$8 -DOCR_API_TOKEN=$9 -DOCR_REGISTRY=${10} -CLOUD_PROVIDER=${11} - -if [ $CLOUD_PROVIDER == "do" ] +if [ $PRIVATE_ECR == "true" ] then - docker login -u $DOCR_API_TOKEN -p $DOCR_API_TOKEN $DOCR_REGISTRY - docker inspect $REPO_NAME-$PACKAGE > /dev/null 2>&1 - if [ $? != 0 ]; then - echo "Repository '$REPO_NAME-$PACKAGE' already exists." - else - echo "Repository '$REPO_NAME-$PACKAGE' does not exist. Creating..." - docker image push $REPO_NAME-$PACKAGE - fi + aws ecr get-login-password --region $REGION | docker login -u AWS --password-stdin $ECR_URL + aws ecr describe-repositories --repository-names $REPO_NAME-$PACKAGE --region $REGION || aws ecr create-repository --repository-name $REPO_NAME-$PACKAGE --region $REGION else - if [ $PRIVATE_ECR == "true" ] - then - aws ecr get-login-password --region $REGION | docker login -u AWS --password-stdin $ECR_URL - aws ecr describe-repositories --repository-names $REPO_NAME-$PACKAGE --region $REGION || aws ecr create-repository --repository-name $REPO_NAME-$PACKAGE --region $REGION - else - aws ecr-public get-login-password --region us-east-1 | docker login -u AWS --password-stdin $ECR_URL - aws ecr-public describe-repositories --repository-names $REPO_NAME-$PACKAGE --region us-east-1 || aws ecr-public create-repository --repository-name $REPO_NAME-$PACKAGE --region us-east-1 - + aws ecr-public get-login-password --region us-east-1 | docker login -u AWS --password-stdin $ECR_URL + aws ecr-public describe-repositories --repository-names $REPO_NAME-$PACKAGE --region us-east-1 || aws ecr-public create-repository --repository-name $REPO_NAME-$PACKAGE --region us-east-1 fi #echo "PRUNED" diff --git a/scripts/build_and_publish_package_do.sh b/scripts/build_and_publish_package_do.sh new file mode 100755 index 0000000000..30ca98ea8f --- /dev/null +++ b/scripts/build_and_publish_package_do.sh @@ -0,0 +1,167 @@ +#!/bin/bash +set -e +set -x + +STAGE=$1 +LABEL=$2 +PACKAGE=$3 +DOCKERFILE=$4 +START_TIME=$5 +REGION=$6 +NODE_ENV=$7 +PRIVATE_ECR=$8 +DOCR_REGISTRY=$9 + +doctl registry login --expiry-seconds 1800 + +#echo "PRUNED" +#docker buildx version +# +docker context create etherealengine-$PACKAGE +docker buildx create --driver=docker-container etherealengine-$PACKAGE --name etherealengine-$PACKAGE --driver-opt "image=moby/buildkit:v0.12.0" + +BUILD_START_TIME=`date +"%d-%m-%yT%H-%M-%S"` +echo "Starting ${PACKAGE} build at ${BUILD_START_TIME}" +if [ $PUBLISH_DOCKERHUB == 'true' ] && [ "$DOCKERFILE" != "client-serve-static" ] +then + echo "$DOCKER_HUB_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin + + docker buildx build \ + --builder etherealengine-$PACKAGE \ + --push \ + -t $DOCR_REGISTRY/$REPO_NAME-$PACKAGE:${TAG}__${START_TIME} \ + -t $DOCR_REGISTRY/$REPO_NAME-$PACKAGE:latest_$STAGE \ + -t ${LABEL}-$PACKAGE:${TAG} \ + -f dockerfiles/$PACKAGE/Dockerfile-$DOCKERFILE \ + --cache-to type=registry,mode=max,image-manifest=true,ref=$DOCR_REGISTRY/$REPO_NAME-$PACKAGE:latest_${STAGE}_cache \ + --cache-from type=registry,ref=$DOCR_REGISTRY/$REPO_NAME-$PACKAGE:latest_${STAGE}_cache \ + --build-arg ECR_URL=$DOCR_REGISTRY \ + --build-arg REPO_NAME=$REPO_NAME \ + --build-arg STAGE=$STAGE \ + --build-arg NODE_ENV=$NODE_ENV \ + --build-arg STORAGE_PROVIDER=$STORAGE_PROVIDER \ + --build-arg STORAGE_CLOUDFRONT_DOMAIN=$STORAGE_CLOUDFRONT_DOMAIN \ + --build-arg STORAGE_CLOUDFRONT_DISTRIBUTION_ID=$STORAGE_CLOUDFRONT_DISTRIBUTION_ID \ + --build-arg STORAGE_S3_STATIC_RESOURCE_BUCKET=$STORAGE_S3_STATIC_RESOURCE_BUCKET \ + --build-arg STORAGE_AWS_ACCESS_KEY_ID=$STORAGE_AWS_ACCESS_KEY_ID \ + --build-arg STORAGE_AWS_ACCESS_KEY_SECRET=$STORAGE_AWS_ACCESS_KEY_SECRET \ + --build-arg STORAGE_S3_REGION=$STORAGE_S3_REGION \ + --build-arg STORAGE_S3_AVATAR_DIRECTORY=$STORAGE_S3_AVATAR_DIRECTORY \ + --build-arg SERVE_CLIENT_FROM_STORAGE_PROVIDER=$SERVE_CLIENT_FROM_STORAGE_PROVIDER \ + --build-arg MYSQL_HOST=$MYSQL_HOST \ + --build-arg MYSQL_USER=$MYSQL_USER \ + --build-arg MYSQL_PORT=$MYSQL_PORT \ + --build-arg MYSQL_PASSWORD=$MYSQL_PASSWORD \ + --build-arg MYSQL_DATABASE=$MYSQL_DATABASE \ + --build-arg VITE_APP_HOST=$VITE_APP_HOST \ + --build-arg VITE_APP_PORT=$VITE_APP_PORT \ + --build-arg VITE_PWA_ENABLED=$VITE_PWA_ENABLED \ + --build-arg VITE_SERVER_HOST=$VITE_SERVER_HOST \ + --build-arg VITE_SERVER_PORT=$VITE_SERVER_PORT \ + --build-arg VITE_FILE_SERVER=$VITE_FILE_SERVER \ + --build-arg VITE_MEDIATOR_SERVER=$VITE_MEDIATOR_SERVER \ + --build-arg VITE_LOGIN_WITH_WALLET=$VITE_LOGIN_WITH_WALLET \ + --build-arg VITE_8TH_WALL=$VITE_8TH_WALL \ + --build-arg VITE_INSTANCESERVER_HOST=$VITE_INSTANCESERVER_HOST \ + --build-arg VITE_INSTANCESERVER_PORT=$VITE_INSTANCESERVER_PORT \ + --build-arg VITE_LOCAL_BUILD=$VITE_LOCAL_BUILD \ + --build-arg VITE_READY_PLAYER_ME_URL=$VITE_READY_PLAYER_ME_URL \ + --build-arg VITE_DISABLE_LOG=$VITE_DISABLE_LOG \ + --build-arg VITE_AVATURN_URL=$VITE_AVATURN_URL \ + --build-arg VITE_AVATURN_API=$VITE_AVATURN_API . +elif [ "$DOCKERFILE" == "client-serve-static" ] +then + docker buildx build \ + --builder etherealengine-$PACKAGE \ + -f dockerfiles/$PACKAGE/Dockerfile-$DOCKERFILE \ + --cache-to type=registry,mode=max,image-manifest=true,ref=$DOCR_REGISTRY/$REPO_NAME-$PACKAGE:latest_${STAGE}_cache \ + --cache-from type=registry,ref=$DOCR_REGISTRY/$REPO_NAME-$PACKAGE:latest_${STAGE}_cache \ + --build-arg ECR_URL=$DOCR_REGISTRY \ + --build-arg REPO_NAME=$REPO_NAME \ + --build-arg STAGE=$STAGE \ + --build-arg NODE_ENV=$NODE_ENV \ + --build-arg STORAGE_PROVIDER=$STORAGE_PROVIDER \ + --build-arg STORAGE_CLOUDFRONT_DOMAIN=$STORAGE_CLOUDFRONT_DOMAIN \ + --build-arg STORAGE_CLOUDFRONT_DISTRIBUTION_ID=$STORAGE_CLOUDFRONT_DISTRIBUTION_ID \ + --build-arg STORAGE_S3_STATIC_RESOURCE_BUCKET=$STORAGE_S3_STATIC_RESOURCE_BUCKET \ + --build-arg STORAGE_AWS_ACCESS_KEY_ID=$STORAGE_AWS_ACCESS_KEY_ID \ + --build-arg STORAGE_AWS_ACCESS_KEY_SECRET=$STORAGE_AWS_ACCESS_KEY_SECRET \ + --build-arg STORAGE_S3_REGION=$STORAGE_S3_REGION \ + --build-arg STORAGE_S3_AVATAR_DIRECTORY=$STORAGE_S3_AVATAR_DIRECTORY \ + --build-arg SERVE_CLIENT_FROM_STORAGE_PROVIDER=$SERVE_CLIENT_FROM_STORAGE_PROVIDER \ + --build-arg MYSQL_HOST=$MYSQL_HOST \ + --build-arg MYSQL_USER=$MYSQL_USER \ + --build-arg MYSQL_PORT=$MYSQL_PORT \ + --build-arg MYSQL_PASSWORD=$MYSQL_PASSWORD \ + --build-arg MYSQL_DATABASE=$MYSQL_DATABASE \ + --build-arg VITE_APP_HOST=$VITE_APP_HOST \ + --build-arg VITE_APP_PORT=$VITE_APP_PORT \ + --build-arg VITE_PWA_ENABLED=$VITE_PWA_ENABLED \ + --build-arg VITE_SERVER_HOST=$VITE_SERVER_HOST \ + --build-arg VITE_SERVER_PORT=$VITE_SERVER_PORT \ + --build-arg VITE_FILE_SERVER=$VITE_FILE_SERVER \ + --build-arg VITE_MEDIATOR_SERVER=$VITE_MEDIATOR_SERVER \ + --build-arg VITE_LOGIN_WITH_WALLET=$VITE_LOGIN_WITH_WALLET \ + --build-arg VITE_8TH_WALL=$VITE_8TH_WALL \ + --build-arg VITE_INSTANCESERVER_HOST=$VITE_INSTANCESERVER_HOST \ + --build-arg VITE_INSTANCESERVER_PORT=$VITE_INSTANCESERVER_PORT \ + --build-arg VITE_LOCAL_BUILD=$VITE_LOCAL_BUILD \ + --build-arg VITE_READY_PLAYER_ME_URL=$VITE_READY_PLAYER_ME_URL \ + --build-arg VITE_DISABLE_LOG=$VITE_DISABLE_LOG \ + --build-arg VITE_AVATURN_URL=$VITE_AVATURN_URL \ + --build-arg VITE_AVATURN_API=$VITE_AVATURN_API . +else + docker buildx build \ + --builder etherealengine-$PACKAGE \ + --push \ + -t $DOCR_REGISTRY/$REPO_NAME-$PACKAGE:${TAG}__${START_TIME} \ + -t $DOCR_REGISTRY/$REPO_NAME-$PACKAGE:latest_$STAGE \ + -f dockerfiles/$PACKAGE/Dockerfile-$DOCKERFILE \ + --cache-to type=registry,mode=max,image-manifest=true,ref=$DOCR_REGISTRY/$REPO_NAME-$PACKAGE:latest_${STAGE}_cache \ + --cache-from type=registry,ref=$DOCR_REGISTRY/$REPO_NAME-$PACKAGE:latest_${STAGE}_cache \ + --build-arg ECR_URL=$DOCR_REGISTRY \ + --build-arg REPO_NAME=$REPO_NAME \ + --build-arg STAGE=$STAGE \ + --build-arg NODE_ENV=$NODE_ENV \ + --build-arg STORAGE_PROVIDER=$STORAGE_PROVIDER \ + --build-arg STORAGE_CLOUDFRONT_DOMAIN=$STORAGE_CLOUDFRONT_DOMAIN \ + --build-arg STORAGE_CLOUDFRONT_DISTRIBUTION_ID=$STORAGE_CLOUDFRONT_DISTRIBUTION_ID \ + --build-arg STORAGE_S3_STATIC_RESOURCE_BUCKET=$STORAGE_S3_STATIC_RESOURCE_BUCKET \ + --build-arg STORAGE_AWS_ACCESS_KEY_ID=$STORAGE_AWS_ACCESS_KEY_ID \ + --build-arg STORAGE_AWS_ACCESS_KEY_SECRET=$STORAGE_AWS_ACCESS_KEY_SECRET \ + --build-arg STORAGE_S3_REGION=$STORAGE_S3_REGION \ + --build-arg STORAGE_S3_AVATAR_DIRECTORY=$STORAGE_S3_AVATAR_DIRECTORY \ + --build-arg SERVE_CLIENT_FROM_STORAGE_PROVIDER=$SERVE_CLIENT_FROM_STORAGE_PROVIDER \ + --build-arg MYSQL_HOST=$MYSQL_HOST \ + --build-arg MYSQL_USER=$MYSQL_USER \ + --build-arg MYSQL_PORT=$MYSQL_PORT \ + --build-arg MYSQL_PASSWORD=$MYSQL_PASSWORD \ + --build-arg MYSQL_DATABASE=$MYSQL_DATABASE \ + --build-arg VITE_APP_HOST=$VITE_APP_HOST \ + --build-arg VITE_APP_PORT=$VITE_APP_PORT \ + --build-arg VITE_PWA_ENABLED=$VITE_PWA_ENABLED \ + --build-arg VITE_SERVER_HOST=$VITE_SERVER_HOST \ + --build-arg VITE_SERVER_PORT=$VITE_SERVER_PORT \ + --build-arg VITE_FILE_SERVER=$VITE_FILE_SERVER \ + --build-arg VITE_MEDIATOR_SERVER=$VITE_MEDIATOR_SERVER \ + --build-arg VITE_LOGIN_WITH_WALLET=$VITE_LOGIN_WITH_WALLET \ + --build-arg VITE_8TH_WALL=$VITE_8TH_WALL \ + --build-arg VITE_INSTANCESERVER_HOST=$VITE_INSTANCESERVER_HOST \ + --build-arg VITE_INSTANCESERVER_PORT=$VITE_INSTANCESERVER_PORT \ + --build-arg VITE_LOCAL_BUILD=$VITE_LOCAL_BUILD \ + --build-arg VITE_READY_PLAYER_ME_URL=$VITE_READY_PLAYER_ME_URL \ + --build-arg VITE_DISABLE_LOG=$VITE_DISABLE_LOG \ + --build-arg VITE_AVATURN_URL=$VITE_AVATURN_URL \ + --build-arg VITE_AVATURN_API=$VITE_AVATURN_API . +fi + +# The following scripts will need to be updated for DOCR but are not critical for the functionality of EE on DO. +if [ $PRIVATE_ECR == "true" ] +then + node ./scripts/prune_ecr_images.js --repoName $REPO_NAME-$PACKAGE --region $REGION --service $PACKAGE --releaseName $STAGE +else + node ./scripts/prune_ecr_images.js --repoName $REPO_NAME-$PACKAGE --region us-east-1 --service $PACKAGE --releaseName $STAGE --public +fi + +BUILD_END_TIME=`date +"%d-%m-%yT%H-%M-%S"` +echo "${PACKAGE} build started at ${BUILD_START_TIME}, ended at ${BUILD_END_TIME}" \ No newline at end of file diff --git a/scripts/build_docker_builder_do.sh b/scripts/build_docker_builder_do.sh new file mode 100755 index 0000000000..b30e41e7e4 --- /dev/null +++ b/scripts/build_docker_builder_do.sh @@ -0,0 +1,49 @@ +#!/bin/bash +set -e +set -x + +STAGE=$1 +TAG=$2 +LABEL=$3 +REGION=$4 +PRIVATE_ECR=$5 +$DOCR_REGISTRY=$6 +EEVERSION=$(jq -r .version ./packages/server-core/package.json) + +docker buildx create --use --driver=docker-container + +doctl registry login --expiry-seconds 1800 + +if [ $PUBLISH_DOCKERHUB == 'true' ] +then + echo "$DOCKER_HUB_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin + + docker buildx build \ + --push \ + --cache-to type=gha,mode=max \ + --cache-from type=gha \ + -t $DOCR_REGISTRY/$REPO_NAME-builder:latest_$STAGE \ + -t $DOCR_REGISTRY/$REPO_NAME-builder:"${EEVERSION}_${TAG}" \ + -t ${LABEL}-builder:"${EEVERSION}_${TAG}" \ + -f dockerfiles/builder/Dockerfile-builder . +else + docker buildx build \ + --push \ + --cache-to type=gha,mode=max \ + --cache-from type=gha \ + -t $DOCR_REGISTRY/$REPO_NAME-builder:latest_$STAGE \ + -t $DOCR_REGISTRY/$REPO_NAME-builder:"${EEVERSION}_${TAG}" \ + -f dockerfiles/builder/Dockerfile-builder . +fi + +# The following scripts will need to be updated for DOCR but are not critical for the functionality of EE on DO. +if [ $PRIVATE_ECR == "true" ] +then + node ./scripts/prune_ecr_images.js --repoName $REPO_NAME-builder --region $REGION --service builder --releaseName $STAGE +else + node ./scripts/prune_ecr_images.js --repoName $REPO_NAME-builder --region us-east-1 --service builder --releaseName $STAGE --public +fi + +# cache links to use once ECR supports cache manifests +# --cache-to type=registry,ref=$ECR_URL/$REPO_NAME-builder:latest_"${STAGE}"_cache,mode=max \ +# --cache-from $ECR_URL/$REPO_NAME-builder:latest_"${STAGE}"_cache \ \ No newline at end of file diff --git a/scripts/run-builder.sh b/scripts/run-builder.sh index 54b0b32c03..cae9586ee7 100755 --- a/scripts/run-builder.sh +++ b/scripts/run-builder.sh @@ -12,12 +12,10 @@ mkdir -pv ~/.docker cp -v /var/lib/docker/certs/client/* ~/.docker touch ./builder-started.txt bash ./scripts/setup_helm.sh -if [[ "$CLOUD_PROVIDER" == "aws" ]]; then - bash ./scripts/setup_aws.sh $EKS_AWS_ACCESS_KEY_ID $EKS_AWS_ACCESS_KEY_SECRET $AWS_REGION $CLUSTER_NAME -elif [[ "$CLOUD_PROVIDER" == "do" ]]; then +if [[ "$CLOUD_PROVIDER" == "do" ]]; then bash ./scripts/setup_do.sh $DO_API_TOKEN $CLUSTER_NAME else - echo "Please specifiy a valid Cloud provider" + bash ./scripts/setup_aws.sh $EKS_AWS_ACCESS_KEY_ID $EKS_AWS_ACCESS_KEY_SECRET $AWS_REGION $CLUSTER_NAME fi npx cross-env ts-node --swc scripts/check-db-exists.ts npm run prepare-database @@ -42,19 +40,24 @@ test -s populate-assetlinks-build-error.txt && npm run record-build-error -- --s fi bash ./scripts/cleanup_builder.sh $DOCKER_LABEL - -if [ $PRIVATE_ECR == "true" ] -then - aws ecr get-login-password --region $AWS_REGION | docker login -u AWS --password-stdin $ECR_URL +BUILD_PUBLISH_SCRIPT="build_and_publish_package.sh" +if [[ "$CLOUD_PROVIDER" == "do" ]]; then + BUILD_PUBLISH_SCRIPT="build_and_publish_package_do.sh" + doctl registry login --expiry-seconds 1800 else - aws ecr-public get-login-password --region us-east-1 | docker login -u AWS --password-stdin $ECR_URL + if [ $PRIVATE_ECR == "true" ] + then + aws ecr get-login-password --region $AWS_REGION | docker login -u AWS --password-stdin $ECR_URL + else + aws ecr-public get-login-password --region us-east-1 | docker login -u AWS --password-stdin $ECR_URL + fi fi mkdir -p ./project-package-jsons/projects/default-project cp packages/projects/default-project/package.json ./project-package-jsons/projects/default-project find packages/projects/projects/ -name package.json -exec bash -c 'mkdir -p ./project-package-jsons/$(dirname $1) && cp $1 ./project-package-jsons/$(dirname $1)' - '{}' \; -bash ./scripts/build_and_publish_package.sh $RELEASE_NAME $DOCKER_LABEL root root $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR >root-build-logs.txt 2>root-build-error.txt +bash ./scripts/$BUILD_PUBLISH_SCRIPT $RELEASE_NAME $DOCKER_LABEL root root $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR $DOCR_REGISTRY >root-build-logs.txt 2>root-build-error.txt npm run record-build-error -- --service=root --isDocker=true npm install -g cli @aws-sdk/client-s3 @@ -63,10 +66,10 @@ if [ "$SERVE_CLIENT_FROM_STORAGE_PROVIDER" = "true" ] && [ "$STORAGE_PROVIDER" = then npx cross-env ts-node --swc scripts/get-deletable-client-files.ts - bash ./scripts/build_and_publish_package.sh $RELEASE_NAME $DOCKER_LABEL api api $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR >api-build-logs.txt 2>api-build-error.txt & - bash ./scripts/build_and_publish_package.sh $RELEASE_NAME $DOCKER_LABEL client client-serve-static $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR >client-build-logs.txt 2>client-build-error.txt & - bash ./scripts/build_and_publish_package.sh $RELEASE_NAME $DOCKER_LABEL instanceserver instanceserver $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR >instanceserver-build-logs.txt 2>instanceserver-build-error.txt & - bash ./scripts/build_and_publish_package.sh $RELEASE_NAME $DOCKER_LABEL taskserver taskserver $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR >taskserver-build-logs.txt 2>taskserver-build-error.txt & + bash ./scripts/$BUILD_PUBLISH_SCRIPT $RELEASE_NAME $DOCKER_LABEL api api $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR $DOCR_REGISTRY >api-build-logs.txt 2>api-build-error.txt & + bash ./scripts/$BUILD_PUBLISH_SCRIPT $RELEASE_NAME $DOCKER_LABEL client client-serve-static $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR $DOCR_REGISTRY >client-build-logs.txt 2>client-build-error.txt & + bash ./scripts/$BUILD_PUBLISH_SCRIPT $RELEASE_NAME $DOCKER_LABEL instanceserver instanceserver $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR $DOCR_REGISTRY>instanceserver-build-logs.txt 2>instanceserver-build-error.txt & + bash ./scripts/$BUILD_PUBLISH_SCRIPT $RELEASE_NAME $DOCKER_LABEL taskserver taskserver $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR $DOCR_REGISTRY>taskserver-build-logs.txt 2>taskserver-build-error.txt & #bash ./scripts/build_and_publish_package.sh $RELEASE_NAME $DOCKER_LABEL testbot testbot $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR >testbot-build-logs.txt 2>testbot-build-error.txt && & wait < <(jobs -p) @@ -78,9 +81,9 @@ then #npm run record-build-error -- --service=testbot --isDocker=true elif [ "$SERVE_CLIENT_FROM_API" = "true" ] then - bash ./scripts/build_and_publish_package.sh $RELEASE_NAME $DOCKER_LABEL api api-client $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR >api-build-logs.txt 2>api-build-error.txt & - bash ./scripts/build_and_publish_package.sh $RELEASE_NAME $DOCKER_LABEL instanceserver instanceserver $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR >instanceserver-build-logs.txt 2>instanceserver-build-error.txt & - bash ./scripts/build_and_publish_package.sh $RELEASE_NAME $DOCKER_LABEL taskserver taskserver $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR >taskserver-build-logs.txt 2>taskserver-build-error.txt & + bash ./scripts/$BUILD_PUBLISH_SCRIPT $RELEASE_NAME $DOCKER_LABEL api api-client $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR $DOCR_REGISTRY >api-build-logs.txt 2>api-build-error.txt & + bash ./scripts/$BUILD_PUBLISH_SCRIPT $RELEASE_NAME $DOCKER_LABEL instanceserver instanceserver $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR $DOCR_REGISTRY >instanceserver-build-logs.txt 2>instanceserver-build-error.txt & + bash ./scripts/$BUILD_PUBLISH_SCRIPT $RELEASE_NAME $DOCKER_LABEL taskserver taskserver $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR $DOCR_REGISTRY >taskserver-build-logs.txt 2>taskserver-build-error.txt & #bash ./scripts/build_and_publish_package.sh $RELEASE_NAME $DOCKER_LABEL testbot testbot $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR >testbot-build-logs.txt 2>testbot-build-error.txt && & wait < <(jobs -p) @@ -90,10 +93,10 @@ then npm run record-build-error -- --service=taskserver --isDocker=true #npm run record-build-error -- --service=testbot --isDocker=true else - bash ./scripts/build_and_publish_package.sh $RELEASE_NAME $DOCKER_LABEL api api $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR >api-build-logs.txt 2>api-build-error.txt & - bash ./scripts/build_and_publish_package.sh $RELEASE_NAME $DOCKER_LABEL client client $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR >client-build-logs.txt 2>client-build-error.txt & - bash ./scripts/build_and_publish_package.sh $RELEASE_NAME $DOCKER_LABEL instanceserver instanceserver $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR >instanceserver-build-logs.txt 2>instanceserver-build-error.txt & - bash ./scripts/build_and_publish_package.sh $RELEASE_NAME $DOCKER_LABEL taskserver taskserver $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR >taskserver-build-logs.txt 2>taskserver-build-error.txt & + bash ./scripts/$BUILD_PUBLISH_SCRIPT $RELEASE_NAME $DOCKER_LABEL api api $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR $DOCR_REGISTRY >api-build-logs.txt 2>api-build-error.txt & + bash ./scripts/$BUILD_PUBLISH_SCRIPT $RELEASE_NAME $DOCKER_LABEL client client $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR $DOCR_REGISTRY >client-build-logs.txt 2>client-build-error.txt & + bash ./scripts/$BUILD_PUBLISH_SCRIPT $RELEASE_NAME $DOCKER_LABEL instanceserver instanceserver $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR $DOCR_REGISTRY >instanceserver-build-logs.txt 2>instanceserver-build-error.txt & + bash ./scripts/$BUILD_PUBLISH_SCRIPT $RELEASE_NAME $DOCKER_LABEL taskserver taskserver $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR $DOCR_REGISTRY >taskserver-build-logs.txt 2>taskserver-build-error.txt & #bash ./scripts/build_and_publish_package.sh $RELEASE_NAME $DOCKER_LABEL testbot testbot $START_TIME $AWS_REGION $NODE_ENV $PRIVATE_ECR >testbot-build-logs.txt 2>testbot-build-error.txt && & wait < <(jobs -p) From ddf1e623e7edacd9b2e109ac6a52b4a9125bc5e1 Mon Sep 17 00:00:00 2001 From: Murad Khateeb Date: Fri, 15 Dec 2023 16:24:53 +0500 Subject: [PATCH 04/17] Add code for builder fixes for DO cluster --- .../media/storageprovider/s3-do.storage.ts | 87 +++++++++++++++++++ .../src/media/storageprovider/s3.storage.ts | 11 ++- .../media/storageprovider/storageprovider.ts | 13 ++- packages/server-core/src/updateAppConfig.ts | 12 +-- scripts/setup_do.sh | 2 +- 5 files changed, 113 insertions(+), 12 deletions(-) create mode 100755 packages/server-core/src/media/storageprovider/s3-do.storage.ts diff --git a/packages/server-core/src/media/storageprovider/s3-do.storage.ts b/packages/server-core/src/media/storageprovider/s3-do.storage.ts new file mode 100755 index 0000000000..939ed44ccc --- /dev/null +++ b/packages/server-core/src/media/storageprovider/s3-do.storage.ts @@ -0,0 +1,87 @@ +/* +CPAL-1.0 License + +The contents of this file are subject to the Common Public Attribution License +Version 1.0. (the "License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at +https://github.com/EtherealEngine/etherealengine/blob/dev/LICENSE. +The License is based on the Mozilla Public License Version 1.1, but Sections 14 +and 15 have been added to cover use of software over a computer network and +provide for limited attribution for the Original Developer. In addition, +Exhibit A has been modified to be consistent with Exhibit B. + +Software distributed under the License is distributed on an "AS IS" basis, +WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for the +specific language governing rights and limitations under the License. + +The Original Code is Ethereal Engine. + +The Original Developer is the Initial Developer. The Initial Developer of the +Original Code is the Ethereal Engine team. + +All portions of the code written by the Ethereal Engine team are Copyright © 2021-2023 +Ethereal Engine. All Rights Reserved. +*/ + +import { FunctionSummary } from '@aws-sdk/client-cloudfront' + +import S3Provider from './s3.storage' + +const MAX_ITEMS = 1 +const CFFunctionTemplate = ` +function handler(event) { + var request = event.request; + var routeRegexRoot = __$routeRegex$__ + var routeRegex = new RegExp(routeRegexRoot) + var publicRegexRoot = __$publicRegex$__ + var publicRegex = new RegExp(publicRegexRoot) + + if (routeRegex.test(request.uri)) { + request.uri = '/client/index.html' + } + + if (publicRegex.test(request.uri)) { + request.uri = '/client' + request.uri + } + return request; +} +` + +/** + * Storage provide class to communicate with AWS S3 API. + */ +export class S3DOProvider extends S3Provider { + /** + * Invalidate items in the S3 storage. + * @param invalidationItems List of keys. + */ + override async createInvalidation(invalidationItems: string[]) { + return undefined + } + + override async getOriginURLs(): Promise { + return [this.cacheDomain] + } + + override async listFunctions(marker: string | null, functions: FunctionSummary[]): Promise { + return [] + } + + async createFunction(functionName: string, routes: string[]) { + return undefined + } + + async associateWithFunction(functionARN: string, attempts = 1) { + return undefined + } + + async publishFunction(functionName: string) { + return undefined + } + + async updateFunction(functionName: string, routes: string[]) { + return undefined + } +} + +export default S3DOProvider diff --git a/packages/server-core/src/media/storageprovider/s3.storage.ts b/packages/server-core/src/media/storageprovider/s3.storage.ts index e3a34adc4e..3f170c1b44 100755 --- a/packages/server-core/src/media/storageprovider/s3.storage.ts +++ b/packages/server-core/src/media/storageprovider/s3.storage.ts @@ -26,6 +26,7 @@ Ethereal Engine. All Rights Reserved. import { CloudFrontClient, CreateFunctionCommand, + CreateFunctionCommandOutput, CreateInvalidationCommand, DescribeFunctionCommand, FunctionRuntime, @@ -34,8 +35,10 @@ import { ListFunctionsCommand, ListFunctionsCommandInput, PublishFunctionCommand, + PublishFunctionCommandOutput, UpdateDistributionCommand, - UpdateFunctionCommand + UpdateFunctionCommand, + UpdateFunctionCommandOutput } from '@aws-sdk/client-cloudfront' import { AbortMultipartUploadCommand, @@ -494,7 +497,7 @@ export class S3Provider implements StorageProviderInterface { ) } - async createFunction(functionName: string, routes: string[]) { + async createFunction(functionName: string, routes: string[]): Promise { const code = this.getFunctionCode(routes) const params = { Name: functionName, @@ -543,7 +546,7 @@ export class S3Provider implements StorageProviderInterface { } } - async publishFunction(functionName: string) { + async publishFunction(functionName: string): Promise { const functionDetailsParams = { Name: functionName } @@ -557,7 +560,7 @@ export class S3Provider implements StorageProviderInterface { return await this.cloudfront.send(command) } - async updateFunction(functionName: string, routes: string[]) { + async updateFunction(functionName: string, routes: string[]): Promise { const code = this.getFunctionCode(routes) const functionDetailsParams = { Name: functionName diff --git a/packages/server-core/src/media/storageprovider/storageprovider.ts b/packages/server-core/src/media/storageprovider/storageprovider.ts index 040db75dfb..ca7cd5c72e 100755 --- a/packages/server-core/src/media/storageprovider/storageprovider.ts +++ b/packages/server-core/src/media/storageprovider/storageprovider.ts @@ -26,6 +26,7 @@ Ethereal Engine. All Rights Reserved. import config from '../../appconfig' import IPFSStorage from './ipfs.storage' import LocalStorage from './local.storage' +import S3DOStorage from './s3-do.storage' import S3Storage from './s3.storage' import { StorageProviderInterface } from './storageprovider.interface' @@ -58,8 +59,16 @@ export const createIPFSStorageProvider = async () => { } export const createDefaultStorageProvider = () => { - const StorageProvider = - config.server.storageProvider !== 's3' && config.server.storageProvider !== 'ipfs' ? LocalStorage : S3Storage + let StorageProvider + + if (config.server.storageProvider === 's3-do') { + StorageProvider = S3DOStorage + } else if (config.server.storageProvider !== 's3' && config.server.storageProvider !== 'ipfs') { + StorageProvider = LocalStorage + } else { + StorageProvider = S3Storage + } + const provider = createStorageProvider(StorageProvider) providers['default'] = provider return provider diff --git a/packages/server-core/src/updateAppConfig.ts b/packages/server-core/src/updateAppConfig.ts index 434d5f9d94..cb1cf1bb25 100644 --- a/packages/server-core/src/updateAppConfig.ts +++ b/packages/server-core/src/updateAppConfig.ts @@ -134,11 +134,13 @@ export const updateAppConfig = async (): Promise => { .select() .from(awsSettingPath) .then(([dbAws]) => { - const dbAwsConfig = awsDbToSchema(dbAws) - if (dbAwsConfig) { - appConfig.aws = { - ...appConfig.aws, - ...dbAwsConfig + if (dbAws) { + const dbAwsConfig = awsDbToSchema(dbAws) + if (dbAwsConfig) { + appConfig.aws = { + ...appConfig.aws, + ...dbAwsConfig + } } } }) diff --git a/scripts/setup_do.sh b/scripts/setup_do.sh index 1d1b98a95b..38d31333dd 100644 --- a/scripts/setup_do.sh +++ b/scripts/setup_do.sh @@ -5,7 +5,7 @@ set -x if [[ -z $(which doctl) ]]; then curl -OL https://github.com/digitalocean/doctl/releases/download/v1.100.0/doctl-1.100.0-linux-amd64.tar.gz tar xf doctl-1.100.0-linux-amd64.tar.gz - sudo mv doctl /usr/local/bin + mv doctl /usr/local/bin fi set +x From abdaf380ca2632610b3f356b65cfb52bf73fc5fc Mon Sep 17 00:00:00 2001 From: Murad Khateeb Date: Mon, 18 Dec 2023 12:40:15 +0500 Subject: [PATCH 05/17] Ammend code for DO specific operations --- .../src/projects/project/project.resolvers.ts | 2 +- packages/server-core/src/updateAppConfig.ts | 18 ++++++++---------- scripts/run-builder.sh | 4 ++-- 3 files changed, 11 insertions(+), 13 deletions(-) diff --git a/packages/server-core/src/projects/project/project.resolvers.ts b/packages/server-core/src/projects/project/project.resolvers.ts index 236c00160d..c5aba6fd59 100644 --- a/packages/server-core/src/projects/project/project.resolvers.ts +++ b/packages/server-core/src/projects/project/project.resolvers.ts @@ -86,7 +86,7 @@ export const projectResolver = resolve( { // Convert the raw data into a new structure before running property resolvers converter: async (rawData, context) => { - return projectDbToSchema(rawData) + return rawData ? projectDbToSchema(rawData) : undefined } } ) diff --git a/packages/server-core/src/updateAppConfig.ts b/packages/server-core/src/updateAppConfig.ts index cb1cf1bb25..82f3bd4316 100644 --- a/packages/server-core/src/updateAppConfig.ts +++ b/packages/server-core/src/updateAppConfig.ts @@ -108,7 +108,7 @@ export const updateAppConfig = async (): Promise => { .select() .from(authenticationSettingPath) .then(([dbAuthentication]) => { - const dbAuthenticationConfig = authenticationDbToSchema(dbAuthentication) + let dbAuthenticationConfig = dbAuthentication ? authenticationDbToSchema(dbAuthentication) : undefined if (dbAuthenticationConfig) { const authStrategies = ['jwt'] for (let authStrategy of dbAuthenticationConfig.authStrategies) { @@ -134,13 +134,11 @@ export const updateAppConfig = async (): Promise => { .select() .from(awsSettingPath) .then(([dbAws]) => { - if (dbAws) { - const dbAwsConfig = awsDbToSchema(dbAws) - if (dbAwsConfig) { - appConfig.aws = { - ...appConfig.aws, - ...dbAwsConfig - } + const dbAwsConfig = dbAws ? awsDbToSchema(dbAws) : undefined + if (dbAwsConfig) { + appConfig.aws = { + ...appConfig.aws, + ...dbAwsConfig } } }) @@ -185,7 +183,7 @@ export const updateAppConfig = async (): Promise => { .select() .from(clientSettingPath) .then(([dbClient]) => { - const dbClientConfig = clientDbToSchema(dbClient) + const dbClientConfig = dbClient ? clientDbToSchema(dbClient) : undefined if (dbClientConfig) { appConfig.client = { ...appConfig.client, @@ -257,7 +255,7 @@ export const updateAppConfig = async (): Promise => { .select() .from(serverSettingPath) .then(([dbServer]) => { - const dbServerConfig = serverDbToSchema(dbServer) + const dbServerConfig = dbServer ? serverDbToSchema(dbServer) : undefined if (dbServerConfig) { appConfig.server = { ...appConfig.server, diff --git a/scripts/run-builder.sh b/scripts/run-builder.sh index cae9586ee7..499e39f1f3 100755 --- a/scripts/run-builder.sh +++ b/scripts/run-builder.sh @@ -62,7 +62,7 @@ npm run record-build-error -- --service=root --isDocker=true npm install -g cli @aws-sdk/client-s3 -if [ "$SERVE_CLIENT_FROM_STORAGE_PROVIDER" = "true" ] && [ "$STORAGE_PROVIDER" = "s3" ] +if [ "$SERVE_CLIENT_FROM_STORAGE_PROVIDER" = "true" ] && {[ "$STORAGE_PROVIDER" = "s3" ] || [ "$STORAGE_PROVIDER" = "s3-do" ]} ; then npx cross-env ts-node --swc scripts/get-deletable-client-files.ts @@ -121,7 +121,7 @@ bash ./scripts/cleanup_builder.sh $DOCKER_LABEL END_TIME=`date +"%d-%m-%yT%H-%M-%S"` echo "Started build at $START_TIME, deployed image to K8s at $DEPLOY_TIME, ended at $END_TIME" sleep 3m -if [ "$SERVE_CLIENT_FROM_STORAGE_PROVIDER" = "true" ] && [ "$STORAGE_PROVIDER" = "s3" ] ; then +if [ "$SERVE_CLIENT_FROM_STORAGE_PROVIDER" = "true" ] && {[ "$STORAGE_PROVIDER" = "s3" ] || [ "$STORAGE_PROVIDER" = "s3-do" ]} ; then npx cross-env ts-node --swc scripts/delete-old-s3-files.ts; echo "Deleted old client files from S3" fi From cbdc6f20296fce7390a567a026d0fa6ddb556684 Mon Sep 17 00:00:00 2001 From: Murad Khateeb Date: Mon, 1 Jan 2024 10:23:51 +0500 Subject: [PATCH 06/17] Push local changes to remote origion --- .../client/Dockerfile-client-serve-static | 2 +- packages/server-core/src/appconfig.ts | 2 +- .../media/storageprovider/s3-do.storage.ts | 4 ++++ .../src/media/storageprovider/s3.storage.ts | 21 ++++++++++++------- .../media/storageprovider/storageprovider.ts | 4 ++++ scripts/build_docker_builder_do.sh | 19 +++++++---------- scripts/push-client-to-s3.ts | 10 +++++++++ scripts/run-builder.sh | 19 +++++++++-------- 8 files changed, 51 insertions(+), 30 deletions(-) diff --git a/dockerfiles/client/Dockerfile-client-serve-static b/dockerfiles/client/Dockerfile-client-serve-static index 36046c3180..df569267cd 100755 --- a/dockerfiles/client/Dockerfile-client-serve-static +++ b/dockerfiles/client/Dockerfile-client-serve-static @@ -86,7 +86,7 @@ RUN npm run build-client RUN npx cross-env ts-node --swc scripts/push-client-to-s3.ts -RUN npx cross-env ts-node --swc scripts/update-cloudfront-function.ts --stage=$STAGE +# RUN npx cross-env ts-node --swc scripts/update-cloudfront-function.ts --stage=$STAGE RUN rm -r packages/client/public diff --git a/packages/server-core/src/appconfig.ts b/packages/server-core/src/appconfig.ts index e90a36b9de..490cb8b768 100755 --- a/packages/server-core/src/appconfig.ts +++ b/packages/server-core/src/appconfig.ts @@ -326,7 +326,7 @@ const aws = { s3: { accessKeyId: process.env.STORAGE_AWS_ACCESS_KEY_ID!, secretAccessKey: process.env.STORAGE_AWS_ACCESS_KEY_SECRET!, - endpoint: process.env.STORAGE_S3_ENDPOINT!, + endpoint: process.env.STORAGE_S3_ENDPOINT ? process.env.STORAGE_S3_ENDPOINT : 'https://sfo2.digitaloceanspaces.com', staticResourceBucket: testEnabled ? process.env.STORAGE_S3_TEST_RESOURCE_BUCKET! : process.env.STORAGE_S3_STATIC_RESOURCE_BUCKET!, diff --git a/packages/server-core/src/media/storageprovider/s3-do.storage.ts b/packages/server-core/src/media/storageprovider/s3-do.storage.ts index 939ed44ccc..334bebb4fb 100755 --- a/packages/server-core/src/media/storageprovider/s3-do.storage.ts +++ b/packages/server-core/src/media/storageprovider/s3-do.storage.ts @@ -51,6 +51,10 @@ function handler(event) { * Storage provide class to communicate with AWS S3 API. */ export class S3DOProvider extends S3Provider { + constructor() { + super() + this.bucketAssetURL = 'https://etherealengine-static-resources.sfo2.digitaloceanspaces.com' + } /** * Invalidate items in the S3 storage. * @param invalidationItems List of keys. diff --git a/packages/server-core/src/media/storageprovider/s3.storage.ts b/packages/server-core/src/media/storageprovider/s3.storage.ts index 3f170c1b44..8d577b662f 100755 --- a/packages/server-core/src/media/storageprovider/s3.storage.ts +++ b/packages/server-core/src/media/storageprovider/s3.storage.ts @@ -110,6 +110,8 @@ function handler(event) { */ export class S3Provider implements StorageProviderInterface { constructor() { + console.log('S3Provider is being called and minioClient is ', this.minioClient) + console.log('App Config', config.aws) if (!this.minioClient) this.getOriginURLs().then((result) => (this.originURLs = result)) } /** @@ -158,7 +160,7 @@ export class S3Provider implements StorageProviderInterface { * Domain address of S3 cache. */ cacheDomain = - config.server.storageProvider === 's3' + config.server.storageProvider === 's3-do' ? config.aws.s3.endpoint ? `${config.aws.s3.endpoint.replace('http://', '').replace('https://', '')}/${this.bucket}` : config.aws.cloudfront.domain @@ -166,8 +168,8 @@ export class S3Provider implements StorageProviderInterface { originURLs = [this.cacheDomain] - private bucketAssetURL = - config.server.storageProvider === 's3' + public bucketAssetURL = + config.server.storageProvider === 's3-do' ? config.aws.s3.endpoint ? `${config.aws.s3.endpoint}/${this.bucket}` : config.aws.s3.s3DevMode === 'local' @@ -241,9 +243,14 @@ export class S3Provider implements StorageProviderInterface { * @param key Key of object. */ async getObject(key: string): Promise { + console.log('[DO] Inside Get Object DO3') const data = new GetObjectCommand({ Bucket: this.bucket, Key: key }) + console.log('[DO] bucket name is, ', this.bucket) + console.log('[DO] endpoint is ', this.provider.config.endpoint) + console.log('[DO] region is ', this.provider.config.region) const response = await this.provider.send(data) const body = await buffer(response.Body as Readable) + console.log('[DO] Sent was succcessfull') return { Body: body, ContentType: response.ContentType! } } @@ -303,7 +310,7 @@ export class S3Provider implements StorageProviderInterface { if (!data.Key) return // key should not contain '/' at the begining const key = data.Key[0] === '/' ? data.Key.substring(1) : data.Key - + console.log('[DO] Key is ', key) const args = params.isDirectory ? { ACL: ObjectCannedACL.public_read, @@ -413,7 +420,7 @@ export class S3Provider implements StorageProviderInterface { async createInvalidation(invalidationItems: string[]) { if (!invalidationItems || invalidationItems.length === 0) return // for non-standard s3 setups, we don't use cloudfront - if (config.server.storageProvider !== 's3' || config.aws.s3.s3DevMode === 'local') return + if (config.server.storageProvider !== 's3-do' || config.aws.s3.s3DevMode === 'local') return const params = { DistributionId: config.aws.cloudfront.distributionId, InvalidationBatch: { @@ -431,7 +438,7 @@ export class S3Provider implements StorageProviderInterface { } async getOriginURLs(): Promise { - if (config.server.storageProvider !== 's3' || config.aws.s3.s3DevMode === 'local') return [this.cacheDomain] + if (config.server.storageProvider !== 's3-do' || config.aws.s3.s3DevMode === 'local') return [this.cacheDomain] const getDistributionParams = { Id: config.aws.cloudfront.distributionId } @@ -442,7 +449,7 @@ export class S3Provider implements StorageProviderInterface { } async listFunctions(marker: string | null, functions: FunctionSummary[]): Promise { - if (config.server.storageProvider !== 's3') return [] + if (config.server.storageProvider !== 's3-do') return [] const params: ListFunctionsCommandInput = { MaxItems: MAX_ITEMS } diff --git a/packages/server-core/src/media/storageprovider/storageprovider.ts b/packages/server-core/src/media/storageprovider/storageprovider.ts index ca7cd5c72e..bcf0d941e8 100755 --- a/packages/server-core/src/media/storageprovider/storageprovider.ts +++ b/packages/server-core/src/media/storageprovider/storageprovider.ts @@ -62,14 +62,18 @@ export const createDefaultStorageProvider = () => { let StorageProvider if (config.server.storageProvider === 's3-do') { + console.log('[DO] Storage provider is S3DOSTorage') StorageProvider = S3DOStorage } else if (config.server.storageProvider !== 's3' && config.server.storageProvider !== 'ipfs') { + console.log('[DO] Storage provider is LocalStorage') StorageProvider = LocalStorage } else { + console.log('[DO] Storage provider is S3Storage') StorageProvider = S3Storage } const provider = createStorageProvider(StorageProvider) providers['default'] = provider + console.log('provider', provider) return provider } diff --git a/scripts/build_docker_builder_do.sh b/scripts/build_docker_builder_do.sh index b30e41e7e4..14b1567340 100755 --- a/scripts/build_docker_builder_do.sh +++ b/scripts/build_docker_builder_do.sh @@ -2,14 +2,14 @@ set -e set -x -STAGE=$1 -TAG=$2 -LABEL=$3 -REGION=$4 -PRIVATE_ECR=$5 -$DOCR_REGISTRY=$6 +STAGE="dig" +TAG="dig-do-7.8.8" +LABEL="etherealengine/etherealengine" +DOCR_REGISTRY="registry.digitalocean.com/etherealengine" +REPO_NAME="etherealengine" EEVERSION=$(jq -r .version ./packages/server-core/package.json) +echo "Entering the script" docker buildx create --use --driver=docker-container doctl registry login --expiry-seconds 1800 @@ -37,12 +37,7 @@ else fi # The following scripts will need to be updated for DOCR but are not critical for the functionality of EE on DO. -if [ $PRIVATE_ECR == "true" ] -then - node ./scripts/prune_ecr_images.js --repoName $REPO_NAME-builder --region $REGION --service builder --releaseName $STAGE -else - node ./scripts/prune_ecr_images.js --repoName $REPO_NAME-builder --region us-east-1 --service builder --releaseName $STAGE --public -fi + # cache links to use once ECR supports cache manifests # --cache-to type=registry,ref=$ECR_URL/$REPO_NAME-builder:latest_"${STAGE}"_cache,mode=max \ diff --git a/scripts/push-client-to-s3.ts b/scripts/push-client-to-s3.ts index 3667bba875..cbfb2a279a 100644 --- a/scripts/push-client-to-s3.ts +++ b/scripts/push-client-to-s3.ts @@ -43,16 +43,22 @@ cli.enable('status') cli.main(async () => { try { + console.log('[DO] Starting to push objects to SOS') await createDefaultStorageProvider() const storageProvider = getStorageProvider() + console.log('[DO] Trying to resolve path now and the path is ', appRootPath.path) const clientPath = path.resolve(appRootPath.path, `packages/client/dist`) const files = getFilesRecursive(clientPath) + console.log('[DO] Completed Get files recursive and files count is ', files.length) + console.log('[DO] Now trying to get object from SOS') let filesToPruneResponse = await storageProvider.getObject('client/S3FilesToRemove.json') + console.log('[DO] files to delete are', filesToPruneResponse) const filesToPush: string[] = [] await Promise.all( files.map((file) => { return new Promise(async (resolve) => { try { + console.log('[DO] About to push files') const fileResult = fs.readFileSync(file) let filePathRelative = processFileName(file.slice(clientPath.length)) let contentType = getContentType(file) @@ -67,8 +73,12 @@ cli.main(async () => { putData.ContentEncoding = 'br' putData.Key = `client${filePathRelative}` } + console.log('[DO] Files relative path is, ', filePathRelative) + console.log('[DO] Trying to push object to SOS') await storageProvider.putObject(putData, { isDirectory: false }) + console.log('[DO] Pushed object to SOS by pushing filePathRelative') filesToPush.push(`client${filePathRelative}`) + console.log('[DO] Sucessfully pushed files') resolve(null) } catch (e) { logger.error(e) diff --git a/scripts/run-builder.sh b/scripts/run-builder.sh index 499e39f1f3..f015954288 100755 --- a/scripts/run-builder.sh +++ b/scripts/run-builder.sh @@ -62,7 +62,7 @@ npm run record-build-error -- --service=root --isDocker=true npm install -g cli @aws-sdk/client-s3 -if [ "$SERVE_CLIENT_FROM_STORAGE_PROVIDER" = "true" ] && {[ "$STORAGE_PROVIDER" = "s3" ] || [ "$STORAGE_PROVIDER" = "s3-do" ]} ; +if [ "$SERVE_CLIENT_FROM_STORAGE_PROVIDER" = "true" ] && [ "$STORAGE_PROVIDER" = "s3-do" ] ; then npx cross-env ts-node --swc scripts/get-deletable-client-files.ts @@ -121,18 +121,19 @@ bash ./scripts/cleanup_builder.sh $DOCKER_LABEL END_TIME=`date +"%d-%m-%yT%H-%M-%S"` echo "Started build at $START_TIME, deployed image to K8s at $DEPLOY_TIME, ended at $END_TIME" sleep 3m -if [ "$SERVE_CLIENT_FROM_STORAGE_PROVIDER" = "true" ] && {[ "$STORAGE_PROVIDER" = "s3" ] || [ "$STORAGE_PROVIDER" = "s3-do" ]} ; then - npx cross-env ts-node --swc scripts/delete-old-s3-files.ts; - echo "Deleted old client files from S3" -fi + +# if [ "$SERVE_CLIENT_FROM_STORAGE_PROVIDER" = "true" ] && {[ "$STORAGE_PROVIDER" = "s3" ] || [ "$STORAGE_PROVIDER" = "s3-do" ]} ; then +# npx cross-env ts-node --swc scripts/delete-old-s3-files.ts; +# echo "Deleted old client files from S3" +# fi echo $(kubectl get jobs | grep $RELEASE_NAME-builder-etherealengine-builder) if [ -z "$(kubectl get jobs | grep $RELEASE_NAME-builder-etherealengine-builder)" ] then echo "Non-job builder, sleeping" sleep infinity -else - echo "Job-based builder, killing docker container" - pkill dockerd - pkill docker-init +# else +# echo "Job-based builder, killing docker container" +# pkill dockerd +# pkill docker-init fi \ No newline at end of file From f8142648291afd3498939d2b3776638b13b30938 Mon Sep 17 00:00:00 2001 From: Murad Khateeb Date: Thu, 4 Jan 2024 21:14:45 +0500 Subject: [PATCH 07/17] Fix location errors and remove redundatnt logs --- .../src/media/storageprovider/s3.storage.ts | 9 +++------ .../src/media/storageprovider/storageprovider.ts | 3 --- packages/server/src/start.ts | 6 +----- scripts/build_docker_builder_do.sh | 2 +- scripts/push-client-to-s3.ts | 10 ---------- 5 files changed, 5 insertions(+), 25 deletions(-) diff --git a/packages/server-core/src/media/storageprovider/s3.storage.ts b/packages/server-core/src/media/storageprovider/s3.storage.ts index 8d577b662f..bdfe337614 100755 --- a/packages/server-core/src/media/storageprovider/s3.storage.ts +++ b/packages/server-core/src/media/storageprovider/s3.storage.ts @@ -160,10 +160,12 @@ export class S3Provider implements StorageProviderInterface { * Domain address of S3 cache. */ cacheDomain = - config.server.storageProvider === 's3-do' + config.server.storageProvider === 's3' ? config.aws.s3.endpoint ? `${config.aws.s3.endpoint.replace('http://', '').replace('https://', '')}/${this.bucket}` : config.aws.cloudfront.domain + : config.server.storageProvider === 's3-do' + ? config.aws.cloudfront.domain : `${config.aws.cloudfront.domain}/${this.bucket}` originURLs = [this.cacheDomain] @@ -243,14 +245,9 @@ export class S3Provider implements StorageProviderInterface { * @param key Key of object. */ async getObject(key: string): Promise { - console.log('[DO] Inside Get Object DO3') const data = new GetObjectCommand({ Bucket: this.bucket, Key: key }) - console.log('[DO] bucket name is, ', this.bucket) - console.log('[DO] endpoint is ', this.provider.config.endpoint) - console.log('[DO] region is ', this.provider.config.region) const response = await this.provider.send(data) const body = await buffer(response.Body as Readable) - console.log('[DO] Sent was succcessfull') return { Body: body, ContentType: response.ContentType! } } diff --git a/packages/server-core/src/media/storageprovider/storageprovider.ts b/packages/server-core/src/media/storageprovider/storageprovider.ts index bcf0d941e8..5f5866f91f 100755 --- a/packages/server-core/src/media/storageprovider/storageprovider.ts +++ b/packages/server-core/src/media/storageprovider/storageprovider.ts @@ -62,13 +62,10 @@ export const createDefaultStorageProvider = () => { let StorageProvider if (config.server.storageProvider === 's3-do') { - console.log('[DO] Storage provider is S3DOSTorage') StorageProvider = S3DOStorage } else if (config.server.storageProvider !== 's3' && config.server.storageProvider !== 'ipfs') { - console.log('[DO] Storage provider is LocalStorage') StorageProvider = LocalStorage } else { - console.log('[DO] Storage provider is S3Storage') StorageProvider = S3Storage } diff --git a/packages/server/src/start.ts b/packages/server/src/start.ts index 238d876563..0d58f968dc 100644 --- a/packages/server/src/start.ts +++ b/packages/server/src/start.ts @@ -131,11 +131,7 @@ export const start = async (): Promise => { StartCorsServer(useSSL, certOptions) } - if ( - process.env.SERVE_CLIENT_FROM_API && - !process.env.SERVE_CLIENT_FROM_STORAGE_PROVIDER && - process.env.SERVE_CLIENT_FROM_STORAGE_PROVIDER !== 'true' - ) { + if (process.env.SERVE_CLIENT_FROM_API === 'true' && process.env.SERVE_CLIENT_FROM_STORAGE_PROVIDER !== 'true') { const clientApp = koa() clientApp.use( serve(join(packageRoot.path, 'packages', 'client', 'dist'), { diff --git a/scripts/build_docker_builder_do.sh b/scripts/build_docker_builder_do.sh index 14b1567340..2f68c2748a 100755 --- a/scripts/build_docker_builder_do.sh +++ b/scripts/build_docker_builder_do.sh @@ -3,7 +3,7 @@ set -e set -x STAGE="dig" -TAG="dig-do-7.8.8" +TAG="dig-do-3.3.3" LABEL="etherealengine/etherealengine" DOCR_REGISTRY="registry.digitalocean.com/etherealengine" REPO_NAME="etherealengine" diff --git a/scripts/push-client-to-s3.ts b/scripts/push-client-to-s3.ts index cbfb2a279a..3667bba875 100644 --- a/scripts/push-client-to-s3.ts +++ b/scripts/push-client-to-s3.ts @@ -43,22 +43,16 @@ cli.enable('status') cli.main(async () => { try { - console.log('[DO] Starting to push objects to SOS') await createDefaultStorageProvider() const storageProvider = getStorageProvider() - console.log('[DO] Trying to resolve path now and the path is ', appRootPath.path) const clientPath = path.resolve(appRootPath.path, `packages/client/dist`) const files = getFilesRecursive(clientPath) - console.log('[DO] Completed Get files recursive and files count is ', files.length) - console.log('[DO] Now trying to get object from SOS') let filesToPruneResponse = await storageProvider.getObject('client/S3FilesToRemove.json') - console.log('[DO] files to delete are', filesToPruneResponse) const filesToPush: string[] = [] await Promise.all( files.map((file) => { return new Promise(async (resolve) => { try { - console.log('[DO] About to push files') const fileResult = fs.readFileSync(file) let filePathRelative = processFileName(file.slice(clientPath.length)) let contentType = getContentType(file) @@ -73,12 +67,8 @@ cli.main(async () => { putData.ContentEncoding = 'br' putData.Key = `client${filePathRelative}` } - console.log('[DO] Files relative path is, ', filePathRelative) - console.log('[DO] Trying to push object to SOS') await storageProvider.putObject(putData, { isDirectory: false }) - console.log('[DO] Pushed object to SOS by pushing filePathRelative') filesToPush.push(`client${filePathRelative}`) - console.log('[DO] Sucessfully pushed files') resolve(null) } catch (e) { logger.error(e) From 40dccc34d873eebd16f82e1cf94709c10fe6f46e Mon Sep 17 00:00:00 2001 From: Kyle Baran Date: Thu, 4 Jan 2024 21:28:07 -0800 Subject: [PATCH 08/17] Overriding cookies dependency --- packages/client/package.json | 5 +++++ packages/server-core/package.json | 7 +++++++ 2 files changed, 12 insertions(+) diff --git a/packages/client/package.json b/packages/client/package.json index 1d61811f5c..462e207b46 100755 --- a/packages/client/package.json +++ b/packages/client/package.json @@ -92,5 +92,10 @@ "trace-unhandled": "2.0.1", "workbox-core": "^6.5.4" }, + "overrides": { + "koa": { + "cookies": "barankyle/cookies" + } + }, "license": "ISC" } diff --git a/packages/server-core/package.json b/packages/server-core/package.json index ae20fd6c5b..32a5148868 100755 --- a/packages/server-core/package.json +++ b/packages/server-core/package.json @@ -137,5 +137,12 @@ "@types/nodemailer-smtp-transport": "2.7.5", "@types/pug": "2.0.6" }, + "overrides": { + "@feathersjs/authentication-oauth": { + "cookie-session": { + "cookies": "barankyle/cookies" + } + } + }, "gitHead": "2313453697ca7c6b8d36b3b166b5a6445fe1c851" } From f9934649d1f09c1dc17bfdc0c9d2e54bb9d85850 Mon Sep 17 00:00:00 2001 From: Murad Khateeb Date: Wed, 10 Jan 2024 14:56:33 +0500 Subject: [PATCH 09/17] Remove unnecessary logs --- packages/server-core/src/media/storageprovider/s3.storage.ts | 3 --- 1 file changed, 3 deletions(-) diff --git a/packages/server-core/src/media/storageprovider/s3.storage.ts b/packages/server-core/src/media/storageprovider/s3.storage.ts index bdfe337614..6033231206 100755 --- a/packages/server-core/src/media/storageprovider/s3.storage.ts +++ b/packages/server-core/src/media/storageprovider/s3.storage.ts @@ -110,8 +110,6 @@ function handler(event) { */ export class S3Provider implements StorageProviderInterface { constructor() { - console.log('S3Provider is being called and minioClient is ', this.minioClient) - console.log('App Config', config.aws) if (!this.minioClient) this.getOriginURLs().then((result) => (this.originURLs = result)) } /** @@ -307,7 +305,6 @@ export class S3Provider implements StorageProviderInterface { if (!data.Key) return // key should not contain '/' at the begining const key = data.Key[0] === '/' ? data.Key.substring(1) : data.Key - console.log('[DO] Key is ', key) const args = params.isDirectory ? { ACL: ObjectCannedACL.public_read, From 5f2a57a50d24dd96e510afa127e9843d34e1331f Mon Sep 17 00:00:00 2001 From: Murad Khateeb Date: Wed, 10 Jan 2024 15:01:32 +0500 Subject: [PATCH 10/17] Update build scripts --- scripts/build_and_publish_package_do.sh | 12 ++++++------ scripts/build_docker_builder_do.sh | 20 +++++++++++--------- 2 files changed, 17 insertions(+), 15 deletions(-) diff --git a/scripts/build_and_publish_package_do.sh b/scripts/build_and_publish_package_do.sh index 30ca98ea8f..3806428e30 100755 --- a/scripts/build_and_publish_package_do.sh +++ b/scripts/build_and_publish_package_do.sh @@ -156,12 +156,12 @@ else fi # The following scripts will need to be updated for DOCR but are not critical for the functionality of EE on DO. -if [ $PRIVATE_ECR == "true" ] -then - node ./scripts/prune_ecr_images.js --repoName $REPO_NAME-$PACKAGE --region $REGION --service $PACKAGE --releaseName $STAGE -else - node ./scripts/prune_ecr_images.js --repoName $REPO_NAME-$PACKAGE --region us-east-1 --service $PACKAGE --releaseName $STAGE --public -fi +# if [ $PRIVATE_ECR == "true" ] +# then +# node ./scripts/prune_ecr_images.js --repoName $REPO_NAME-$PACKAGE --region $REGION --service $PACKAGE --releaseName $STAGE +# else +# node ./scripts/prune_ecr_images.js --repoName $REPO_NAME-$PACKAGE --region us-east-1 --service $PACKAGE --releaseName $STAGE --public +# fi BUILD_END_TIME=`date +"%d-%m-%yT%H-%M-%S"` echo "${PACKAGE} build started at ${BUILD_START_TIME}, ended at ${BUILD_END_TIME}" \ No newline at end of file diff --git a/scripts/build_docker_builder_do.sh b/scripts/build_docker_builder_do.sh index 2f68c2748a..5596af78fe 100755 --- a/scripts/build_docker_builder_do.sh +++ b/scripts/build_docker_builder_do.sh @@ -3,37 +3,39 @@ set -e set -x STAGE="dig" -TAG="dig-do-3.3.3" +TAG="dig-do-3.5.1" LABEL="etherealengine/etherealengine" DOCR_REGISTRY="registry.digitalocean.com/etherealengine" REPO_NAME="etherealengine" EEVERSION=$(jq -r .version ./packages/server-core/package.json) echo "Entering the script" -docker buildx create --use --driver=docker-container -doctl registry login --expiry-seconds 1800 +doctl registry login if [ $PUBLISH_DOCKERHUB == 'true' ] then echo "$DOCKER_HUB_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin - docker buildx build \ - --push \ - --cache-to type=gha,mode=max \ + docker build \ --cache-from type=gha \ -t $DOCR_REGISTRY/$REPO_NAME-builder:latest_$STAGE \ -t $DOCR_REGISTRY/$REPO_NAME-builder:"${EEVERSION}_${TAG}" \ -t ${LABEL}-builder:"${EEVERSION}_${TAG}" \ -f dockerfiles/builder/Dockerfile-builder . + docker push + $DOCR_REGISTRY/$REPO_NAME-builder:latest_$STAGE \ + $DOCR_REGISTRY/$REPO_NAME-builder:"${EEVERSION}_${TAG}" \ + ${LABEL}-builder:"${EEVERSION}_${TAG}" else - docker buildx build \ - --push \ - --cache-to type=gha,mode=max \ + docker build \ --cache-from type=gha \ -t $DOCR_REGISTRY/$REPO_NAME-builder:latest_$STAGE \ -t $DOCR_REGISTRY/$REPO_NAME-builder:"${EEVERSION}_${TAG}" \ -f dockerfiles/builder/Dockerfile-builder . + docker push + $DOCR_REGISTRY/$REPO_NAME-builder:latest_$STAGE \ + $DOCR_REGISTRY/$REPO_NAME-builder:"${EEVERSION}_${TAG}" fi # The following scripts will need to be updated for DOCR but are not critical for the functionality of EE on DO. From 828e99b782750fca728d4aff02a4e3e4978ee4ff Mon Sep 17 00:00:00 2001 From: Kyle Baran Date: Fri, 5 Jan 2024 17:27:04 -0800 Subject: [PATCH 11/17] Added imagePullSecrets to Jobs/CronJobs Updated pod schema to include imagePullSecrets, pods-helper to return this alongside current information. --- .../common/src/schemas/cluster/pods.schema.ts | 3 ++- .../src/cluster/pods/pods-helper.ts | 3 ++- .../src/projects/project/project-helper.ts | 26 ++++++++++++++----- scripts/update-cronjob-image.ts | 14 +++++++++- 4 files changed, 36 insertions(+), 10 deletions(-) diff --git a/packages/common/src/schemas/cluster/pods.schema.ts b/packages/common/src/schemas/cluster/pods.schema.ts index ef56de6dfe..8110c89b5f 100644 --- a/packages/common/src/schemas/cluster/pods.schema.ts +++ b/packages/common/src/schemas/cluster/pods.schema.ts @@ -60,7 +60,8 @@ export const serverPodInfoSchema = Type.Object( format: 'uuid' }) ), - currentUsers: Type.Optional(Type.Number()) + currentUsers: Type.Optional(Type.Number()), + imagePullSecrets: Type.Optional(Type.Array(Type.Object({ name: Type.String()}))) }, { $id: 'ServerPodInfo', additionalProperties: false } ) diff --git a/packages/server-core/src/cluster/pods/pods-helper.ts b/packages/server-core/src/cluster/pods/pods-helper.ts index c15fd4f258..a9b599c405 100644 --- a/packages/server-core/src/cluster/pods/pods-helper.ts +++ b/packages/server-core/src/cluster/pods/pods-helper.ts @@ -205,7 +205,8 @@ const getServerPodInfo = (item: k8s.V1Pod) => { name: item.metadata?.name, status: item.status?.phase, age: item.status?.startTime?.toString(), - containers: getServerContainerInfo(item.status?.containerStatuses!) + containers: getServerContainerInfo(item.status?.containerStatuses!), + imagePullSecrets: item.spec?.imagePullSecrets } as ServerPodInfoType } diff --git a/packages/server-core/src/projects/project/project-helper.ts b/packages/server-core/src/projects/project/project-helper.ts index a55ad2834e..e6de9e8616 100644 --- a/packages/server-core/src/projects/project/project-helper.ts +++ b/packages/server-core/src/projects/project/project-helper.ts @@ -932,7 +932,9 @@ export async function getProjectUpdateJobBody( app ) - const image = apiPods.pods[0].containers.find((container) => container.name === 'etherealengine')!.image + const pod = apiPods.pods[0] + const image = pod.containers.find((container) => container.name === 'etherealengine')!.image + const imagePullSecrets = pod.imagePullSecrets const command = [ 'npx', @@ -990,6 +992,7 @@ export async function getProjectUpdateJobBody( } }, spec: { + imagePullSecrets, serviceAccountName: `${process.env.RELEASE_NAME}-etherealengine-api`, containers: [ { @@ -1024,7 +1027,9 @@ export async function getProjectPushJobBody( app ) - const image = apiPods.pods[0].containers.find((container) => container.name === 'etherealengine')!.image + const pod = apiPods.pods[0] + const image = pod.containers.find((container) => container.name === 'etherealengine')!.image + const imagePullSecrets = pod.imagePullSecrets const command = [ 'npx', @@ -1070,6 +1075,7 @@ export async function getProjectPushJobBody( } }, spec: { + imagePullSecrets, serviceAccountName: `${process.env.RELEASE_NAME}-etherealengine-api`, containers: [ { @@ -1089,7 +1095,7 @@ export async function getProjectPushJobBody( } } -export const getCronJobBody = (project: ProjectType, image: string): object => { +export const getCronJobBody = (project: ProjectType, image: string, imagePullSecrets?): object => { return { metadata: { name: `${process.env.RELEASE_NAME}-${project.name}-auto-update`, @@ -1119,6 +1125,7 @@ export const getCronJobBody = (project: ProjectType, image: string): object => { } }, spec: { + imagePullSecrets: imagePullSecrets || [], serviceAccountName: `${process.env.RELEASE_NAME}-etherealengine-api`, containers: [ { @@ -1162,7 +1169,9 @@ export async function getDirectoryArchiveJobBody( app ) - const image = apiPods.pods[0].containers.find((container) => container.name === 'etherealengine')!.image + const pod = apiPods.pods[0] + const image = pod.containers.find((container) => container.name === 'etherealengine')!.image + const imagePullSecrets = pod.imagePullSecrets const command = [ 'npx', @@ -1198,6 +1207,7 @@ export async function getDirectoryArchiveJobBody( } }, spec: { + imagePullSecrets, serviceAccountName: `${process.env.RELEASE_NAME}-etherealengine-api`, containers: [ { @@ -1235,7 +1245,9 @@ export const createOrUpdateProjectUpdateJob = async (app: Application, projectNa app ) - const image = apiPods.pods[0].containers.find((container) => container.name === 'etherealengine')!.image + const pod = apiPods.pods[0] + const image = pod.containers.find((container) => container.name === 'etherealengine')!.image + const imagePullSecrets = pod.imagePullSecrets const k8BatchClient = getState(ServerState).k8BatchClient @@ -1244,7 +1256,7 @@ export const createOrUpdateProjectUpdateJob = async (app: Application, projectNa await k8BatchClient.patchNamespacedCronJob( `${process.env.RELEASE_NAME}-${projectName}-auto-update`, 'default', - getCronJobBody(project, image), + getCronJobBody(project, image, imagePullSecrets), undefined, undefined, undefined, @@ -1258,7 +1270,7 @@ export const createOrUpdateProjectUpdateJob = async (app: Application, projectNa ) } catch (err) { logger.error('Could not find cronjob %o', err) - await k8BatchClient.createNamespacedCronJob('default', getCronJobBody(project, image)) + await k8BatchClient.createNamespacedCronJob('default', getCronJobBody(project, image, imagePullSecrets)) } } } diff --git a/scripts/update-cronjob-image.ts b/scripts/update-cronjob-image.ts index 0941b2c702..6f7f5d5980 100644 --- a/scripts/update-cronjob-image.ts +++ b/scripts/update-cronjob-image.ts @@ -32,6 +32,8 @@ import { getState } from '@etherealengine/hyperflux' import { ServerMode, ServerState } from '@etherealengine/server-core/src/ServerState' import { createFeathersKoaApp, serverJobPipe } from '@etherealengine/server-core/src/createApp' import { getCronJobBody } from '@etherealengine/server-core/src/projects/project/project-helper' +import {getPodsData} from "@etherealengine/server-core/src/cluster/pods/pods-helper"; +import config from "@etherealengine/server-core/src/appconfig"; dotenv.config({ path: appRootPath.path, @@ -78,13 +80,23 @@ cli.main(async () => { paginate: false })) as ProjectType[] const k8BatchClient = getState(ServerState).k8BatchClient + const apiPods = await getPodsData( + `app.kubernetes.io/instance=${config.server.releaseName},app.kubernetes.io/component=api`, + 'api', + 'Api', + app + ) + + const pod = apiPods.pods[0] + const image = pod.containers.find((container) => container.name === 'etherealengine')!.image + const imagePullSecrets = pod.imagePullSecrets if (k8BatchClient) for (const project of autoUpdateProjects) { try { await k8BatchClient.patchNamespacedCronJob( `${process.env.RELEASE_NAME}-${project.name}-auto-update`, 'default', - getCronJobBody(project, `${options.ecrUrl}/${options.repoName}-api:${options.tag}__${options.startTime}`), + getCronJobBody(project, image, imagePullSecrets), undefined, undefined, undefined, From c20df05142e595b699198ce9fd424d26cf0f1cca Mon Sep 17 00:00:00 2001 From: Murad Khateeb Date: Thu, 11 Jan 2024 18:52:45 +0500 Subject: [PATCH 12/17] Update BuildX commands --- scripts/build_and_publish_package_do.sh | 11 +++++------ scripts/build_docker_builder_do.sh | 6 ++---- 2 files changed, 7 insertions(+), 10 deletions(-) diff --git a/scripts/build_and_publish_package_do.sh b/scripts/build_and_publish_package_do.sh index 3806428e30..4a534293cf 100755 --- a/scripts/build_and_publish_package_do.sh +++ b/scripts/build_and_publish_package_do.sh @@ -26,14 +26,12 @@ if [ $PUBLISH_DOCKERHUB == 'true' ] && [ "$DOCKERFILE" != "client-serve-static" then echo "$DOCKER_HUB_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin - docker buildx build \ + docker build \ --builder etherealengine-$PACKAGE \ - --push \ -t $DOCR_REGISTRY/$REPO_NAME-$PACKAGE:${TAG}__${START_TIME} \ -t $DOCR_REGISTRY/$REPO_NAME-$PACKAGE:latest_$STAGE \ -t ${LABEL}-$PACKAGE:${TAG} \ -f dockerfiles/$PACKAGE/Dockerfile-$DOCKERFILE \ - --cache-to type=registry,mode=max,image-manifest=true,ref=$DOCR_REGISTRY/$REPO_NAME-$PACKAGE:latest_${STAGE}_cache \ --cache-from type=registry,ref=$DOCR_REGISTRY/$REPO_NAME-$PACKAGE:latest_${STAGE}_cache \ --build-arg ECR_URL=$DOCR_REGISTRY \ --build-arg REPO_NAME=$REPO_NAME \ @@ -69,6 +67,8 @@ then --build-arg VITE_DISABLE_LOG=$VITE_DISABLE_LOG \ --build-arg VITE_AVATURN_URL=$VITE_AVATURN_URL \ --build-arg VITE_AVATURN_API=$VITE_AVATURN_API . + docker push --all-tags $DOCR_REGISTRY/$REPO_NAME-$PACKAGE + docker push ${LABEL}-$PACKAGE:${TAG} elif [ "$DOCKERFILE" == "client-serve-static" ] then docker buildx build \ @@ -111,13 +111,11 @@ then --build-arg VITE_AVATURN_URL=$VITE_AVATURN_URL \ --build-arg VITE_AVATURN_API=$VITE_AVATURN_API . else - docker buildx build \ + docker build \ --builder etherealengine-$PACKAGE \ - --push \ -t $DOCR_REGISTRY/$REPO_NAME-$PACKAGE:${TAG}__${START_TIME} \ -t $DOCR_REGISTRY/$REPO_NAME-$PACKAGE:latest_$STAGE \ -f dockerfiles/$PACKAGE/Dockerfile-$DOCKERFILE \ - --cache-to type=registry,mode=max,image-manifest=true,ref=$DOCR_REGISTRY/$REPO_NAME-$PACKAGE:latest_${STAGE}_cache \ --cache-from type=registry,ref=$DOCR_REGISTRY/$REPO_NAME-$PACKAGE:latest_${STAGE}_cache \ --build-arg ECR_URL=$DOCR_REGISTRY \ --build-arg REPO_NAME=$REPO_NAME \ @@ -153,6 +151,7 @@ else --build-arg VITE_DISABLE_LOG=$VITE_DISABLE_LOG \ --build-arg VITE_AVATURN_URL=$VITE_AVATURN_URL \ --build-arg VITE_AVATURN_API=$VITE_AVATURN_API . + docker push --all-tags $DOCR_REGISTRY/$REPO_NAME-$PACKAGE fi # The following scripts will need to be updated for DOCR but are not critical for the functionality of EE on DO. diff --git a/scripts/build_docker_builder_do.sh b/scripts/build_docker_builder_do.sh index 5596af78fe..394d0fe2dd 100755 --- a/scripts/build_docker_builder_do.sh +++ b/scripts/build_docker_builder_do.sh @@ -3,7 +3,7 @@ set -e set -x STAGE="dig" -TAG="dig-do-3.5.1" +TAG="dig-do-5.4.5" LABEL="etherealengine/etherealengine" DOCR_REGISTRY="registry.digitalocean.com/etherealengine" REPO_NAME="etherealengine" @@ -33,9 +33,7 @@ else -t $DOCR_REGISTRY/$REPO_NAME-builder:latest_$STAGE \ -t $DOCR_REGISTRY/$REPO_NAME-builder:"${EEVERSION}_${TAG}" \ -f dockerfiles/builder/Dockerfile-builder . - docker push - $DOCR_REGISTRY/$REPO_NAME-builder:latest_$STAGE \ - $DOCR_REGISTRY/$REPO_NAME-builder:"${EEVERSION}_${TAG}" + docker push --all-tags $DOCR_REGISTRY/$REPO_NAME-builder fi # The following scripts will need to be updated for DOCR but are not critical for the functionality of EE on DO. From b6df1cd2cd92332aef705b4a5f734b021b9fec1e Mon Sep 17 00:00:00 2001 From: Kyle Baran Date: Thu, 11 Jan 2024 14:06:22 -0800 Subject: [PATCH 13/17] Restored buildx and added --load flag. --- scripts/build_and_publish_package_do.sh | 11 ++++++++--- scripts/build_docker_builder_do.sh | 20 ++++++++++---------- 2 files changed, 18 insertions(+), 13 deletions(-) diff --git a/scripts/build_and_publish_package_do.sh b/scripts/build_and_publish_package_do.sh index 4a534293cf..b4dc42f48b 100755 --- a/scripts/build_and_publish_package_do.sh +++ b/scripts/build_and_publish_package_do.sh @@ -26,8 +26,9 @@ if [ $PUBLISH_DOCKERHUB == 'true' ] && [ "$DOCKERFILE" != "client-serve-static" then echo "$DOCKER_HUB_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin - docker build \ + docker buildx build \ --builder etherealengine-$PACKAGE \ + --load \ -t $DOCR_REGISTRY/$REPO_NAME-$PACKAGE:${TAG}__${START_TIME} \ -t $DOCR_REGISTRY/$REPO_NAME-$PACKAGE:latest_$STAGE \ -t ${LABEL}-$PACKAGE:${TAG} \ @@ -73,8 +74,8 @@ elif [ "$DOCKERFILE" == "client-serve-static" ] then docker buildx build \ --builder etherealengine-$PACKAGE \ + --load \ -f dockerfiles/$PACKAGE/Dockerfile-$DOCKERFILE \ - --cache-to type=registry,mode=max,image-manifest=true,ref=$DOCR_REGISTRY/$REPO_NAME-$PACKAGE:latest_${STAGE}_cache \ --cache-from type=registry,ref=$DOCR_REGISTRY/$REPO_NAME-$PACKAGE:latest_${STAGE}_cache \ --build-arg ECR_URL=$DOCR_REGISTRY \ --build-arg REPO_NAME=$REPO_NAME \ @@ -111,8 +112,9 @@ then --build-arg VITE_AVATURN_URL=$VITE_AVATURN_URL \ --build-arg VITE_AVATURN_API=$VITE_AVATURN_API . else - docker build \ + docker buildx build \ --builder etherealengine-$PACKAGE \ + --load \ -t $DOCR_REGISTRY/$REPO_NAME-$PACKAGE:${TAG}__${START_TIME} \ -t $DOCR_REGISTRY/$REPO_NAME-$PACKAGE:latest_$STAGE \ -f dockerfiles/$PACKAGE/Dockerfile-$DOCKERFILE \ @@ -154,6 +156,9 @@ else docker push --all-tags $DOCR_REGISTRY/$REPO_NAME-$PACKAGE fi +# Add this back to buildx build commands once DO fixes their 413 errors, as cache pushes are also triggering them. +# --cache-to type=registry,mode=max,image-manifest=true,ref=$DOCR_REGISTRY/$REPO_NAME-$PACKAGE:latest_${STAGE}_cache \ + # The following scripts will need to be updated for DOCR but are not critical for the functionality of EE on DO. # if [ $PRIVATE_ECR == "true" ] # then diff --git a/scripts/build_docker_builder_do.sh b/scripts/build_docker_builder_do.sh index 394d0fe2dd..7b8ba38cac 100755 --- a/scripts/build_docker_builder_do.sh +++ b/scripts/build_docker_builder_do.sh @@ -3,7 +3,7 @@ set -e set -x STAGE="dig" -TAG="dig-do-5.4.5" +TAG="dig-do-5.4.6" LABEL="etherealengine/etherealengine" DOCR_REGISTRY="registry.digitalocean.com/etherealengine" REPO_NAME="etherealengine" @@ -17,23 +17,23 @@ if [ $PUBLISH_DOCKERHUB == 'true' ] then echo "$DOCKER_HUB_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin - docker build \ - --cache-from type=gha \ + docker buildx build \ + --load \ -t $DOCR_REGISTRY/$REPO_NAME-builder:latest_$STAGE \ -t $DOCR_REGISTRY/$REPO_NAME-builder:"${EEVERSION}_${TAG}" \ -t ${LABEL}-builder:"${EEVERSION}_${TAG}" \ -f dockerfiles/builder/Dockerfile-builder . - docker push - $DOCR_REGISTRY/$REPO_NAME-builder:latest_$STAGE \ - $DOCR_REGISTRY/$REPO_NAME-builder:"${EEVERSION}_${TAG}" \ - ${LABEL}-builder:"${EEVERSION}_${TAG}" + docker push $DOCR_REGISTRY/$REPO_NAME-builder:latest_$STAGE + docker push $DOCR_REGISTRY/$REPO_NAME-builder:"${EEVERSION}_${TAG}" + docker push ${LABEL}-builder:"${EEVERSION}_${TAG}" else - docker build \ - --cache-from type=gha \ + docker buildx build \ + --load \ -t $DOCR_REGISTRY/$REPO_NAME-builder:latest_$STAGE \ -t $DOCR_REGISTRY/$REPO_NAME-builder:"${EEVERSION}_${TAG}" \ -f dockerfiles/builder/Dockerfile-builder . - docker push --all-tags $DOCR_REGISTRY/$REPO_NAME-builder + docker push $DOCR_REGISTRY/$REPO_NAME-builder:latest_$STAGE + docker push $DOCR_REGISTRY/$REPO_NAME-builder:"${EEVERSION}_${TAG}" fi # The following scripts will need to be updated for DOCR but are not critical for the functionality of EE on DO. From efba6060105b92b55b34658cca3a7a0d26bca0b2 Mon Sep 17 00:00:00 2001 From: Murad Khateeb Date: Mon, 15 Jan 2024 09:48:00 +0500 Subject: [PATCH 14/17] remove redundent variable declarations --- .../media/storageprovider/s3-do.storage.ts | 20 ------------------- 1 file changed, 20 deletions(-) diff --git a/packages/server-core/src/media/storageprovider/s3-do.storage.ts b/packages/server-core/src/media/storageprovider/s3-do.storage.ts index 334bebb4fb..95e7aa3b65 100755 --- a/packages/server-core/src/media/storageprovider/s3-do.storage.ts +++ b/packages/server-core/src/media/storageprovider/s3-do.storage.ts @@ -27,26 +27,6 @@ import { FunctionSummary } from '@aws-sdk/client-cloudfront' import S3Provider from './s3.storage' -const MAX_ITEMS = 1 -const CFFunctionTemplate = ` -function handler(event) { - var request = event.request; - var routeRegexRoot = __$routeRegex$__ - var routeRegex = new RegExp(routeRegexRoot) - var publicRegexRoot = __$publicRegex$__ - var publicRegex = new RegExp(publicRegexRoot) - - if (routeRegex.test(request.uri)) { - request.uri = '/client/index.html' - } - - if (publicRegex.test(request.uri)) { - request.uri = '/client' + request.uri - } - return request; -} -` - /** * Storage provide class to communicate with AWS S3 API. */ From baec4785bf9bd13db5a8715670550c9a9e62768f Mon Sep 17 00:00:00 2001 From: Murad Khateeb Date: Mon, 15 Jan 2024 09:59:14 +0500 Subject: [PATCH 15/17] Relocate DO specific Vars --- packages/server-core/src/media/storageprovider/s3-do.storage.ts | 2 ++ packages/server-core/src/media/storageprovider/s3.storage.ts | 2 -- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/server-core/src/media/storageprovider/s3-do.storage.ts b/packages/server-core/src/media/storageprovider/s3-do.storage.ts index 95e7aa3b65..c4c60d6f93 100755 --- a/packages/server-core/src/media/storageprovider/s3-do.storage.ts +++ b/packages/server-core/src/media/storageprovider/s3-do.storage.ts @@ -25,6 +25,7 @@ Ethereal Engine. All Rights Reserved. import { FunctionSummary } from '@aws-sdk/client-cloudfront' +import config from '../../appconfig' import S3Provider from './s3.storage' /** @@ -34,6 +35,7 @@ export class S3DOProvider extends S3Provider { constructor() { super() this.bucketAssetURL = 'https://etherealengine-static-resources.sfo2.digitaloceanspaces.com' + this.cacheDomain = config.aws.cloudfront.domain } /** * Invalidate items in the S3 storage. diff --git a/packages/server-core/src/media/storageprovider/s3.storage.ts b/packages/server-core/src/media/storageprovider/s3.storage.ts index 6033231206..10b6a0b1ca 100755 --- a/packages/server-core/src/media/storageprovider/s3.storage.ts +++ b/packages/server-core/src/media/storageprovider/s3.storage.ts @@ -162,8 +162,6 @@ export class S3Provider implements StorageProviderInterface { ? config.aws.s3.endpoint ? `${config.aws.s3.endpoint.replace('http://', '').replace('https://', '')}/${this.bucket}` : config.aws.cloudfront.domain - : config.server.storageProvider === 's3-do' - ? config.aws.cloudfront.domain : `${config.aws.cloudfront.domain}/${this.bucket}` originURLs = [this.cacheDomain] From 3a1c8eba2727fe66635667a8cc2eecc0e198e778 Mon Sep 17 00:00:00 2001 From: Murad Khateeb Date: Thu, 18 Jan 2024 11:06:29 +0500 Subject: [PATCH 16/17] Update variables for DO --- packages/server-core/src/appconfig.ts | 2 +- packages/server-core/src/media/storageprovider/s3-do.storage.ts | 2 +- packages/server-core/src/media/storageprovider/s3.storage.ts | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/server-core/src/appconfig.ts b/packages/server-core/src/appconfig.ts index 490cb8b768..e90a36b9de 100755 --- a/packages/server-core/src/appconfig.ts +++ b/packages/server-core/src/appconfig.ts @@ -326,7 +326,7 @@ const aws = { s3: { accessKeyId: process.env.STORAGE_AWS_ACCESS_KEY_ID!, secretAccessKey: process.env.STORAGE_AWS_ACCESS_KEY_SECRET!, - endpoint: process.env.STORAGE_S3_ENDPOINT ? process.env.STORAGE_S3_ENDPOINT : 'https://sfo2.digitaloceanspaces.com', + endpoint: process.env.STORAGE_S3_ENDPOINT!, staticResourceBucket: testEnabled ? process.env.STORAGE_S3_TEST_RESOURCE_BUCKET! : process.env.STORAGE_S3_STATIC_RESOURCE_BUCKET!, diff --git a/packages/server-core/src/media/storageprovider/s3-do.storage.ts b/packages/server-core/src/media/storageprovider/s3-do.storage.ts index c4c60d6f93..08ad9c692e 100755 --- a/packages/server-core/src/media/storageprovider/s3-do.storage.ts +++ b/packages/server-core/src/media/storageprovider/s3-do.storage.ts @@ -34,7 +34,7 @@ import S3Provider from './s3.storage' export class S3DOProvider extends S3Provider { constructor() { super() - this.bucketAssetURL = 'https://etherealengine-static-resources.sfo2.digitaloceanspaces.com' + this.bucketAssetURL = `https://${config.aws.cloudfront.domain}` this.cacheDomain = config.aws.cloudfront.domain } /** diff --git a/packages/server-core/src/media/storageprovider/s3.storage.ts b/packages/server-core/src/media/storageprovider/s3.storage.ts index 10b6a0b1ca..850952b953 100755 --- a/packages/server-core/src/media/storageprovider/s3.storage.ts +++ b/packages/server-core/src/media/storageprovider/s3.storage.ts @@ -167,7 +167,7 @@ export class S3Provider implements StorageProviderInterface { originURLs = [this.cacheDomain] public bucketAssetURL = - config.server.storageProvider === 's3-do' + config.server.storageProvider === 's3' ? config.aws.s3.endpoint ? `${config.aws.s3.endpoint}/${this.bucket}` : config.aws.s3.s3DevMode === 'local' From abf4af61fba062f885f0c51e271f74a958b7c290 Mon Sep 17 00:00:00 2001 From: Murad Khateeb Date: Fri, 19 Jan 2024 08:33:15 +0500 Subject: [PATCH 17/17] Update conditional statements to accomudate 's3' --- .../src/media/storageprovider/s3.storage.ts | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/packages/server-core/src/media/storageprovider/s3.storage.ts b/packages/server-core/src/media/storageprovider/s3.storage.ts index 850952b953..b3d9d24339 100755 --- a/packages/server-core/src/media/storageprovider/s3.storage.ts +++ b/packages/server-core/src/media/storageprovider/s3.storage.ts @@ -412,7 +412,11 @@ export class S3Provider implements StorageProviderInterface { async createInvalidation(invalidationItems: string[]) { if (!invalidationItems || invalidationItems.length === 0) return // for non-standard s3 setups, we don't use cloudfront - if (config.server.storageProvider !== 's3-do' || config.aws.s3.s3DevMode === 'local') return + if ( + (config.server.storageProvider !== 's3' && config.server.storageProvider !== 's3-do') || + config.aws.s3.s3DevMode === 'local' + ) + return const params = { DistributionId: config.aws.cloudfront.distributionId, InvalidationBatch: { @@ -430,7 +434,11 @@ export class S3Provider implements StorageProviderInterface { } async getOriginURLs(): Promise { - if (config.server.storageProvider !== 's3-do' || config.aws.s3.s3DevMode === 'local') return [this.cacheDomain] + if ( + (config.server.storageProvider !== 's3-do' && config.server.storageProvider !== 's3') || + config.aws.s3.s3DevMode === 'local' + ) + return [this.cacheDomain] const getDistributionParams = { Id: config.aws.cloudfront.distributionId } @@ -441,7 +449,7 @@ export class S3Provider implements StorageProviderInterface { } async listFunctions(marker: string | null, functions: FunctionSummary[]): Promise { - if (config.server.storageProvider !== 's3-do') return [] + if (config.server.storageProvider !== 's3-do' && config.server.storageProvider !== 's3-do') return [] const params: ListFunctionsCommandInput = { MaxItems: MAX_ITEMS }