Skip to content

Commit

Permalink
Merge branch 'develop' into cd/add-semver
Browse files Browse the repository at this point in the history
  • Loading branch information
rrsettgast authored Jun 19, 2024
2 parents bd217df + 9134c42 commit 38ada29
Show file tree
Hide file tree
Showing 627 changed files with 34,350 additions and 10,242 deletions.
2 changes: 1 addition & 1 deletion .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"build": {
"dockerfile": "Dockerfile",
"args": {
"GEOS_TPL_TAG": "260-235"
"GEOS_TPL_TAG": "262-349"
}
},
"runArgs": [
Expand Down
150 changes: 131 additions & 19 deletions .github/workflows/build_and_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,20 @@ on:
required: false
type: string
default: build
DOCKER_CERTS_UPDATE_COMMAND:
required: false
type: string
CMAKE_BUILD_TYPE:
required: true
type: string
CODE_COVERAGE:
required: false
type: boolean
default: false
DOCKER_CERTS_DIR:
required: false
type: string
default: ''
DOCKER_IMAGE_TAG:
required: true
type: string
Expand All @@ -40,20 +47,41 @@ on:
HOST_CONFIG:
required: false
type: string
NPROC:
required: false
type: string
default: ''
RUNS_ON:
required: true
type: string
USE_SCCACHE:
required: false
type: boolean
default: true
REQUIRED_LABEL:
required: false
type: string
LOCAL_BASELINE_DIR:
required: false
type: string
secrets:
GOOGLE_CLOUD_GCP:
required: false
jobs:
build_test_deploy:
runs-on: ${{ inputs.RUNS_ON }}
steps:
- name: does_pr_have_necessary_labels
if: ${{inputs.REQUIRED_LABEL && github.event_name == 'pull_request'}}
run: |
pr_json=$(curl -H "Accept: application/vnd.github+json" https://api.github.com/repos/${{ github.repository }}/pulls/${{ github.event.number }})
LABELS=$(echo ${pr_json} | jq -crM '[.labels[].name]')
echo " the labels are ${LABELS}"
echo " the required label is ${{inputs.REQUIRED_LABEL}}"
if [[ "${LABELS}" != *"${{inputs.REQUIRED_LABEL}}"* ]]; then
exit 1
fi
- name: 'Cleanup build folder'
run: |
pwd
Expand All @@ -64,15 +92,15 @@ jobs:
ls -la ./
- name: Checkout Repository
uses: actions/[email protected].1
uses: actions/[email protected].4
with:
submodules: true
lfs: ${{ inputs.BUILD_TYPE == 'integrated_tests' }}
fetch-depth: 1

- id: 'auth'
if: ${{ inputs.GCP_BUCKET || inputs.USE_SCCACHE }}
uses: 'google-github-actions/[email protected].0'
uses: 'google-github-actions/[email protected].2'
with:
credentials_json: '${{ secrets.GOOGLE_CLOUD_GCP }}'
create_credentials_file: true
Expand All @@ -93,15 +121,28 @@ jobs:
docker_args=()
script_args=()
if [[ -n "${{ inputs.DOCKER_CERTS_DIR }}" ]]; then
DOCKER_CERTS_DIR=${{ inputs.DOCKER_CERTS_DIR }}
docker_args+=(-e DOCKER_CERTS_DIR=${DOCKER_CERTS_DIR})
fi
if [[ -n "${{ inputs.DOCKER_CERTS_UPDATE_COMMAND }}" ]]; then
DOCKER_CERTS_UPDATE_COMMAND=${{ inputs.DOCKER_CERTS_UPDATE_COMMAND }}
docker_args+=(-e DOCKER_CERTS_UPDATE_COMMAND=${DOCKER_CERTS_UPDATE_COMMAND})
fi
if [[ -n "${{ inputs.NPROC }}" ]]; then
NPROC=${{ inputs.NPROC }}
script_args+=(--nproc ${NPROC})
fi
docker_args+=(${{ inputs.DOCKER_RUN_ARGS }})
COMMIT=${{ github.event.pull_request.head.sha }}
SHORT_COMMIT=${COMMIT:0:7}
script_args+=(--install-dir-basename GEOSX-${SHORT_COMMIT})
# All the data exchanged with the docker container is eventually meant to be send to the cloud.
# All the data exchanged with the docker container is eventually meant to be sent to the cloud.
if [[ ! -z "${{ inputs.GCP_BUCKET }}" ]]; then
if [ "${{ inputs.BUILD_TYPE }}" = "build" ]; then
DATA_BASENAME=GEOSX-and-TPL-${SHORT_COMMIT}.tar.gz
Expand All @@ -112,9 +153,9 @@ jobs:
script_args+=(--data-basename ${DATA_BASENAME})
DATA_EXCHANGE_DIR=/mnt/geos-exchange # Exchange folder outside of the container
DATA_EXCHANGE_DIR=${GITHUB_WORKSPACE}/geos-exchange # Exchange folder outside of the container
if [ ! -d "${DATA_EXCHANGE_DIR}" ]; then
sudo mkdir -p ${DATA_EXCHANGE_DIR}
mkdir -p ${DATA_EXCHANGE_DIR}
fi
DATA_EXCHANGE_MOUNT_POINT=/tmp/exchange # Exchange folder inside of the container
docker_args+=(--volume=${DATA_EXCHANGE_DIR}:${DATA_EXCHANGE_MOUNT_POINT})
Expand All @@ -128,11 +169,6 @@ jobs:
script_args+=(--sccache-credentials $(basename ${GOOGLE_GHA_CREDS_PATH}))
fi
if [ ${{ inputs.RUNS_ON }} == 'self-hosted' ]; then
RUNNER_CERTIFICATES_DIR=/etc/pki/ca-trust/source/anchors/
mkdir -p ${GITHUB_WORKSPACE}/certificates
cp ${RUNNER_CERTIFICATES_DIR}/*.crt* ${GITHUB_WORKSPACE}/certificates
fi
# We need to know where the code folder is mounted inside the container so we can run the script at the proper location!
# Since this information is repeated twice, we use a variable.
GITHUB_WORKSPACE_MOUNT_POINT=/tmp/geos
Expand All @@ -154,7 +190,8 @@ jobs:
script_args+=(--cmake-build-type ${{ inputs.CMAKE_BUILD_TYPE }})
script_args+=(${{ inputs.BUILD_AND_TEST_CLI_ARGS }})
DOCKER_REPOSITORY=${{ inputs.DOCKER_REPOSITORY }}
SPLIT_DOCKER_REPOSITORY=(${DOCKER_REPOSITORY//// })
CONTAINER_NAME=geosx_build_${SPLIT_DOCKER_REPOSITORY[1]}_${GITHUB_SHA:0:7}
echo "CONTAINER_NAME: ${CONTAINER_NAME}"
Expand All @@ -168,6 +205,53 @@ jobs:
script_args+=(--code-coverage)
fi
if [[ -n "${{ inputs.LOCAL_BASELINE_DIR }}" ]]; then
# Extract the 'baseline' value
# Define the path to the YAML file
YAML_FILE_PATH="${GITHUB_WORKSPACE}/.integrated_tests.yaml"
# Verify the YAML file path
if [[ ! -f "${YAML_FILE_PATH}" ]]; then
echo "Error: File $YAML_FILE_PATH does not exist."
else
echo "Found integratedTests file: $YAML_FILE_PATH."
fi
# Extract the baseline field
BASELINE_FULL_PATH=$(grep -A 2 'baselines:' "${YAML_FILE_PATH}" | grep 'baseline:' | awk '{print $2}')
# Remove the 'integratedTests/' prefix
BASELINE_TAG=${BASELINE_FULL_PATH#integratedTests/}
echo "Baseline: ${BASELINE_TAG}"
# Extract the folder name
PR_NUMBER=$(echo "$BASELINE_TAG" | grep -o 'pr[0-9]*')
PR_BASELINE_FOLDER_NAME=baselines_${PR_NUMBER}
echo "Baseline folder name: ${PR_BASELINE_FOLDER_NAME}"
CURRENT_BASELINE_DIR=${{ inputs.LOCAL_BASELINE_DIR }}/${PR_BASELINE_FOLDER_NAME}
echo "Current baseline dir: ${CURRENT_BASELINE_DIR}"
if [ -d ${CURRENT_BASELINE_DIR} ];then
echo "Current baseline dir found."
ls -l ${CURRENT_BASELINE_DIR}
# We defined a mount point and mount it read-only inside the container.
CURRENT_BASELINE_DIR_MOUNT=/tmp/geos/baselines
docker_args+=(--volume=${CURRENT_BASELINE_DIR}:${CURRENT_BASELINE_DIR_MOUNT}:ro)
else
echo "Current baselines directory (${CURRENT_BASELINE_DIR}) not found"
fi
fi
echo running "docker run \
${docker_args[@]} \
-h=`hostname` \
${{ inputs.DOCKER_REPOSITORY }}:${{ inputs.DOCKER_IMAGE_TAG }} \
${GITHUB_WORKSPACE_MOUNT_POINT}/scripts/ci_build_and_test_in_container.sh \
${script_args[@]}"
# In case of integrated tests run, we still want to send the results to the cloud for inspection.
# While for standard build (if even possible), pushing a failed build would be pointless.
# GHA set `-e` to bash scripts by default to fail asap,
Expand All @@ -186,21 +270,49 @@ jobs:
# Send to the bucket and print the download link when it makes sense.
if [[ ! -z "${{ inputs.GCP_BUCKET }}" ]]; then
if [[ "${{ inputs.BUILD_TYPE }}" = "integrated_tests" || ${EXIT_STATUS} -eq 0 ]]; then
CLOUDSDK_PYTHON=python3 gsutil cp -a public-read ${DATA_EXCHANGE_DIR}/${DATA_BASENAME} gs://${{ inputs.GCP_BUCKET }}/
echo "Download the bundle at https://storage.googleapis.com/${{ inputs.GCP_BUCKET }}/${DATA_BASENAME}"
if [ -f ${DATA_EXCHANGE_DIR}/${DATA_BASENAME} ]; then
CLOUDSDK_PYTHON=python3 gsutil cp -a public-read ${DATA_EXCHANGE_DIR}/${DATA_BASENAME} gs://${{ inputs.GCP_BUCKET }}/
echo "Download the bundle at https://storage.googleapis.com/${{ inputs.GCP_BUCKET }}/${DATA_BASENAME}"
fi
if [ -f ${DATA_EXCHANGE_DIR}/test_logs_${DATA_BASENAME} ]; then
CLOUDSDK_PYTHON=python3 gsutil cp -a public-read ${DATA_EXCHANGE_DIR}/test_logs_${DATA_BASENAME} gs://${{ inputs.GCP_BUCKET }}/
echo "Download integrated test logs here: https://storage.googleapis.com/${{ inputs.GCP_BUCKET }}/test_logs_${DATA_BASENAME}"
fi
if [ -f ${DATA_EXCHANGE_DIR}/baseline_${DATA_BASENAME} ];then
if [[ -n "${{ inputs.LOCAL_BASELINE_DIR }}" ]]; then
# 1. We copy the baselines to a local directory to store them
# 1.a Create the new target directory to store the new baselines
THIS_PR_NUMBER=pr${{ github.event.number }}
NEW_PR_BASELINE_FOLDER_NAME=baselines_${THIS_PR_NUMBER}
TARGET_DIR="${{ inputs.LOCAL_BASELINE_DIR }}/${NEW_PR_BASELINE_FOLDER_NAME}"
echo "Create folder ${TARGET_DIR}"
mkdir -p "${TARGET_DIR}"
# 1.b We copy the new baselines to the new target directory
SOURCE_FILE="${DATA_EXCHANGE_DIR}/baseline_${DATA_BASENAME}"
echo "Copy ${SOURCE_FILE} to ${TARGET_DIR}"
cp "${SOURCE_FILE}" "${TARGET_DIR}"
fi
# 2. We push the baselines to the cloud
CLOUDSDK_PYTHON=python3 gsutil cp -a public-read ${DATA_EXCHANGE_DIR}/baseline_${DATA_BASENAME} gs://${{ inputs.GCP_BUCKET }}/
echo "Download test baselines here: https://storage.googleapis.com/${{ inputs.GCP_BUCKET }}/baseline_${DATA_BASENAME}"
echo "New baseline ID: baseline_${DATA_BASENAME::-7}"
else
echo "Baselines ${DATA_EXCHANGE_DIR}/baseline_${DATA_BASENAME} were not uploaded. Likeyly because no rebaseline was necessary."
fi
fi
fi
# manually remove the workspace to avoid issues with the next job when using self-hosted runners
if [ -d "${GITHUB_WORKSPACE}/integratedTests" ]; then
rm -rf ${GITHUB_WORKSPACE}/integratedTests
fi
exit ${EXIT_STATUS}
- name: Upload coverage to Codecov
if: inputs.CODE_COVERAGE
uses: codecov/codecov-action@v4.0.1
uses: codecov/codecov-action@v4.3.1
with:
files: geos_coverage.info.cleaned
fail_ci_if_error: true
Expand Down
Loading

0 comments on commit 38ada29

Please sign in to comment.