Skip to content

Commit

Permalink
Merge branch 'develop' into pt/conf-tol
Browse files Browse the repository at this point in the history
  • Loading branch information
paveltomin authored May 8, 2024
2 parents f4a0613 + 90feb10 commit 46e605e
Show file tree
Hide file tree
Showing 77 changed files with 1,120 additions and 873 deletions.
70 changes: 59 additions & 11 deletions .github/workflows/build_and_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,20 @@ on:
required: false
type: string
default: build
DOCKER_CERTS_UPDATE_COMMAND:
required: false
type: string
CMAKE_BUILD_TYPE:
required: true
type: string
CODE_COVERAGE:
required: false
type: boolean
default: false
DOCKER_CERTS_DIR:
required: false
type: string
default: ''
DOCKER_IMAGE_TAG:
required: true
type: string
Expand All @@ -40,9 +47,16 @@ on:
HOST_CONFIG:
required: false
type: string
NPROC:
required: false
type: string
default: ''
RUNS_ON:
required: true
type: string
UPLOAD_BASELINES:
required: false
type: string
USE_SCCACHE:
required: false
type: boolean
Expand Down Expand Up @@ -78,15 +92,15 @@ jobs:
ls -la ./
- name: Checkout Repository
uses: actions/[email protected].1
uses: actions/[email protected].4
with:
submodules: true
lfs: ${{ inputs.BUILD_TYPE == 'integrated_tests' }}
fetch-depth: 1

- id: 'auth'
if: ${{ inputs.GCP_BUCKET || inputs.USE_SCCACHE }}
uses: 'google-github-actions/[email protected].0'
uses: 'google-github-actions/[email protected].2'
with:
credentials_json: '${{ secrets.GOOGLE_CLOUD_GCP }}'
create_credentials_file: true
Expand All @@ -107,7 +121,20 @@ jobs:
docker_args=()
script_args=()
if [[ -n "${{ inputs.DOCKER_CERTS_DIR }}" ]]; then
DOCKER_CERTS_DIR=${{ inputs.DOCKER_CERTS_DIR }}
docker_args+=(-e DOCKER_CERTS_DIR=${DOCKER_CERTS_DIR})
fi
if [[ -n "${{ inputs.DOCKER_CERTS_UPDATE_COMMAND }}" ]]; then
DOCKER_CERTS_UPDATE_COMMAND=${{ inputs.DOCKER_CERTS_UPDATE_COMMAND }}
docker_args+=(-e DOCKER_CERTS_UPDATE_COMMAND=${DOCKER_CERTS_UPDATE_COMMAND})
fi
if [[ -n "${{ inputs.NPROC }}" ]]; then
NPROC=${{ inputs.NPROC }}
script_args+=(--nproc ${NPROC})
fi
docker_args+=(${{ inputs.DOCKER_RUN_ARGS }})
Expand Down Expand Up @@ -142,11 +169,6 @@ jobs:
script_args+=(--sccache-credentials $(basename ${GOOGLE_GHA_CREDS_PATH}))
fi
if [ ${{ inputs.RUNS_ON }} == 'streak' ] || [ ${{ inputs.RUNS_ON }} == 'streak2' ]; then
RUNNER_CERTIFICATES_DIR=/etc/pki/ca-trust/source/anchors/
mkdir -p ${GITHUB_WORKSPACE}/certificates
cp ${RUNNER_CERTIFICATES_DIR}/*.crt* ${GITHUB_WORKSPACE}/certificates
fi
# We need to know where the code folder is mounted inside the container so we can run the script at the proper location!
# Since this information is repeated twice, we use a variable.
GITHUB_WORKSPACE_MOUNT_POINT=/tmp/geos
Expand All @@ -168,7 +190,8 @@ jobs:
script_args+=(--cmake-build-type ${{ inputs.CMAKE_BUILD_TYPE }})
script_args+=(${{ inputs.BUILD_AND_TEST_CLI_ARGS }})
DOCKER_REPOSITORY=${{ inputs.DOCKER_REPOSITORY }}
SPLIT_DOCKER_REPOSITORY=(${DOCKER_REPOSITORY//// })
CONTAINER_NAME=geosx_build_${SPLIT_DOCKER_REPOSITORY[1]}_${GITHUB_SHA:0:7}
echo "CONTAINER_NAME: ${CONTAINER_NAME}"
Expand All @@ -182,6 +205,14 @@ jobs:
script_args+=(--code-coverage)
fi
echo running "docker run \
${docker_args[@]} \
-h=`hostname` \
${{ inputs.DOCKER_REPOSITORY }}:${{ inputs.DOCKER_IMAGE_TAG }} \
${GITHUB_WORKSPACE_MOUNT_POINT}/scripts/ci_build_and_test_in_container.sh \
${script_args[@]}"
# In case of integrated tests run, we still want to send the results to the cloud for inspection.
# While for standard build (if even possible), pushing a failed build would be pointless.
# GHA set `-e` to bash scripts by default to fail asap,
Expand All @@ -200,8 +231,25 @@ jobs:
# Send to the bucket and print the download link when it makes sense.
if [[ ! -z "${{ inputs.GCP_BUCKET }}" ]]; then
if [[ "${{ inputs.BUILD_TYPE }}" = "integrated_tests" || ${EXIT_STATUS} -eq 0 ]]; then
CLOUDSDK_PYTHON=python3 gsutil cp -a public-read ${DATA_EXCHANGE_DIR}/${DATA_BASENAME} gs://${{ inputs.GCP_BUCKET }}/
echo "Download the bundle at https://storage.googleapis.com/${{ inputs.GCP_BUCKET }}/${DATA_BASENAME}"
if [ -f ${DATA_EXCHANGE_DIR}/${DATA_BASENAME} ]; then
CLOUDSDK_PYTHON=python3 gsutil cp -a public-read ${DATA_EXCHANGE_DIR}/${DATA_BASENAME} gs://${{ inputs.GCP_BUCKET }}/
echo "Download the bundle at https://storage.googleapis.com/${{ inputs.GCP_BUCKET }}/${DATA_BASENAME}"
fi
if [ -f ${DATA_EXCHANGE_DIR}/test_logs_${DATA_BASENAME} ]; then
CLOUDSDK_PYTHON=python3 gsutil cp -a public-read ${DATA_EXCHANGE_DIR}/test_logs_${DATA_BASENAME} gs://${{ inputs.GCP_BUCKET }}/
echo "Download integrated test logs here: https://storage.googleapis.com/${{ inputs.GCP_BUCKET }}/test_logs_${DATA_BASENAME}"
fi
# if $UPLOAD_BASELINES; then
if [ -f ${DATA_EXCHANGE_DIR}/baseline_${DATA_BASENAME} ];then
CLOUDSDK_PYTHON=python3 gsutil cp -a public-read ${DATA_EXCHANGE_DIR}/baseline_${DATA_BASENAME} gs://${{ inputs.GCP_BUCKET }}/
echo "Download test baselines here: https://storage.googleapis.com/${{ inputs.GCP_BUCKET }}/baseline_${DATA_BASENAME}"
echo "New baseline ID: baseline_${DATA_BASENAME::-7}"
else
echo "Baselines ${DATA_EXCHANGE_DIR}/baseline_${DATA_BASENAME} were not uploaded. Likeyly because no rebaseline was necessary."
fi
# fi
fi
fi
Expand All @@ -214,7 +262,7 @@ jobs:
- name: Upload coverage to Codecov
if: inputs.CODE_COVERAGE
uses: codecov/codecov-action@v4.0.1
uses: codecov/codecov-action@v4.3.1
with:
files: geos_coverage.info.cleaned
fail_ci_if_error: true
Expand Down
47 changes: 40 additions & 7 deletions .github/workflows/ci_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ jobs:
# The TPL tag is contained in the codespaces configuration to avoid duplications.
- name: Checkout .devcontainer/devcontainer.json
uses: actions/[email protected].1
uses: actions/[email protected].4
with:
sparse-checkout: |
.devcontainer/devcontainer.json
Expand Down Expand Up @@ -77,7 +77,7 @@ jobs:
# The integrated test submodule repository contains large data (using git lfs).
# To save time (and money) we do not let Github Actions automatically clone all our (lfs) subrepositories and do it by hand.
- name: Checkout Repository
uses: actions/[email protected].1
uses: actions/[email protected].4
with:
# Let script update submodules; Github Actions submodule history causes error
submodules: false
Expand Down Expand Up @@ -200,8 +200,27 @@ jobs:
ENABLE_TRILINOS: OFF
GCP_BUCKET: geosx/integratedTests
RUNS_ON: streak2
DOCKER_RUN_ARGS: "--cpus=32 --memory=384g"
NPROC: 32
DOCKER_RUN_ARGS: "--cpus=32 --memory=384g -v /etc/pki/ca-trust/source/anchors/:/usr/local/share/ca-certificates/llnl:ro"
DOCKER_CERTS_DIR: "/usr/local/share/ca-certificates"
DOCKER_CERTS_UPDATE_COMMAND: "update-ca-certificates"
REQUIRED_LABEL: "ci: run integrated tests"
UPLOAD_BASELINES: "ci: upload test baselines"

baseline_log:
needs: [is_not_draft_pull_request]
runs-on: ubuntu-22.04
steps:
- name: Checkout Repository
uses: actions/[email protected]
with:
submodules: false
lfs: false
fetch-depth: 0
sparse-checkout: |
scripts
- name: Check that the baseline logs are modified if rebaselines are detected
run: "scripts/check_baseline_log.sh"

code_coverage:
needs:
Expand Down Expand Up @@ -237,8 +256,12 @@ jobs:
ENABLE_HYPRE_DEVICE: CUDA
ENABLE_HYPRE: ON
ENABLE_TRILINOS: OFF
RUNS_ON: Runner_8core_32GB

RUNS_ON: streak2
NPROC: 16
DOCKER_RUN_ARGS: "--cpus=16 --memory=256g --runtime=nvidia -v /etc/pki/ca-trust/source/anchors/:/usr/local/share/ca-certificates/llnl:ro"
DOCKER_CERTS_DIR: "/usr/local/share/ca-certificates"
DOCKER_CERTS_UPDATE_COMMAND: "update-ca-certificates"

- name: Ubuntu CUDA (20.04, clang 10.0.0 + gcc 9.4.0, open-mpi 4.0.3, cuda-11.8.89)
BUILD_AND_TEST_CLI_ARGS: "--no-install-schema"
CMAKE_BUILD_TYPE: Release
Expand All @@ -247,13 +270,20 @@ jobs:
ENABLE_HYPRE: ON
ENABLE_TRILINOS: OFF
RUNS_ON: streak
DOCKER_RUN_ARGS: "--cpus=8 --memory=256g --runtime=nvidia --gpus all"
NPROC: 8
DOCKER_RUN_ARGS: "--cpus=8 --memory=256g --runtime=nvidia --gpus all -v /etc/pki/ca-trust/source/anchors/:/usr/local/share/ca-certificates/llnl:ro"
DOCKER_CERTS_DIR: "/usr/local/share/ca-certificates"
DOCKER_CERTS_UPDATE_COMMAND: "update-ca-certificates"

- name: Centos (7.7, gcc 8.3.1, open-mpi 1.10.7, cuda 11.8.89)
BUILD_AND_TEST_CLI_ARGS: "--no-run-unit-tests --no-install-schema"
CMAKE_BUILD_TYPE: Release
DOCKER_REPOSITORY: geosx/centos7.7-gcc8.3.1-cuda11.8.89
RUNS_ON: Runner_4core_16GB
RUNS_ON: streak2
NPROC: 16
DOCKER_RUN_ARGS: "--cpus=16 --memory=256g --runtime=nvidia -v /etc/pki/ca-trust/source/anchors/:/etc/pki/ca-trust/source/anchors/llnl:ro"
DOCKER_CERTS_DIR: "/etc/pki/ca-trust/source/anchors"
DOCKER_CERTS_UPDATE_COMMAND: "update-ca-trust"

# Below this line, jobs that deploy to Google Cloud.
- name: Pecan GPU (centos 7.7, gcc 8.2.0, open-mpi 4.0.1, mkl 2019.5, cuda 11.5.119)
Expand All @@ -278,6 +308,8 @@ jobs:
with:
BUILD_AND_TEST_CLI_ARGS: ${{ matrix.BUILD_AND_TEST_CLI_ARGS }}
CMAKE_BUILD_TYPE: ${{ matrix.CMAKE_BUILD_TYPE }}
DOCKER_CERTS_DIR: ${{ matrix.DOCKER_CERTS_DIR }}
DOCKER_CERTS_UPDATE_COMMAND: ${{ matrix.DOCKER_CERTS_UPDATE_COMMAND }}
DOCKER_IMAGE_TAG: ${{ needs.is_not_draft_pull_request.outputs.DOCKER_IMAGE_TAG }}
DOCKER_REPOSITORY: ${{ matrix.DOCKER_REPOSITORY }}
DOCKER_RUN_ARGS: ${{ matrix.DOCKER_RUN_ARGS }}
Expand All @@ -286,6 +318,7 @@ jobs:
ENABLE_TRILINOS: ${{ matrix.ENABLE_TRILINOS }}
GCP_BUCKET: ${{ matrix.GCP_BUCKET }}
HOST_CONFIG: ${{ matrix.HOST_CONFIG }}
NPROC: ${{ matrix.NPROC }}
RUNS_ON: ${{ matrix.RUNS_ON }}
REQUIRED_LABEL: "ci: ready to be merged"
secrets: inherit
Expand Down
3 changes: 0 additions & 3 deletions .gitmodules
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,6 @@
[submodule "src/coreComponents/constitutive/PVTPackage"]
path = src/coreComponents/constitutive/PVTPackage
url = ../../GEOS-DEV/PVTPackage.git
[submodule "integratedTests"]
path = integratedTests
url = ../../GEOS-DEV/integratedTests.git
[submodule "src/coreComponents/fileIO/coupling/hdf5_interface"]
path = src/coreComponents/fileIO/coupling/hdf5_interface
url = ../../GEOS-DEV/hdf5_interface.git
Expand Down
8 changes: 8 additions & 0 deletions .integrated_tests.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
---
baselines:
bucket: geosx
baseline: integratedTests/baseline_integratedTests-pr3105-4885-a1a85c1

allow_fail:
all: ''
streak: pennyShapedToughnessDominated_smoke_01,pennyShapedViscosityDominated_smoke_01,pknViscosityDominated_smoke_01
1 change: 0 additions & 1 deletion .readthedocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,5 +33,4 @@ formats: all
submodules:
include:
- src/coreComponents/constitutive/PVTPackage
- integratedTests
recursive: true
26 changes: 26 additions & 0 deletions BASELINE_NOTES.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@

Notes
==========

This file is designed to track changes to the integrated test baselines.
Any developer who updates the baseline ID in the .integrated_tests.yaml file is expected to create an entry in this file with the pull request number, date, and their justification for rebaselining.
These notes should be in reverse-chronological order, and use the following time format: (YYYY-MM-DD).

PR #3105 (2024-05-08)
======================

Added missing derivative for temperature, hence small numerical diffs in thermal tests results and numeracal behavior


PR #2917 (2024-05-07)
======================

New fields for wellsControls: wellControls1_ConstantMassRate_table, targetMassRate, massDensity, ...


PR #3044 (2024-05-02)
======================

Removed old integratedTests submodule
Implemented new baseline storage
Implemented new CI integrated tests
6 changes: 3 additions & 3 deletions host-configs/LLNL/quartz-base.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -58,9 +58,9 @@ set(MKL_LIBRARIES ${MKL_ROOT}/lib/intel64/libmkl_intel_lp64.so

# ATS
set(ATS_ARGUMENTS "--machine slurm36" CACHE STRING "")
# set(USER $ENV{USER} CACHE STRING "")
# set(ATS_WORKING_DIR "/p/lustre2/${USER}/integratedTests/${CONFIG_NAME}" CACHE PATH "")
# set(ATS_BASELINE_DIR "/p/lustre2/${USER}/integratedTests/baselines" CACHE PATH "")
set(USER $ENV{USER} CACHE STRING "")
set(ATS_WORKING_DIR "/p/lustre2/${USER}/integratedTestsGEOS/${CONFIG_NAME}" CACHE PATH "")
set(ATS_BASELINE_DIR "/p/lustre2/${USER}/integratedTestsGEOS/baselines" CACHE PATH "")

# Temporary argument for python module change testing
# set(GEOS_PYTHON_PACKAGES_BRANCH "feature/sherman/outOfPlaceATS" CACHE STRING "" FORCE)
Expand Down
Loading

0 comments on commit 46e605e

Please sign in to comment.