diff --git a/.github/workflows/clang-format-check.yml b/.github/workflows/clang-format-check.yml index 52e4d061..40e36a0a 100644 --- a/.github/workflows/clang-format-check.yml +++ b/.github/workflows/clang-format-check.yml @@ -7,7 +7,7 @@ jobs: runs-on: ubuntu-latest if: "!contains(github.event.head_commit.message, 'skip-ci')" steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Run clang-format style check for C programs. uses: DoozyX/clang-format-lint-action@v0.11 with: diff --git a/.github/workflows/clang-format-fix.yml b/.github/workflows/clang-format-fix.yml index 9145f65c..52781162 100644 --- a/.github/workflows/clang-format-fix.yml +++ b/.github/workflows/clang-format-fix.yml @@ -1,16 +1,18 @@ -name: clang-format Check +name: clang-format Fix Format on: workflow_dispatch: push: jobs: formatting-check: - name: Formatting Check + name: Commit Format Changes runs-on: ubuntu-latest if: "!contains(github.event.head_commit.message, 'skip-ci')" + permissions: + contents: write steps: - - uses: actions/checkout@v2 - - name: Run clang-format style check for C programs. - uses: DoozyX/clang-format-lint-action@v0.11 + - uses: actions/checkout@v3 + - name: Fix C formatting issues detected by clang-format + uses: DoozyX/clang-format-lint-action@v0.13 with: source: '.' extensions: 'c,h,cpp,hpp' @@ -18,8 +20,8 @@ jobs: inplace: True style: file # exclude: './config ' - - uses: EndBug/add-and-commit@v7 + - uses: EndBug/add-and-commit@v9 with: author_name: github-actions author_email: github-actions[bot]@users.noreply.github.com - message: 'Committing clang-format changes' + message: 'committing clang-format changes' diff --git a/.github/workflows/h5bench-hdf5-1.10.4.yml b/.github/workflows/h5bench-hdf5-1.10.4.yml index 712eb4e9..92bf51b1 100644 --- a/.github/workflows/h5bench-hdf5-1.10.4.yml +++ b/.github/workflows/h5bench-hdf5-1.10.4.yml @@ -1,5 +1,8 @@ name: h5bench (HDF5 1.10.4) +env: + ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true + on: pull_request: @@ -20,7 +23,7 @@ jobs: OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1 steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v4 with: submodules: true @@ -223,7 +226,7 @@ jobs: - name: Upload artifact if: always() - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: test path: build/storage/**/std* diff --git a/.github/workflows/h5bench-hdf5-1.10.7.yml b/.github/workflows/h5bench-hdf5-1.10.7.yml index 8834b81b..023094d9 100644 --- a/.github/workflows/h5bench-hdf5-1.10.7.yml +++ b/.github/workflows/h5bench-hdf5-1.10.7.yml @@ -1,5 +1,8 @@ name: h5bench (HDF5 1.10.7) +env: + ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true + on: pull_request: @@ -20,7 +23,7 @@ jobs: OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1 steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v4 with: submodules: true @@ -223,7 +226,7 @@ jobs: - name: Upload artifact if: always() - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: test path: build/h5bench_e3sm-prefix/src/h5bench_e3sm-stamp/* diff --git a/.github/workflows/h5bench-hdf5-1.10.8.yml b/.github/workflows/h5bench-hdf5-1.10.8.yml index 3c2fc5f1..ea4c6ad1 100644 --- a/.github/workflows/h5bench-hdf5-1.10.8.yml +++ b/.github/workflows/h5bench-hdf5-1.10.8.yml @@ -1,5 +1,8 @@ name: h5bench (HDF5 1.10.8) +env: + ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true + on: pull_request: @@ -20,7 +23,7 @@ jobs: OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1 steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v4 with: submodules: true @@ -223,7 +226,7 @@ jobs: - name: Upload artifact if: always() - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: test path: build/storage/**/std* diff --git a/.github/workflows/h5bench-hdf5-1.12.0.yml b/.github/workflows/h5bench-hdf5-1.12.0.yml index d194ae41..b0e27e9f 100644 --- a/.github/workflows/h5bench-hdf5-1.12.0.yml +++ b/.github/workflows/h5bench-hdf5-1.12.0.yml @@ -20,7 +20,7 @@ jobs: OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1 steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v4 with: submodules: true @@ -258,7 +258,7 @@ jobs: - name: Upload artifact if: always() - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: test path: build/storage/**/std* diff --git a/.github/workflows/h5bench-hdf5-1.14.0.yml b/.github/workflows/h5bench-hdf5-1.14.0.yml index b3be7393..8aa0b5d2 100644 --- a/.github/workflows/h5bench-hdf5-1.14.0.yml +++ b/.github/workflows/h5bench-hdf5-1.14.0.yml @@ -20,7 +20,7 @@ jobs: OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: submodules: true @@ -513,7 +513,7 @@ jobs: - name: Upload artifact if: always() - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: test path: build*/storage/**/std* diff --git a/.github/workflows/h5bench-hdf5-1.14.1-compress.yml b/.github/workflows/h5bench-hdf5-1.14.1-compress.yml new file mode 100644 index 00000000..2ec53f6e --- /dev/null +++ b/.github/workflows/h5bench-hdf5-1.14.1-compress.yml @@ -0,0 +1,125 @@ +name: h5bench (HDF5 1.14.1 + compression) + +on: + pull_request: + + workflow_dispatch: + +jobs: + h5bench: + runs-on: ubuntu-20.04 + container: + image: hpcio/hdf5-1.14.1-compression + timeout-minutes: 60 + + steps: + - uses: actions/checkout@v4 + with: + submodules: true + + - name: Build h5bench SYNC + run: | + current="$PWD" + + export HDF5_HOME=/opt/hdf5 + + mkdir build-sync + cd build-sync + + cmake .. \ + -DCMAKE_C_COMPILER=$HDF5_HOME/bin/h5pcc + make + + - name: Test h5bench SYNC write/read + run: | + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-read-contig-1d-small.json + + - name: Test h5bench SYNC write/read 1D N_BIT filter + run: | + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-read-chunked-1d-N_BIT.json + + - name: Test h5bench SYNC write/read 1D SZIP filter + run: | + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-read-chunked-1d-SZIP.json + + - name: Test h5bench SYNC write/read 1D GZIP filter + run: | + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-read-chunked-1d-GZIP.json + + - name: Test h5bench SYNC write/read 1D SZ3 filter + run: | + export LD_LIBRARY_PATH=/opt/SZ3/lib:$LD_LIBRARY_PATH + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-read-chunked-1d-SZ3.json + + - name: Test h5bench SYNC write/read 1D ZFP filter + run: | + export LD_LIBRARY_PATH=/opt/zfp/lib:$LD_LIBRARY_PATH + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-read-chunked-1d-ZFP.json + + - name: Test h5bench SYNC write/read 2D N_BIT filter + run: | + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-read-chunked-2d-N_BIT.json + + - name: Test h5bench SYNC write/read 2D SZIP filter + run: | + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-read-chunked-2d-SZIP.json + + - name: Test h5bench SYNC write/read 2D GZIP filter + run: | + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-read-chunked-2d-GZIP.json + + - name: Test h5bench SYNC write/read 2D SZ3 filter + run: | + export LD_LIBRARY_PATH=/opt/SZ3/lib:$LD_LIBRARY_PATH + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-read-chunked-2d-SZ3.json + + - name: Test h5bench SYNC write/read 2D ZFP filter + run: | + export LD_LIBRARY_PATH=/opt/zfp/lib:$LD_LIBRARY_PATH + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-read-chunked-2d-ZFP.json + + - name: Test h5bench SYNC write/read 3D N_BIT filter + run: | + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-read-chunked-3d-N_BIT.json + + - name: Test h5bench SYNC write/read 3D SZIP filter + run: | + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-read-chunked-3d-SZIP.json + + - name: Test h5bench SYNC write/read 3D GZIP filter + run: | + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-read-chunked-3d-GZIP.json + + - name: Test h5bench SYNC write/read 3D SZ3 filter + run: | + export LD_LIBRARY_PATH=/opt/SZ3/lib:$LD_LIBRARY_PATH + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-read-chunked-3d-SZ3.json + + - name: Test h5bench SYNC write/read 3D ZFP filter + run: | + export LD_LIBRARY_PATH=/opt/zfp/lib:$LD_LIBRARY_PATH + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-read-chunked-3d-ZFP.json + + - name: Upload artifact + if: always() + uses: actions/upload-artifact@v4 + with: + name: test + path: build*/storage/**/std* + retention-days: 1 diff --git a/.github/workflows/h5bench-hdf5-1.14.1.yml b/.github/workflows/h5bench-hdf5-1.14.1.yml index 2de7aceb..f9008c2c 100644 --- a/.github/workflows/h5bench-hdf5-1.14.1.yml +++ b/.github/workflows/h5bench-hdf5-1.14.1.yml @@ -20,7 +20,7 @@ jobs: OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: submodules: true @@ -513,7 +513,7 @@ jobs: - name: Upload artifact if: always() - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: test path: build*/storage/**/std* diff --git a/.github/workflows/h5bench-hdf5-develop-test.yml b/.github/workflows/h5bench-hdf5-develop-test.yml index e995e9e1..b989d8db 100644 --- a/.github/workflows/h5bench-hdf5-develop-test.yml +++ b/.github/workflows/h5bench-hdf5-develop-test.yml @@ -15,7 +15,7 @@ jobs: OMPI_MCA_rmaps_base_oversubscribe: "yes" steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: submodules: true @@ -131,6 +131,9 @@ jobs: export ABT_DIR=/opt/argobots export ASYNC_DIR=/opt/vol-async + export LD_LIBRARY_PATH=/opt/SZ3/lib:$LD_LIBRARY_PATH + export LD_LIBRARY_PATH=/opt/zfp/lib:$LD_LIBRARY_PATH + cd build-sync ctest --verbose . @@ -152,7 +155,7 @@ jobs: - name: Upload artifact if: always() - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: test path: build*/storage/**/std* diff --git a/.github/workflows/h5bench-hdf5-develop.yml b/.github/workflows/h5bench-hdf5-develop.yml index b68c92e2..dde52e3e 100644 --- a/.github/workflows/h5bench-hdf5-develop.yml +++ b/.github/workflows/h5bench-hdf5-develop.yml @@ -15,7 +15,7 @@ jobs: OMPI_MCA_rmaps_base_oversubscribe: "yes" steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: submodules: true @@ -645,7 +645,7 @@ jobs: - name: Upload artifact if: always() - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: test path: build*/storage/**/std* diff --git a/.gitignore b/.gitignore index 443bc984..7c29fba8 100644 --- a/.gitignore +++ b/.gitignore @@ -101,4 +101,7 @@ Temporary Items # iCloud generated files *.icloud +# Build and installer directories +/build +/installer # End of https://www.toptal.com/developers/gitignore/api/macos,c++,cmake,linux diff --git a/.gitmodules b/.gitmodules index 59b94cef..3584c84f 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,14 +1,14 @@ [submodule "amrex"] path = amrex - url = https://github.com/AMReX-Codes/amrex + url = https://github.com/AMReX-Codes/amrex.git [submodule "openpmd"] path = openpmd - url = https://github.com/openPMD/openPMD-api + url = https://github.com/openPMD/openPMD-api.git [submodule "e3sm"] path = e3sm - url = https://github.com/Parallel-NetCDF/E3SM-IO + url = https://github.com/Parallel-NetCDF/E3SM-IO.git [submodule "macsio"] path = macsio - url = https://github.com/LLNL/MACSio + url = https://github.com/LLNL/MACSio.git diff --git a/CMakeLists.txt b/CMakeLists.txt index cf22a019..c277ce3a 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -233,7 +233,7 @@ if(H5BENCH_E3SM) ExternalProject_Add(h5bench_e3sm SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/e3sm CONFIGURE_COMMAND autoreconf -i COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/e3sm/configure --prefix=${CMAKE_BINARY_DIR} --with-hdf5=${HDF5_HOME} - BUILD_COMMAND make -j 1 + BUILD_COMMAND make INSTALL_COMMAND ${CMAKE_COMMAND} -E copy src/e3sm_io ${CMAKE_BINARY_DIR}/h5bench_e3sm BUILD_IN_SOURCE 1 LOG_CONFIGURE 1 diff --git a/commons/h5bench_util.c b/commons/h5bench_util.c index 5f898c86..50a250d3 100644 --- a/commons/h5bench_util.c +++ b/commons/h5bench_util.c @@ -31,6 +31,11 @@ int parse_unit(char *str_in, unsigned long long *num, char **unit_str); int has_vol_async; +char *compress_filter_names[] = {"INVALID", "N_BIT", "SZIP", "GZIP", "SZ3", "ZFP", "SNAPPY_CUDA"}; + +int compress_filter_ids[] = {-1, 5, 4, 1, 32024, 32013, 32003}; +static unsigned int *cd_values; + unsigned long get_time_usec() { @@ -181,8 +186,9 @@ ts_delayed_close(mem_monitor *mon, unsigned long *metadata_time_total, int dset_ unsigned long t1, t2; unsigned long meta_time = 0; - if (!has_vol_async) + if (!has_vol_async) { return 0; + } for (int i = 0; i < mon->time_step_cnt; i++) { ts_run = &(mon->time_steps[i]); @@ -851,6 +857,62 @@ _set_params(char *key, char *val_in, bench_params *params_in_out, int do_write) else (*params_in_out).read_option = READ_OPTION_INVALID; } + else if (strcmp(key, "COMPRESS_FILTER") == 0) { // New + if (strcmp(val_in, "N_BIT") == 0) { + (*params_in_out).compress_filter = N_BIT; + } + else if (strcmp(val_in, "SZIP") == 0) { + (*params_in_out).compress_filter = SZIP; + } + else if (strcmp(val_in, "GZIP") == 0) { + (*params_in_out).compress_filter = GZIP; + } + else if (strcmp(val_in, "SZ3") == 0) { + (*params_in_out).compress_filter = SZ3; + } + else if (strcmp(val_in, "ZFP") == 0) { + (*params_in_out).compress_filter = ZFP; + } + else if (strcmp(val_in, "SNAPPY_CUDA") == 0) { + (*params_in_out).compress_filter = SNAPPY_CUDA; + } + + else + (*params_in_out).compress_filter = COMPRESS_FILTER_INVALID; + } + else if (strcmp(key, "CD_NELMTS") == 0) { + (*params_in_out).cd_nelmts = atoi(val); + } + else if (strcmp(key, "CD_VALUES_1") == 0) { + (*params_in_out).cd_value_1 = atoi(val); + } + else if (strcmp(key, "CD_VALUES_2") == 0) { + (*params_in_out).cd_value_2 = atoi(val); + } + else if (strcmp(key, "CD_VALUES_3") == 0) { + (*params_in_out).cd_value_3 = atoi(val); + } + else if (strcmp(key, "CD_VALUES_4") == 0) { + (*params_in_out).cd_value_4 = atoi(val); + } + else if (strcmp(key, "CD_VALUES_5") == 0) { + (*params_in_out).cd_value_5 = atoi(val); + } + else if (strcmp(key, "CD_VALUES_6") == 0) { + (*params_in_out).cd_value_6 = atoi(val); + } + else if (strcmp(key, "CD_VALUES_7") == 0) { + (*params_in_out).cd_value_7 = atoi(val); + } + else if (strcmp(key, "CD_VALUES_8") == 0) { + (*params_in_out).cd_value_8 = atoi(val); + } + else if (strcmp(key, "CD_VALUES_9") == 0) { + (*params_in_out).cd_value_9 = atoi(val); + } + else if (strcmp(key, "CD_VALUES_10") == 0) { + (*params_in_out).cd_value_10 = atoi(val); + } else if (strcmp(key, "NUM_DIMS") == 0) { int num = atoi(val); if (num > 0) @@ -1099,6 +1161,13 @@ bench_params_init(bench_params *params_out) (*params_out).align = 0; (*params_out).align_threshold = 0; (*params_out).align_len = 0; + + (*params_out).cd_nelmts = 0; // new + (*params_out).cd_value_1 = 0; // new + (*params_out).cd_value_2 = 0; // new + (*params_out).cd_value_3 = 0; // new + (*params_out).cd_value_4 = 0; // new + (*params_out).cd_value_5 = 0; // new } int @@ -1272,6 +1341,24 @@ print_params(const bench_params *p) if (p->useCompress) { printf("Use compression: %d\n", p->useCompress); + printf(" Compression_filter_name: %s\n", compress_filter_names[p->compress_filter]); + printf(" Compression_filter_id: %d\n", compress_filter_ids[p->compress_filter]); + printf(" Number of compression filter parameters: %d\n", p->cd_nelmts); + cd_values = (unsigned int *)malloc(10 * sizeof(unsigned int)); + cd_values[0] = p->cd_value_1; + cd_values[1] = p->cd_value_2; + cd_values[2] = p->cd_value_3; + cd_values[3] = p->cd_value_4; + cd_values[4] = p->cd_value_5; + cd_values[5] = p->cd_value_6; + cd_values[6] = p->cd_value_7; + cd_values[7] = p->cd_value_8; + cd_values[8] = p->cd_value_9; + cd_values[9] = p->cd_value_10; + for (int i = 0; i < p->cd_nelmts; ++i) { + printf(" Compression parameter %d: %d\n", i + 1, cd_values[i]); + } + free(cd_values); printf(" chunk_dim1: %lu\n", p->chunk_dim_1); if (p->num_dims >= 2) { printf(" chunk_dim2: %lu\n", p->chunk_dim_2); diff --git a/commons/h5bench_util.h b/commons/h5bench_util.h index 5fc8adb4..64098b4a 100644 --- a/commons/h5bench_util.h +++ b/commons/h5bench_util.h @@ -100,15 +100,26 @@ typedef enum read_option { CS } read_option; +typedef enum compress_filter { + COMPRESS_FILTER_INVALID, + N_BIT, + SZIP, + GZIP, + SZ3, + ZFP, + SNAPPY_CUDA +} compress_filter; + typedef struct bench_params { - io_operation io_op; - pattern mem_pattern; - pattern file_pattern; - read_option read_option; - int useCompress; - int useCSV; - async_mode asyncMode; - int subfiling; + io_operation io_op; + pattern mem_pattern; + pattern file_pattern; + read_option read_option; + compress_filter compress_filter; + int useCompress; + int useCSV; + async_mode asyncMode; + int subfiling; union access_pattern { read_pattern pattern_read; write_pattern pattern_write; @@ -148,6 +159,17 @@ typedef struct bench_params { unsigned long align_threshold; unsigned long align_len; unsigned long stdev_dim_1; + size_t cd_nelmts; + unsigned int cd_value_1; + unsigned int cd_value_2; + unsigned int cd_value_3; + unsigned int cd_value_4; + unsigned int cd_value_5; + unsigned int cd_value_6; + unsigned int cd_value_7; + unsigned int cd_value_8; + unsigned int cd_value_9; + unsigned int cd_value_10; } bench_params; typedef struct data_md { diff --git a/docker/ubuntu-18.04-hdf5-1.10.4/Dockerfile b/docker/ubuntu-18.04-hdf5-1.10.4/Dockerfile index 900789b5..41cb8602 100644 --- a/docker/ubuntu-18.04-hdf5-1.10.4/Dockerfile +++ b/docker/ubuntu-18.04-hdf5-1.10.4/Dockerfile @@ -1,6 +1,6 @@ -FROM ubuntu:bionic +FROM ubuntu:focal -LABEL Description="Ubuntu 18.04 environment with HDF5 1.10.4" +LABEL Description="Ubuntu 20.04 environment with HDF5 1.10.4" ENV DEBIAN_FRONTEND=noninteractive ENV HDF5_LIBTOOL=/usr/bin/libtoolize diff --git a/docker/ubuntu-18.04-hdf5-1.10.7/Dockerfile b/docker/ubuntu-18.04-hdf5-1.10.7/Dockerfile index 82b59850..ca118574 100644 --- a/docker/ubuntu-18.04-hdf5-1.10.7/Dockerfile +++ b/docker/ubuntu-18.04-hdf5-1.10.7/Dockerfile @@ -1,6 +1,6 @@ -FROM ubuntu:bionic +FROM ubuntu:focal -LABEL Description="Ubuntu 18.04 environment with HDF5 1.10.7" +LABEL Description="Ubuntu 20.04 environment with HDF5 1.10.7" ENV DEBIAN_FRONTEND=noninteractive ENV HDF5_LIBTOOL=/usr/bin/libtoolize diff --git a/docker/ubuntu-18.04-hdf5-1.10.8/Dockerfile b/docker/ubuntu-18.04-hdf5-1.10.8/Dockerfile index 8db8ae59..416f4dfd 100644 --- a/docker/ubuntu-18.04-hdf5-1.10.8/Dockerfile +++ b/docker/ubuntu-18.04-hdf5-1.10.8/Dockerfile @@ -1,6 +1,6 @@ -FROM ubuntu:bionic +FROM ubuntu:focal -LABEL Description="Ubuntu 18.04 environment with HDF5 1.10.8" +LABEL Description="Ubuntu 20.04 environment with HDF5 1.10.8" ENV DEBIAN_FRONTEND=noninteractive ENV HDF5_LIBTOOL=/usr/bin/libtoolize diff --git a/docker/ubuntu-20.04-hdf5-1.14.1-compression/Dockerfile b/docker/ubuntu-20.04-hdf5-1.14.1-compression/Dockerfile new file mode 100644 index 00000000..82636cf5 --- /dev/null +++ b/docker/ubuntu-20.04-hdf5-1.14.1-compression/Dockerfile @@ -0,0 +1,82 @@ +FROM ubuntu:focal + +LABEL Description="Ubuntu 20.04 environment with HDF5 1.14.1 (compression filters)" + +ENV DEBIAN_FRONTEND=noninteractive +ENV HDF5_LIBTOOL=/usr/bin/libtoolize + +RUN apt-get update \ + && apt-get install -y \ + git \ + curl \ + wget \ + sudo \ + gpg \ + ca-certificates \ + m4 \ + autoconf \ + automake \ + libtool \ + pkg-config \ + cmake \ + libtool \ + zlib1g-dev \ + libaec0 \ + libaec-dev \ + python3 \ + python3-pip \ + python3-dev \ + python3-setuptools \ + gcc \ + g++ \ + libopenmpi-dev \ + software-properties-common \ + && ldconfig +RUN wget -O - https://apt.kitware.com/keys/kitware-archive-latest.asc 2>/dev/null | gpg --dearmor - | sudo tee /etc/apt/trusted.gpg.d/kitware.gpg >/dev/null \ + && sudo apt-add-repository 'deb https://apt.kitware.com/ubuntu/ focal main' \ + && apt-get update \ + && apt-get install cmake -y \ + && pip3 install psutil +RUN wget https://github.com/HDFGroup/hdf5/archive/refs/tags/hdf5-1_14_1.tar.gz \ + && tar zxvf hdf5-1_14_1.tar.gz \ + && mv hdf5-hdf5-1_14_1 hdf5 \ + && cd hdf5 \ + && ./autogen.sh \ + && CC=mpicc CXX=mpicxx ./configure --prefix=/opt/hdf5 --enable-parallel --enable-cxx --enable-threadsafe --enable-shared --disable-static --enable-unsupported --with-zlib=yes --with-szlib=yes \ + && make -j 8 \ + && make install +RUN cd ~ \ + && export PATH=/opt/hdf5/bin:$PATH \ + && export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH \ + && export HDF5_HOME=/opt/hdf5 \ + && git clone https://github.com/szcompressor/SZ3 \ + && cd SZ3 \ + && git checkout v3.1.8 \ + && mkdir build \ + && cd build \ + && cmake -DCMAKE_INSTALL_PREFIX=/opt/SZ3 -DBUILD_H5Z_FILTER=ON .. \ + && make \ + && make install +RUN cd ~ \ + && git clone https://github.com/LLNL/zfp \ + && cd zfp \ + && mkdir build \ + && cd build \ + && cmake -DCMAKE_INSTALL_PREFIX=/opt/zfp -DZFP_BIT_STREAM_WORD_SIZE=8 .. \ + && make \ + && make install +RUN cd ~ \ + && export HDF5_DIR=/opt/hdf5 \ + && export PATH=/opt/hdf5/bin:$PATH \ + && export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH \ + && export ZFP_DIR=/opt/zfp/lib/cmake/zfp \ + && git clone https://github.com/LLNL/H5Z-ZFP \ + && cd H5Z-ZFP \ + && mkdir build \ + && cd build \ + && CC=mpicc cmake -DCMAKE_INSTALL_PREFIX=/opt/H5Z-ZFP -DFORTRAN_INTERFACE=OFF .. \ + && make \ + && make install +RUN rm -rf /var/lib/apt/lists/* \ + && apt-get clean \ + && apt-get autoclean diff --git a/docs/source/macsio.rst b/docs/source/macsio.rst index 84d75be6..f7989b44 100644 --- a/docs/source/macsio.rst +++ b/docs/source/macsio.rst @@ -5,7 +5,7 @@ MACSio (Multi-purpose, Application-Centric, Scalable I/O Proxy Application) is b MACSio in h5bench only supports the HDF5 interface. You need to have the `json-cwx `_ dependency library installed prior to compiling it in h5bench. -You can find more information in MACSio `GitHub repository `_. +You can find more information in MACSio `GitHub repository `_. Configuration ------------- @@ -38,4 +38,4 @@ To run an instance of MACSio HDF5 benchmark you need to include the following in "parallel_file_mode": "MIF 8", "part_size": "1M" } - } + } \ No newline at end of file diff --git a/docs/source/plugin.rst b/docs/source/plugin.rst new file mode 100644 index 00000000..85a9e2dc --- /dev/null +++ b/docs/source/plugin.rst @@ -0,0 +1,84 @@ +Build and Run External Compression Plugins +=================================== + +----------------------------------- +SZ3 +----------------------------------- + + +Build with CMake +----------------------------------- + +.. code-block:: bash + + git clone https://github.com/szcompressor/SZ3 + cd SZ3 + mkdir build installer + realpath installer + cd build + export CMAKE_INSTALL_PREFIX=.../SZ3/installer + export PATH=.../hdf5/installer/bin:$PATH + export HDF5_HOME=.../hdf5/installer + cmake -DCMAKE_INSTALL_PREFIX=$CMAKE_INSTALL_PREFIX -DBUILD_H5Z_FILTER=ON .. + make + make install + + +Enable SZ3 in benchmark at runtime` +----------------------------------- +In order to make sure HDF5 can find the installed plugin and apply it to the datasets, you **must** either define the macro ``HDF5_PLUGIN_PATH`` using ``export HDF5_PLUGIN_PATH=.../SZ3/installer/lib64`` in every session or giving that as an input in the configuration JSON file and h5bench will set up for you: + +.. code-block:: json + + "vol": { + "path": ".../SZ3/installer/lib64" + } + + +----------------------------------- +ZFP +----------------------------------- + + +Build with CMake +----------------------------------- +First, clone the ZFP GitHub repository and build ZFP + +.. code-block:: bash + + git clone https://github.com/LLNL/zfp.git + cd zfp + mkdir build installer + realpath installer + cd build + export CMAKE_INSTALL_PREFIX=.../zfp/installer + cmake -DCMAKE_INSTALL_PREFIX=$CMAKE_INSTALL_PREFIX -DZFP_BIT_STREAM_WORD_SIZE=8 .. + make + make install + +Second, clone the H5Z-ZFP GitHub repository and build H5Z-ZFP + +.. code-block:: bash + + git clone https://github.com/LLNL/H5Z-ZFP.git + cd H5Z-ZFP + mkdir build installer + realpath installer + cd build + export CMAKE_INSTALL_PREFIX=.../H5Z-ZFP/installer + export HDF5_DIR=.../hdf5/installer + export ZFP_DIR=.../zfp/installer/lib64/cmake/zfp + cmake -DCMAKE_INSTALL_PREFIX=$CMAKE_INSTALL_PREFIX .. + make + make install + + +Enable ZFP in benchmark at runtime +----------------------------------- +You **must** either define the macro ``HDF5_PLUGIN_PATH`` using ``export HDF5_PLUGIN_PATH=.../H5Z-ZFP/installer/plugin`` in every session or giving that in the JSON file: +.. code-block:: + + "vol": { + "path": ".../H5Z-ZFP/installer/plugin" + } + diff --git a/docs/source/vpic.rst b/docs/source/vpic.rst index 93fc1055..de144934 100644 --- a/docs/source/vpic.rst +++ b/docs/source/vpic.rst @@ -69,12 +69,29 @@ Compression Settings **Parameter** **Description** ======================================= ========================================================== ``COMPRESS`` `YES` or `NO` (optional) enables parralel compression +``COMPRESS_FILTER`` Options: ``N_BIT``, ``SZIP``, ``GZIP``, ``SZ3``, ``ZFP``, ``SNAPPY_CUDA`` +``CD_NELMTS`` Number of auxiliary data to control the behavior of the compression +``CD_VALUES_1`` Auxiliary data +``CD_VALUES_2`` Auxiliary data +``CD_VALUES_3`` Auxiliary data +``CD_VALUES_4`` Auxiliary data +``CD_VALUES_5`` Auxiliary data +``CD_VALUES_6`` Auxiliary data +``CD_VALUES_7`` Auxiliary data +``CD_VALUES_8`` Auxiliary data +``CD_VALUES_9`` Auxiliary data +``CD_VALUES_10`` Auxiliary data ``CHUNK_DIM_1`` Chunk dimension ``CHUNK_DIM_2`` Chunk dimension ``CHUNK_DIM_3`` Chunk dimension ======================================= ========================================================== -Compression is only applicable for ``h5bench_write``. It has not effect for ``h5bench_read``. When enabled the chunk dimensions parameters (``CHUNK_DIM_1``, ``CHUNK_DIM_2``, ``CHUNK_DIM_3``) are required. The chunk dimension settings should be compatible with the data dimensions, i.e., they must have the same rank of dimensions, and chunk dimension size cannot be greater than data dimension size. Extra chunk dimensions have no effect and should be set to ``1``. +Compression and decompression are applicable for ``h5bench_write`` and ``h5bench_read``. When enabled the chunk dimensions parameters (``CHUNK_DIM_1``, ``CHUNK_DIM_2``, ``CHUNK_DIM_3``) are required. The chunk dimension settings should be compatible with the data dimensions, i.e., they must have the same rank of dimensions, and chunk dimension size cannot be greater than data dimension size. Extra chunk dimensions have no effect and should be set to ``1``. + +For built-in compressions ``N_BIT``/``SZIP``/``GZIP`` to work on datasets in ``h5bench_write`` you just need to define the value for ``COMPRESS_FILTER`` and chunk dimensions. For external compressions ``SZ3``/``ZFP``/``SNAPPY_CUDA`` to work you **must** also download and build the compression filter plugin on your system, and specify the installation path ``path`` in ``vol`` so that HDF5 can apply. See `Enable Plugins `_ for details using a external compression. If ``h5bench_write`` executed successfully with a intended compression, ``h5bench_read`` will read compressed data and apply decompression automatically. + +The ``CD_NELMTS`` and ``CD_VALUES_[1-10]`` parameters are optional and only applicable to external compressions. Their default values are ``0`` and only accpet ``unsigned int`` data type. You can provide 10 auxiliary data at maximum. For information on how to define these values, see `H5Z-ZFP Interfaces `_ for an example to manipulate the ZFP compression, and see `SZ3 Usage Example `_ to manipulate the SZ3 compression. + .. warning:: diff --git a/h5bench_patterns/h5bench_read.c b/h5bench_patterns/h5bench_read.c index 59d45558..d1776aeb 100644 --- a/h5bench_patterns/h5bench_read.c +++ b/h5bench_patterns/h5bench_read.c @@ -65,6 +65,17 @@ herr_t ierr; data_contig_md *BUF_STRUCT; mem_monitor * MEM_MONITOR; +typedef struct filter_info { + int USE_COMPRESS; + size_t * cd_nelmts; + unsigned int *cd_values; + char * name; + unsigned int *filter_config; + H5Z_filter_t filter_id; +} filter_info; + +filter_info FILTER_INFO; + void print_data(int n) { @@ -85,6 +96,69 @@ set_dspace_plist(hid_t *plist_id_out, int data_collective) H5Pset_dxpl_mpio(*plist_id_out, H5FD_MPIO_INDEPENDENT); } +// Allocate memory for filter_info +void +filter_info_init() +{ + FILTER_INFO.USE_COMPRESS = 0; + FILTER_INFO.cd_nelmts = (size_t *)malloc(sizeof(size_t)); + *(FILTER_INFO.cd_nelmts) = 10; + FILTER_INFO.cd_values = (unsigned int *)malloc(10 * sizeof(unsigned int)); + FILTER_INFO.name = (char *)malloc(255 * sizeof(char)); + FILTER_INFO.filter_config = (unsigned int *)malloc(1 * sizeof(unsigned int)); +} + +// Free memory for filter_info +void +filter_info_free() +{ + free(FILTER_INFO.cd_nelmts); + free(FILTER_INFO.cd_values); + free(FILTER_INFO.name); + free(FILTER_INFO.filter_config); +} + +// Retrieve information about a filter on a dataset +int +get_filter_info(hid_t dset_id) +{ + hid_t dcpl; + dcpl = H5Dget_create_plist(dset_id); + + if (dcpl < 0) { + printf("Invalid dataset creation property list identifier.\n"); + return -1; + } + + // Check the number of filters in the pipeline, skip calling H5Pget_filter if 0 filter is detected in the + // pipeline + int num_filters = H5Pget_nfilters(dcpl); + if (num_filters <= 0) { + return 0; + } + FILTER_INFO.filter_id = + H5Pget_filter2(dcpl, 0, H5Z_FLAG_MANDATORY, FILTER_INFO.cd_nelmts, FILTER_INFO.cd_values, 255, + FILTER_INFO.name, FILTER_INFO.filter_config); + + if (FILTER_INFO.filter_id < 0) { + printf("Failed to retrieve filter information.\n"); + return -1; + } + + FILTER_INFO.USE_COMPRESS = 1; + + if (MY_RANK == 0) { + printf(" Compression filter to decompress: %s\n", FILTER_INFO.name); + printf(" Filter ID: %d\n", FILTER_INFO.filter_id); + // printf(" Number of compression filter parameters: %ld\n", *FILTER_INFO.cd_nelmts); + // for (int i = 0; i < *(FILTER_INFO.cd_nelmts); ++i) { + // printf(" Compression parameter %d: %lu\n", i, FILTER_INFO.cd_values[i]); + //} + } + + return 0; +} + // Create HDF5 file and read data void read_h5_data(time_step *ts, hid_t loc, hid_t *dset_ids, hid_t filespace, hid_t memspace, hid_t plist_id, @@ -92,6 +166,7 @@ read_h5_data(time_step *ts, hid_t loc, hid_t *dset_ids, hid_t filespace, hid_t m { hid_t dapl; unsigned long t1, t2, t3; + // Create a dataset access property list dapl = H5Pcreate(H5P_DATASET_ACCESS); #if H5_VERSION_GE(1, 10, 0) H5Pset_all_coll_metadata_ops(dapl, true); @@ -108,6 +183,13 @@ read_h5_data(time_step *ts, hid_t loc, hid_t *dset_ids, hid_t filespace, hid_t m dset_ids[6] = H5Dopen_async(loc, "py", dapl, ts->es_meta_create); dset_ids[7] = H5Dopen_async(loc, "pz", dapl, ts->es_meta_create); + int ret = get_filter_info(dset_ids[0]); + if (ret < 0) { + if (MY_RANK == 0) { + printf("get_filter_info() failed\n"); + } + } + t2 = get_time_usec(); ierr = H5Dread_async(dset_ids[0], H5T_NATIVE_FLOAT, memspace, filespace, plist_id, BUF_STRUCT->x, @@ -514,6 +596,7 @@ _run_benchmark_read(hid_t file_id, hid_t fapl, hid_t gapl, hid_t filespace, benc unsigned long read_time_exp = 0, metadata_time_exp = 0; unsigned long read_time_imp = 0, metadata_time_imp = 0; int dset_cnt = 8; + filter_info_init(); for (int ts_index = 0; ts_index < nts; ts_index++) { meta_time1 = 0, meta_time2 = 0, meta_time3 = 0, meta_time4 = 0, meta_time5 = 0; sprintf(grp_name, "Timestep_%d", ts_index); @@ -605,6 +688,7 @@ int main(int argc, char *argv[]) { int mpi_thread_lvl_provided = -1; + // Initialize the MPI exe env which multiple threads may make MPI calls MPI_Init_thread(&argc, &argv, MPI_THREAD_MULTIPLE, &mpi_thread_lvl_provided); assert(MPI_THREAD_MULTIPLE == mpi_thread_lvl_provided); MPI_Comm_rank(MPI_COMM_WORLD, &MY_RANK); @@ -648,8 +732,9 @@ main(int argc, char *argv[]) hid_t fapl, gapl; set_pl(&fapl, &gapl); + // Initialize array to store the size of each dimension, at max 64 dimensions hsize_t dims[64] = {0}; - + // Open an existing HDF5 file with read-only access hid_t file_id = H5Fopen(file_name, H5F_ACC_RDONLY, fapl); hid_t filespace = get_filespace(file_id); int dims_cnt = H5Sget_simple_extent_dims(filespace, dims, NULL); @@ -658,6 +743,7 @@ main(int argc, char *argv[]) for (int i = 0; i < dims_cnt; i++) { if (MY_RANK == 0) printf("dims[%d] = %llu (total number for the file)\n", i, dims[i]); + // Calculate the size/area/volume total_particles *= dims[i]; } } @@ -692,7 +778,7 @@ main(int argc, char *argv[]) goto error; } } - + // NUM_RANKS here refers to the number of processes instead of the number of dimensions NUM_PARTICLES = total_particles / NUM_RANKS; unsigned long long read_elem_cnt = params.try_num_particles; @@ -714,6 +800,7 @@ main(int argc, char *argv[]) MPI_Allreduce(&NUM_PARTICLES, &TOTAL_PARTICLES, 1, MPI_LONG_LONG, MPI_SUM, MPI_COMM_WORLD); MPI_Scan(&NUM_PARTICLES, &FILE_OFFSET, 1, MPI_LONG_LONG, MPI_SUM, MPI_COMM_WORLD); FILE_OFFSET -= NUM_PARTICLES; + // Allocate memory for each particlee BUF_STRUCT = prepare_contig_memory_multi_dim(params.dim_1, params.dim_2, params.dim_3); unsigned long t1 = get_time_usec(); @@ -803,6 +890,17 @@ main(int argc, char *argv[]) "seconds"); value = format_human_readable(total_size_bytes); fprintf(params.csv_fs, "total size, %.3lf, %cB\n", value.value, value.unit); + + if (FILTER_INFO.USE_COMPRESS) { + fprintf(params.csv_fs, "compression filter name, %s\n", FILTER_INFO.name); + fprintf(params.csv_fs, "filter ID, %d\n", FILTER_INFO.filter_id); + // fprintf(params.csv_fs, "number of compression filter parameters, %ld\n", + // *FILTER_INFO.cd_nelmts); + // for (int i = 0; i < *(FILTER_INFO.cd_nelmts); ++i) { + // fprintf(params.csv_fs, "compression parameter %d, %lu\n", i, FILTER_INFO.cd_values[i]); + //} + } + fprintf(params.csv_fs, "raw time, %.3f, %s\n", rrt_s, "seconds"); value = format_human_readable(raw_rate); fprintf(params.csv_fs, "raw rate, %.3lf, %cB/s\n", value.value, value.unit); @@ -813,7 +911,7 @@ main(int argc, char *argv[]) fclose(params.csv_fs); } } - + filter_info_free(); error: H5E_BEGIN_TRY { diff --git a/h5bench_patterns/h5bench_write.c b/h5bench_patterns/h5bench_write.c index a7151702..f2e159f4 100644 --- a/h5bench_patterns/h5bench_write.c +++ b/h5bench_patterns/h5bench_write.c @@ -37,6 +37,7 @@ // 02/19/2019 --> Add option to write multiple timesteps of data - Tang // +#include #include #include #include @@ -48,11 +49,16 @@ #include #include "../commons/h5bench_util.h" #include "../commons/async_adaptor.h" + #ifdef HAVE_SUBFILING #include "H5FDsubfiling.h" #include "H5FDioc.h" #endif -#define DIM_MAX 3 + +#define DIM_MAX 3 +#define H5Z_FILTER_ZFP 32013 +#define H5Z_FILTER_SZ3 32024 +#define H5Z_FILTER_SNAPPY_CUDA 32003 herr_t ierr; @@ -60,8 +66,11 @@ typedef struct compress_info { int USE_COMPRESS; hid_t dcpl_id; hsize_t chunk_dims[DIM_MAX]; + hsize_t total_compressed_size; } compress_info; +static unsigned int *cd_values; + // Global Variables and dimensions async_mode ASYNC_MODE; compress_info COMPRESS_INFO; // Using parallel compressing: need to set chunk dimensions for dcpl. @@ -756,6 +765,7 @@ _run_benchmark_write(bench_params params, hid_t file_id, hid_t fapl, hid_t files unsigned long metadata_time_exp = 0, data_time_exp = 0, t0, t1, t2, t3, t4; unsigned long metadata_time_imp = 0, data_time_imp = 0; unsigned long meta_time1 = 0, meta_time2 = 0, meta_time3 = 0, meta_time4 = 0, meta_time5 = 0; + COMPRESS_INFO.total_compressed_size = 0; for (int ts_index = 0; ts_index < timestep_cnt; ts_index++) { meta_time1 = 0, meta_time2 = 0, meta_time3 = 0, meta_time4 = 0, meta_time5 = 0; time_step *ts = &(MEM_MONITOR->time_steps[ts_index]); @@ -824,6 +834,11 @@ _run_benchmark_write(bench_params params, hid_t file_id, hid_t fapl, hid_t files for (int j = 0; j < dset_cnt; j++) { if (ts->dset_ids[j] != 0) { + // get the size of each dataset after compression before losing access + hsize_t dset_size = H5Dget_storage_size(ts->dset_ids[j]); + COMPRESS_INFO.total_compressed_size += dset_size; + + // close the dataset H5Dclose_async(ts->dset_ids[j], ts->es_meta_close); } } @@ -882,16 +897,61 @@ set_globals(const bench_params *params) if (COMPRESS_INFO.USE_COMPRESS) { // set DCPL herr_t ret; + + // Construct auxiliary data for the filter + cd_values = (unsigned int *)malloc(10 * sizeof(unsigned int)); + cd_values[0] = params->cd_value_1; + cd_values[1] = params->cd_value_2; + cd_values[2] = params->cd_value_3; + cd_values[3] = params->cd_value_4; + cd_values[4] = params->cd_value_5; + cd_values[5] = params->cd_value_6; + cd_values[6] = params->cd_value_7; + cd_values[7] = params->cd_value_8; + cd_values[8] = params->cd_value_9; + cd_values[9] = params->cd_value_10; + + // Create a new property list instance COMPRESS_INFO.dcpl_id = H5Pcreate(H5P_DATASET_CREATE); assert(COMPRESS_INFO.dcpl_id > 0); + // Clear any possible residual filter settings + ret = H5Premove_filter(COMPRESS_INFO.dcpl_id, H5Z_FILTER_ALL); + assert(ret >= 0); + /* Set chunked layout and chunk dimensions */ ret = H5Pset_layout(COMPRESS_INFO.dcpl_id, H5D_CHUNKED); assert(ret >= 0); ret = H5Pset_chunk(COMPRESS_INFO.dcpl_id, params->num_dims, (const hsize_t *)COMPRESS_INFO.chunk_dims); assert(ret >= 0); - ret = H5Pset_deflate(COMPRESS_INFO.dcpl_id, 9); + + // Adds the specified filter to pipeline + if (params->compress_filter == N_BIT) { + ret = H5Pset_nbit(COMPRESS_INFO.dcpl_id); + } + else if (params->compress_filter == SZIP) { + ret = H5Pset_szip(COMPRESS_INFO.dcpl_id, H5_SZIP_EC_OPTION_MASK, 8); + } + else if (params->compress_filter == GZIP) { + ret = H5Pset_deflate(COMPRESS_INFO.dcpl_id, 9); + } + else if (params->compress_filter == SZ3) { + ret = H5Pset_filter(COMPRESS_INFO.dcpl_id, H5Z_FILTER_SZ3, H5Z_FLAG_MANDATORY, params->cd_nelmts, + cd_values); + } + else if (params->compress_filter == ZFP) { + ret = H5Pset_filter(COMPRESS_INFO.dcpl_id, H5Z_FILTER_ZFP, H5Z_FLAG_MANDATORY, params->cd_nelmts, + cd_values); + } + else if (params->compress_filter == SNAPPY_CUDA) { + ret = H5Pset_filter(COMPRESS_INFO.dcpl_id, H5Z_FILTER_SNAPPY_CUDA, H5Z_FLAG_MANDATORY, + params->cd_nelmts, cd_values); + } + else { + ret = -1; + } + free(cd_values); assert(ret >= 0); } @@ -978,6 +1038,8 @@ main(int argc, char *argv[]) MPI_Init_thread(&argc, &argv, MPI_THREAD_MULTIPLE, &mpi_thread_lvl_provided); assert(MPI_THREAD_MULTIPLE == mpi_thread_lvl_provided); MPI_Comm_rank(MPI_COMM_WORLD, &MY_RANK); + assert(MPI_THREAD_MULTIPLE == mpi_thread_lvl_provided); + MPI_Comm_rank(MPI_COMM_WORLD, &MY_RANK); MPI_Comm_size(MPI_COMM_WORLD, &NUM_RANKS); MPI_Comm comm = MPI_COMM_WORLD; MPI_Info info = MPI_INFO_NULL; @@ -999,6 +1061,17 @@ main(int argc, char *argv[]) char * output_file; bench_params params; + params.cd_value_1 = 0; + params.cd_value_2 = 0; + params.cd_value_3 = 0; + params.cd_value_4 = 0; + params.cd_value_5 = 0; + params.cd_value_6 = 0; + params.cd_value_7 = 0; + params.cd_value_8 = 0; + params.cd_value_9 = 0; + params.cd_value_10 = 0; + char *cfg_file_path = argv[1]; output_file = argv[2]; if (MY_RANK == 0) { @@ -1104,7 +1177,9 @@ main(int argc, char *argv[]) unsigned long t2 = get_time_usec(); // t2 - t1: metadata: creating/opening unsigned long raw_write_time, inner_metadata_time, local_data_size; - int stat = _run_benchmark_write(params, file_id, fapl, filespace, memspace, data, data_size, + + // Run write benchmark + int stat = _run_benchmark_write(params, file_id, fapl, filespace, memspace, data, data_size, &local_data_size, &raw_write_time, &inner_metadata_time); if (stat < 0) { @@ -1125,6 +1200,8 @@ main(int argc, char *argv[]) H5Fclose_async(file_id, 0); + // reopen file and try to get the compressed data size for calculating compression ratio + unsigned long tfclose_end = get_time_usec(); MPI_Barrier(MPI_COMM_WORLD); unsigned long t4 = get_time_usec(); @@ -1147,10 +1224,25 @@ main(int argc, char *argv[]) read_time_val(params.compute_time, TIME_US) * (params.cnt_time_step - 1); printf("Total emulated compute time: %.3lf s\n", total_sleep_time_us / (1000.0 * 1000.0)); + // Report total write size double total_size_bytes = NUM_RANKS * local_data_size; value = format_human_readable(total_size_bytes); printf("Total write size: %.3lf %cB\n", value.value, value.unit); + // Assume no compression + float compression_ratio = 1.0; + + // Report compression ratio + if (COMPRESS_INFO.USE_COMPRESS) { + if (COMPRESS_INFO.total_compressed_size > 0) { + compression_ratio = total_size_bytes / COMPRESS_INFO.total_compressed_size; + } + + value = format_human_readable(COMPRESS_INFO.total_compressed_size); + printf("Total compressed size: %.3lf %cB\n", value.value, value.unit); + printf("Compression ratio: %.3f\n", compression_ratio); + } + float rwt_s = (float)raw_write_time / (1000.0 * 1000.0); float raw_rate = (float)total_size_bytes / rwt_s; printf("Raw write time: %.3f s\n", rwt_s); @@ -1184,12 +1276,17 @@ main(int argc, char *argv[]) fprintf(params.csv_fs, "operation, %s, %s\n", "write", ""); fprintf(params.csv_fs, "ranks, %d, %s\n", NUM_RANKS, ""); fprintf(params.csv_fs, "collective data, %s, %s\n", params.data_coll == 1 ? "YES" : "NO", ""); - fprintf(params.csv_fs, "collective meta, %s, %s\n", params.meta_coll == 1 ? "YES" : "NO", ""); - fprintf(params.csv_fs, "subfiling, %s, %s\n", params.subfiling == 1 ? "YES" : "NO", ""); fprintf(params.csv_fs, "total compute time, %.3lf, %s\n", total_sleep_time_us / (1000.0 * 1000.0), "seconds"); value = format_human_readable(total_size_bytes); fprintf(params.csv_fs, "total size, %.3lf, %cB\n", value.value, value.unit); + + if (COMPRESS_INFO.USE_COMPRESS) { + value = format_human_readable(COMPRESS_INFO.total_compressed_size); + fprintf(params.csv_fs, "total compressed size, %.3lf, %cB\n", value.value, value.unit); + fprintf(params.csv_fs, "compression ratio, %.3lf, %s\n", compression_ratio, ""); + } + fprintf(params.csv_fs, "raw time, %.3f, %s\n", rwt_s, "seconds"); value = format_human_readable(raw_rate); fprintf(params.csv_fs, "raw rate, %.3lf, %cB/s\n", value.value, value.unit); diff --git a/samples/metrics-perlmutter.json b/samples/metrics-perlmutter.json index 16b7dcba..92d77d34 100644 --- a/samples/metrics-perlmutter.json +++ b/samples/metrics-perlmutter.json @@ -1,7 +1,7 @@ { "mpi": { "command": "srun", - "configuration": "--tasks-per-node=64 -N 8 -n 512 -t 00:30:00" + "configuration": "-A m2621 --qos=debug --constraint=cpu --tasks-per-node=64 -N 8 -n 512 -t 00:30:00" }, "vol": { @@ -53,4 +53,4 @@ } } ] -} +} \ No newline at end of file diff --git a/samples/sync-write-read-chunked-1d-GZIP.json b/samples/sync-write-read-chunked-1d-GZIP.json new file mode 100644 index 00000000..fc5942f1 --- /dev/null +++ b/samples/sync-write-read-chunked-1d-GZIP.json @@ -0,0 +1,57 @@ +{ + "mpi": { + "command": "mpirun", + "ranks": "2", + "configuration": "--allow-run-as-root --oversubscribe" + }, + "vol": { + }, + "file-system": { + }, + "directory": "storage", + "benchmarks": [ + { + "benchmark": "write", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "1", + "DIM_1": "4194304", + "DIM_2": "1", + "DIM_3": "1", + "CSV_FILE": "output.csv", + "MODE": "SYNC", + "COMPRESS": "YES", + "COMPRESS_FILTER": "GZIP", + "CHUNK_DIM_1": "4194304", + "CHUNK_DIM_2": "1", + "CHUNK_DIM_3": "1" + } + }, + { + "benchmark": "read", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "1", + "DIM_1": "4194304", + "DIM_2": "1", + "DIM_3": "1", + "CSV_FILE": "output.csv", + "MODE": "SYNC" + } + } + ] +} diff --git a/samples/sync-write-read-chunked-1d-N_BIT.json b/samples/sync-write-read-chunked-1d-N_BIT.json new file mode 100644 index 00000000..41f4dce1 --- /dev/null +++ b/samples/sync-write-read-chunked-1d-N_BIT.json @@ -0,0 +1,57 @@ +{ + "mpi": { + "command": "mpirun", + "ranks": "2", + "configuration": "--allow-run-as-root --oversubscribe -n 2" + }, + "vol": { + }, + "file-system": { + }, + "directory": "storage", + "benchmarks": [ + { + "benchmark": "write", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "1", + "DIM_1": "4194304", + "DIM_2": "1", + "DIM_3": "1", + "CSV_FILE": "output.csv", + "MODE": "SYNC", + "COMPRESS": "YES", + "COMPRESS_FILTER": "N_BIT", + "CHUNK_DIM_1": "4194304", + "CHUNK_DIM_2": "1", + "CHUNK_DIM_3": "1" + } + }, + { + "benchmark": "read", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "1", + "DIM_1": "4194304", + "DIM_2": "1", + "DIM_3": "1", + "CSV_FILE": "output.csv", + "MODE": "SYNC" + } + } + ] +} diff --git a/samples/sync-write-read-chunked-1d-SZ3.json b/samples/sync-write-read-chunked-1d-SZ3.json new file mode 100644 index 00000000..1f843720 --- /dev/null +++ b/samples/sync-write-read-chunked-1d-SZ3.json @@ -0,0 +1,58 @@ +{ + "mpi": { + "command": "mpirun", + "ranks": "2", + "configuration": "--allow-run-as-root --oversubscribe -n 2" + }, + "vol": { + "path": "/opt/SZ3/lib" + }, + "file-system": { + }, + "directory": "storage", + "benchmarks": [ + { + "benchmark": "write", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "1", + "DIM_1": "4194304", + "DIM_2": "1", + "DIM_3": "1", + "CSV_FILE": "output.csv", + "MODE": "SYNC", + "COMPRESS": "YES", + "COMPRESS_FILTER": "SZ3", + "CHUNK_DIM_1": "4194304", + "CHUNK_DIM_2": "1", + "CHUNK_DIM_3": "1" + } + }, + { + "benchmark": "read", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "1", + "DIM_1": "4194304", + "DIM_2": "1", + "DIM_3": "1", + "CSV_FILE": "output.csv", + "MODE": "SYNC" + } + } + ] +} diff --git a/samples/sync-write-read-chunked-1d-SZIP.json b/samples/sync-write-read-chunked-1d-SZIP.json new file mode 100644 index 00000000..03467832 --- /dev/null +++ b/samples/sync-write-read-chunked-1d-SZIP.json @@ -0,0 +1,57 @@ +{ + "mpi": { + "command": "mpirun", + "ranks": "2", + "configuration": "--allow-run-as-root --oversubscribe -n 2" + }, + "vol": { + }, + "file-system": { + }, + "directory": "storage", + "benchmarks": [ + { + "benchmark": "write", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "1", + "DIM_1": "4194304", + "DIM_2": "1", + "DIM_3": "1", + "CSV_FILE": "output.csv", + "MODE": "SYNC", + "COMPRESS": "YES", + "COMPRESS_FILTER": "SZIP", + "CHUNK_DIM_1": "4194304", + "CHUNK_DIM_2": "1", + "CHUNK_DIM_3": "1" + } + }, + { + "benchmark": "read", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "1", + "DIM_1": "4194304", + "DIM_2": "1", + "DIM_3": "1", + "CSV_FILE": "output.csv", + "MODE": "SYNC" + } + } + ] +} diff --git a/samples/sync-write-read-chunked-1d-ZFP.json b/samples/sync-write-read-chunked-1d-ZFP.json new file mode 100644 index 00000000..49f9f47d --- /dev/null +++ b/samples/sync-write-read-chunked-1d-ZFP.json @@ -0,0 +1,58 @@ +{ + "mpi": { + "command": "mpirun", + "ranks": "2", + "configuration": "--allow-run-as-root --oversubscribe -n 2" + }, + "vol": { + "path": "/opt/H5Z-ZFP/plugin" + }, + "file-system": { + }, + "directory": "storage", + "benchmarks": [ + { + "benchmark": "write", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "1", + "DIM_1": "4194304", + "DIM_2": "1", + "DIM_3": "1", + "CSV_FILE": "output.csv", + "MODE": "SYNC", + "COMPRESS": "YES", + "COMPRESS_FILTER": "ZFP", + "CHUNK_DIM_1": "4194304", + "CHUNK_DIM_2": "1", + "CHUNK_DIM_3": "1" + } + }, + { + "benchmark": "read", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "1", + "DIM_1": "4194304", + "DIM_2": "1", + "DIM_3": "1", + "CSV_FILE": "output.csv", + "MODE": "SYNC" + } + } + ] +} diff --git a/samples/sync-write-read-chunked-2d-GZIP.json b/samples/sync-write-read-chunked-2d-GZIP.json new file mode 100644 index 00000000..a52b43d0 --- /dev/null +++ b/samples/sync-write-read-chunked-2d-GZIP.json @@ -0,0 +1,57 @@ +{ + "mpi": { + "command": "mpirun", + "ranks": "4", + "configuration": "--allow-run-as-root --oversubscribe -n 2" + }, + "vol": { + }, + "file-system": { + }, + "directory": "storage", + "benchmarks": [ + { + "benchmark": "write", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "2", + "DIM_1": "1024", + "DIM_2": "1024", + "DIM_3": "1", + "CSV_FILE": "output.csv", + "MODE": "SYNC", + "COMPRESS": "YES", + "COMPRESS_FILTER": "GZIP", + "CHUNK_DIM_1": "1024", + "CHUNK_DIM_2": "1024", + "CHUNK_DIM_3": "1" + } + }, + { + "benchmark": "read", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "2", + "DIM_1": "1024", + "DIM_2": "1024", + "DIM_3": "1", + "CSV_FILE": "output.csv", + "MODE": "SYNC" + } + } + ] +} diff --git a/samples/sync-write-read-chunked-2d-N_BIT.json b/samples/sync-write-read-chunked-2d-N_BIT.json new file mode 100644 index 00000000..d79bc878 --- /dev/null +++ b/samples/sync-write-read-chunked-2d-N_BIT.json @@ -0,0 +1,57 @@ +{ + "mpi": { + "command": "mpirun", + "ranks": "4", + "configuration": "--allow-run-as-root --oversubscribe -n 2" + }, + "vol": { + }, + "file-system": { + }, + "directory": "storage", + "benchmarks": [ + { + "benchmark": "write", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "2", + "DIM_1": "1024", + "DIM_2": "1024", + "DIM_3": "1", + "CSV_FILE": "output.csv", + "MODE": "SYNC", + "COMPRESS": "YES", + "COMPRESS_FILTER": "N_BIT", + "CHUNK_DIM_1": "1024", + "CHUNK_DIM_2": "1024", + "CHUNK_DIM_3": "1" + } + }, + { + "benchmark": "read", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "2", + "DIM_1": "1024", + "DIM_2": "1024", + "DIM_3": "1", + "CSV_FILE": "output.csv", + "MODE": "SYNC" + } + } + ] +} diff --git a/samples/sync-write-read-chunked-2d-SZ3.json b/samples/sync-write-read-chunked-2d-SZ3.json new file mode 100644 index 00000000..f38dfd4e --- /dev/null +++ b/samples/sync-write-read-chunked-2d-SZ3.json @@ -0,0 +1,58 @@ +{ + "mpi": { + "command": "mpirun", + "ranks": "4", + "configuration": "--allow-run-as-root --oversubscribe -n 2" + }, + "vol": { + "path": "/opt/SZ3/lib" + }, + "file-system": { + }, + "directory": "storage", + "benchmarks": [ + { + "benchmark": "write", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "2", + "DIM_1": "1024", + "DIM_2": "1024", + "DIM_3": "1", + "CSV_FILE": "output.csv", + "MODE": "SYNC", + "COMPRESS": "YES", + "COMPRESS_FILTER": "SZ3", + "CHUNK_DIM_1": "1024", + "CHUNK_DIM_2": "1024", + "CHUNK_DIM_3": "1" + } + }, + { + "benchmark": "read", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "2", + "DIM_1": "1024", + "DIM_2": "1024", + "DIM_3": "1", + "CSV_FILE": "output.csv", + "MODE": "SYNC" + } + } + ] +} diff --git a/samples/sync-write-read-chunked-2d-SZIP.json b/samples/sync-write-read-chunked-2d-SZIP.json new file mode 100644 index 00000000..8e12554a --- /dev/null +++ b/samples/sync-write-read-chunked-2d-SZIP.json @@ -0,0 +1,57 @@ +{ + "mpi": { + "command": "mpirun", + "ranks": "4", + "configuration": "--allow-run-as-root --oversubscribe -n 2" + }, + "vol": { + }, + "file-system": { + }, + "directory": "storage", + "benchmarks": [ + { + "benchmark": "write", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "2", + "DIM_1": "1024", + "DIM_2": "1024", + "DIM_3": "1", + "CSV_FILE": "output.csv", + "MODE": "SYNC", + "COMPRESS": "YES", + "COMPRESS_FILTER": "SZIP", + "CHUNK_DIM_1": "1024", + "CHUNK_DIM_2": "1024", + "CHUNK_DIM_3": "1" + } + }, + { + "benchmark": "read", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "2", + "DIM_1": "1024", + "DIM_2": "1024", + "DIM_3": "1", + "CSV_FILE": "output.csv", + "MODE": "SYNC" + } + } + ] +} diff --git a/samples/sync-write-read-chunked-2d-ZFP.json b/samples/sync-write-read-chunked-2d-ZFP.json new file mode 100644 index 00000000..b746f81d --- /dev/null +++ b/samples/sync-write-read-chunked-2d-ZFP.json @@ -0,0 +1,58 @@ +{ + "mpi": { + "command": "mpirun", + "ranks": "4", + "configuration": "--allow-run-as-root --oversubscribe -n 2" + }, + "vol": { + "path": "/opt/H5Z-ZFP/plugin" + }, + "file-system": { + }, + "directory": "storage", + "benchmarks": [ + { + "benchmark": "write", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "2", + "DIM_1": "1024", + "DIM_2": "1024", + "DIM_3": "1", + "CSV_FILE": "output.csv", + "MODE": "SYNC", + "COMPRESS": "YES", + "COMPRESS_FILTER": "ZFP", + "CHUNK_DIM_1": "1024", + "CHUNK_DIM_2": "1024", + "CHUNK_DIM_3": "1" + } + }, + { + "benchmark": "read", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "2", + "DIM_1": "1024", + "DIM_2": "1024", + "DIM_3": "1", + "CSV_FILE": "output.csv", + "MODE": "SYNC" + } + } + ] +} diff --git a/samples/sync-write-read-chunked-3d-GZIP.json b/samples/sync-write-read-chunked-3d-GZIP.json new file mode 100644 index 00000000..cd038e83 --- /dev/null +++ b/samples/sync-write-read-chunked-3d-GZIP.json @@ -0,0 +1,57 @@ +{ + "mpi": { + "command": "mpirun", + "ranks": "4", + "configuration": "--allow-run-as-root --oversubscribe -n 2" + }, + "vol": { + }, + "file-system": { + }, + "directory": "storage", + "benchmarks": [ + { + "benchmark": "write", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "3", + "DIM_1": "64", + "DIM_2": "64", + "DIM_3": "64", + "CSV_FILE": "output.csv", + "MODE": "SYNC", + "COMPRESS": "YES", + "COMPRESS_FILTER": "GZIP", + "CHUNK_DIM_1": "64", + "CHUNK_DIM_2": "64", + "CHUNK_DIM_3": "64" + } + }, + { + "benchmark": "read", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "3", + "DIM_1": "64", + "DIM_2": "64", + "DIM_3": "64", + "CSV_FILE": "output.csv", + "MODE": "SYNC" + } + } + ] +} diff --git a/samples/sync-write-read-chunked-3d-N_BIT.json b/samples/sync-write-read-chunked-3d-N_BIT.json new file mode 100644 index 00000000..54dc0097 --- /dev/null +++ b/samples/sync-write-read-chunked-3d-N_BIT.json @@ -0,0 +1,57 @@ +{ + "mpi": { + "command": "mpirun", + "ranks": "4", + "configuration": "--allow-run-as-root --oversubscribe -n 2" + }, + "vol": { + }, + "file-system": { + }, + "directory": "storage", + "benchmarks": [ + { + "benchmark": "write", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "3", + "DIM_1": "64", + "DIM_2": "64", + "DIM_3": "64", + "CSV_FILE": "output.csv", + "MODE": "SYNC", + "COMPRESS": "YES", + "COMPRESS_FILTER": "N_BIT", + "CHUNK_DIM_1": "64", + "CHUNK_DIM_2": "64", + "CHUNK_DIM_3": "64" + } + }, + { + "benchmark": "read", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "3", + "DIM_1": "64", + "DIM_2": "64", + "DIM_3": "64", + "CSV_FILE": "output.csv", + "MODE": "SYNC" + } + } + ] +} diff --git a/samples/sync-write-read-chunked-3d-SZ3.json b/samples/sync-write-read-chunked-3d-SZ3.json new file mode 100644 index 00000000..244d18e3 --- /dev/null +++ b/samples/sync-write-read-chunked-3d-SZ3.json @@ -0,0 +1,58 @@ +{ + "mpi": { + "command": "mpirun", + "ranks": "4", + "configuration": "--allow-run-as-root --oversubscribe -n 2" + }, + "vol": { + "path": "/opt/SZ3/lib" + }, + "file-system": { + }, + "directory": "storage", + "benchmarks": [ + { + "benchmark": "write", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "3", + "DIM_1": "64", + "DIM_2": "64", + "DIM_3": "64", + "CSV_FILE": "output.csv", + "MODE": "SYNC", + "COMPRESS": "YES", + "COMPRESS_FILTER": "SZ3", + "CHUNK_DIM_1": "64", + "CHUNK_DIM_2": "64", + "CHUNK_DIM_3": "64" + } + }, + { + "benchmark": "read", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "3", + "DIM_1": "64", + "DIM_2": "64", + "DIM_3": "64", + "CSV_FILE": "output.csv", + "MODE": "SYNC" + } + } + ] +} diff --git a/samples/sync-write-read-chunked-3d-SZIP.json b/samples/sync-write-read-chunked-3d-SZIP.json new file mode 100644 index 00000000..d4ebf7ce --- /dev/null +++ b/samples/sync-write-read-chunked-3d-SZIP.json @@ -0,0 +1,57 @@ +{ + "mpi": { + "command": "mpirun", + "ranks": "4", + "configuration": "--allow-run-as-root --oversubscribe -n 2" + }, + "vol": { + }, + "file-system": { + }, + "directory": "storage", + "benchmarks": [ + { + "benchmark": "write", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "3", + "DIM_1": "64", + "DIM_2": "64", + "DIM_3": "64", + "CSV_FILE": "output.csv", + "MODE": "SYNC", + "COMPRESS": "YES", + "COMPRESS_FILTER": "SZIP", + "CHUNK_DIM_1": "64", + "CHUNK_DIM_2": "64", + "CHUNK_DIM_3": "64" + } + }, + { + "benchmark": "read", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "3", + "DIM_1": "64", + "DIM_2": "64", + "DIM_3": "64", + "CSV_FILE": "output.csv", + "MODE": "SYNC" + } + } + ] +} diff --git a/samples/sync-write-read-chunked-3d-ZFP.json b/samples/sync-write-read-chunked-3d-ZFP.json new file mode 100644 index 00000000..95f79110 --- /dev/null +++ b/samples/sync-write-read-chunked-3d-ZFP.json @@ -0,0 +1,58 @@ +{ + "mpi": { + "command": "mpirun", + "ranks": "4", + "configuration": "--allow-run-as-root --oversubscribe -n 2" + }, + "vol": { + "path": "/opt/H5Z-ZFP/plugin" + }, + "file-system": { + }, + "directory": "storage", + "benchmarks": [ + { + "benchmark": "write", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "3", + "DIM_1": "64", + "DIM_2": "64", + "DIM_3": "64", + "CSV_FILE": "output.csv", + "MODE": "SYNC", + "COMPRESS": "YES", + "COMPRESS_FILTER": "ZFP", + "CHUNK_DIM_1": "64", + "CHUNK_DIM_2": "64", + "CHUNK_DIM_3": "64" + } + }, + { + "benchmark": "read", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "2", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "5 s", + "NUM_DIMS": "3", + "DIM_1": "64", + "DIM_2": "64", + "DIM_3": "64", + "CSV_FILE": "output.csv", + "MODE": "SYNC" + } + } + ] +} diff --git a/src/h5bench.py b/src/h5bench.py index b50b7850..f9e86296 100755 --- a/src/h5bench.py +++ b/src/h5bench.py @@ -203,6 +203,7 @@ def run(self): # Check if filters were enabled if self.filter: + # Skip the benchmark if no specified in the argument if name not in self.filter: self.logger.warning('Skipping "{}" due to active filters'.format(name)) @@ -383,21 +384,25 @@ def run_pattern(self, id, operation, setup, vol): if operation == 'write': benchmark_path = self.H5BENCH_PATTERNS_WRITE - if operation == 'write-unlimited': + elif operation == 'write-unlimited': benchmark_path = self.H5BENCH_PATTERNS_WRITE_UNLIMITED - if operation == 'write_var_normal_dist': + elif operation == 'write_var_normal_dist': benchmark_path = self.H5BENCH_PATTERNS_WRITE_VAR_NORMAL_DIST - if operation == 'overwrite': + elif operation == 'overwrite': benchmark_path = self.H5BENCH_PATTERNS_OVERWRITE - if operation == 'append': + elif operation == 'append': benchmark_path = self.H5BENCH_PATTERNS_APPEND - if operation == 'read': + elif operation == 'read': benchmark_path = self.H5BENCH_PATTERNS_READ + else: + self.logger.error('Invalid operation name provided. Check documentation for correct write/read benchmarks input.') + sys.exit(os.EX_SOFTWARE) + if self.prefix: benchmark_path = self.prefix + '/' + benchmark_path else: diff --git a/tests/test_async_h5bench.py b/tests/test_async_h5bench.py index 8519027f..0183f660 100644 --- a/tests/test_async_h5bench.py +++ b/tests/test_async_h5bench.py @@ -16,7 +16,7 @@ BINARY_UNLIMITED = 'h5bench_write_unlimited' samples = \ - glob.glob('async-write-*d-*.json') + \ + glob.glob('sync-write-*d-*[!SZIP,N_BIT,SZ3,GZIP,ZFP].json') + \ glob.glob('async-append*.json') + \ glob.glob('async-overwrite*.json') + \ glob.glob('async-write-unlimited*.json') diff --git a/tests/test_sync_h5bench.py b/tests/test_sync_h5bench.py index e704a49b..b2c6f014 100644 --- a/tests/test_sync_h5bench.py +++ b/tests/test_sync_h5bench.py @@ -16,7 +16,7 @@ BINARY_UNLIMITED = 'h5bench_write_unlimited' samples = \ - glob.glob('sync-write-*d-*.json') + \ + glob.glob('sync-write-*d-*[!SZIP,N_BIT,SZ3,GZIP,ZFP].json') + \ glob.glob('sync-append*.json') + \ glob.glob('sync-overwrite*.json') + \ glob.glob('sync-write-unlimited*.json') diff --git a/workflows/h5bench-hdf5-1.14.0.yml b/workflows/h5bench-hdf5-1.14.0.yml deleted file mode 100644 index 4d26a71c..00000000 --- a/workflows/h5bench-hdf5-1.14.0.yml +++ /dev/null @@ -1,543 +0,0 @@ -name: h5bench (HDF5 1.14.0) - -on: - pull_request: - - workflow_dispatch: - -jobs: - h5bench: - runs-on: ubuntu-20.04 - container: - image: hpcio/hdf5-1.14.0 - timeout-minutes: 60 - - steps: - - uses: actions/checkout@v2 - with: - submodules: true - - - name: Dependencies - run: | - # VOL-ASYNC - git clone --recursive https://github.com/hpc-io/vol-async.git - - - name: Build Argobots - run: | - current="$PWD" - - export ABT_DIR=$current/vol-async/argobots - - cd $ABT_DIR - - ./autogen.sh - ./configure --prefix=$ABT_DIR/install - - make -j 8 - make install - - - name: Build VOL-ASYNC - run: | - current="$PWD" - - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - - cd $VOL_DIR - mkdir build - cd build - - cmake .. -DCMAKE_PREFIX_PATH=$HDF5_DIR - make - - - name: Test VOL-ASYNC - run: | - current="$PWD" - - export HDF5_DIR=/opt/hdf5 - export HDF5_HOME=$HDF5_DIR - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$current/vol-async/src - - export LD_LIBRARY_PATH=$VOL_DIR/src:$HDF5_DIR/lib:$ABT_DIR/lib:$LD_LIBRARY_PATH - export HDF5_PLUGIN_PATH="$VOL_DIR/src" - export HDF5_VOL_CONNECTOR="async under_vol=0;under_info={}" - - cd $VOL_DIR/build - - export LD_PRELOAD=$ASYNC_DIR/libh5async.so:$ABT_DIR/lib/libabt.so:$HDF5_DIR/lib/libhdf5.so - - ctest - - - name: Build h5bench SYNC - run: | - current="$PWD" - - export HDF5_HOME=/opt/hdf5 - - mkdir build-sync - cd build-sync - - cmake .. \ - -DCMAKE_C_COMPILER=$HDF5_HOME/bin/h5pcc \ - -DH5BENCH_ALL=ON - make - - - name: Build h5bench ASYNC - run: | - current="$PWD" - - export HDF5_HOME=/opt/hdf5 - export ASYNC_HOME=$current/vol-async/src - - mkdir build-async - cd build-async - - cmake .. \ - -DWITH_ASYNC_VOL:BOOL=ON \ - -DCMAKE_C_FLAGS="-I/$current/vol-async/src -L/$current/vol-async/src" \ - -DCMAKE_C_COMPILER=$HDF5_HOME/bin/h5pcc \ - -DH5BENCH_ALL=ON - make - - - name: Test h5bench ASYNC write/read - run: | - current="$PWD" - - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD=$ASYNC_DIR/libh5async.so:$ABT_DIR/lib/libabt.so:$HDF5_DIR/lib/libhdf5.so - - cd build-async - - python3 ../samples/update.py ../samples/async-write-read-contig-1d-small.json - - cat ../samples/async-write-read-contig-1d-small.json - - ./h5bench --debug --abort-on-failure ../samples/async-write-read-contig-1d-small.json - - - name: Test h5bench SYNC write/read - run: | - cd build-sync - ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-read-contig-1d-small.json - - - name: Test h5bench SYNC write 1D contiguous (memory) strided (file) - run: | - cd build-sync - ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-contig-strided.json - - - name: Test h5bench SYNC write 1D contiguous (memory) contiguous (file) - run: | - cd build-sync - ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-contig-contig.json - - - name: Test h5bench SYNC write 1D contiguous (memory) interleaved (file) - run: | - cd build-sync - ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-contig-interleaved.json - - - name: Test h5bench SYNC write 1D interleaved (memory) contiguous (file) - run: | - cd build-sync - ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-interleaved-contig.json - - - name: Test h5bench SYNC write 1D interleaved (memory) interleaved (file) - run: | - cd build-sync - ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-interleaved-interleaved.json - - - name: Test h5bench SYNC write 2D contiguous (memory) contiguous (file) - run: | - cd build-sync - ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-2d-contig-contig.json - - - name: Test h5bench SYNC write 2D contiguous (memory) interleaved (file) - run: | - cd build-sync - ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-2d-contig-interleaved.json - - - name: Test h5bench SYNC write 2D interleaved (memory) contiguous (file) - run: | - cd build-sync - ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-2d-interleaved-contig.json - - - name: Test h5bench SYNC write 2D interleaved (memory) interleaved (file) - run: | - cd build-sync - ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-2d-interleaved-interleaved.json - - - name: Test h5bench SYNC write 3D contiguous (memory) contiguous (file) - run: | - cd build-sync - ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-3d-contig-contig.json - - - name: Test h5bench SYNC read 1D contiguous (memory) contiguous (file) full - run: | - cd build-sync - ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-contig-contig-read-full.json - - - name: Test h5bench SYNC read 1D contiguous (memory) contiguous (file) partial - run: | - cd build-sync - ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-contig-contig-read-partial.json - - - name: Test h5bench SYNC read 1D contiguous (memory) contiguous (file) strided - run: | - cd build-sync - ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-contig-contig-read-strided.json - - - name: Test h5bench SYNC read 2D contiguous (memory) contiguous (file) full - run: | - cd build-sync - ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-2d-contig-contig-read-full.json - - - name: Test h5bench SYNC read 3D contiguous (memory) contiguous (file) full - run: | - cd build-sync - ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-2d-contig-contig-read-full.json - - - name: Test h5bench SYNC write unlimited - run: | - cd build-sync - ./h5bench --debug --abort-on-failure ../samples/sync-write-unlimited.json - - - name: Test h5bench SYNC overwrite - run: | - cd build-sync - ./h5bench --debug --abort-on-failure ../samples/sync-overwrite.json - - - name: Test h5bench SYNC append - run: | - cd build-sync - ./h5bench --debug --abort-on-failure ../samples/sync-append.json - - - name: Test h5bench SYNC exerciser - run: | - cd build-sync - ./h5bench --debug --abort-on-failure ../samples/sync-exerciser.json - - - name: Test h5bench SYNC metadata - run: | - cd build-sync - ./h5bench --debug --abort-on-failure ../samples/sync-metadata.json - - - name: Test h5bench SYNC amrex - run: | - cd build-sync - ./h5bench --debug --abort-on-failure ../samples/sync-amrex.json - - - name: Test h5bench SYNC openpmd - run: | - cd build-sync - ./h5bench --debug --abort-on-failure ../samples/sync-openpmd.json - - - name: Test h5bench SYNC e3sm - run: | - export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH - - cd build-sync - ./h5bench --debug --abort-on-failure ../samples/sync-e3sm.json - - - name: Test h5bench ASYNC write/read - run: | - current="$PWD" - - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD=$ABT_DIR/lib/libabt.so - - cd build-async - - python3 ../samples/update.py ../samples/async-write-read-contig-1d-small.json - - cat ../samples/async-write-read-contig-1d-small.json - - ./h5bench --debug --abort-on-failure ../samples/async-write-read-contig-1d-small.json - - - name: Test h5bench ASYNC write 1D contiguous (memory) strided (file) - run: | - current="$PWD" - - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD=$ABT_DIR/lib/libabt.so - - cd build-async - - python3 ../samples/update.py ../samples/async-write-1d-contig-strided.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-strided.json - - - name: Test h5bench ASYNC write 1D contiguous (memory) contiguous (file) - run: | - current="$PWD" - - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD=$ABT_DIR/lib/libabt.so - - cd build-async - - python3 ../samples/update.py ../samples/async-write-1d-contig-contig.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig.json - - - name: Test h5bench ASYNC write 1D contiguous (memory) interleaved (file) - run: | - current="$PWD" - - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD=$ABT_DIR/lib/libabt.so - - cd build-async - - python3 ../samples/update.py ../samples/async-write-1d-contig-interleaved.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-interleaved.json - - - name: Test h5bench ASYNC write 1D interleaved (memory) contiguous (file) - run: | - current="$PWD" - - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD=$ABT_DIR/lib/libabt.so - - cd build-async - - python3 ../samples/update.py ../samples/async-write-1d-interleaved-contig.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-interleaved-contig.json - - - name: Test h5bench ASYNC write 1D interleaved (memory) interleaved (file) - run: | - current="$PWD" - - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD=$ABT_DIR/lib/libabt.so - - cd build-async - - python3 ../samples/update.py ../samples/async-write-1d-interleaved-interleaved.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-interleaved-interleaved.json - - - name: Test h5bench ASYNC write 2D contiguous (memory) contiguous (file) - run: | - current="$PWD" - - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD=$ABT_DIR/lib/libabt.so - - cd build-async - - python3 ../samples/update.py ../samples/async-write-2d-contig-contig.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig.json - - - name: Test h5bench ASYNC write 2D contiguous (memory) interleaved (file) - run: | - current="$PWD" - - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD=$ABT_DIR/lib/libabt.so - - cd build-async - - python3 ../samples/update.py ../samples/async-write-2d-contig-interleaved.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-interleaved.json - - - name: Test h5bench ASYNC write 2D interleaved (memory) contiguous (file) - run: | - current="$PWD" - - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD=$ABT_DIR/lib/libabt.so - - cd build-async - - python3 ../samples/update.py ../samples/async-write-2d-interleaved-contig.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-interleaved-contig.json - - - name: Test h5bench ASYNC write 2D interleaved (memory) interleaved (file) - run: | - current="$PWD" - - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD=$ABT_DIR/lib/libabt.so - - cd build-async - - python3 ../samples/update.py ../samples/async-write-2d-interleaved-interleaved.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-interleaved-interleaved.json - - - name: Test h5bench ASYNC write 3D contiguous (memory) contiguous (file) - run: | - current="$PWD" - - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD=$ABT_DIR/lib/libabt.so - - cd build-async - - python3 ../samples/update.py ../samples/async-write-3d-contig-contig.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-3d-contig-contig.json - - - name: Test h5bench ASYNC read 1D contiguous (memory) contiguous (file) full - run: | - current="$PWD" - - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD=$ABT_DIR/lib/libabt.so - - cd build-async - - python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-full.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-full.json - - - name: Test h5bench ASYNC read 1D contiguous (memory) contiguous (file) partial - run: | - current="$PWD" - - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD=$ABT_DIR/lib/libabt.so - - cd build-async - - python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-partial.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-partial.json - - - name: Test h5bench ASYNC read 1D contiguous (memory) contiguous (file) strided - run: | - current="$PWD" - - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD=$ABT_DIR/lib/libabt.so - - cd build-async - - python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-strided.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-strided.json - - - name: Test h5bench ASYNC read 2D contiguous (memory) contiguous (file) full - run: | - current="$PWD" - - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD=$ABT_DIR/lib/libabt.so - - cd build-async - - python3 ../samples/update.py ../samples/async-write-2d-contig-contig-read-full.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig-read-full.json - - - name: Test h5bench ASYNC read 3D contiguous (memory) contiguous (file) full - run: | - current="$PWD" - - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD=$ABT_DIR/lib/libabt.so - - cd build-async - - python3 ../samples/update.py ../samples/async-write-2d-contig-contig-read-full.json - - ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig-read-full.json - - - name: Test h5bench ASYNC amrex - run: | - current="$PWD" - - export HDF5_DIR=/opt/hdf5 - export ABT_DIR=$current/vol-async/argobots/install - export VOL_DIR=$current/vol-async - export ASYNC_DIR=$VOL_DIR/src - - export LD_PRELOAD=$ABT_DIR/lib/libabt.so - - cd build-async - - python3 ../samples/update.py ../samples/async-amrex.json - - ./h5bench --debug --abort-on-failure ../samples/async-amrex.json - - - name: Upload artifact - if: always() - uses: actions/upload-artifact@v2 - with: - name: test - path: build*/storage/**/std* - retention-days: 1