diff --git a/.build_rtd_docs/conf.py b/.build_rtd_docs/conf.py
index d93113678be..6b90638e3bd 100644
--- a/.build_rtd_docs/conf.py
+++ b/.build_rtd_docs/conf.py
@@ -38,7 +38,7 @@
pth = os.path.join("..", "distribution")
args = (
"python",
- "make_release.py",
+ "update_version.py",
)
# run the command
proc = Popen(args, stdout=PIPE, stderr=PIPE, cwd=pth)
diff --git a/distribution/.fprettify.yaml b/.fprettify.yaml
similarity index 100%
rename from distribution/.fprettify.yaml
rename to .fprettify.yaml
diff --git a/.github/common/fortran-format-check.sh b/.github/common/fortran-format-check.sh
index 09992ba1184..e72b4bb399b 100755
--- a/.github/common/fortran-format-check.sh
+++ b/.github/common/fortran-format-check.sh
@@ -26,7 +26,7 @@ for path in "${SEARCHPATHS[@]}"; do
((checkcount++))
- if [[ ! -z $(fprettify -d -c ./distribution/.fprettify.yaml "${file}" 2>&1) ]]; then
+ if [[ ! -z $(fprettify -d -c .fprettify.yaml "${file}" 2>&1) ]]; then
fformatfails+=("${file}")
fi
done
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 27caa9bde0e..e25e23ffeea 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -5,10 +5,16 @@ on:
- master
- develop
- ci-diagnose*
+ paths-ignore:
+ - '**.md'
+ - 'doc/**'
pull_request:
branches:
- master
- develop
+ paths-ignore:
+ - '**.md'
+ - 'doc/**'
jobs:
lint:
name: Lint (fprettify)
@@ -57,11 +63,14 @@ jobs:
cache-downloads: true
cache-env: true
- - name: Meson setup/compile
+ - name: Meson setup
run: |
meson setup builddir -Ddebug=false -Dwerror=true
+
+ - name: Meson compile
+ run: |
meson compile -C builddir
-
+
- name: Meson test
run: |
meson test --verbose --no-rebuild -C builddir
@@ -94,7 +103,13 @@ jobs:
repository: MODFLOW-USGS/modflow6-testmodels
path: modflow6-testmodels
- - name: Setup gfortran ${{ env.GCC_V }}
+ - name: Checkout modflow6-examples
+ uses: actions/checkout@v3
+ with:
+ repository: MODFLOW-USGS/modflow6-examples
+ path: modflow6-examples
+
+ - name: Setup GNU Fortran ${{ env.GCC_V }}
uses: awvwgk/setup-fortran@main
with:
compiler: gcc
@@ -107,6 +122,26 @@ jobs:
cache-downloads: true
cache-env: true
+ - name: Cache modflow6 examples
+ id: cache-examples
+ uses: actions/cache@v3
+ with:
+ path: modflow6-examples/examples
+ key: modflow6-examples-${{ hashFiles('modflow6-examples/scripts/**') }}
+
+ - name: Install extra Python packages
+ if: steps.cache-examples.outputs.cache-hit != 'true'
+ working-directory: modflow6-examples/etc
+ run: |
+ pip install -r requirements.pip.txt
+
+ - name: Build example models
+ if: steps.cache-examples.outputs.cache-hit != 'true'
+ working-directory: modflow6-examples/etc
+ run: |
+ python ci_build_files.py
+ ls -lh ../examples/
+
- name: Build modflow6
working-directory: modflow6
run: |
@@ -124,11 +159,18 @@ jobs:
run: |
pytest -v --durations 0 get_exes.py
- - name: Run tests
+ - name: Test programs
working-directory: modflow6/autotest
run: |
pytest -v -n auto --durations 0
+ - name: Test scripts
+ working-directory: modflow6/distribution
+ env:
+ GITHUB_TOKEN: ${{ github.token }}
+ run: |
+ pytest -v --durations 0
+
test_gfortran_previous:
name: Test gfortran (${{ matrix.GCC_V }}, ${{ matrix.os }})
needs:
@@ -158,7 +200,7 @@ jobs:
repository: MODFLOW-USGS/modflow6-testmodels
path: modflow6-testmodels
- - name: Setup gfortran ${{ matrix.GCC_V }}
+ - name: Setup GNU Fortran ${{ matrix.GCC_V }}
uses: awvwgk/setup-fortran@main
with:
compiler: gcc
@@ -188,10 +230,10 @@ jobs:
run: |
pytest -v --durations 0 get_exes.py
- - name: Run tests
+ - name: Test modflow6
working-directory: modflow6/autotest
run: |
- pytest -v -n auto --durations 0
+ pytest -v -n auto --durations 0
test_ifort:
name: Test (ifort)
@@ -219,6 +261,12 @@ jobs:
repository: MODFLOW-USGS/modflow6-testmodels
path: modflow6-testmodels
+ - name: Checkout modflow6-examples
+ uses: actions/checkout@v3
+ with:
+ repository: MODFLOW-USGS/modflow6-examples
+ path: modflow6-examples
+
- name: Setup Micromamba
uses: mamba-org/provision-with-micromamba@main
with:
@@ -226,10 +274,10 @@ jobs:
cache-downloads: true
cache-env: true
- - name: Setup ifort
+ - name: Setup Intel Fortran
uses: modflowpy/install-intelfortran-action@v1
- - name: Add Micromamba Scripts dir to path
+ - name: Fix Micromamba path (Windows)
if: runner.os == 'Windows'
shell: pwsh
run: |
@@ -237,6 +285,30 @@ jobs:
$mamba_bin = "C:\Users\runneradmin\micromamba-root\envs\modflow6\Scripts"
echo $mamba_bin | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
+ - name: Cache modflow6 examples
+ id: cache-examples
+ uses: actions/cache@v3
+ with:
+ path: modflow6-examples/examples
+ key: modflow6-examples-${{ hashFiles('modflow6-examples/scripts/**') }}
+
+ - name: Install extra Python packages
+ if: steps.cache-examples.outputs.cache-hit != 'true'
+ working-directory: modflow6-examples/etc
+ run: |
+ pip install -r requirements.pip.txt
+
+ - name: Build example models
+ if: steps.cache-examples.outputs.cache-hit != 'true'
+ working-directory: modflow6-examples/etc
+ run: |
+ python ci_build_files.py
+ ls -lh ../examples/
+
+ - name: Update version files
+ working-directory: modflow6/distribution
+ run: python update_version.py
+
- name: Build modflow6
if: runner.os != 'Windows'
working-directory: modflow6
@@ -245,7 +317,7 @@ jobs:
meson install -C builddir
meson test --verbose --no-rebuild -C builddir
- - name: Build modflow6
+ - name: Build modflow6 (Windows)
if: runner.os == 'Windows'
working-directory: modflow6
shell: pwsh
@@ -265,175 +337,39 @@ jobs:
run: |
pytest -v --durations 0 get_exes.py
- - name: Get executables
+ - name: Get executables (Windows)
if: runner.os == 'Windows'
working-directory: modflow6/autotest
shell: pwsh
run: |
pytest -v --durations 0 get_exes.py
-
- - name: Run tests
+
+ - name: Test programs
if: runner.os != 'Windows'
working-directory: modflow6/autotest
run: |
pytest -v -n auto --durations 0
- - name: Run tests
+ - name: Test programs (Windows)
if: runner.os == 'Windows'
working-directory: modflow6/autotest
shell: pwsh
run: |
pytest -v -n auto --durations 0
-
- test_makefiles_gfortran:
- name: Test makefiles (gfortran ${{ matrix.gcc_v }}, ${{ matrix.os }})
- needs:
- - lint
- - build
- runs-on: ${{ matrix.os }}
- strategy:
- fail-fast: false
- matrix:
- os: [ ubuntu-22.04, macos-12, windows-2022]
- gcc_v: [ 12 ]
- defaults:
- run:
- shell: bash -l {0}
- env:
- FC: gfortran
- steps:
-
- - name: Checkout modflow6
- uses: actions/checkout@v3
- with:
- path: modflow6
-
- - name: Checkout modflow6-testmodels
- uses: actions/checkout@v3
- with:
- repository: MODFLOW-USGS/modflow6-testmodels
- path: modflow6-testmodels
-
- - name: Setup gfortran ${{ matrix.gcc_v }}
- uses: awvwgk/setup-fortran@main
- with:
- compiler: gcc
- version: ${{ matrix.gcc_v }}
-
- - name: Setup Micromamba
- uses: mamba-org/provision-with-micromamba@main
- with:
- environment-file: modflow6/environment.yml
- cache-downloads: true
- cache-env: true
- - name: Test makefiles
+ - name: Test scripts
+ if: runner.os != 'Windows'
working-directory: modflow6/distribution
+ env:
+ GITHUB_TOKEN: ${{ github.token }}
run: |
- pytest -v -n auto build_makefiles.py
-
- test_makefiles_ifort:
- name: Test makefiles (ifort, ${{ matrix.os }})
- needs:
- - lint
- - build
- runs-on: ${{ matrix.os }}
- strategy:
- fail-fast: false
- matrix:
- os: [ ubuntu-latest, macos-latest ]
- defaults:
- run:
- shell: bash -l {0}
- env:
- FC: ifort
- steps:
-
- - name: Checkout modflow6
- uses: actions/checkout@v3
- with:
- path: modflow6
-
- - name: Checkout modflow6-testmodels
- uses: actions/checkout@v3
- with:
- repository: MODFLOW-USGS/modflow6-testmodels
- path: modflow6-testmodels
-
- - name: Setup ifort
- uses: modflowpy/install-intelfortran-action@v1
-
- - name: Setup Micromamba
- uses: mamba-org/provision-with-micromamba@main
- with:
- environment-file: modflow6/environment.yml
- cache-downloads: true
- cache-env: true
+ pytest -v --durations 0
- - name: Test makefiles
+ - name: Test scripts (Windows)
+ if: runner.os == 'Windows'
working-directory: modflow6/distribution
+ shell: pwsh
+ env:
+ GITHUB_TOKEN: ${{ github.token }}
run: |
- pytest -v -n auto build_makefiles.py
-
- test_nightly_build_gfortran:
- name: Test nightly build (gfortran 12)
- needs:
- - lint
- - build
- runs-on: ${{ matrix.os }}
- strategy:
- fail-fast: false
- matrix:
- include:
- - os: ubuntu-22.04
- ostag: linux
- - os: macos-12
- ostag: mac
- - os: windows-2022
- ostag: win64
- env:
- GCC_V: 12
- defaults:
- run:
- shell: bash -l {0}
- steps:
-
- - name: Checkout modflow6
- uses: actions/checkout@v3
-
- - name: Setup gfortran ${{ env.GCC_V }}
- uses: awvwgk/setup-fortran@main
- with:
- compiler: gcc
- version: ${{ env.GCC_V }}
-
- - name: Setup Micromamba
- uses: mamba-org/provision-with-micromamba@main
- with:
- cache-downloads: true
- cache-env: true
-
- - name: Print Python package versions
- run: |
- pip list
-
- - name: Update flopy
- working-directory: autotest
- run: |
- python update_flopy.py
-
- - name: Run nightly build script
- working-directory: distribution
- run: |
- python build_nightly.py
-
- - name: Make sure zip file exists
- working-directory: distribution
- run: |
- path="temp_zip/${{ matrix.ostag }}.zip"
- if [ -e "$path" ]; then
- echo "Zipfile found: $path"
- else
- echo "Zipfile not found: $path"
- exit 1
- fi
\ No newline at end of file
+ pytest -v --durations 0
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index 529b69cb588..d863a1e5b23 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -11,27 +11,45 @@ on:
- develop
jobs:
rtd_build:
- runs-on: ubuntu-latest
+ name: Build ReadTheDocs
+ runs-on: ubuntu-22.04
defaults:
run:
shell: bash -l {0}
env:
+ GCC_V: 12
working-directory: .build_rtd_docs
distribution-directory: distribution
steps:
- - uses: actions/checkout@v3
+ - name: Checkout modflow6
+ uses: actions/checkout@v3
+ with:
+ path: modflow6
+
+ - name: Checkout modflow6-examples
+ uses: actions/checkout@v3
+ with:
+ repository: MODFLOW-USGS/modflow6-examples
+ path: modflow6-examples
+
+ - name: Checkout usgslatex
+ uses: actions/checkout@v3
+ with:
+ repository: MODFLOW-USGS/usgslatex
+ path: usgslatex
- name: Install Conda environment from environment.yml
uses: mamba-org/provision-with-micromamba@main
with:
- cache-downloads: true
- cache-env: true
+ environment-file: modflow6/environment.yml
+ cache-downloads: true
+ cache-env: true
- name: Install additional packages for Sphinx using pip
+ working-directory: modflow6/.build_rtd_docs
run: |
pip install -r requirements.rtd.txt
- working-directory: ${{env.working-directory}}
- name: Print python package versions
run: |
@@ -42,70 +60,93 @@ jobs:
sudo apt-get update
sudo apt install texlive-latex-extra texlive-science texlive-font-utils
- - name: Clone USGS LaTeX repo
- run: |
- git clone https://github.com/MODFLOW-USGS/usgslatex.git usgslatex
-
- name: Install USGS LaTeX style files and Univers font
- working-directory: ./usgslatex/usgsLaTeX
+ working-directory: usgslatex/usgsLaTeX
run: |
sudo ./install.sh --all-users
- name: Test building files from dfn's for LaTeX
- working-directory: autotest
+ working-directory: modflow6/autotest
run: |
pytest -v build_mfio_tex.py
- - name: Install gfortran
- uses: modflowpy/install-gfortran-action@v1
+ - name: Setup GNU Fortran ${{ env.GCC_V }}
+ uses: awvwgk/setup-fortran@main
+ with:
+ compiler: gcc
+ version: ${{ env.GCC_V }}
+
+ - name: Cache modflow6 examples
+ id: cache-examples
+ uses: actions/cache@v3
+ with:
+ path: modflow6-examples/examples
+ key: modflow6-examples-${{ hashFiles('modflow6-examples/scripts/**') }}
- - name: Run-time comparison
+ - name: Install extra Python packages
+ if: steps.cache-examples.outputs.cache-hit != 'true'
+ working-directory: modflow6-examples/etc
run: |
- python evaluate_run_times.py
- working-directory: ${{env.distribution-directory}}
+ pip install -r requirements.pip.txt
+ pip install -r requirements.usgs.txt
+
+ - name: Build example models
+ if: steps.cache-examples.outputs.cache-hit != 'true'
+ working-directory: modflow6-examples/etc
+ run: |
+ python ci_build_files.py
+ ls -lh ../examples/
+
+ - name: Run benchmarks
+ working-directory: modflow6/distribution
+ run: python benchmark.py
- - name: run sphinx
+ - name: Run sphinx
+ working-directory: modflow6/.build_rtd_docs
run: |
make html
- working-directory: ${{env.working-directory}}
- - name: Output run-time file if not triggering rtd build
- if: github.repository_owner == 'MODFLOW-USGS' && github.event_name != 'push'
+ - name: Show results
+ working-directory: modflow6/distribution
run: |
cat run-time-comparison.md
- working-directory: ${{env.distribution-directory}}
- # Create an artifact of the processed files.
- - uses: actions/upload-artifact@v2.2.3
+ - name: Upload comparison
+ uses: actions/upload-artifact@v3
+ with:
+ name: run-time-comparison
+ path: modflow6/distribution/run-time-comparison.md
+
+ - name: Upload results
+ uses: actions/upload-artifact@v3
with:
name: rtd-files-for-${{ github.sha }}
path: |
- ${{env.working-directory}}/index.rst
- ${{env.working-directory}}/mf6io.rst
- ${{env.working-directory}}/_mf6run/
- ${{env.working-directory}}/_mf6io/
- ${{env.working-directory}}/_static/
+ modflow6/.build_rtd_docs/
+ modflow6/.build_rtd_docs/index.rst
+ modflow6/.build_rtd_docs/mf6io.rst
+ modflow6/.build_rtd_docs/_mf6run/
+ modflow6/.build_rtd_docs/_mf6io/
+ modflow6/.build_rtd_docs/_static/
- # trigger rtd if "rtd_build" job was successful
rtd_trigger:
name: rtd-trigger
needs: rtd_build
- runs-on: ubuntu-latest
-
+ runs-on: ubuntu-22.04
if: github.repository_owner == 'MODFLOW-USGS' && github.event_name == 'push'
steps:
- name: Checkout repo
- uses: actions/checkout@v2.3.4
+ uses: actions/checkout@v3
- name: Trigger RTDs build on push to repo branches
- uses: dfm/rtds-action@v1.0.3
+ uses: dfm/rtds-action@v1
with:
webhook_url: ${{ secrets.MF6_RTDS_WEBHOOK_URL }}
webhook_token: ${{ secrets.MF6_RTDS_WEBHOOK_TOKEN }}
commit_ref: ${{ github.ref }}
doxygen_build:
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
env:
working-directory: .build_rtd_docs
distribution-directory: distribution
@@ -116,7 +157,7 @@ jobs:
shell: bash -l {0}
steps:
- - uses: actions/checkout@v2.3.4
+ - uses: actions/checkout@v3
- name: Print branch name
run: |
@@ -139,7 +180,7 @@ jobs:
- name: update MODFLOW 6 version
run: |
- python make_release.py
+ python update_version.py
working-directory: ${{env.distribution-directory}}
- name: update MODFLOW 6 version in Doxyfile
diff --git a/.github/workflows/large.yml b/.github/workflows/large.yml
index ed23cc21e4f..ed253c76369 100644
--- a/.github/workflows/large.yml
+++ b/.github/workflows/large.yml
@@ -52,7 +52,7 @@ jobs:
uses: actions/cache@v3
with:
path: modflow6-examples/examples
- key: modflow6-examples-${{ hashFiles('modflow6-examples/data/**') }}
+ key: modflow6-examples-${{ hashFiles('modflow6-examples/scripts/**') }}
- name: Install extra Python packages
if: matrix.repo == 'examples' && steps.cache-examples.outputs.cache-hit != 'true'
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
new file mode 100644
index 00000000000..be852a50021
--- /dev/null
+++ b/.github/workflows/release.yml
@@ -0,0 +1,591 @@
+name: MODFLOW 6 release
+on:
+ push:
+ branches:
+ - master
+ - v*
+env:
+ FC: ifort
+jobs:
+ build:
+ name: Build binaries (${{ matrix.os }})
+ if: ${{ github.event_name != 'push' || github.ref_name != 'master' }}
+ runs-on: ${{ matrix.os }}
+ strategy:
+ fail-fast: false
+ matrix:
+ include:
+ - os: ubuntu-22.04
+ ostag: linux
+ - os: macos-12
+ ostag: mac
+ - os: windows-2022
+ ostag: win64
+ defaults:
+ run:
+ shell: bash -l {0}
+ steps:
+ - name: Checkout modflow6
+ uses: actions/checkout@v3
+ with:
+ path: modflow6
+
+ - name: Cache binaries
+ if: ${{ contains(github.ref_name, 'rc') }}
+ id: cache-bin
+ uses: actions/cache@v3
+ with:
+ key: bin-${{ runner.os }}
+ path: modflow6/bin
+
+ - name: Setup Micromamba
+ if: ${{ !(contains(github.ref_name, 'rc')) || steps.cache-bin.outputs.cache-hit != 'true' }}
+ uses: mamba-org/provision-with-micromamba@main
+ with:
+ environment-file: modflow6/environment.yml
+ cache-downloads: true
+ cache-env: true
+
+ - name: Setup Intel Fortran
+ if: ${{ !(contains(github.ref_name, 'rc')) || steps.cache-bin.outputs.cache-hit != 'true' }}
+ uses: modflowpy/install-intelfortran-action@v1
+
+ - name: Fix Micromamba path (Windows)
+ if: ${{ runner.os == 'Windows' && (!(contains(github.ref_name, 'rc')) || steps.cache-bin.outputs.cache-hit != 'true') }}
+ shell: pwsh
+ run: |
+ # https://github.com/modflowpy/install-intelfortran-action#conda-scripts
+ $mamba_bin = "C:\Users\runneradmin\micromamba-root\envs\modflow6\Scripts"
+ echo $mamba_bin | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
+
+ - name: Update version
+ if: ${{ !(contains(github.ref_name, 'rc')) || steps.cache-bin.outputs.cache-hit != 'true' }}
+ working-directory: modflow6/distribution
+ run: |
+ ref="${{ github.ref_name }}"
+ ver="${ref%"rc"}"
+ if [ "$ref" == "$ver" ]; then
+ python update_version.py -v "${ver#"v"}" --approve
+ else
+ python update_version.py -v "${ver#"v"}"
+ fi
+
+ # check src/Utilities/version.f90 IDEVELOPMODE setting
+ cat ../src/Utilities/version.f90
+
+ - name: Build binaries
+ if: ${{ runner.os != 'Windows' && (!(contains(github.ref_name, 'rc')) || steps.cache-bin.outputs.cache-hit != 'true') }}
+ working-directory: modflow6
+ run: |
+ meson setup builddir -Ddebug=false --prefix=$(pwd) --libdir=bin
+ meson install -C builddir
+ meson test --verbose --no-rebuild -C builddir
+
+ - name: Build binaries (Windows)
+ if: ${{ runner.os == 'Windows' && (!(contains(github.ref_name, 'rc')) || steps.cache-bin.outputs.cache-hit != 'true') }}
+ working-directory: modflow6
+ shell: pwsh
+ run: |
+ meson setup builddir -Ddebug=false --prefix=$(pwd) --libdir=bin
+ meson install -C builddir
+ meson test --verbose --no-rebuild -C builddir
+
+ - name: Upload binaries
+ uses: actions/upload-artifact@v3
+ with:
+ name: bin-${{ runner.os }}
+ path: modflow6/bin
+
+ docs:
+ name: Build docs
+ if: ${{ github.event_name != 'push' || github.ref_name != 'master' }}
+ needs: build
+ runs-on: ubuntu-22.04
+ defaults:
+ run:
+ shell: bash -l {0}
+ steps:
+
+ - name: Checkout modflow6
+ uses: actions/checkout@v3
+ with:
+ path: modflow6
+
+ - name: Checkout modflow6-examples
+ uses: actions/checkout@v3
+ with:
+ repository: MODFLOW-USGS/modflow6-examples
+ path: modflow6-examples
+
+ - name: Install TeX Live
+ run: |
+ sudo apt-get update
+ sudo apt install texlive-latex-extra texlive-science texlive-font-utils texlive-fonts-recommended texlive-fonts-extra
+
+ - name: Checkout usgslatex
+ uses: actions/checkout@v3
+ with:
+ repository: MODFLOW-USGS/usgslatex
+ path: usgslatex
+
+ - name: Install USGS LaTeX style files and Univers font
+ working-directory: usgslatex/usgsLaTeX
+ run: |
+ sudo ./install.sh --all-users
+
+ - name: Setup Micromamba
+ uses: mamba-org/provision-with-micromamba@main
+ with:
+ environment-file: modflow6/environment.yml
+ cache-downloads: true
+ cache-env: true
+
+ - name: Setup Intel Fortran
+ uses: modflowpy/install-intelfortran-action@v1
+
+ - name: Fix Micromamba path (Windows)
+ if: runner.os == 'Windows'
+ shell: pwsh
+ run: |
+ # https://github.com/modflowpy/install-intelfortran-action#conda-scripts
+ $mamba_bin = "C:\Users\runneradmin\micromamba-root\envs\modflow6\Scripts"
+ echo $mamba_bin | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
+
+ - name: Install extra Python packages
+ working-directory: modflow6-examples/etc
+ run: pip install -r requirements.pip.txt
+
+ - name: Build example models
+ working-directory: modflow6-examples/etc
+ run: |
+ pytest -v -n auto ci_build_files.py
+ ls -lh ../examples/
+
+ - name: Update version
+ working-directory: modflow6/distribution
+ run: |
+ ref="${{ github.ref_name }}"
+ ver="${ref%"rc"}"
+ # if tag doesn't end with 'rc' the release is approved
+ if [ "$ref" == "$ver" ]; then
+ python update_version.py -v "${ver#"v"}" --approve
+ else
+ python update_version.py -v "${ver#"v"}"
+ fi
+
+ echo "DISTNAME=mf${ref#"v"}" >> $GITHUB_ENV
+
+ - name: Create directory structure
+ run: |
+ # Create a skeleton of the distribution's folder structure to include in the docs
+ mkdir -p "$DISTNAME/doc"
+ mkdir "$DISTNAME/make"
+ mkdir "$DISTNAME/msvs"
+ mkdir "$DISTNAME/srcbmi"
+ cp modflow6/code.json "$DISTNAME/code.json"
+ cp modflow6/meson.build "$DISTNAME/meson.build"
+ cp -r modflow6-examples/examples "$DISTNAME"
+ cp -r modflow6/src "$DISTNAME"
+ cp -r modflow6/utils "$DISTNAME"
+
+ # create LaTeX file describing the folder structure
+ cd modflow6/doc/ReleaseNotes
+ python mk_folder_struct.py -dp "${{ github.workspace }}/$DISTNAME"
+
+ - name: Download artifacts
+ uses: actions/download-artifact@v3
+ with:
+ name: bin-${{ runner.os }}
+ path: bin
+
+ - name: Build documentation
+ env:
+ GITHUB_TOKEN: ${{ github.token }}
+ run: |
+ chmod +x bin/mf6
+ chmod +x bin/mf5to6
+ chmod +x bin/zbud6
+ python modflow6/distribution/build_docs.py -b bin -o doc
+
+ - name: Upload documentation
+ uses: actions/upload-artifact@v3
+ with:
+ name: doc
+ path: doc
+
+ dist:
+ name: Build distribution (${{ matrix.os }})
+ if: ${{ github.event_name != 'push' || github.ref_name != 'master' }}
+ needs: docs
+ runs-on: ${{ matrix.os }}
+ strategy:
+ fail-fast: false
+ matrix:
+ include:
+ - os: ubuntu-22.04
+ ostag: linux
+ - os: macos-12
+ ostag: mac
+ - os: windows-2022
+ ostag: win64
+ defaults:
+ run:
+ shell: bash -l {0}
+ steps:
+ - name: Checkout modflow6
+ uses: actions/checkout@v3
+ with:
+ path: modflow6
+
+ - name: Checkout modflow6-examples
+ uses: actions/checkout@v3
+ with:
+ repository: MODFLOW-USGS/modflow6-examples
+ path: modflow6-examples
+
+ - name: Setup Micromamba
+ uses: mamba-org/provision-with-micromamba@main
+ with:
+ environment-file: modflow6/environment.yml
+ cache-downloads: true
+ cache-env: true
+
+ - name: Setup Intel Fortran
+ uses: modflowpy/install-intelfortran-action@v1
+
+ - name: Fix Micromamba path (Windows)
+ if: runner.os == 'Windows'
+ shell: pwsh
+ run: |
+ # https://github.com/modflowpy/install-intelfortran-action#conda-scripts
+ $mamba_bin = "C:\Users\runneradmin\micromamba-root\envs\modflow6\Scripts"
+ echo $mamba_bin | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
+
+ - name: Install extra Python packages
+ working-directory: modflow6-examples/etc
+ run: |
+ pip install -r requirements.pip.txt
+
+ - name: Build example models
+ working-directory: modflow6-examples/etc
+ run: |
+ pytest -v -n auto ci_build_files.py
+ ls -lh ../examples/
+
+ - name: Update version
+ working-directory: modflow6/distribution
+ run: |
+ ref="${{ github.ref_name }}"
+ ver="${ref%"rc"}"
+ # if tag doesn't end with 'rc' the release is approved
+ if [ "$ref" == "$ver" ]; then
+ python update_version.py -v "${ver#"v"}" --approve
+ else
+ python update_version.py -v "${ver#"v"}"
+ fi
+
+ echo "DISTNAME=mf${ref#"v"}" >> $GITHUB_ENV
+
+ - name: Download artifacts
+ uses: actions/download-artifact@v3
+ with:
+ path: ${{ env.DISTNAME }}
+
+ - name: Select artifacts for OS
+ run: |
+ # move binaries for current OS to top level bin
+ # directory and remove executables for other OS
+ mv "$DISTNAME/bin-${{ runner.os }}" "$DISTNAME/bin"
+ rm -rf "$DISTNAME/bin-*"
+
+ - name: Build distribution
+ env:
+ GITHUB_TOKEN: ${{ github.token }}
+ run: |
+ if [ "${{ runner.os }}" == "Windows" ]; then
+ # fix MSVC linker path on Windows
+ export PATH="/C/Program Files/Microsoft Visual Studio/2022/Enterprise/VC/Tools/MSVC/14.33.31629/bin/Hostx64/x64":$PATH
+ fi
+
+ # build dist folder
+ python modflow6/distribution/build_dist.py -o "$DISTNAME" -e modflow6-examples
+
+ # rename PDF docs
+ mv "$DISTNAME/doc/ReleaseNotes.pdf" "$DISTNAME/doc/release.pdf"
+ mv "$DISTNAME/doc/converter_mf5to6.pdf" "$DISTNAME/doc/mf5to6.pdf"
+
+ # set zip name
+ if [ "${{ runner.os }}" == "Windows" ]; then
+ zip_name="${{ env.DISTNAME }}"
+ else
+ zip_name="${{ env.DISTNAME }}_${{ matrix.ostag }}"
+ fi
+ echo "ZIP_NAME=$zip_name" >> $GITHUB_ENV
+
+ # - name: Set execute permissions
+ # if: runner.os != 'Windows'
+ # run: |
+ # chmod +x "$DISTNAME/bin/mf6"
+ # chmod +x "$DISTNAME/bin/mf5to6"
+ # chmod +x "$DISTNAME/bin/zbud6"
+ # chmod +x "$DISTNAME/examples/runall.sh"
+ # for f in "$DISTNAME/examples"/*/run.sh; do
+ # chmod +x "$f"
+ # done
+
+ - name: Zip distribution
+ if: runner.os != 'Windows'
+ run: |
+ zip -r ${{ env.ZIP_NAME }}.zip \
+ ${{ env.DISTNAME }}/bin \
+ ${{ env.DISTNAME }}/src \
+ ${{ env.DISTNAME }}/srcbmi \
+ ${{ env.DISTNAME }}/doc \
+ ${{ env.DISTNAME }}/examples \
+ ${{ env.DISTNAME }}/make \
+ ${{ env.DISTNAME }}/msvs \
+ ${{ env.DISTNAME }}/utils \
+ ${{ env.DISTNAME }}/code.json \
+ ${{ env.DISTNAME }}/meson.build \
+ -x '*.DS_Store' \
+ -x '*libmf6.lib' \
+ -x '*idmloader*' \
+ -x '*pymake*' \
+ -x '*obj_temp*' \
+ -x '*mod_temp*'
+
+ - name: Zip distribution (Windows)
+ if: runner.os == 'Windows'
+ run: |
+ 7z a -tzip ${{ env.ZIP_NAME }}.zip \
+ ${{ env.DISTNAME }}/bin \
+ ${{ env.DISTNAME }}/src \
+ ${{ env.DISTNAME }}/srcbmi \
+ ${{ env.DISTNAME }}/doc \
+ ${{ env.DISTNAME }}/examples \
+ ${{ env.DISTNAME }}/make \
+ ${{ env.DISTNAME }}/msvs \
+ ${{ env.DISTNAME }}/utils \
+ ${{ env.DISTNAME }}/code.json \
+ ${{ env.DISTNAME }}/meson.build \
+ -xr!libmf6.lib \
+ -xr!idmloader \
+ -xr!pymake \
+ -xr!obj_temp \
+ -xr!mod_temp
+
+ # validate after zipping to avoid accidentally changing the distribution files
+ - name: Check distribution
+ run: pytest -v -s modflow6/distribution/check_dist.py -P ${{ env.DISTNAME }}
+
+ - name: Upload distribution
+ uses: actions/upload-artifact@v3
+ with:
+ name: ${{ env.ZIP_NAME }}
+ path: ${{ env.ZIP_NAME }}.zip
+
+ # actions/upload-artifact doesn't preserve execute permissions
+ # - name: Upload distribution (Windows)
+ # if: runner.os == 'Windows'
+ # uses: actions/upload-artifact@v3
+ # with:
+ # name: ${{ env.ZIP_NAME }}
+ # path: |
+ # ${{ env.DISTNAME }}/bin
+ # ${{ env.DISTNAME }}/src
+ # ${{ env.DISTNAME }}/srcbmi
+ # ${{ env.DISTNAME }}/doc
+ # ${{ env.DISTNAME }}/examples
+ # ${{ env.DISTNAME }}/make
+ # ${{ env.DISTNAME }}/msvs
+ # ${{ env.DISTNAME }}/utils
+ # ${{ env.DISTNAME }}/code.json
+ # ${{ env.DISTNAME }}/meson.build
+ # !${{ env.DISTNAME }}/utils/idmloader
+ # !${{ env.DISTNAME }}/bin/libmf6.lib
+ # !${{ env.DISTNAME }}/**/pymake
+ # !${{ env.DISTNAME }}/**/.DS_Store
+ # !${{ env.DISTNAME }}/**/obj_temp
+ # !${{ env.DISTNAME }}/**/mod_temp
+
+ - name: Upload release notes
+ if: runner.os == 'Linux'
+ uses: actions/upload-artifact@v3
+ with:
+ name: release_notes
+ path: ${{ env.DISTNAME }}/doc/release.pdf
+
+ pr:
+ name: Create release PR
+ if: ${{ github.event_name == 'push' && !(contains(github.ref_name, 'rc')) }}
+ needs: dist
+ runs-on: ubuntu-22.04
+ permissions:
+ contents: write
+ pull-requests: write
+ defaults:
+ run:
+ shell: bash -l {0}
+ steps:
+ - name: Checkout modflow6
+ uses: actions/checkout@v3
+
+ - name: Setup Micromamba
+ uses: mamba-org/provision-with-micromamba@main
+ with:
+ cache-downloads: true
+ cache-env: true
+
+ - name: Update version
+ working-directory: distribution
+ run: |
+ # update version files
+ ref="${{ github.ref_name }}"
+ ver="${ref#"v"}"
+ if [ "$ref" == "$ver" ]; then
+ python update_version.py -v "$ver" --approve
+ else
+ python update_version.py -v "$ver"
+ fi
+
+ # update formatting
+ fprettify -c ../.fprettify.yaml ../src/Utilities/version.f90
+
+ # commit and push
+ git config core.sharedRepository true
+ git config user.name "github-actions[bot]"
+ git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
+ git add -A
+ git commit -m "ci(release): update version to $ver"
+ git push origin "$ref"
+
+ - name: Create pull request
+ env:
+ GITHUB_TOKEN: ${{ github.token }}
+ run: |
+ ref="${{ github.ref_name }}"
+ ver="${ref#"v"}"
+ body='
+ # MODFLOW '$ver' release
+
+ To approve this release, merge this pull request into `master`. This will trigger a final CI job to:
+ 1) create a tagged GitHub release and upload assets (OS-specific distributions and release notes)
+ 2) open a PR updating `develop` from `master`, resetting version files, and setting `IDEVELOPMODE=1`
+ '
+ gh pr create -B "master" -H "$ref" --title "Release $ver" --draft --body "$body"
+
+ release:
+ name: Release and reset
+ if: ${{ github.event_name == 'push' && github.ref_name == 'master' }}
+ runs-on: ubuntu-22.04
+ defaults:
+ run:
+ shell: bash -l {0}
+ steps:
+ - name: Checkout modflow6
+ uses: actions/checkout@v3
+ with:
+ path: modflow6
+
+ - name: Setup Micromamba
+ uses: mamba-org/provision-with-micromamba@main
+ with:
+ environment-file: modflow6/environment.yml
+ cache-downloads: true
+ cache-env: true
+
+ # this isn't necessary, GitHub creates "Source code (zip)" and "Source code (tar.gz)" assets automatically
+ # - name: Archive source code (zip)
+ # uses: thedoctor0/zip-release@main
+ # with:
+ # path: modflow6
+ # type: zip
+ # filename: source.zip
+ # exclusions: '*.git/'
+
+ # - name: Archive source code (tar)
+ # uses: thedoctor0/zip-release@main
+ # with:
+ # path: modflow6
+ # type: tar
+ # filename: source.tar.gz
+ # exclusions: '*.git/'
+
+ # - name: Rename source code
+ # run: |
+ # mv source.zip "Source code (zip)"
+ # mv source.tar.gz "Source code (tar.gz)"
+
+ - name: Download artifacts
+ uses: dawidd6/action-download-artifact@v2
+
+ - name: Detect version
+ working-directory: modflow6
+ env:
+ GITHUB_TOKEN: ${{ github.token }}
+ run: |
+ # get updated version
+ ver=$(python distribution/update_version.py --get)
+ echo "VERSION=$ver" >> $GITHUB_ENV
+
+ # - name: Create release
+ # uses: marvinpinto/action-automatic-releases@latest
+ # with:
+ # repo_token: ${{ github.token }}
+ # automatic_release_tag: ${{ env.VERSION }}
+ # prerelease: false
+ # title: "MODFLOW ${{ env.VERSION }}"
+ # files: |
+ # # double-nested because actions/upload-artifact has no way of
+ # # disabling compression or detecting files already compressed
+ # mf*/mf*.zip
+ # release_notes/release.pdf
+
+ - name: Create release
+ working-directory: modflow6
+ env:
+ GITHUB_TOKEN: ${{ github.token }}
+ run: |
+ # get updated version
+ ver=$(python distribution/update_version.py --get)
+ echo "VERSION=$ver" >> $GITHUB_ENV
+
+ # create release
+ title="MODFLOW $ver"
+ notes='
+ This is the approved USGS MODFLOW '$ver' release.
+
+ *Insert citation here*
+
+ Visit the USGS "MODFLOW and Related Programs" site for information on MODFLOW 6 and related software: https://doi.org/10.5066/F76Q1VQV
+ '
+ gh release create "$ver" ../mf*/mf*.zip ../release_notes/release.pdf --target master --title "$title" --notes "$notes" --draft --latest
+
+ - name: Reinitialize develop
+ working-directory: modflow6
+ env:
+ GITHUB_TOKEN: ${{ github.token }}
+ run: |
+ # create reset branch from master
+ reset_branch="post-release-$VERSION-reset"
+ git checkout master
+ git switch -c $reset_branch
+
+ # reset version files and IDEVELOPMODE
+ python distribution/update_version.py
+
+ # commit and push to reset branch
+ git config core.sharedRepository true
+ git config user.name "github-actions[bot]"
+ git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
+ git add -A
+ git commit -m "ci(release): reinitialize develop for next release"
+ git push -u origin $reset_branch
+
+ # create PR into develop
+ body='
+ # Reinitialize MODFLOW '$VERSION' for development
+
+ Updates the `develop` branch from `master` following an approved release. Also resets version files, setting `IDEVELOPMODE` to `1`.
+ '
+ gh pr create -B "develop" -H "$reset_branch" --title "Reinitialize develop after release $VERSION" --draft --body "$body"
diff --git a/.gitignore b/.gitignore
index c15a4fa32c6..56b28c72ec8 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,4 @@
+**.env
*.exe
*.lib
*.dll
@@ -73,6 +74,7 @@ autotest/notebooks/
distribution/temp/
distribution/temp_zip/
distribution/*.zip
+dist
mod_temp/
obj_temp/
@@ -128,3 +130,6 @@ modflow6.code-workspace
# unittests
unittests/
+
+**/__pycache__
+**/.benchmarks
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 573acd3d821..96e163a9268 100755
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -141,18 +141,13 @@ To ensure consistency throughout the source code, keep these rules in mind as yo
## Format Rules
Fortran souce code format rules are met by running the
-[fprettify formatter](https://github.com/pseewald/fprettify) while specifying the [MODFLOW 6
-fprettify configuration](https://github.com/MODFLOW-USGS/modflow6/blob/develop/distribution/.fprettify.yaml).
-The tool can be run from the command line or integrated into a
-[VSCode](https://github.com/MODFLOW-USGS/modflow6/blob/develop/.vscode/README.md) or Visual Studio environment.
+[fprettify formatter](https://github.com/pseewald/fprettify) while specifying the [MODFLOW 6 fprettify configuration](https://github.com/MODFLOW-USGS/modflow6/blob/develop/distribution/.fprettify.yaml). `fprettify` is included in the Conda `environment.yml` and can be run on the command line or integrated into a [VSCode](https://github.com/MODFLOW-USGS/modflow6/blob/develop/.vscode/README.md) or Visual Studio environment.
-The format configuration file reflects the current minimum standard for Fortran source
-formatting. The main goal, however, is consistent and readable Fortran source code and as such
-pay particular attention to consistency within and across files. As the formatting tool may at
-times shift code in unexpected ways, check for formatting consistency after running.
+The configuration file reflects the current minimum standard for Fortran source
+formatting. The main goal, however, is consistent and readable Fortran source code and as such pay particular attention to consistency within and across files. As the formatting tool may at times shift code in unexpected ways, check for formatting consistency after running.
An example run of the command line tool from the MODFLOW 6 root directory:
-`fprettify -c ./distribution/.fprettify.yaml ./utils/zonebudget/src/zbud6.f90`
+`fprettify -c .fprettify.yaml ./utils/zonebudget/src/zbud6.f90`
When run in this way, the tool will modify the file in place and generate no output if successful. The
tool will write stderr warnings when unable to complete formatting. In general, these warnings (e.g.
diff --git a/autotest/test_cli.py b/autotest/test_cli.py
new file mode 100644
index 00000000000..a8e8f02aca6
--- /dev/null
+++ b/autotest/test_cli.py
@@ -0,0 +1,19 @@
+import subprocess
+from pathlib import Path
+
+_project_root_path = Path(__file__).parent.parent
+_bin_path = _project_root_path / "bin"
+
+
+def test_cli_version():
+ output = ' '.join(subprocess.check_output([str(_bin_path / 'mf6'), "-v"]).decode().split())
+ assert output.startswith("mf6:")
+ assert output.lower().count("release") == 1
+ # assert output.lower().count("candidate") <= 1
+
+ print(output)
+
+ version = output.lower().rpartition(":")[2].rpartition("release")[0].strip()
+ v_split = version.split(".")
+ assert len(v_split) == 3
+ assert all(s.isdigit() for s in v_split)
diff --git a/distribution/README.md b/distribution/README.md
new file mode 100644
index 00000000000..ad36ec0a332
--- /dev/null
+++ b/distribution/README.md
@@ -0,0 +1,196 @@
+# Distributing MODFLOW 6
+
+This folder contains scripts to automate MODFLOW 6 distribution tasks.
+
+
+
+
+- [Overview](#overview)
+- [Requirements](#requirements)
+- [Testing](#testing)
+- [Release procedures](#release-procedures)
+ - [Preparing a nightly release](#preparing-a-nightly-release)
+ - [Preparing an official release](#preparing-an-official-release)
+ - [Updating version info](#updating-version-info)
+ - [Building makefiles](#building-makefiles)
+ - [Building example models](#building-example-models)
+ - [Benchmarking example models](#benchmarking-example-models)
+ - [Building documentation](#building-documentation)
+ - [Building the distribution archive](#building-the-distribution-archive)
+- [Release automation](#release-automation)
+ - [Nightly builds](#nightly-builds)
+ - [Official releases](#official-releases)
+
+
+
+## Overview
+
+This document describes release procedures for MODFLOW 6. Manually preparing a release involves running scripts in the `distribution` directory in a particular order:
+
+1. Update version info: `update_version.py`
+2. Run benchmarks: `benchmark.py`
+3. Build documentation: `build_docs.py`
+4. Build distribution: `build_dist.py`
+
+This process is automated in the `.github/workflows/release.yml` workflow.
+
+The `build_dist.py` script is lazy — benchmarks, example models and documentation artifacts are downloaded via the GitHub API if available, and only re-created if none exist or the `--force` (`-f`) flag is provided. This allows the release workflow to consume artifacts previously created by other workflow runs, reducing the time needed to create and publish a release.
+
+## Requirements
+
+This document assumes a MODFLOW 6 development environment has been configured as per the [developer documentation](../DEVELOPER.md), including a Fortran compiler (either `ifort` or `gfortran`) as well as a Conda environment as specified in `environment.yml`. Official distributions are currently prepared with Intel Fortran (`ifort`).
+
+## Testing
+
+Each script in `distribution/` contains its own tests. To run them, run `pytest` from the `distribution/` folder. The tests will not be discovered if `pytest` is run from a different location, as the scripts in this folder are not named `test_*.py` and are only discoverable by virtue of the patterns provided in `distribution/pytest.ini`. The tests use temporary directories where possible and revert modifications to tracked files on teardown.
+
+**Note:** the tests clean up by reverting changes to files in the following locations:
+
+- `doc/`
+- `make`
+- `utils/**/make/`
+
+Make sure you don't have any uncommitted changes in these locations before running the tests.
+
+**Note:** to avoid contested file access, the tests **should not be run in parallel** with `pytest-xdist`.
+
+There is a small additional suite of tests that can be used to validate a release distribution folder after it is built: `check_dist.py`. These tests are run as part of the release workflow.
+
+## Release procedures
+
+MODFLOW 6 release come in two flavors:
+
+- nightly development builds
+- full/approved distributions
+
+Development builds are created nightly from the tip of the `develop` branch and released from the [`MODFLOW-USGS/modflow6-nightly-build` repository](https://github.com/MODFLOW-USGS/modflow6-nightly-build). Development distributions contain only MODFLOW 6 input/output documentation and core executables and libraries:
+
+- `mf6`: MODFLOW 6 executable
+- `zbud6`: Zonebudget executable
+- `mf5to6`: MODFLOW 5 to 6 converter executable
+- `libmf6`: MODFLOW 6 dynamic library
+
+Full distributions, on the other hand, contain the items listed above, as well as:
+
+- Meson build files
+- Fortran source code
+- MODFLOW 6 example models
+- MODFLOW 6 makefiles
+- MODFLOW 6 Visual Studio files
+- more extensive documentation, including:
+ - MODFLOW 6 input/output docs
+ - MODFLOW 6 example model docs
+ - MODFLOW 6 release notes
+ - MODFLOW 6 supplementary technical information
+ - docs for various MODFLOW 6 features and packages
+ - docs for `mf5to6` and `zbud6`
+
+### Preparing a nightly release
+
+Development releases are built and [posted nightly on the `MODFLOW-USGS/modflow6-nightly-build` repository](https://github.com/MODFLOW-USGS/modflow6-nightly-build/releases). Release assets include:
+
+- platform-specific distributions containing only executables `mf6`, `zbud6`, `mf5to6` and library `libmf6`
+- MODFLOW 6 input/output documentation
+
+The `build_dist.py` script is used to create both development and full distributions. To create a development distribution zipfile in the default location (`/distribution/`), run the script with the `--development` (short `-d`) flag:
+
+```shell
+python build_dist.py -d
+```
+
+The script has several optional command line arguments:
+
+- `--build-path`: path to the build workspace, defaults to `/builddir`
+- `--output-path (-o)`: path to create a distribution zipfile, defaults to `/distribution/`
+- `--examples-repo-path (-e)`: path to the [`MODFLOW-USGS/modflow6-examples`](https://github.com/MODFLOW-USGS/modflow6-examples) repository, defaults to `modflow6-examples` side-by-side with project root
+
+Default paths are resolved relative to the script's location on the filesystem, *not* the current working directory, so the script can be run from `distribution/`, from the project root, or from anywhere else. (This is true of all scripts in the `distribution/` directory.)
+
+### Preparing an official release
+
+To prepare an official release for distribution, the steps are as follows:
+
+#### Updating version info
+
+Version information is stored primarily in `version.txt` in the project root, as well as in several other files in the repository.
+
+The `update_version.py` script updates files containing version information. First a file lock is acquired, then `version.txt` is updated, and changes are propagated to other files in the repository, then the lock is released.
+
+The version can be specified with the `--version` (short `-v`) option. For instance, to set version to `6.4.1`, run from the `scripts/` folder:
+
+```shell
+python update_version.py -v 6.4.1
+```
+
+If no `--version` is provided, the version is not changed, only the build timestamp.
+
+#### Building makefiles
+
+The `build_makefiles.py` script is used to rewrite makefiles after Fortran source files have been added, removed, or renamed. Up-to-date makefiles must be generated for inclusion in a distribution. To build makefiles, run:
+
+```shell
+python build_makefiles.py
+```
+
+#### Building example models
+
+MODFLOW 6 [example models](https://github.com/MODFLOW-USGS/modflow6-examples) are bundled with official releases. Example models must be built and run to generate plots and tables before documentation can be generated. The `release.yml` workflow attempts to download the latest release from the examples repository, only re-building and re-running example models if no such release is available. See the examples repository for more information on preparing example models.
+
+#### Benchmarking example models
+
+MODFLOW 6 documentation includes a performance evaluation comparing the current version against the last official release. Benchmarks must run before a release can be prepared. Benchmarks run as a component of the `docs.yml` CI workflow — `release.yml` attempts to download benchmark results if available, only re-running them if necessary.
+
+The `benchmark.py` script benchmarks the current development version of MODFLOW 6 against the latest release rebuilt in development mode, using the models from the `MODFLOW-USGS/modflow6-examples` repository. Paths to pre-built binaries for both versions can be provided via the `--current-bin-path` (short `-c`) and `--previous-bin-path` (short `-p`) command line options. If bin paths are not provided, executables are rebuilt in the default locations:
+
+`/bin`: current development version
+`/bin/rebuilt`: previous version
+
+The examples repository must first be installed and prepared as described above. Its path may be explicitly provided with the `--examples-repo-path` (short `-e`) option. If no path is provided, the repository is assumed to be named `modflow6-examples` and live side-by-side with the `modflow6` repository on the filesystem.
+
+The directory to write benchmark results can be specified with `--output-path` (short `-o`). If no such option is provided, results are written to the current working directory.
+
+```shell
+python benchmark.py -e ../modflow6-examples -o .benchmarks
+```
+
+The above will write results to a markdown file `.benchmarks/run-time-comparison.md` relative to the project root.
+
+#### Building documentation
+
+Extensive documentation is bundled with official MODFLOW 6 releases. MODFLOW 6 documentation is written in LaTeX. Some LaTeX files (in particular for MODFLOW 6 input/output documentation) is automatically generated from DFN files. The `release.yml` workflow first runs `update_version.py` to update version strings to be substituted into the docs, then runs `build_docs.py` to regenerate LaTeX files where necessary, download benchmark results (and convert the Markdown results file to LaTeX), download publications hosted on the USGS website, and finally convert LaTeX to PDFs.
+
+Manually building MODFLOW 6 documentation requires additional Python dependencies specified in `build_rtd_docs/requirements.rtd.txt`. Styles defined in the [`MODFLOW-USGS/usgslatex`](https://github.com/MODFLOW-USGS/usgslatex) are also required. (See that repository's `README` for installation instructions or this repo's [`../.github/workflows/docs.yml](../.github/workflows/docs.yml) CI workflow for an example.) A Docker image with documentation dependencies pre-installed is also available on Docker Hub: [`wbonelli/usgslatex`](https://hub.docker.com/r/wbonelli/usgslatex). This can be useful for building documentation on a system without a LaTeX installation.
+
+#### Building the distribution archive
+
+After each step above is complete, the `build_dist.py` script can be used (without the `--development` flag) to bundle MODFLOW 6 official release artifacts for distribution. See [the `release.yml` workflow](../.github/workflows/release.yml) for a complete example of how to build a distribution archive.
+
+## Release automation
+
+Both nightly builds and official distributions are built automatically with GitHub Actions.
+
+### Nightly builds
+
+As mentioned, development releases are automatically built and posted nightly on the [`MODFLOW-USGS/modflow6-nightly-build`](https://github.com/MODFLOW-USGS/modflow6-nightly-build) repository.
+
+### Official releases
+
+The procedure above to prepare an official release is reproduced in `.github/workflows/release.yml`. This workflow is configured to run whenever tags are pushed to the `MODFLOW-USGS/modflow6` repository.
+
+To release a new version of MODFLOW 6:
+
+1. Create a release candidate branch from the tip of `develop`. The branch's name must begin with `v` followed by the version number, and ending with `rc`. The branch name must must end with `rc` for release candidate branches, e.g. `v6.4.0rc`.
+2. Push the branch to the `MODFLOW-USGS/modflow6` repository. This will trigger a dry run build, in which the distribution archive is constructed with development mode binaries (`IDEVELOPMODE` set to 1) and no release is posted to the repository. Release are cached for easier debugging on dry ryns.
+3. If the release candidate build passes inspection, rename the branch (or create another) without the trailing `rc`, and push it. This will trigger another build, in which the distribution archive is constructed with production mode binaries. After binaries and docs are rebuilt, a draft PR will automatically be created on the `MODFLOW-USGS/modflow6` repository's `master` branch. To approve and finalize the release, This PR can be merged into `master`. This will trigger a final CI job to tag the revision to `master`, post a draft release to the `MODFLOW-USGS/modflow6` repository, and create another PR updating the `develop` branch from `master`, resetting version files, and setting `IDEVELOPMODE` back to 1.
+
+**Note**: version tags posted to the MODFLOW 6 repository follow [semver](https://semver.org/) formatting (`major.minor.patch`) *without* an initial `v`.
+
+Release notes use the following format convention:
+
+```
+This is the approved USGS MODFLOW release.
+
+, , MODFLOW 6 Modular Hydrologic Model version : U.S. Geological Survey Software Release, ,
+
+Visit the USGS "MODFLOW and Related Programs" site for information on MODFLOW 6 and related software: https://doi.org/10.5066/F76Q1VQV
+```
\ No newline at end of file
diff --git a/distribution/benchmark.py b/distribution/benchmark.py
new file mode 100644
index 00000000000..547de7a3c66
--- /dev/null
+++ b/distribution/benchmark.py
@@ -0,0 +1,454 @@
+import argparse
+import os
+import shutil
+import subprocess
+import sys
+import textwrap
+from multiprocessing import Pool
+from os import PathLike
+from pathlib import Path
+from typing import List, Tuple
+
+import flopy
+import pymake
+import pytest
+from modflow_devtools.build import meson_build
+from modflow_devtools.misc import get_model_paths
+
+from utils import get_project_root_path
+
+_verify = False
+_project_root_path = get_project_root_path()
+_examples_repo_path = _project_root_path.parent / "modflow6-examples"
+_build_path = _project_root_path / "builddir"
+_bin_path = _project_root_path / "bin"
+_github_repo = "MODFLOW-USGS/modflow6"
+_markdown_file_name = "run-time-comparison.md"
+_is_windows = sys.platform.lower() == "win32"
+_app_ext = ".exe" if _is_windows else ""
+_soext = ".dll" if _is_windows else ".so"
+
+
+def download_previous_version(output_path: PathLike) -> Tuple[str, Path]:
+ output_path = Path(output_path).expanduser().absolute()
+ version = pymake.repo_latest_version(github_repo=_github_repo, verify=_verify)
+ url = (
+ f"https://github.com/{_github_repo}"
+ + f"/releases/download/{version}/mf{version}.zip"
+ )
+ pymake.download_and_unzip(
+ url,
+ pth=str(output_path),
+ verbose=True,
+ verify=_verify,
+ )
+
+ return version, output_path / f"mf{version}"
+
+
+def get_mf6_cmdargs(app, argv, text="mf6:", verbose=False):
+ return_text = None
+ proc = subprocess.Popen(
+ argv,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ cwd=os.path.dirname(app),
+ )
+ result, error = proc.communicate()
+ if result is not None:
+ c = result.decode("utf-8")
+ c = c.rstrip("\r\n")
+ if verbose:
+ print(f"{c}")
+ if text in c:
+ idx0 = c.index(text) + len(text) + 1
+ return_text = c[idx0:].strip()
+ return return_text
+
+
+def get_mf6_version(app, verbose=False):
+ text = "mf6:"
+ if str(app).endswith(".exe"):
+ text = "mf6.exe:"
+ version = get_mf6_cmdargs(app, [app, "-v"], text=text, verbose=verbose)
+ if version is not None:
+ version = version.split()[0]
+ if verbose:
+ print(f"version: {version}")
+ return version
+
+
+def get_mf6_compiler(app, verbose=False):
+ text = "mf6:"
+ if str(app).endswith(".exe"):
+ text = "mf6.exe:"
+ compiler = get_mf6_cmdargs(app, [app, "-c"], text=text, verbose=verbose)
+ if verbose and compiler is not None:
+ print(f"compiler: {compiler}")
+ return compiler
+
+
+def revert_files(app, example):
+ replace_dict = {
+ ".ims": {
+ (6, 1, 1): ("dvclose", "hclose"),
+ }
+ }
+ extensions = list(replace_dict.keys())
+
+ # get current version
+ version = get_mf6_version(app)
+ if version is not None:
+ version = tuple([int(v) for v in version.split(".")])
+
+ # get a list of files in example directory
+ files = os.listdir(example)
+
+ for file in files:
+ _, extension = os.path.splitext(file)
+ if extension in extensions:
+ key = extension.lower()
+ for v, replace in replace_dict[key].items():
+ if version < v:
+ fpth = os.path.join(example, file)
+ with open(fpth, "r") as f:
+ lines = f.readlines()
+ with open(fpth, "w") as f:
+ for line in lines:
+ if replace[0] in line.lower():
+ line = line.lower().replace(replace[0], replace[1])
+ f.write(line)
+
+
+def get_elapsed_time(buff, tag="Elapsed run time:"):
+ elt_str = ""
+ for line in buff:
+ if tag in line:
+ i0 = line.index(":")
+ elt_str = line[i0 + 1 :].strip()
+ return elt_str
+
+
+def time_factor(time_unit):
+ if "hours" in time_unit:
+ factor = 60.0 * 60.0
+ elif "minutes" in time_unit:
+ factor = 60.0
+ else:
+ factor = 1.0
+ return factor
+
+
+def elapsed_string_to_real(elt_str):
+ time_sec = 0.0
+ t = elt_str.split()
+ for idx in range(0, len(t), 2):
+ t0 = float(t[idx])
+ time_sec += t0 * time_factor(t[idx + 1].lower())
+ return time_sec
+
+
+def elapsed_real_to_string(elt):
+ if elt > 60.0:
+ time_min = int(elt / 60.0)
+ time_sec = elt % 60.0
+ elt_str = f"{time_min} Minutes, "
+ else:
+ time_sec = elt
+ elt_str = ""
+ return elt_str + f"{time_sec:.3f} Seconds"
+
+
+def run_function(app, example):
+ return flopy.run_model(
+ app,
+ None,
+ model_ws=example,
+ silent=True,
+ report=True,
+ )
+
+
+def run_model(current_app: PathLike, previous_app: PathLike, model_path: PathLike):
+ current_app = Path(current_app).expanduser().absolute()
+ previous_app = Path(previous_app).expanduser().absolute()
+ model_path = Path(model_path).expanduser().absolute()
+
+ current_time = 0.0
+ previous_time = 0.0
+
+ generic_names = ["mf6gwf", "mf6gwt"]
+ name = f"{model_path.parent.name}/{model_path.name}" if model_path.name in generic_names else model_path.name
+ print(f"Running scenario: {name}")
+ line = f"| {name} |"
+
+ # copy directory for previous application
+ prev_dir = os.path.join(model_path, "previous")
+ if os.path.isdir(prev_dir):
+ shutil.rmtree(prev_dir)
+ print(f"Copying {model_path} ==> {prev_dir}")
+ shutil.copytree(model_path, prev_dir)
+
+ # modify input files to use deprecated keywords in directory
+ # used with the previous application
+ revert_files(previous_app, prev_dir)
+
+ # # run the current application
+ # success, buff = run_function(app, example)
+ #
+ # # run the previous application
+ # success0, buff0 = run_function(app0, prev_dir)
+
+ # processing options
+ args = (
+ (current_app, model_path),
+ (previous_app, prev_dir),
+ )
+
+ # Multi-processing using Pool
+ # initialize the pool
+ pool = Pool(processes=2)
+
+ # run the models
+ results = [pool.apply_async(run_function, args=arg) for arg in args]
+
+ # close the pool
+ pool.close()
+
+ # set variables for processing
+ success, buff = results[0].get()
+ success0, buff0 = results[1].get()
+
+ if success:
+ elt = get_elapsed_time(buff)
+ line += f" {elt} |"
+ else:
+ print(f"Failure for current app with example: {name}")
+ for b in buff:
+ print(b)
+ line += " -- |"
+
+ if success0:
+ elt0 = get_elapsed_time(buff0)
+ line += f" {elt0} |"
+ else:
+ print(f"Failure for previous app with example: {name}")
+ line += " -- |"
+
+ if success and success0:
+ t = elapsed_string_to_real(elt)
+ t0 = elapsed_string_to_real(elt0)
+ current_time += t
+ previous_time += t0
+ pd = (t - t0) / t0
+ line += f" {pd:.2%} |"
+ else:
+ line += " -- |"
+
+ # clean up previous directory
+ if os.path.isdir(prev_dir):
+ shutil.rmtree(prev_dir)
+
+ return success, current_time, previous_time, line
+
+
+def write_results(
+ current_exe: PathLike,
+ previous_exe: PathLike,
+ output_path: PathLike,
+ current_total,
+ previous_total,
+ lines: List[str],
+):
+ current_exe = Path(current_exe)
+ previous_exe = Path(previous_exe)
+ output_path = Path(output_path).expanduser().absolute()
+
+ current_v = get_mf6_version(current_exe)
+ previous_v = get_mf6_version(previous_exe)
+
+ # open markdown table
+ with open(output_path / _markdown_file_name, "w") as f:
+ # get version numbers and write header
+
+ line = "### Comparison of simulation run times\n\n"
+ line += (
+ "Comparison of run times of the current version of "
+ + f"MODFLOW 6 ({current_v}) "
+ + f"to the previous version ({previous_v}). "
+ + "The current example models available from the "
+ + "[MODFLOW 6 Examples GitHub Repository]"
+ + "(https://github.com/MODFLOW-USGS/modflow6-examples) are "
+ + "used to compare run times. Simulations that fail are "
+ + "indicated by '--'. The percent difference, where calculated, "
+ + "is relative to the simulation run time for the previous "
+ + "version. Percent differences for example problems with "
+ + "short run times (less than 30 seconds) may not be significant.\n\n"
+ + f"{get_mf6_compiler(current_exe, verbose=True)}.\n\n\n"
+ )
+ line += "| Example Problem "
+ line += f"| Current Version {current_v} "
+ line += f"| Previous Version {previous_v} "
+ line += "| Percent difference |\n"
+ line += "| :---------- | :----------: | :----------: | :----------: |\n"
+ f.write(line)
+
+ # write benchmark data
+ for line in lines:
+ f.write(f"{line}\n")
+ f.flush()
+
+ # add final (total) line
+ pd = (current_total - previous_total) / previous_total
+ line = f"| Total simulation time |"
+ line += f" {elapsed_real_to_string(current_total)} |"
+ line += f" {elapsed_real_to_string(previous_total)} |"
+ line += f" {pd:.2%} |"
+ f.write(f"{line}\n")
+
+
+def run_benchmarks(
+ build_path: PathLike,
+ current_bin_path: PathLike,
+ previous_bin_path: PathLike,
+ examples_path: PathLike,
+ output_path: PathLike,
+ excluded: List[str]=[]):
+ """Benchmark current development version against previous release with example models."""
+
+ build_path = Path(build_path).expanduser().absolute()
+ current_bin_path = Path(current_bin_path).expanduser().absolute()
+ previous_bin_path = Path(previous_bin_path).expanduser().absolute()
+ examples_path = Path(examples_path).expanduser().absolute()
+ output_path = Path(output_path).expanduser().absolute()
+
+ example_dirs = get_model_paths(examples_path, excluded=excluded)
+ assert any(example_dirs), f"No example model paths found, have models been built?"
+
+ # results_path = output_path / _markdown_file_name
+ # if results_path.is_file():
+ # print(f"Benchmark results already exist: {results_path}")
+ # return
+
+ exe_name = f"mf6{_app_ext}"
+ current_exe = current_bin_path / exe_name
+ previous_exe = previous_bin_path / exe_name
+
+ if not current_exe.is_file():
+ print(f"Building current MODFLOW 6 development version")
+ meson_build(project_path=_project_root_path, build_path=build_path, bin_path=current_bin_path)
+
+ if not previous_exe.is_file():
+ version, download_path = download_previous_version(output_path)
+ print(f"Rebuilding latest MODFLOW 6 release {version} in development mode")
+ meson_build(project_path=download_path, build_path=build_path, bin_path=previous_bin_path)
+
+ print(f"Benchmarking MODFLOW 6 versions:")
+ print(f" current: {current_exe}")
+ print(f" previous: {previous_exe}")
+
+ # benchmark models
+ current_total = 0.0
+ previous_total = 0.0
+ lines = []
+ for idx, example in enumerate(example_dirs):
+ success, t, t0, line = run_model(
+ current_exe,
+ previous_exe,
+ example,
+ )
+ assert success, f"{example} run failed"
+ current_total += t
+ previous_total += t0
+ lines.append(line)
+
+ # create markdown results file
+ write_results(
+ current_exe=current_exe,
+ previous_exe=previous_exe,
+ output_path=output_path,
+ current_total=current_total,
+ previous_total=previous_total,
+ lines=lines,
+ )
+
+
+@pytest.mark.skip(reason="for manual testing")
+def test_run_benchmarks(tmp_path):
+ run_benchmarks(
+ build_path=_build_path,
+ current_bin_path=_bin_path,
+ previous_bin_path=_bin_path / "rebuilt",
+ examples_path=_examples_repo_path / "examples",
+ output_path=tmp_path,
+ excluded=["previous"])
+ assert (tmp_path / _markdown_file_name).is_file()
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ prog="Benchmark MODFLOW 6 versions on example models",
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ epilog=textwrap.dedent(
+ """\
+ Benchmarks the current version of MODFLOW 6 against the latest official release.
+ with the example models stored in the MODFLOW-USGS/modflow6-examples repository.
+ """
+ ),
+ )
+ parser.add_argument(
+ "--build-path",
+ required=False,
+ default=str(_build_path),
+ help="Path to the build workspace",
+ )
+ parser.add_argument(
+ "--current-bin-path",
+ required=False,
+ default=str(_bin_path),
+ help="Path to the directory to install current version executables",
+ )
+ parser.add_argument(
+ "--previous-bin-path",
+ required=False,
+ default=str(_bin_path / "rebuilt"),
+ help="Path to the directory to install previous version executables",
+ )
+ parser.add_argument(
+ "-o",
+ "--output-path",
+ required=False,
+ default=str(_project_root_path / "distribution" / ""),
+ help="Location to create the zip archive",
+ )
+ parser.add_argument(
+ "-e",
+ "--examples-repo-path",
+ required=False,
+ default=str(_project_root_path.parent / "modflow6-examples"),
+ help="Path to the directory with modflow6 examples",
+ )
+ args = parser.parse_args()
+ build_path = Path(args.build_path)
+ current_bin_path = Path(args.current_bin_path)
+ previous_bin_path = Path(args.previous_bin_path)
+ output_path = Path(args.output_path) if args.output_path else Path(os.getcwd())
+ examples_repo_path = (
+ Path(args.examples_repo_path)
+ if args.examples_repo_path
+ else _examples_repo_path
+ )
+
+ output_path.mkdir(parents=True, exist_ok=True)
+ assert (
+ examples_repo_path.is_dir()
+ ), f"Examples repo not found: {examples_repo_path}"
+
+ run_benchmarks(
+ build_path=build_path,
+ current_bin_path=current_bin_path,
+ previous_bin_path=previous_bin_path,
+ examples_path=examples_repo_path / "examples",
+ output_path=output_path,
+ excluded=["previous"]
+ )
diff --git a/distribution/build_dist.py b/distribution/build_dist.py
new file mode 100644
index 00000000000..b5bdd6d0541
--- /dev/null
+++ b/distribution/build_dist.py
@@ -0,0 +1,459 @@
+import argparse
+import os
+import platform
+import shutil
+import textwrap
+from collections import namedtuple
+from os import PathLike, environ
+from pathlib import Path
+from pprint import pprint
+from shutil import copytree
+
+import pytest
+from modflow_devtools.build import meson_build
+from modflow_devtools.download import download_and_unzip, get_release
+from modflow_devtools.markers import requires_exe
+from modflow_devtools.misc import get_model_paths
+
+from build_docs import build_documentation
+from build_makefiles import build_mf6_makefile, build_mf5to6_makefile, build_zbud6_makefile
+from utils import (get_project_root_path, run_command)
+
+_project_name = "MODFLOW 6"
+
+# default paths
+_project_root_path = get_project_root_path()
+_version_texf_path = _project_root_path / "doc" / "version.tex"
+_examples_repo_path = _project_root_path.parent / "modflow6-examples"
+_examples_path = _examples_repo_path / "examples"
+_build_path = _project_root_path / "builddir"
+_bin_path = _project_root_path / "bin"
+_docs_path = _project_root_path / "doc"
+_benchmarks_path = _project_root_path / "distribution" / ".benchmarks"
+
+# top-level directories included in distribution
+_included_dir_paths = [
+ "bin",
+ "doc",
+ "examples",
+ "src",
+ "srcbmi",
+ "msvs",
+ "make",
+ "utils",
+]
+
+# LaTex files included in distribution docs
+_default_tex_paths = [
+ _project_root_path / "doc" / "mf6io" / "mf6io.tex",
+ _project_root_path / "doc" / "ReleaseNotes" / "ReleaseNotes.tex",
+ _project_root_path / "doc" / "zonebudget" / "zonebudget.tex",
+ _project_root_path / "doc" / "ConverterGuide" / "converter_mf5to6.tex",
+ _project_root_path / "doc" / "SuppTechInfo" / "mf6suptechinfo.tex",
+]
+
+
+Makefile = namedtuple('Makefile', ['app', 'src_path', 'out_path'])
+
+
+# makefiles included in distribution
+_makefiles = [
+ Makefile(app="mf6",
+ src_path=_project_root_path / "src",
+ out_path=Path("make")),
+ Makefile(app="zbud6",
+ src_path=_project_root_path / "utils" / "zonebudget" / "src",
+ out_path=Path("utils") / "zonebudget" / "make"),
+ Makefile(app="mf5to6",
+ src_path=_project_root_path / "utils" / "mf5to6" / "src",
+ out_path=Path("utils") / "mf5to6" / "make")
+]
+
+# system-specific filenames, extensions, etc
+_system = platform.system()
+_eext = ".exe" if _system == "Windows" else ""
+_soext = ".dll" if _system == "Windows" else ".so" if _system == "Linux" else ".dylib"
+_scext = ".bat" if _system == "Windows" else ".sh"
+_executable = f"mf6{_eext}"
+
+# Fortran and C compilers
+FC = environ.get("FC", "gfortran")
+CC = environ.get("CC", "gcc")
+
+
+def copy_sources(output_path: PathLike):
+ output_path = Path(output_path).expanduser().absolute()
+
+ # make sure output directory exists
+ output_path.mkdir(exist_ok=True)
+
+ # copy code.json
+ shutil.copy(_project_root_path / "code.json", output_path)
+
+ # Copy Visual Studio sln and project files
+ print("Copying msvs files to output directory")
+ (output_path / "msvs").mkdir(exist_ok=True)
+ source_msvs_path = _project_root_path / "msvs"
+ for d in [
+ str(source_msvs_path / "mf6.sln"),
+ str(source_msvs_path / "mf6.vfproj"),
+ str(source_msvs_path / "mf6core.vfproj"),
+ str(source_msvs_path / "mf6bmi.sln"),
+ str(source_msvs_path / "mf6bmi.vfproj"),
+ ]:
+ shutil.copy(d, output_path / "msvs")
+
+ ignored = shutil.ignore_patterns(".DS_Store")
+
+ # copy top-level meson.build
+ shutil.copy(_project_root_path / "meson.build", output_path)
+
+ # copy source folder
+ src_path = _project_root_path / "src"
+ dst_path = output_path / "src"
+ print(f"Copying {src_path} to {dst_path}")
+ copytree(src_path, dst_path, ignore=ignored)
+
+ # copy srcbmi folder
+ src_path = _project_root_path / "srcbmi"
+ dst_path = output_path / "srcbmi"
+ print(f"Copying {src_path} to {dst_path}")
+ copytree(src_path, dst_path, ignore=ignored)
+
+ # copy utils folder
+ src_path = _project_root_path / "utils"
+ dst_path = output_path / "utils"
+ print(f"Copying {src_path} to {dst_path}")
+ copytree(src_path, dst_path, ignore=ignored)
+
+
+def test_copy_sources(tmp_path):
+ copy_sources(tmp_path)
+
+ assert (tmp_path / "src").is_dir()
+ assert (tmp_path / "srcbmi").is_dir()
+ assert (tmp_path / "utils").is_dir()
+ assert (tmp_path / "msvs").is_dir()
+
+ assert (tmp_path / "src" / "meson.build").is_file()
+ assert (tmp_path / "srcbmi" / "meson.build").is_file()
+ assert (tmp_path / "utils" / "meson.build").is_file()
+ assert (tmp_path / "msvs" / "mf6.sln").is_file()
+
+
+def build_examples(examples_repo_path: PathLike, overwrite: bool = False):
+ examples_repo_path = Path(examples_repo_path).expanduser().absolute()
+
+ # create examples, but don't run them
+ examples_path = examples_repo_path / "examples"
+ examples_path.mkdir(parents=True, exist_ok=True)
+ if not overwrite and any(get_model_paths(examples_path)):
+ print(f"Examples already built")
+ else:
+ print(f"Building examples")
+ scripts_folder = examples_repo_path / "scripts"
+ exclude_list = ["ex-gwf-capture.py"]
+ scripts = [
+ fname
+ for fname in scripts_folder.glob("*")
+ if fname.suffix == ".py"
+ and fname.stem.startswith("ex-")
+ and fname.stem not in exclude_list
+ ]
+ for script in scripts:
+ argv = [
+ "python",
+ script,
+ "--no_run",
+ "--no_plot",
+ "--destination",
+ examples_path,
+ ]
+ print(f"running {argv} in {scripts_folder}")
+ run_command(argv, scripts_folder)
+
+
+def setup_examples(bin_path: PathLike, examples_path: PathLike, overwrite: bool = False):
+ examples_path = Path(examples_path).expanduser().absolute()
+
+ # download example models zip asset
+ latest = get_release("MODFLOW-USGS/modflow6-examples", "latest")
+ assets = latest["assets"]
+ asset = next(iter([a for a in assets if a["name"] == "modflow6-examples.zip"]), None)
+ download_and_unzip(asset["browser_download_url"], examples_path, verbose=True)
+
+ # list folders with mfsim.nam (recursively)
+ # and add run.sh/bat script to each folder
+ model_paths = get_model_paths(examples_path)
+ for mp in model_paths:
+ script_path = mp / f"run{_scext}"
+ if not overwrite and script_path.is_file():
+ print(f"Script {script_path} already exists")
+ else:
+ print(f"Creating {script_path}")
+ with open(script_path, "w") as f:
+ if _system == "Windows":
+ f.write("@echo off" + "\n")
+ else:
+ f.write("#!/bin/sh" + "\n")
+ runbatloc = os.path.relpath(bin_path / _executable, start=mp)
+ f.write(runbatloc + "\n")
+ if _system == "Windows":
+ f.write("echo." + "\n")
+ f.write("echo Run complete. Press any key to continue" + "\n")
+ f.write("pause>nul" + "\n")
+
+ if _system != "Windows":
+ script_path.chmod(script_path.stat().st_mode | 0o111)
+ print(f"Execute permission set for {script_path}")
+
+ # add runall.sh/bat, which runs all examples
+ script_path = examples_path / f"runall{_scext}"
+ if not overwrite and script_path.is_file():
+ print(f"Script {script_path} already exists")
+ else:
+ print(f"Creating {script_path}")
+ with open(script_path, "w") as f:
+ if _system != "Windows":
+ f.write("#!/bin/sh" + "\n")
+ for mp in model_paths:
+ d = os.path.relpath(mp, start=examples_path)
+ s = f"cd {d}"
+ f.write(s + "\n")
+ runbatloc = os.path.relpath(bin_path / _executable, start=mp)
+ f.write(runbatloc + "\n")
+ d = os.path.relpath(examples_path, start=mp)
+ s = f"cd {d}"
+ f.write(s + "\n")
+ s = ""
+ f.write(s + "\n")
+ if _system == "Windows":
+ f.write("pause" + "\n")
+ else:
+ script_path.chmod(script_path.stat().st_mode | 0o111)
+ print(f"Execute permission set for {script_path}")
+
+
+def test_setup_examples():
+ pass
+
+
+def build_programs_meson(build_path: PathLike, bin_path: PathLike, overwrite: bool = False):
+ build_path = Path(build_path).expanduser().absolute()
+ bin_path = Path(bin_path).expanduser().absolute()
+
+ exe_paths = [
+ bin_path / f"mf6{_eext}",
+ bin_path / f"zbud6{_eext}",
+ bin_path / f"mf5to6{_eext}"
+ ]
+ lib_paths = [
+ bin_path / f"libmf6{_soext}"
+ ]
+
+ if not overwrite and all(p.is_file() for p in exe_paths) and all(p.is_file() for p in lib_paths):
+ print(f"Binaries already exist:")
+ pprint(exe_paths + lib_paths)
+ else:
+ print(f"Building binaries in {build_path}, installing to {bin_path}")
+ meson_build(project_path=_project_root_path, build_path=build_path, bin_path=bin_path)
+
+ for target in (exe_paths + lib_paths):
+ assert target.is_file(), f"Failed to build {target}"
+ target.chmod(target.stat().st_mode | 0o111)
+ print(f"Execute permission set for {target}")
+
+
+def test_build_programs_meson(tmp_path):
+ build_programs_meson(tmp_path / "builddir", tmp_path / "bin")
+
+
+def build_makefiles(output_path: PathLike):
+ output_path = Path(output_path).expanduser().absolute()
+
+ # create and copy mf6 makefile
+ build_mf6_makefile()
+ (output_path / "make").mkdir(parents=True, exist_ok=True)
+ shutil.copyfile(_project_root_path / "make" / "makefile", output_path / "make" / "makefile")
+ shutil.copyfile(_project_root_path / "make" / "makedefaults", output_path / "make" / "makedefaults")
+
+ # create and copy zbud6 makefile
+ build_zbud6_makefile()
+ rel_path = Path("utils") / "zonebudget" / "make"
+ (output_path / rel_path).mkdir(parents=True, exist_ok=True)
+ shutil.copyfile(_project_root_path / rel_path / "makefile", output_path / rel_path / "makefile")
+ shutil.copyfile(_project_root_path / rel_path / "makedefaults", output_path / rel_path / "makedefaults")
+
+ # create and copy mf5to6 makefile
+ build_mf5to6_makefile()
+ rel_path = Path("utils") / "mf5to6" / "make"
+ (output_path / rel_path).mkdir(parents=True, exist_ok=True)
+ shutil.copyfile(_project_root_path / rel_path / "makefile", output_path / rel_path / "makefile")
+ shutil.copyfile(_project_root_path / rel_path / "makedefaults", output_path / rel_path / "makedefaults")
+
+
+def test_build_makefiles(tmp_path):
+ build_makefiles(tmp_path)
+
+ assert (tmp_path / "make" / "makefile").is_file()
+ assert (tmp_path / "make" / "makedefaults").is_file()
+ assert (tmp_path / "utils" / "zonebudget" / "make" / "makefile").is_file()
+ assert (tmp_path / "utils" / "zonebudget" / "make" / "makedefaults").is_file()
+ assert (tmp_path / "utils" / "mf5to6" / "make" / "makefile").is_file()
+ assert (tmp_path / "utils" / "mf5to6" / "make" / "makedefaults").is_file()
+
+ os.system(f"cd {tmp_path} && make -f make/makefile")
+ os.system(f"cd {tmp_path} && make -f utils/zonebudget/make/makefile")
+ os.system(f"cd {tmp_path} && make -f utils/mf5to6/make/makefile")
+
+
+def build_distribution(
+ build_path: PathLike,
+ output_path: PathLike,
+ examples_repo_path: PathLike,
+ development: bool = False,
+ overwrite: bool = False):
+ print(f"Building {'development' if development else 'full'} distribution")
+
+ build_path = Path(build_path).expanduser().absolute()
+ output_path = Path(output_path).expanduser().absolute()
+ examples_repo_path = Path(examples_repo_path).expanduser().absolute()
+
+ # binaries
+ build_programs_meson(
+ build_path=build_path,
+ bin_path=output_path / "bin",
+ overwrite=overwrite)
+
+ # examples
+ setup_examples(
+ bin_path=output_path / "bin",
+ examples_path=output_path / "examples",
+ overwrite=overwrite)
+
+ # docs
+ build_documentation(
+ bin_path=output_path / "bin",
+ output_path=output_path / "doc",
+ examples_repo_path=examples_repo_path,
+ # benchmarks_path=_benchmarks_path / "run-time-comparison.md",
+ development=development,
+ overwrite=overwrite)
+
+ # full releases include source code and makefiles
+ if not development:
+ # copy source code files
+ copy_sources(output_path=output_path)
+
+ # build and copy makefiles
+ build_makefiles(output_path=output_path)
+
+
+@requires_exe("pdflatex")
+@pytest.mark.skip(reason="manual testing")
+@pytest.mark.parametrize("dev", [True, False])
+def test_build_distribution(tmp_path, dev):
+ output_path = tmp_path / "dist"
+ build_distribution(
+ build_path=tmp_path / "builddir",
+ output_path=output_path,
+ examples_repo_path=_examples_repo_path,
+ development=dev,
+ overwrite=True
+ )
+
+ if dev:
+ # check binaries and libs
+ system = platform.system()
+ ext = ".exe" if system == "Windows" else ""
+ for exe in ["mf6", "mf5to6", "zbud6"]:
+ assert (output_path / f"{exe}{ext}").is_file()
+ assert (
+ output_path
+ / (
+ "libmf6"
+ + (".so" if system == "Linux" else (".dylib" if system == "Darwin" else ".dll"))
+ )
+ ).is_file()
+
+ # check mf6io docs
+ assert (output_path / "mf6io.pdf").is_file()
+ else:
+ pass
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ prog="Create a Modflow 6 distribution directory for release",
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ epilog=textwrap.dedent(
+ """\
+ Create a distribution folder. If no output path is provided
+ distribution files are written to the distribution/ folder.
+ """
+ ),
+ )
+ parser.add_argument(
+ "--build-path",
+ required=False,
+ default=str(_build_path),
+ help="Path to the build workspace",
+ )
+ parser.add_argument(
+ "-o",
+ "--output-path",
+ required=False,
+ default=str(_project_root_path / "distribution"),
+ help="Path to create distribution artifacts",
+ )
+ parser.add_argument(
+ "-e",
+ "--examples-repo-path",
+ required=False,
+ default=str(_examples_repo_path),
+ help="Path to directory containing modflow6 example models"
+ )
+ # parser.add_argument(
+ # "-b",
+ # "--benchmarks-path",
+ # required=False,
+ # default=str(_project_root_path / "distribution" / ".benchmarks"),
+ # help="Path to directory containing benchmark results"
+ # )
+ parser.add_argument(
+ "-d",
+ "--development",
+ required=False,
+ default=False,
+ action="store_true",
+ help="Whether to build a development (e.g., nightly) rather than a full distribution"
+ )
+ parser.add_argument(
+ "-f",
+ "--force",
+ required=False,
+ default=False,
+ action="store_true",
+ help="Whether to recreate and overwrite existing artifacts"
+ )
+ args = parser.parse_args()
+
+ build_path = Path(args.build_path)
+ out_path = Path(args.output_path)
+ examples_repo_path = (
+ Path(args.examples_repo_path)
+ if args.examples_repo_path
+ else _examples_repo_path
+ )
+ assert (
+ examples_repo_path.is_dir()
+ ), f"Examples repo not found at path: {examples_repo_path}"
+ out_path.mkdir(parents=True, exist_ok=True)
+
+ build_distribution(
+ build_path=build_path,
+ output_path=out_path,
+ examples_repo_path=examples_repo_path,
+ development=args.development,
+ overwrite=args.force,
+ )
diff --git a/distribution/build_docs.py b/distribution/build_docs.py
new file mode 100644
index 00000000000..2a6b096e45d
--- /dev/null
+++ b/distribution/build_docs.py
@@ -0,0 +1,548 @@
+import argparse
+import os
+import platform
+import shutil
+import textwrap
+from _warnings import warn
+from datetime import datetime
+from os import PathLike
+from pathlib import Path
+from pprint import pprint
+from shutil import which
+from tempfile import TemporaryDirectory
+from typing import List, Optional
+from urllib.error import HTTPError
+from warnings import warn
+
+import pytest
+from flaky import flaky
+from modflow_devtools.build import meson_build
+from modflow_devtools.download import list_artifacts, download_artifact, get_release, download_and_unzip
+from modflow_devtools.markers import requires_exe, requires_github
+from modflow_devtools.misc import set_dir, run_cmd
+
+from benchmark import run_benchmarks
+from utils import convert_line_endings
+from utils import get_project_root_path
+
+_project_root_path = get_project_root_path()
+_version_texf_path = _project_root_path / "doc" / "version.tex"
+_examples_repo_path = _project_root_path.parent / "modflow6-examples"
+_release_notes_path = _project_root_path / "doc" / "ReleaseNotes"
+_distribution_path = _project_root_path / "distribution"
+_benchmarks_path = _project_root_path / "distribution" / ".benchmarks"
+_docs_path = _project_root_path / "doc"
+
+_default_tex_paths = [
+ _project_root_path / "doc" / "mf6io" / "mf6io.tex",
+ _project_root_path / "doc" / "ReleaseNotes" / "ReleaseNotes.tex",
+ _project_root_path / "doc" / "zonebudget" / "zonebudget.tex",
+ _project_root_path / "doc" / "ConverterGuide" / "converter_mf5to6.tex",
+ _project_root_path / "doc" / "SuppTechInfo" / "mf6suptechinfo.tex",
+]
+_system = platform.system()
+_eext = ".exe" if _system == "Windows" else ""
+_soext = ".dll" if _system == "Windows" else ".so" if _system == "Linux" else ".dylib"
+
+
+# publications included in distribution docs
+_publication_urls = [
+ "https://pubs.usgs.gov/tm/06/a55/tm6a55.pdf",
+ "https://pubs.usgs.gov/tm/06/a56/tm6a56.pdf",
+ "https://pubs.usgs.gov/tm/06/a57/tm6a57.pdf",
+ "https://pubs.usgs.gov/tm/06/a61/tm6a61.pdf",
+ "https://pubs.usgs.gov/tm/06/a62/tm6a62.pdf",
+]
+
+
+def clean_tex_files():
+ print("Cleaning latex files")
+ exts = ["pdf", "aux", "bbl", "idx", "lof", "out", "toc"]
+ pth = _project_root_path / "doc" / "mf6io"
+ files = [(pth / f"mf6io.{e}") for e in exts]
+ for file in files:
+ file.unlink(missing_ok=True)
+ assert not os.path.isfile(str(pth) + ".pdf")
+
+ pth = _project_root_path / "doc" / "ReleaseNotes"
+ files = [(pth / f"ReleaseNotes.{e}") for e in exts]
+ for file in files:
+ file.unlink(missing_ok=True)
+ assert not os.path.isfile(str(pth) + ".pdf")
+
+ pth = _project_root_path / "doc" / "zonebudget"
+ files = [(pth / f"zonebudget.{e}") for e in exts]
+ for file in files:
+ file.unlink(missing_ok=True)
+ assert not os.path.isfile(str(pth) + ".pdf")
+
+ pth = _project_root_path / "doc" / "ConverterGuide"
+ files = [(pth / f"converter_mf5to6.{e}") for e in exts]
+ for file in files:
+ file.unlink(missing_ok=True)
+ assert not os.path.isfile(str(pth) + ".pdf")
+
+ pth = _project_root_path.parent / "modflow6-docs.git" / "mf6suptechinfo"
+ files = [(pth / f"mf6suptechinfo.{e}") for e in exts]
+ if pth.is_dir():
+ for file in files:
+ file.unlink(missing_ok=True)
+ assert not os.path.isfile(str(pth) + ".pdf")
+
+ pth = _examples_repo_path / "doc"
+ files = [(pth / f"mf6examples.{e}") for e in exts]
+ for file in files:
+ file.unlink(missing_ok=True)
+ assert not os.path.isfile(str(pth) + ".pdf")
+
+
+def download_benchmarks(output_path: PathLike, quiet: bool = True) -> Optional[Path]:
+ output_path = Path(output_path).expanduser().absolute()
+ name = "run-time-comparison"
+ repo = "w-bonelli/modflow6"
+ artifacts = list_artifacts(repo, name=name, quiet=quiet)
+ artifacts = sorted(artifacts, key=lambda a: datetime.strptime(a['created_at'], '%Y-%m-%dT%H:%M:%SZ'), reverse=True)
+ most_recent = next(iter(artifacts), None)
+ print(f"Found most recent benchmarks (artifact {most_recent['id']})")
+ if most_recent:
+ print(f"Downloading benchmarks (artifact {most_recent['id']})")
+ download_artifact(repo, id=most_recent['id'], path=output_path, quiet=quiet)
+ print(f"Downloaded benchmarks to {output_path}")
+ path = output_path / f"{name}.md"
+ assert path.is_file()
+ return path
+ else:
+ print(f"No benchmarks found")
+ return None
+
+
+@flaky
+@requires_github
+def test_download_benchmarks(tmp_path):
+ path = download_benchmarks(tmp_path, quiet=False)
+ if path:
+ assert path.name == "run-time-comparison.md"
+
+
+def build_benchmark_tex(output_path: PathLike, overwrite: bool = False):
+ _benchmarks_path.mkdir(parents=True, exist_ok=True)
+ benchmarks_path = _benchmarks_path / "run-time-comparison.md"
+
+ # download benchmark artifacts if any exist on GitHub
+ if not benchmarks_path.is_file():
+ benchmarks_path = download_benchmarks(_benchmarks_path)
+
+ # run benchmarks again if no benchmarks found on GitHub or overwrite requested
+ if overwrite or not benchmarks_path.is_file():
+ run_benchmarks(
+ build_path=_project_root_path / "builddir",
+ current_bin_path=_project_root_path / "bin",
+ previous_bin_path=_project_root_path / "bin" / "rebuilt",
+ examples_path=_examples_repo_path / "examples",
+ output_path=output_path)
+
+ # convert markdown benchmark results to LaTeX
+ with set_dir(_release_notes_path):
+ tex_path = Path("run-time-comparison.tex")
+ tex_path.unlink(missing_ok=True)
+ out, err, ret = run_cmd("python", "mk_runtimecomp.py", benchmarks_path, verbose=True)
+ assert not ret, out + err
+ assert tex_path.is_file()
+
+ if (_distribution_path / f"{benchmarks_path.stem}.md").is_file():
+ assert (_docs_path / "ReleaseNotes" / f"{benchmarks_path.stem}.tex").is_file()
+
+
+@flaky
+@requires_github
+@pytest.mark.skipif(not (_benchmarks_path / "run-time-comparison.md").is_file(), reason="needs benchmarks")
+def test_build_benchmark_tex(tmp_path):
+ benchmarks_path = _benchmarks_path / "run-time-comparison.md"
+ tex_path = _distribution_path / f"{benchmarks_path.stem}.tex"
+
+ try:
+ build_benchmark_tex(tmp_path)
+ assert benchmarks_path.is_file()
+ finally:
+ tex_path.unlink(missing_ok=True)
+
+
+def build_mf6io_tex_from_dfn(overwrite: bool = False):
+ if overwrite:
+ clean_tex_files()
+
+ def files_match(tex_path, dfn_path, ignored):
+ dfn_names = [
+ f.stem
+ for f in dfn_path.glob("*")
+ if f.is_file()
+ and "dfn" in f.suffix
+ and not any(pattern in f.name for pattern in ignored)
+ ]
+ tex_names = [
+ f.stem.replace("-desc", "")
+ for f in tex_path.glob("*")
+ if f.is_file()
+ and "tex" in f.suffix
+ and not any(pattern in f.name for pattern in ignored)
+ ]
+
+ return set(tex_names) == set(dfn_names)
+
+ with set_dir(_project_root_path / "doc" / "mf6io" / "mf6ivar"):
+ ignored = ["appendix", "common"]
+ tex_pth = Path("tex")
+ dfn_pth = Path("dfn")
+ tex_files = [f for f in tex_pth.glob("*") if f.is_file()]
+ dfn_files = [f for f in dfn_pth.glob("*") if f.is_file()]
+
+ if not overwrite and any(tex_files) and any(dfn_files) and files_match(tex_pth, dfn_pth, ignored):
+ print(f"DFN files already exist:")
+ pprint(dfn_files)
+ else:
+ for f in tex_files:
+ f.unlink()
+
+ # run python script
+ out, err, ret = run_cmd("python", "mf6ivar.py")
+ assert not ret, out + err
+
+ # check that dfn and tex files match
+ assert files_match(tex_pth, dfn_pth, ignored)
+
+
+@pytest.mark.parametrize("overwrite", [True, False])
+def test_build_mf6io_tex_from_dfn(overwrite):
+ mf6ivar_path = _project_root_path / "doc" / "mf6io" / "mf6ivar"
+ file_paths = [p for p in (mf6ivar_path / "tex").glob("*.tex") if p.is_file()] + [
+ mf6ivar_path / "md" / "mf6ivar.md",
+ mf6ivar_path / "tex" / "gwf-disv-griddata.dat",
+ mf6ivar_path / "tex" / "gwf-npf-options.dat",
+ ]
+ file_mtimes = [p.stat().st_mtime for p in file_paths]
+
+ try:
+ build_mf6io_tex_from_dfn(overwrite=overwrite)
+
+ # files should have been modified if overwrite is true
+ for p, t in zip(file_paths, file_mtimes):
+ assert overwrite == (p.stat().st_mtime > t)
+ finally:
+ for p in (file_paths + [
+ # should these be under version control, since they're cleaned in fn above?
+ _project_root_path / "doc" / "ConverterGuide" / "converter_mf5to6.bbl",
+ _project_root_path / "doc" / "ReleaseNotes" / "ReleaseNotes.bbl",
+ _project_root_path / "doc" / "mf6io" / "mf6io.bbl",
+ _project_root_path / "doc" / "zonebudget" / "zonebudget.bbl"
+ ]):
+ os.system(f"git restore {p}")
+
+
+def build_tex_folder_structure(overwrite: bool = False):
+ path = _release_notes_path / "folder_struct.tex"
+
+ if overwrite:
+ path.unlink(missing_ok=True)
+ elif path.is_file():
+ print(f"Folder structure file already exists: {path}")
+ return
+
+ with set_dir(_release_notes_path):
+ out, err, ret = run_cmd("python", "mk_folder_struct.py", "-dp", _project_root_path)
+ assert not ret, out + err
+
+ assert path.is_file(), f"Failed to create {path}"
+
+
+def test_build_tex_folder_structure():
+ path = _project_root_path / "doc" / "ReleaseNotes" / "folder_struct.tex"
+ try:
+ build_tex_folder_structure()
+ finally:
+ os.system(f"git restore {path}")
+
+
+def build_mf6io_tex_example(workspace_path: PathLike, bin_path: PathLike, example_model_path: PathLike):
+ workspace_path = Path(workspace_path) / "workspace"
+ bin_path = Path(bin_path).expanduser().absolute()
+ mf6_exe_path = bin_path / f"mf6{_eext}"
+ example_model_path = Path(example_model_path).expanduser().absolute()
+
+ assert mf6_exe_path.is_file(), f"{mf6_exe_path} does not exist"
+ assert example_model_path.is_dir(), f"{example_model_path} does not exist"
+
+ tex_path = _project_root_path / "doc" / "mf6io"
+ fname1 = tex_path / "mf6output.tex"
+ fname2 = tex_path / "mf6noname.tex"
+ fname3 = tex_path / "mf6switches.tex"
+ cmd = str(mf6_exe_path)
+
+ if workspace_path.is_dir():
+ shutil.rmtree(workspace_path)
+ shutil.copytree(example_model_path, workspace_path)
+
+ # run example model
+
+ with set_dir(workspace_path):
+ out, err, ret = run_cmd(cmd)
+ buff = out + err
+ lines = buff.split("\r\n")
+ with open(fname1, "w") as f:
+ f.write("{\\small\n")
+ f.write("\\begin{lstlisting}[style=modeloutput]\n")
+ for line in lines:
+ f.write(line.rstrip() + "\n")
+ f.write("\\end{lstlisting}\n")
+ f.write("}\n")
+
+ if workspace_path.is_dir():
+ shutil.rmtree(workspace_path)
+ os.mkdir(workspace_path)
+
+ # run model without a namefile present
+ with set_dir(workspace_path):
+ out, err, ret = run_cmd(cmd)
+ buff = out + err
+ lines = buff.split("\r\n")
+ with open(fname2, "w") as f:
+ f.write("{\\small\n")
+ f.write("\\begin{lstlisting}[style=modeloutput]\n")
+ for line in lines:
+ f.write(line.rstrip() + "\n")
+ f.write("\\end{lstlisting}\n")
+ f.write("}\n")
+
+ with set_dir(workspace_path):
+ # run mf6 command with -h to show help
+ out, err, ret = run_cmd(str(mf6_exe_path), "-h")
+ buff = out + err
+ lines = buff.split("\r\n")
+ with open(fname3, "w") as f:
+ f.write("{\\small\n")
+ f.write("\\begin{lstlisting}[style=modeloutput]\n")
+ for line in lines:
+ f.write(line.rstrip() + "\n")
+ f.write("\\end{lstlisting}\n")
+ f.write("}\n")
+
+
+def test_build_mf6io_tex_example():
+ pass
+
+
+def build_pdfs_from_tex(tex_paths: List[PathLike], output_path: PathLike, passes: int = 3, overwrite: bool = False):
+ print(f"Building PDFs from LaTex:")
+ pprint(tex_paths)
+
+ output_path = Path(output_path).expanduser().absolute()
+ built_paths = set()
+ for tex_path in tex_paths:
+ tex_path = Path(tex_path).expanduser().absolute()
+ pdf_name = tex_path.stem + ".pdf"
+ pdf_path = tex_path.parent / pdf_name
+ tgt_path = output_path / pdf_name
+ if overwrite or not tgt_path.is_file():
+ print(f"Converting {tex_path} to PDF")
+ with set_dir(tex_path.parent):
+ first = True
+ for i in range(passes):
+ print(f"Pass {i + 1}/{passes}")
+ out, err, ret = run_cmd(
+ "pdflatex",
+ "-interaction=nonstopmode",
+ "-halt-on-error",
+ tex_path.name,
+ )
+ buff = out + err
+ assert not ret, buff
+ if first:
+ out, err, ret = run_cmd("bibtex", tex_path.stem + ".aux")
+ buff = out + err
+ assert not ret or "I found no" in buff, buff
+ first = False
+
+ if tgt_path.is_file():
+ print(f"Clobbering {tgt_path}")
+ tgt_path.unlink()
+
+ print(f"Moving {pdf_path} to {tgt_path}")
+ pdf_path.rename(tgt_path)
+ else:
+ print(f"{tgt_path} already exists, nothing to do")
+
+ assert tgt_path.is_file(), f"Failed to build {tgt_path} from {tex_path}"
+ assert tgt_path not in built_paths, f"Duplicate target: {tgt_path}"
+ built_paths.add(tgt_path)
+
+
+@requires_exe("pdflatex")
+def test_build_pdfs_from_tex(tmp_path):
+ tex_paths = [
+ _docs_path / "mf6io" / "mf6io.tex",
+ _docs_path / "ReleaseNotes" / "ReleaseNotes.tex",
+ _docs_path / "zonebudget" / "zonebudget.tex",
+ _docs_path / "ConverterGuide" / "converter_mf5to6.tex",
+ _docs_path / "SuppTechInfo" / "mf6suptechinfo.tex",
+ _examples_repo_path / "doc" / "mf6examples.tex",
+ ]
+ bbl_paths = [
+ _docs_path / "ConverterGuide" / "converter_mf5to6.bbl",
+ _docs_path / "ReleaseNotes" / "ReleaseNotes.tex",
+ _docs_path / "zonebudget" / "zonebudget.tex",
+ ]
+
+ try:
+ build_pdfs_from_tex(tex_paths, tmp_path)
+ finally:
+ for p in (tex_paths[:-1] + bbl_paths):
+ os.system(f"git restore {p}")
+
+
+def build_documentation(bin_path: PathLike,
+ output_path: PathLike,
+ examples_repo_path: PathLike,
+ development: bool = False,
+ overwrite: bool = False):
+ print(f"Building {'development' if development else 'candidate'} documentation")
+
+ bin_path = Path(bin_path).expanduser().absolute()
+ output_path = Path(output_path).expanduser().absolute()
+ examples_repo_path = Path(examples_repo_path).expanduser().absolute()
+
+ # make sure output directory exists
+ output_path.mkdir(parents=True, exist_ok=True)
+
+ # build LaTex input/output docs from DFN files
+ build_mf6io_tex_from_dfn(overwrite=True)
+
+ # build LaTeX input/output example model docs
+ with TemporaryDirectory() as temp:
+ temp_path = Path(temp)
+ build_mf6io_tex_example(
+ workspace_path=temp_path,
+ bin_path=bin_path,
+ example_model_path=examples_repo_path / "examples" / "ex-gwf-twri01",
+ )
+
+ # build LaTeX file describing distribution folder structure
+ # build_tex_folder_structure(overwrite=True)
+
+ if development:
+ # convert LaTeX to PDF
+ build_pdfs_from_tex(tex_paths=[_docs_path / "mf6io" / "mf6io.tex"], output_path=output_path)
+ else:
+ # convert benchmarks to LaTex, running them first if necessary
+ build_benchmark_tex(output_path=output_path, overwrite=overwrite)
+
+ # download example docs
+ latest = get_release("MODFLOW-USGS/modflow6-examples", "latest")
+ assets = latest["assets"]
+ asset = next(iter([a for a in assets if a["name"] == "mf6examples.pdf"]), None)
+ download_and_unzip(asset["browser_download_url"], output_path, verbose=True)
+
+ # download publications
+ for url in _publication_urls:
+ print(f"Downloading publication: {url}")
+ try:
+ download_and_unzip(url, path=output_path, delete_zip=False)
+ assert (output_path / url.rpartition("/")[2]).is_file()
+ except HTTPError as e:
+ if "404" in str(e):
+ warn(f"Publication not found: {url}")
+ else:
+ raise
+
+ # convert LaTex to PDF
+ build_pdfs_from_tex(tex_paths=_default_tex_paths, output_path=output_path, overwrite=overwrite)
+
+ # enforce os line endings on all text files
+ windows_line_endings = True
+ convert_line_endings(output_path, windows_line_endings)
+
+ # make sure we have expected PDFs
+ if development:
+ assert (output_path / "mf6io.pdf").is_file()
+ else:
+ assert (output_path / "mf6io.pdf").is_file()
+ assert (output_path / "ReleaseNotes.pdf").is_file()
+ assert (output_path / "zonebudget.pdf").is_file()
+ assert (output_path / "converter_mf5to6.pdf").is_file()
+ assert (output_path / "mf6suptechinfo.pdf").is_file()
+ assert (output_path / "mf6examples.pdf").is_file()
+
+
+@requires_exe("pdflatex")
+@pytest.mark.skip(reason="manual testing")
+@pytest.mark.skipif(not (_benchmarks_path / "run-time-comparison.md").is_file(), reason="needs benchmarks")
+def test_build_documentation(tmp_path):
+ bin_path = tmp_path / "bin"
+ dist_path = tmp_path / "dist"
+ meson_build(_project_root_path, tmp_path / "builddir", bin_path)
+ build_documentation(bin_path, dist_path, _examples_repo_path) #, _benchmarks_path / "run-time-comparison.md")
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ prog="Convert LaTeX docs to PDFs",
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ epilog=textwrap.dedent(
+ """\
+ Create documentation for a distribution. This includes benchmarks, release notes, the
+ MODFLOW 6 input/output specification, example model documentation, supplemental info,
+ documentation for the MODFLOW 5 to 6 converter and Zonebudget 6, and several articles
+ downloaded from the USGS website. These are all written to a specified --output-path.
+ Additional LaTeX files may be included in the distribution by specifying --tex-paths.
+ """
+ ),
+ )
+ parser.add_argument("-t", "--tex-path", action="append", required=False, help="Extra LaTeX files to include")
+ parser.add_argument(
+ "-b",
+ "--bin-path",
+ required=False,
+ default=os.getcwd(),
+ help="Location of modflow6 executables",
+ )
+ parser.add_argument(
+ "-e",
+ "--examples-repo-path",
+ required=False,
+ default=str(_examples_repo_path),
+ help="Path to directory containing modflow6 example models"
+ )
+ parser.add_argument(
+ "-o",
+ "--output-path",
+ required=False,
+ default=os.getcwd(),
+ help="Location to create documentation artifacts",
+ )
+ parser.add_argument(
+ "-d",
+ "--development",
+ required=False,
+ default=False,
+ action="store_true",
+ help="Whether to build a development (e.g., nightly) rather than a full distribution"
+ )
+ parser.add_argument(
+ "-f",
+ "--force",
+ required=False,
+ default=False,
+ action="store_true",
+ help="Whether to recreate and overwrite existing artifacts"
+ )
+ args = parser.parse_args()
+ tex_paths = _default_tex_paths + ([Path(p) for p in args.tex_path] if args.tex_path else [])
+ output_path = Path(args.output_path).expanduser().absolute()
+ output_path.mkdir(parents=True, exist_ok=True)
+ bin_path = Path(args.bin_path).expanduser().absolute()
+ examples_repo_path = Path(args.examples_repo_path).expanduser().absolute()
+
+ build_documentation(
+ bin_path=bin_path,
+ output_path=output_path,
+ examples_repo_path=examples_repo_path,
+ development=args.development,
+ overwrite=args.force)
diff --git a/distribution/build_makefiles.py b/distribution/build_makefiles.py
index 113729661af..e4e86be9750 100644
--- a/distribution/build_makefiles.py
+++ b/distribution/build_makefiles.py
@@ -1,43 +1,26 @@
import os
import sys
-from contextlib import contextmanager
+from os import environ
+from pathlib import Path
import pymake
+import pytest
+from flaky import flaky
+from modflow_devtools.misc import set_dir
+from modflow_devtools.markers import requires_exe
-if sys.platform.lower() == "win32":
- ext = ".exe"
-else:
- ext = ""
-
-# check for command line arguments
-fc = None
-for idx, arg in enumerate(sys.argv):
- if arg in ("-fc",):
- fc = sys.argv[idx + 1]
-
-# if compiler not set by command line argument
-# use environmental variable or set to default compiler (gfortran)
-if fc is None:
- if "FC" in os.environ:
- fc = os.getenv("FC")
- else:
- fc = "gfortran"
-
-
-@contextmanager
-def cwd(path):
- oldpwd = os.getcwd()
- os.chdir(path)
- try:
- yield
- finally:
- os.chdir(oldpwd)
+from utils import get_modified_time, get_project_root_path
+
+_project_root_path = get_project_root_path()
+_is_windows = sys.platform.lower() == "win32"
+_ext = ".exe" if _is_windows else ""
+
+FC = environ.get("FC")
+_fc_reason = "make must be used with gfortran"
def run_makefile(target):
- assert os.path.isfile(
- "makefile"
- ), f"makefile does not exist in {os.getcwd()}"
+ assert Path("makefile").is_file(), f"makefile does not exist in {os.getcwd()}"
base_target = os.path.basename(target)
base_message = (
@@ -52,111 +35,192 @@ def run_makefile(target):
# build MODFLOW 6 with makefile
print(f"build {base_target} with makefile")
- return_code = os.system(f"make FC={fc}")
+ return_code = os.system(f"make FC={environ.get('FC', 'gfortran')}")
assert return_code == 0, f"could not make '{base_target}'." + base_message
-
- assert os.path.isfile(target), (
- f"{base_target} does not exist." + base_message
- )
-
- # clean after successful make
- print(f"clean {base_target} with makefile")
- os.system("make clean")
-
- return
+ assert os.path.isfile(target), f"{base_target} does not exist." + base_message
def build_mf6_makefile():
- with cwd(os.path.join("..", "make")):
- pm = pymake.Pymake()
- pm.target = "mf6"
- pm.srcdir = os.path.join("..", "src")
- pm.appdir = os.path.join("..", "bin")
- pm.include_subdirs = True
- pm.inplace = True
- pm.dryrun = True
- pm.makefile = True
- pm.networkx = True
-
- # build the application
- pm.build()
-
- msg = f"could not create makefile for '{pm.target}'."
- assert pm.returncode == 0, msg
-
- return
+ target = "mf6"
+ print(f"Creating makefile for {target}")
+ with set_dir(_project_root_path / "make"):
+ pymake.main(
+ srcdir=str(_project_root_path / "src"),
+ target=target,
+ appdir=str(_project_root_path / "bin"),
+ include_subdirs=True,
+ inplace=True,
+ dryrun=True,
+ makefile=True,
+ networkx=True,
+ )
def build_zbud6_makefile():
- with cwd(os.path.join("..", "utils", "zonebudget", "make")):
- pm = pymake.Pymake()
- pm.target = "zbud6"
- pm.srcdir = os.path.join("..", "src")
- pm.appdir = os.path.join("..", "..", "..", "bin")
- pm.extrafiles = os.path.join("..", "pymake", "extrafiles.txt")
- pm.inplace = True
- pm.makeclean = True
- pm.dryrun = True
- pm.makefile = True
- pm.networkx = True
-
- # build the application
- pm.build()
-
- msg = f"could not create makefile for '{pm.target}'."
- assert pm.returncode == 0, msg
+ target = "zbud6"
+ util_path = _project_root_path / "utils" / "zonebudget"
+ print(f"Creating makefile for {target}")
+ with set_dir(util_path / "make"):
+ returncode = pymake.main(
+ srcdir=str(util_path / "src"),
+ target=target,
+ appdir=str(_project_root_path / "bin"),
+ extrafiles=str(util_path / "pymake" / "extrafiles.txt"),
+ inplace=True,
+ include_subdirs=True,
+ makefile=True,
+ dryrun=True,
+ networkx=True,
+ )
- return
+ assert returncode == 0, f"Failed to create makefile for '{target}'"
def build_mf5to6_makefile():
- with cwd(os.path.join("..", "utils", "mf5to6", "make")):
- srcdir = os.path.join("..", "src")
- target = os.path.join("..", "..", "..", "bin", "mf5to6")
- extrafiles = os.path.join("..", "pymake", "extrafiles.txt")
+ target = "mf5to6"
+ util_path = _project_root_path / "utils" / "mf5to6"
+ print(f"Creating makefile for {target}")
+ with set_dir(util_path / "make"):
+ extrafiles = str(util_path / "pymake" / "extrafiles.txt")
# build modflow 5 to 6 converter
returncode = pymake.main(
- srcdir,
- target,
+ srcdir=str(util_path / "src"),
+ target=target,
+ appdir=str(_project_root_path / "bin"),
include_subdirs=True,
extrafiles=extrafiles,
inplace=True,
dryrun=True,
makefile=True,
networkx=True,
- fflags="-fall-intrinsics",
+ fflags=["-fall-intrinsics"],
)
- msg = f"could not create makefile for '{os.path.basename(target)}'."
- assert returncode == 0, msg
+ assert returncode == 0, f"Failed to create makefile for '{target}'"
- return
+@flaky
+@pytest.mark.skipif(FC == "ifort", reason=_fc_reason)
+def test_build_mf6_makefile():
+ makefile_paths = [
+ _project_root_path / "make" / "makefile",
+ _project_root_path / "make" / "makedefaults"
+ ]
+ makefile_mtimes = [p.stat().st_mtime for p in makefile_paths]
-def test_build_mf6_wmake():
- target = os.path.join("..", "bin", f"mf6{ext}")
- with cwd(os.path.join("..", "make")):
- run_makefile(target)
+ try:
+ build_mf6_makefile()
+
+ # check files were modified
+ for p, t in zip(makefile_paths, makefile_mtimes):
+ assert p.stat().st_mtime > t
+ finally:
+ for p in makefile_paths:
+ os.system(f"git restore {p}")
+
+
+@flaky
+@pytest.mark.skipif(FC == "ifort", reason=_fc_reason)
+def test_build_zbud6_makefile():
+ util_path = _project_root_path / "utils" / "zonebudget"
+ makefile_paths = [
+ util_path / "make" / "makefile",
+ util_path / "make" / "makedefaults",
+ ]
+ makefile_mtimes = [p.stat().st_mtime for p in makefile_paths]
+
+ try:
+ build_zbud6_makefile()
+
+ # check files were modified
+ for p, t in zip(makefile_paths, makefile_mtimes):
+ assert p.stat().st_mtime > t
+ finally:
+ for p in makefile_paths:
+ os.system(f"git restore {p}")
+
+
+@flaky
+@pytest.mark.skipif(FC == "ifort", reason=_fc_reason)
+def test_build_mf5to6_makefile():
+ util_path = _project_root_path / "utils" / "mf5to6"
+ makefile_paths = [
+ util_path / "make" / "makefile",
+ util_path / "make" / "makedefaults"
+ ]
+ makefile_mtimes = [p.stat().st_mtime for p in makefile_paths]
+
+ try:
+ build_mf5to6_makefile()
+
+ # check files were modified
+ for p, t in zip(makefile_paths, makefile_mtimes):
+ assert p.stat().st_mtime > t
+ finally:
+ for p in makefile_paths:
+ os.system(f"git restore {p}")
-def test_build_zbud6_wmake():
- target = os.path.join("..", "..", "..", "bin", f"zbud6{ext}")
- with cwd(os.path.join("..", "utils", "zonebudget", "make")):
- run_makefile(target)
+@flaky
+@requires_exe("make")
+@pytest.mark.skipif(FC == "ifort", reason=_fc_reason)
+def test_build_mf6_with_make():
+ target = _project_root_path / "bin" / f"mf6{_ext}"
+ mtime = get_modified_time(target)
+ try:
+ with set_dir(_project_root_path / "make"):
+ run_makefile(target)
+
+ # check executable was modified
+ assert target.stat().st_mtime > mtime
+ finally:
+ # clean after successful make
+ print(f"clean {target} with makefile")
+ os.system("make clean")
+
+
+@flaky
+@requires_exe("make")
+@pytest.mark.skipif(FC == "ifort", reason=_fc_reason)
+def test_build_zbud6_with_make():
+ target = _project_root_path / "bin" / f"zbud6{_ext}"
+ util_path = _project_root_path / "utils" / "zonebudget"
+ mtime = get_modified_time(target)
+
+ try:
+ with set_dir(util_path / "make"):
+ run_makefile(target)
-def test_build_mf5to6_wmake():
- target = os.path.join("..", "..", "..", "bin", f"mf5to6{ext}")
- with cwd(os.path.join("..", "utils", "mf5to6", "make")):
- run_makefile(target)
+ # check executable was modified
+ assert target.stat().st_mtime > mtime
+ finally:
+ print(f"clean {target} with makefile")
+ os.system("make clean")
+
+
+@flaky
+@requires_exe("make")
+@pytest.mark.skipif(FC == "ifort", reason=_fc_reason)
+def test_build_mf5to6_with_make():
+ target = _project_root_path / "bin" / f"mf5to6{_ext}"
+ util_path = _project_root_path / "utils" / "mf5to6"
+ mtime = get_modified_time(target)
+
+ try:
+ with set_dir(util_path / "make"):
+ run_makefile(target)
+
+ # check executable was modified
+ assert target.stat().st_mtime > mtime
+ finally:
+ print(f"clean {target} with makefile")
+ os.system("make clean")
if __name__ == "__main__":
build_mf6_makefile()
build_zbud6_makefile()
build_mf5to6_makefile()
- # test_build_mf6_wmake()
- # test_build_zbud6_wmake()
- # test_build_mf5to6_wmake()
diff --git a/distribution/build_nightly.py b/distribution/build_nightly.py
deleted file mode 100644
index a59749dba60..00000000000
--- a/distribution/build_nightly.py
+++ /dev/null
@@ -1,131 +0,0 @@
-import os
-import pathlib
-import platform
-import shutil
-import sys
-
-import flopy
-import pymake
-
-# add path to build script in autotest directory and reuse mf6 build scripts
-sys.path.append(os.path.join("..", "autotest"))
-from build_exes import meson_build
-
-# make sure exe extension is used on windows
-eext = ""
-soext = ".so"
-if sys.platform.lower() == "win32":
- eext = ".exe"
- soext = ".dll"
-
-bin_path = os.path.abspath(os.path.join("..", "bin"))
-example_path = os.path.abspath(os.path.join("temp"))
-zip_path = os.path.abspath(os.path.join("temp_zip"))
-
-
-def get_zipname():
- zipname = sys.platform.lower()
- if zipname == "linux2":
- zipname = "linux"
- elif zipname == "darwin":
- zipname = "mac"
- elif zipname == "win32":
- if platform.architecture()[0] == "64bit":
- zipname = "win64"
- return zipname
-
-
-def relpath_fallback(pth):
- try:
- # throws ValueError on Windows if pth is on a different drive
- return os.path.relpath(pth)
- except ValueError:
- return os.path.abspath(pth)
-
-
-def create_dir(pth):
- # remove pth directory if it exists
- if os.path.exists(pth):
- print(f"removing... {os.path.abspath(pth)}")
- shutil.rmtree(pth)
-
- # create pth directory
- print(f"creating... {os.path.abspath(pth)}")
- os.makedirs(pth)
-
- msg = f"could not create... {os.path.abspath(pth)}"
- assert os.path.exists(pth), msg
-
-
-def test_update_version():
- from make_release import update_version
-
- update_version()
-
-
-def test_create_dirs():
- for pth in (
- bin_path,
- zip_path,
- ):
- create_dir(pth)
-
-
-def test_nightly_build():
- meson_build()
-
- # test if there are any executable files to zip
- binpth_files = [
- os.path.join(bin_path, f)
- for f in os.listdir(bin_path)
- if os.path.isfile(os.path.join(bin_path, f))
- and shutil.which(os.path.join(bin_path, f), mode=os.X_OK)
- and pathlib.Path(os.path.join(bin_path, f)).suffix
- not in (".a", ".lib", ".pdb")
- ]
- if len(binpth_files) < 1:
- raise FileNotFoundError(
- f"No executable files present in {os.path.abspath(bin_path)}.\n"
- + f"Available files:\n [{', '.join(os.listdir(bin_path))}]"
- )
- else:
- print(f"Files to zip:\n [{', '.join(binpth_files)}]")
-
- zip_pth = os.path.abspath(os.path.join(zip_path, get_zipname() + ".zip"))
- print(f"Zipping files to '{zip_pth}'")
- success = pymake.zip_all(zip_pth, file_pths=binpth_files)
- assert success, f"Could not create '{zip_pth}'"
-
-
-def test_update_mf6io():
- from mkdist import update_mf6io_tex_files
-
- # build simple model
- name = "mymodel"
- ws = os.path.join(example_path, name)
- exe_name = "mf6"
- if sys.platform.lower() == "win32":
- exe_name += ".exe"
- exe_name = os.path.join(bin_path, exe_name)
- sim = flopy.mf6.MFSimulation(sim_name=name, sim_ws=ws, exe_name=exe_name)
- tdis = flopy.mf6.ModflowTdis(sim)
- ims = flopy.mf6.ModflowIms(sim)
- gwf = flopy.mf6.ModflowGwf(sim, modelname=name, save_flows=True)
- dis = flopy.mf6.ModflowGwfdis(gwf, nrow=10, ncol=10)
- ic = flopy.mf6.ModflowGwfic(gwf)
- npf = flopy.mf6.ModflowGwfnpf(gwf, save_specific_discharge=True)
- chd = flopy.mf6.ModflowGwfchd(
- gwf, stress_period_data=[[(0, 0, 0), 1.0], [(0, 9, 9), 0.0]]
- )
- oc = flopy.mf6.ModflowGwfoc(gwf, printrecord=[("BUDGET", "ALL")])
- sim.write_simulation()
-
- # update the mf6io simulation output for LaTeX
- update_mf6io_tex_files(None, exe_name, expth=ws)
-
-
-if __name__ == "__main__":
- test_update_version()
- test_create_dirs()
- test_nightly_build()
- test_update_mf6io()
diff --git a/distribution/check_dist.py b/distribution/check_dist.py
new file mode 100644
index 00000000000..aed5d3ac9de
--- /dev/null
+++ b/distribution/check_dist.py
@@ -0,0 +1,103 @@
+import platform
+import subprocess
+from os import environ
+from pathlib import Path
+
+import pytest
+
+_system = platform.system()
+_eext = ".exe" if _system == "Windows" else ""
+_soext = ".dll" if _system == "Windows" else ".so" if _system == "Linux" else ".dylib"
+_scext = ".bat" if _system == "Windows" else ".sh"
+_fc = environ.get("FC", None)
+
+
+@pytest.fixture
+def dist_dir_path(request):
+ def skip():
+ pytest.skip(f"no distribution directory found at {path}")
+
+ path = request.config.getoption("--path")
+ if not path:
+ skip()
+
+ path = Path(path).expanduser().absolute()
+ if not path.is_dir():
+ skip()
+
+ return path
+
+
+def test_sources(dist_dir_path):
+ assert (dist_dir_path / "src").is_dir()
+ assert (dist_dir_path / "src" / "mf6.f90").is_file()
+
+
+@pytest.mark.skipif(not _fc, reason="needs Fortran compiler")
+def test_makefiles(dist_dir_path):
+ assert (dist_dir_path / "make" / "makefile").is_file()
+ assert (dist_dir_path / "make" / "makedefaults").is_file()
+ assert (dist_dir_path / "utils" / "zonebudget" / "make" / "makefile").is_file()
+ assert (dist_dir_path / "utils" / "zonebudget" / "make" / "makedefaults").is_file()
+ assert (dist_dir_path / "utils" / "mf5to6" / "make" / "makefile").is_file()
+ assert (dist_dir_path / "utils" / "mf5to6" / "make" / "makedefaults").is_file()
+
+ # makefiles can't be used on Windows with ifort compiler
+ if _system != 'Windows' or _fc != 'ifort':
+ print(subprocess.check_output("make", cwd=dist_dir_path / "make", shell=True))
+ print(subprocess.check_output("make", cwd=dist_dir_path / "utils" / "zonebudget" / "make", shell=True))
+ print(subprocess.check_output("make", cwd=dist_dir_path / "utils" / "mf5to6" / "make", shell=True))
+
+
+def test_msvs(dist_dir_path):
+ assert (dist_dir_path / "msvs" / "mf6.sln").is_file()
+ assert (dist_dir_path / "msvs" / "mf6.vfproj").is_file()
+ assert (dist_dir_path / "msvs" / "mf6bmi.sln").is_file()
+ assert (dist_dir_path / "msvs" / "mf6bmi.vfproj").is_file()
+ assert (dist_dir_path / "msvs" / "mf6core.vfproj").is_file()
+
+
+def test_docs(dist_dir_path):
+ assert (dist_dir_path / "doc" / "mf6io.pdf").is_file()
+ assert (dist_dir_path / "doc" / "release.pdf").is_file()
+ assert (dist_dir_path / "doc" / "mf5to6.pdf").is_file()
+ assert (dist_dir_path / "doc" / "zonebudget.pdf").is_file()
+ assert (dist_dir_path / "doc" / "mf6suptechinfo.pdf").is_file()
+ assert (dist_dir_path / "doc" / "mf6examples.pdf").is_file()
+
+ for pub in [
+ "tm6a55",
+ "tm6a56",
+ "tm6a57",
+ "tm6a61",
+ "tm6a62",
+ ]:
+ assert (dist_dir_path / "doc" / f"{pub}.pdf").is_file()
+
+
+def test_examples(dist_dir_path):
+ examples_path = dist_dir_path / "examples"
+ example_path = next(examples_path.iterdir(), None)
+ assert example_path
+ output = ' '.join(subprocess.check_output([str(example_path / f"run{_scext}")], cwd=example_path).decode().split())
+ print(output)
+
+
+def test_binaries(dist_dir_path):
+ bin_path = dist_dir_path / "bin"
+ assert (bin_path / f"mf6{_eext}").is_file()
+ assert (bin_path / f"zbud6{_eext}").is_file()
+ assert (bin_path / f"mf5to6{_eext}").is_file()
+ assert (bin_path / f"libmf6{_soext}").is_file()
+
+ output = ' '.join(subprocess.check_output([str(bin_path / f"mf6{_eext}"), "-v"]).decode().split())
+ assert output.startswith("mf6")
+ assert output.lower().count("release") == 1
+ assert output.lower().count("candidate") <= 1
+
+ version = output.lower().rpartition(":")[2].rpartition("release")[0].strip()
+ v_split = version.split(".")
+ assert len(v_split) == 3
+ assert all(s.isdigit() for s in v_split)
+
+ # TODO check utils
diff --git a/distribution/conftest.py b/distribution/conftest.py
new file mode 100644
index 00000000000..b9979c03ae1
--- /dev/null
+++ b/distribution/conftest.py
@@ -0,0 +1,10 @@
+from pathlib import Path
+
+from update_version import Version
+
+_project_root_path = Path(__file__).parent.parent
+_dist_dir_path = Path(__file__).parent.parent.parent / f"mf{str(Version.from_file(_project_root_path / 'version.txt'))}"
+
+
+def pytest_addoption(parser):
+ parser.addoption("-P", "--path", action="store", default=str(_dist_dir_path))
diff --git a/distribution/evaluate_run_times.py b/distribution/evaluate_run_times.py
deleted file mode 100644
index acc3d397401..00000000000
--- a/distribution/evaluate_run_times.py
+++ /dev/null
@@ -1,401 +0,0 @@
-import os
-import shutil
-import subprocess
-import sys
-from multiprocessing import Pool
-
-import flopy
-import pymake
-
-# Set VERIFY
-VERIFY = False
-
-# add path to build script in autotest directory and reuse mf6 build scripts
-sys.path.append(os.path.join("..", "autotest"))
-from build_exes import meson_build
-
-github_repo = "MODFLOW-USGS/modflow6"
-working_dir = "./temp/"
-base_build_dir = os.path.join("..", "bin")
-examples_dir = "examples"
-app_ext = ""
-if sys.platform == "win32":
- app_ext = ".exe"
-
-
-def _get_version():
- version = None
- for idx, arg in enumerate(sys.argv):
- if arg == "--version":
- version = sys.argv[idx + 1]
- break
- if version is None:
- version = pymake.repo_latest_version(
- github_repo=github_repo, verify=VERIFY
- )
- return version
-
-
-def _del_version():
- i0 = None
- for idx, arg in enumerate(sys.argv):
- if arg == "--version":
- i0 = idx
- break
- if i0 is not None:
- del sys.argv[i0 : i0 + 2]
-
-
-def _is_dryrun():
- dryrun = False
- for idx, arg in enumerate(sys.argv):
- if arg == "--dryrun":
- dryrun = True
- break
- return dryrun
-
-
-def _get_download_dir():
- return f"mf{_get_version()}"
-
-
-def _get_previous_version():
- version = _get_version()
- url = (
- f"https://github.com/{github_repo}"
- + f"/releases/download/{version}/mf{version}.zip"
- )
- if not _is_dryrun():
- pymake.download_and_unzip(
- url,
- pth=working_dir,
- verbose=True,
- verify=VERIFY,
- )
-
- return version, f"mf{version}"
-
-
-def build_previous_version(pth):
- _del_version()
- appdir = os.path.abspath(os.path.join(base_build_dir, "rebuilt"))
- if not _is_dryrun():
- meson_build(dir_path=pth, libdir=appdir)
-
- return os.path.abspath(os.path.join(appdir, f"mf6{app_ext}"))
-
-
-def build_current_version():
- if not _is_dryrun():
- meson_build()
- return os.path.abspath(os.path.join(base_build_dir, f"mf6{app_ext}"))
-
-
-def get_mf6_cmdargs(app, argv, text="mf6:", verbose=False):
- return_text = None
- proc = subprocess.Popen(
- argv,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- cwd=os.path.dirname(app),
- )
- result, error = proc.communicate()
- if result is not None:
- c = result.decode("utf-8")
- c = c.rstrip("\r\n")
- if verbose:
- print(f"{c}")
- if text in c:
- idx0 = c.index(text) + len(text) + 1
- return_text = c[idx0:].strip()
- return return_text
-
-
-def get_mf6_version(app, verbose=False):
- text = "mf6:"
- if app.endswith(".exe"):
- text = "mf6.exe:"
- version = get_mf6_cmdargs(app, [app, "-v"], text=text, verbose=verbose)
- if version is not None:
- version = version.split()[0]
- if verbose:
- print(f"version: {version}")
- return version
-
-
-def get_mf6_compiler(app, verbose=False):
- text = "mf6:"
- if app.endswith(".exe"):
- text = "mf6.exe:"
- compiler = get_mf6_cmdargs(app, [app, "-c"], text=text, verbose=verbose)
- if verbose and compiler is not None:
- print(f"compiler: {compiler}")
- return compiler
-
-
-def revert_files(app, example):
- replace_dict = {
- ".ims": {
- (6, 1, 1): ("dvclose", "hclose"),
- }
- }
- extensions = list(replace_dict.keys())
-
- # get current version
- version = get_mf6_version(app)
- if version is not None:
- version = tuple([int(v) for v in version.split(".")])
-
- # get a list of files in example directory
- files = os.listdir(example)
-
- for file in files:
- _, extension = os.path.splitext(file)
- if extension in extensions:
- key = extension.lower()
- for v, replace in replace_dict[key].items():
- if version < v:
- fpth = os.path.join(example, file)
- with open(fpth, "r") as f:
- lines = f.readlines()
- with open(fpth, "w") as f:
- for line in lines:
- if replace[0] in line.lower():
- line = line.lower().replace(
- replace[0], replace[1]
- )
- f.write(line)
- return
-
-
-def get_elapsed_time(buff, tag="Elapsed run time:"):
- elt_str = ""
- for line in buff:
- if tag in line:
- i0 = line.index(":")
- elt_str = line[i0 + 1 :].strip()
- return elt_str
-
-
-def time_factor(time_unit):
- if "hours" in time_unit:
- factor = 60.0 * 60.0
- elif "minutes" in time_unit:
- factor = 60.0
- else:
- factor = 1.0
- return factor
-
-
-def elapsed_string_to_real(elt_str):
- time_sec = 0.0
- t = elt_str.split()
- for idx in range(0, len(t), 2):
- t0 = float(t[idx])
- time_sec += t0 * time_factor(t[idx + 1].lower())
- return time_sec
-
-
-def elapsed_real_to_string(elt):
- if elt > 60.0:
- time_min = int(elt / 60.0)
- time_sec = elt % 60.0
- elt_str = f"{time_min} Minutes, "
- else:
- time_sec = elt
- elt_str = ""
- return elt_str + f"{time_sec:.3f} Seconds"
-
-
-def get_examples():
- examples_repo = "MODFLOW-USGS/modflow6-examples"
- version = pymake.repo_latest_version(
- github_repo=examples_repo, verify=VERIFY
- )
- print(f"current examples version: {version}")
- url = (
- f"https://github.com/{examples_repo}"
- + f"/releases/download/{version}/modflow6-examples.zip"
- )
- pth = os.path.join(working_dir, examples_dir)
- if not _is_dryrun():
- pymake.download_and_unzip(url, pth=pth, verbose=True, verify=VERIFY)
- example_files = []
- for root, dirs, files in os.walk(pth):
- fpth = os.path.join(root, "mfsim.nam")
- if os.path.exists(fpth):
- example_files.append(os.path.abspath(root))
- return sorted(example_files)
-
-
-def run_function(app, example):
- return flopy.run_model(
- app,
- None,
- model_ws=example,
- silent=True,
- report=True,
- )
-
-
-def run_model(app, app0, example, fmd, silent=True, pool=False):
- t_out = 0.0
- t0_out = 0.0
-
- id0 = example.index(examples_dir) + len(examples_dir) + 1
- test = example[id0:]
- print(f"Running simulation: {test}")
- line = f"| {test} |"
-
- # copy directory for previous application
- prev_dir = os.path.join(example, "previous")
- if os.path.isdir(prev_dir):
- shutil.rmtree(prev_dir)
- print(f"Copying {example} ==> {prev_dir}")
- shutil.copytree(example, prev_dir)
-
- # modify input files to use deprecated keywords in directory
- # used with the previous application
- revert_files(app0, prev_dir)
-
- # # run the current application
- # success, buff = run_function(app, example)
- #
- # # run the previous application
- # success0, buff0 = run_function(app0, prev_dir)
-
- # processing options
- args = (
- (app, example),
- (app0, prev_dir),
- )
-
- # Multi-processing using Pool
- # initialize the pool
- pool = Pool(processes=2)
-
- # run the models
- results = [pool.apply_async(run_function, args=arg) for arg in args]
-
- # close the pool
- pool.close()
-
- # set variables for processing
- success, buff = results[0].get()
- success0, buff0 = results[1].get()
-
- if success:
- elt = get_elapsed_time(buff)
- line += f" {elt} |"
- else:
- print(f"Failure for current app with example: {test}")
- for b in buff:
- print(b)
- line += " -- |"
-
- if success0:
- elt0 = get_elapsed_time(buff0)
- line += f" {elt0} |"
- else:
- print(f"Failure for previous app with example: {test}")
- line += " -- |"
-
- if success and success0:
- t = elapsed_string_to_real(elt)
- t0 = elapsed_string_to_real(elt0)
- t_out += t
- t0_out += t0
- pd = (t - t0) / t0
- line += f" {pd:.2%} |"
- else:
- line += " -- |"
-
- fmd.write(f"{line}\n")
- fmd.flush()
-
- # clean up previous directory
- if os.path.isdir(prev_dir):
- shutil.rmtree(prev_dir)
-
- return success, t_out, t0_out
-
-
-def cleanup():
- b = None
- if not _is_dryrun():
- b = True
- return
-
-
-if __name__ == "__main__":
- _get_previous_version()
-
- # compile the previous version
- pth = os.path.join(working_dir, _get_download_dir())
- previous_app = build_previous_version(pth)
-
- # compile the current version
- current_app = build_current_version()
- print(f"previous app: {previous_app}\ncurrent app: {current_app}")
-
- # open markdown table
- f = open("run-time-comparison.md", "w")
-
- # get version numbers and write header
- v = get_mf6_version(current_app)
- v0 = get_mf6_version(previous_app)
- line = "### Comparison of simulation run times\n\n"
- line += (
- "Comparison of run times of the current version of "
- + f"MODFLOW 6 ({v}) "
- + f"to the previous version ({v0}). "
- + "The current example models available from the "
- + "[MODFLOW 6 Examples GitHub Repository]"
- + "(https://github.com/MODFLOW-USGS/modflow6-examples) are "
- + "used to compare run times. Simulations that fail are "
- + "indicated by '--'. The percent difference, where calculated, "
- + "is relative to the simulation run time for the previous "
- + "version. Percent differences for example problems with "
- + "short run times (less than 30 seconds) may not be significant.\n\n"
- + f"{get_mf6_compiler(current_app, verbose=True)}.\n\n\n"
- )
- line += "| Example Problem "
- line += f"| Current Version {v} "
- line += f"| Previous Version {v0} "
- line += "| Percent difference |\n"
- line += "| :---------- | :----------: | :----------: | :----------: |\n"
- f.write(line)
-
- #
- total_t = 0.0
- total_t0 = 0.0
-
- # get examples
- example_dirs = get_examples()
-
- # run models
- for idx, example in enumerate(example_dirs):
- success, t, t0 = run_model(
- current_app,
- previous_app,
- example,
- f,
- silent=False,
- )
- assert success, f"{example} run failed"
- total_t += t
- total_t0 += t0
-
- # add total
- pd = (total_t - total_t0) / total_t0
-
- # add final line
- line = f"| Total simulation time |"
- line += f" {elapsed_real_to_string(total_t)} |"
- line += f" {elapsed_real_to_string(total_t0)} |"
- line += f" {pd:.2%} |"
- f.write(f"{line}\n")
- f.flush()
-
- # close the markdown file
- f.close()
diff --git a/distribution/make_release.py b/distribution/make_release.py
deleted file mode 100644
index aa2645e965e..00000000000
--- a/distribution/make_release.py
+++ /dev/null
@@ -1,510 +0,0 @@
-#!/usr/bin/python
-
-"""
-make_release.py: Update files in this modflow6 repository according to relase information.
-
-This script is used to update several files in the modflow6 repository, including:
-
- ../version.txt
- ../doc/version.tex
- ../README.md
- ../DISCLAIMER.md
- ../code.json
- ../src/Utiliteis/version.f90
-
-Command line switches for overriding settings include:
-
- --version
- --developMode
- --isApproved
- --releaseCandidate
-
-Information in these files include version number (major.minor.micro), build date, whether or not
-the release is a release candidate or an actual release, whether the source code should be compiled
-in develop mode or in release mode, and the approval status.
-
-This information is determined using the following logic:
-
- If the branch name is master or release or the --isApproved argument is specified,
- then it assumes this version is approved, which will result in use of the approved disclaimer.
- Otherwise it is assumed to be provisional.
-
- If the version is approved (as determined by the previous logic) then the distribution is
- not marked as a release candidate unless it is forced to be so using the --releaseCandidate
- command line argument.
-
- The version number is read in from ../version.txt, which contains major, minor, and micro version
- numbers. These numbers will be propagated through the source code, latex files, markdown files,
- etc. The version numbers can be overridden using the command line argument --version major.minor.macro.
-
- Develop mode is set to 0 if the distribution is approved or it can be explicitly set using
- the --developMode command line argument.
-
-Once this script is run, these updated files will be used in compilation, latex documents, and
-other parts of the repo to mark the overall status.
-
-"""
-
-
-import datetime
-import json
-import os
-import shutil
-import subprocess
-import sys
-from collections import OrderedDict
-
-# update files and paths so that there are the same number of
-# path and file entries in the paths and files list. Enter '.'
-# as the path if the file is in the root repository directory
-paths = ["../", "../doc", "../", "../", "../", "../src/Utilities"]
-files = [
- "version.txt",
- "version.tex",
- "README.md",
- "DISCLAIMER.md",
- "code.json",
- "version.f90",
-]
-
-# check that there are the same number of entries in files and paths
-if len(paths) != len(files):
- msg = (
- "The number of entries in paths "
- + f"({len(paths)}) must equal "
- + f"the number of entries in files ({len(files)})"
- )
- assert False, msg
-
-prod = "MODFLOW 6"
-repo = "MODFLOW-USGS/modflow6.git"
-
-now = datetime.datetime.now()
-
-approved = """Disclaimer
-----------
-
-This software has been approved for release by the U.S. Geological Survey
-(USGS). Although the software has been subjected to rigorous review, the USGS
-reserves the right to update the software as needed pursuant to further analysis
-and review. No warranty, expressed or implied, is made by the USGS or the U.S.
-Government as to the functionality of the software and related material nor
-shall the fact of release constitute any such warranty. Furthermore, the
-software is released on condition that neither the USGS nor the U.S. Government
-shall be held liable for any damages resulting from its authorized or
-unauthorized use.
-"""
-
-preliminary = """Disclaimer
-----------
-
-This software is preliminary or provisional and is subject to revision. It is
-being provided to meet the need for timely best science. The software has not
-received final approval by the U.S. Geological Survey (USGS). No warranty,
-expressed or implied, is made by the USGS or the U.S. Government as to the
-functionality of the software and related material nor shall the fact of release
-constitute any such warranty. The software is provided on the condition that
-neither the USGS nor the U.S. Government shall be held liable for any damages
-resulting from the authorized or unauthorized use of the software.
-"""
-
-approvedfmt = ''' character(len=*), parameter :: FMTDISCLAIMER = &
- "(/,&
- &'This software has been approved for release by the U.S. Geological ',/,&
- &'Survey (USGS). Although the software has been subjected to rigorous ',/,&
- &'review, the USGS reserves the right to update the software as needed ',/,&
- &'pursuant to further analysis and review. No warranty, expressed or ',/,&
- &'implied, is made by the USGS or the U.S. Government as to the ',/,&
- &'functionality of the software and related material nor shall the ',/,&
- &'fact of release constitute any such warranty. Furthermore, the ',/,&
- &'software is released on condition that neither the USGS nor the U.S. ',/,&
- &'Government shall be held liable for any damages resulting from its ',/,&
- &'authorized or unauthorized use. Also refer to the USGS Water ',/,&
- &'Resources Software User Rights Notice for complete use, copyright, ',/,&
- &'and distribution information.',/)"'''
-
-preliminaryfmt = ''' character(len=*), parameter :: FMTDISCLAIMER = &
- "(/,&
- &'This software is preliminary or provisional and is subject to ',/,&
- &'revision. It is being provided to meet the need for timely best ',/,&
- &'science. The software has not received final approval by the U.S. ',/,&
- &'Geological Survey (USGS). No warranty, expressed or implied, is made ',/,&
- &'by the USGS or the U.S. Government as to the functionality of the ',/,&
- &'software and related material nor shall the fact of release ',/,&
- &'constitute any such warranty. The software is provided on the ',/,&
- &'condition that neither the USGS nor the U.S. Government shall be held ',/,&
- &'liable for any damages resulting from the authorized or unauthorized ',/,&
- &'use of the software.',/)"'''
-
-
-def get_disclaimer():
- is_approved = get_is_approved()
- if is_approved:
- disclaimer = approved
- else:
- disclaimer = preliminary
- return is_approved, disclaimer
-
-
-def get_is_approved():
-
- is_approved = None
-
- # override if --isApproved argument was set
- for idx, arg in enumerate(sys.argv):
- if arg == "--isApproved":
- is_approved = True
-
- if is_approved is None:
- # get current branch
- branch = get_branch()
- if "release" in branch.lower() or "master" in branch.lower():
- is_approved = True
- else:
- is_approved = False
-
- return is_approved
-
-
-def get_disclaimerfmt():
-
- is_approved = get_is_approved()
-
- if is_approved:
- disclaimer = approvedfmt
- else:
- disclaimer = preliminaryfmt
-
- return is_approved, disclaimer
-
-
-def get_branch(verbose=False):
- branch = None
-
- # determine if branch defined on command line
- for argv in sys.argv:
- if "master" in argv:
- branch = "master"
- elif "develop" in argv.lower():
- branch = "develop"
-
- if branch is None:
- try:
- # determine current branch
- proc = subprocess.Popen(
- ("git", "branch"),
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- cwd=".",
- )
- stdout, stderr = proc.communicate()
- if stdout:
- for line in stdout.decode("utf-8").splitlines():
- if "* " in line:
- branch = line.replace("* ", "")
- if verbose:
- print(line)
- if verbose:
- print(f"On Branch: {branch}\n")
- if stderr:
- print(f"Errors:\n{stderr.decode('utf-8')}")
-
- if branch is not None:
- if "master" in branch or "release" in branch:
- branch = "master"
- else:
- branch = "develop"
-
- except:
- msg = "Could not determine current branch. Is git installed?"
- raise ValueError(msg)
-
- return branch
-
-
-def get_version_str(v0, v1, v2):
- version_type = (f"{v0}", f"{v1}", f"{v2}")
- version = ".".join(version_type)
- return version
-
-
-def get_tag(v0, v1, v2):
- tag_type = (f"{v0}", f"{v1}", f"{v2}")
- tag = ".".join(tag_type)
- return tag
-
-
-def update_version():
- vmajor = None
- vminor = None
- vmicro = None
-
- # override if --version argument was set
- for idx, arg in enumerate(sys.argv):
- if arg == "--version":
- t = sys.argv[idx + 1]
- t = t.split(".")
- vmajor = int(t[0])
- vminor = int(t[1])
- vmicro = int(t[2])
-
- try:
- fpth = os.path.join(paths[0], files[0])
- lines = [line.rstrip("\n") for line in open(fpth, "r")]
- if vmajor is None:
- for line in lines:
- t = line.split()
- if "major =" in line:
- vmajor = int(t[2])
- elif "minor =" in line:
- vminor = int(t[2])
- elif "micro =" in line:
- vmicro = int(t[2])
- except:
- msg = "There was a problem updating the version file"
- raise IOError(msg)
-
- try:
- # write new version file
- f = open(fpth, "w")
- f.write(
- f"# {prod} version file automatically "
- + f"created using...{os.path.basename(__file__)}\n"
- )
- f.write("# created on..." + f"{now.strftime('%B %d, %Y %H:%M:%S')}\n")
- f.write("\n")
- f.write(f"major = {vmajor}\n")
- f.write(f"minor = {vminor}\n")
- f.write(f"micro = {vmicro}\n")
- f.write("__version__ = '{:d}.{:d}.{:d}'.format(major, minor, micro)\n")
- f.close()
- print("Successfully updated version.py")
-
- # update version.py in doc directory
- shutil.copyfile(
- os.path.abspath(fpth),
- os.path.join(
- "..", "doc", os.path.basename(fpth.replace(".txt", ".py"))
- ),
- )
-
- # update latex version file
- version = get_version_str(vmajor, vminor, vmicro)
- version_type = get_version_type(get_branch()).strip()
- if len(version_type) > 0:
- version += f"---{version_type}"
- pth = os.path.join(paths[1], files[1])
- f = open(pth, "w")
- line = "\\newcommand{\\modflowversion}{mf" + f"{version}" + "}"
- f.write(f"{line}\n")
- line = (
- "\\newcommand{\\modflowdate}{"
- + f"{now.strftime('%B %d, %Y')}"
- + "}"
- )
- f.write(f"{line}\n")
- line = (
- "\\newcommand{\\currentmodflowversion}"
- + "{Version \\modflowversion---\\modflowdate}"
- )
- f.write(f"{line}\n")
- f.close()
- print(f"Succesfully updated {files[1]}")
- except:
- msg = "There was a problem updating the version file"
- raise IOError(msg)
-
- # update version.f90
- update_mf6_version(vmajor, vminor, vmicro)
-
- # update README.md with new version information
- update_readme_markdown(vmajor, vminor, vmicro)
-
- # update code.json
- update_codejson(vmajor, vminor, vmicro)
-
-
-def get_version_type(branch):
-
- # override if --releaseCandidate argument was set
- version_type = None
- for idx, arg in enumerate(sys.argv):
- if arg == "--releaseCandidate":
- version_type = " release candidate "
-
- if version_type is None:
- is_approved = get_is_approved()
- version_type = " "
- if not is_approved:
- version_type = " release candidate "
-
- return version_type
-
-
-def get_develop_mode(branch):
-
- # override if --releaseCandidate argument was set
- idevelop = None
- for idx, arg in enumerate(sys.argv):
- if arg == "--developMode":
- ival = sys.argv[idx + 1]
- ival = int(ival)
- idevelop = ival
-
- if idevelop is None:
- idevelop = 0
- is_approved = get_is_approved()
- if not is_approved:
- idevelop = 1
-
- return idevelop
-
-
-def update_mf6_version(vmajor, vminor, vmicro):
-
- # get branch
- branch = get_branch()
-
- # create version
- version = get_tag(vmajor, vminor, vmicro)
-
- # get develop mode
- idevelopmode = get_develop_mode(branch)
-
- # get version type
- version_type = get_version_type(branch)
-
- # develop date text
- sdate = now.strftime("%m/%d/%Y")
-
- # create disclaimer text
- is_approved, disclaimerfmt = get_disclaimerfmt()
-
- # read version.f90 into memory
- fpth = os.path.join(paths[5], files[5])
- with open(fpth, "r") as file:
- lines = [line.rstrip() for line in file]
-
- # rewrite version.f90
- skip = False
- f = open(fpth, "w")
- for line in lines:
- # skip all of the disclaimer text
- if skip:
- if ',/)"' in line:
- skip = False
- continue
- elif ":: IDEVELOPMODE =" in line:
- line = (
- " integer(I4B), parameter :: "
- + f"IDEVELOPMODE = {idevelopmode}"
- )
- elif ":: VERSION =" in line:
- line = (
- " character(len=40), parameter :: "
- + "VERSION = '{}{}{}'".format(version, version_type, sdate)
- )
- elif ":: FMTDISCLAIMER =" in line:
- line = disclaimerfmt
- skip = True
- f.write(f"{line}\n")
- f.close()
-
- return
-
-
-def update_readme_markdown(vmajor, vminor, vmicro):
- # get branch
- branch = get_branch()
-
- # create version
- version = get_tag(vmajor, vminor, vmicro)
-
- # create disclaimer text
- is_approved, disclaimer = get_disclaimer()
-
- if is_approved:
- sb = ""
- else:
- sb = " release candidate"
-
- # read README.md into memory
- fpth = os.path.join(paths[2], files[2])
- with open(fpth, "r") as file:
- lines = [line.rstrip() for line in file]
-
- # rewrite README.md
- terminate = False
- f = open(fpth, "w")
- for line in lines:
- if "## Version " in line:
- line = f"### Version {version}"
- if "develop" in branch:
- line += sb
- # This has been commented out as we've generalized this reference.
- # elif "https://doi.org/10.5066/F76Q1VQV" in line:
- # line = (
- # "[Langevin, C.D., Hughes, J.D., "
- # + "Banta, E.R., Provost, A.M., "
- # + "Niswonger, R.G., and Panday, Sorab, "
- # + "{}, ".format(now.year)
- # + "MODFLOW 6 Modular Hydrologic Model "
- # + "version {}{}: ".format(version, sb)
- # + "U.S. Geological Survey Software Release, "
- # + "{}, ".format(now.strftime("%d %B %Y"))
- # + "https://doi.org/10.5066/F76Q1VQV]"
- # + "(https://doi.org/10.5066/F76Q1VQV)"
- # )
- elif "Disclaimer" in line:
- line = disclaimer
- terminate = True
- f.write(f"{line}\n")
- if terminate:
- break
- f.close()
-
- # write disclaimer markdown file
- fpth = os.path.join(paths[3], files[3])
- f = open(fpth, "w")
- f.write(disclaimer)
- f.close()
-
- return
-
-
-def update_codejson(vmajor, vminor, vmicro):
- # define json filename
- json_fname = os.path.join(paths[4], files[4])
-
- # get branch
- branch = get_branch()
-
- # create version
- version = get_tag(vmajor, vminor, vmicro)
-
- is_approved = get_is_approved()
-
- # load and modify json file
- with open(json_fname, "r") as f:
- data = json.load(f, object_pairs_hook=OrderedDict)
-
- # modify the json file data
- sdate = now.strftime("%Y-%m-%d")
- data[0]["date"]["metadataLastUpdated"] = sdate
- if is_approved:
- data[0]["version"] = version
- data[0]["status"] = "Production"
- else:
- data[0]["version"] = version
- data[0]["status"] = "Release Candidate"
-
- # rewrite the json file
- with open(json_fname, "w") as f:
- json.dump(data, f, indent=4)
- f.write("\n")
-
- return
-
-
-if __name__ == "__main__":
- update_version()
diff --git a/distribution/mkdist.py b/distribution/mkdist.py
deleted file mode 100644
index a6d25399f0d..00000000000
--- a/distribution/mkdist.py
+++ /dev/null
@@ -1,930 +0,0 @@
-"""
-Python code to create a MODFLOW 6 distribution. This has been used mostly
-on Windows and requires that Latex be installed, and Python with the
-pymake package.
-
-To make a distribution:
- 1. Install/update pymake, mf6examples, flopy, unix2dos/dos2unix,
- fortran compiler, jupytext, bmipy, xmipy, modflowapi
- 2. Run update_flopy.py in modflow6/autotest
- 3. Put fresh executables (including mf6.exe and libmf6.dll) into
- mf6examples/bin
- 4. Run python scripts in mf6examples/scripts (run process-scripts.py last)
- 5. Create a release branch
- 6. Update version.txt with the correct minor and micro numbers
- 7. Run the make_release.py script, which will create the proper dist name
- 8. Run this mkdist.py script
- 9. Post the distribution zip file
- 10. Commit the release changes, but no need to push
- 11. Merge the release changes into the master branch
- 12. Tag the master branch with the correct version
- 13. Merge master into develop
-
-"""
-
-
-import os
-import shutil
-import subprocess
-import sys
-import zipfile
-from contextlib import contextmanager
-
-import pymake
-from pymake import download_and_unzip
-
-
-@contextmanager
-def cwd(path):
- oldpwd = os.getcwd()
- os.chdir(path)
- try:
- yield
- finally:
- os.chdir(oldpwd)
-
-
-def get_distribution_info(versiontexname):
- vername = None
- verdate = None
- fname = versiontexname
- with open(fname) as f:
- lines = f.readlines()
- f.close()
- for line in lines:
- # \newcommand{\modflowversion}{mf6beta0.9.00}
- srchtxt = "modflowversion"
- if srchtxt in line:
- istart = line.rfind("{") + 1
- istop = line.rfind("}")
- if 0 < istart < istop:
- vername = line[istart:istop]
- srchtxt = "modflowdate"
- if srchtxt in line:
- istart = line.rfind("{") + 1
- istop = line.rfind("}")
- if 0 < istart < istop:
- verdate = line[istart:istop]
- if verdate is not None:
- break
- return vername, verdate
-
-
-def zipdir(dirname, zipname):
- print(f"Zipping directory: {dirname}")
- zipf = zipfile.ZipFile(zipname, "w", zipfile.ZIP_DEFLATED)
- for root, dirs, files in os.walk(dirname):
- for file in files:
- if ".DS_Store" not in file:
- fname = os.path.join(root, file)
- print(" Adding to zip: ==> ", fname)
- zipf.write(fname, arcname=fname)
- zipf.close()
- print("\n")
- return
-
-
-def setup(name, destpath, version, subdirs):
- """
- Setup the folder structure, and return a dictionary of subfolder name
- and the full path in destpath.
-
- """
- print(2 * "\n")
- print(f"Setting up {name} distribution: {version}")
- print("\n")
-
- dest = os.path.join(destpath, version)
- if os.path.exists(dest):
- # Raise Exception('Destination path exists. Kill it first.')
- print(f"Clobbering destination directory: {dest}")
- print("\n")
- shutil.rmtree(dest)
- os.mkdir(dest)
-
- print("Creating subdirectories")
- folderdict = {}
- for sd in subdirs:
- fullpath = os.path.join(dest, sd)
- print(f" creating ==> {fullpath}")
- os.mkdir(fullpath)
- folderdict[sd] = fullpath
- print("\n")
-
- return folderdict
-
-
-def copytree(src, dst, symlinks=False, ignore=None):
- """
- Copy a folder from src to dst. If dst does not exist, then create it.
-
- """
- for item in os.listdir(src):
- s = os.path.join(src, item)
- d = os.path.join(dst, item)
- if os.path.isdir(s):
- print(f" copying {s} ===> {d}")
- shutil.copytree(s, d, symlinks, ignore)
- else:
- print(f" copying {s} ===> {d}")
- shutil.copy2(s, d)
- return
-
-
-def convert_line_endings(folder, windows=True):
- """
- Convert all of the line endings to windows or unix
-
- """
- # Prior to zipping, enforce os line endings on all text files
- print("Converting line endings...")
- platform = sys.platform
- cmd = None
- if platform.lower() == "darwin":
- if windows:
- cmd = "find . -name '*' | xargs unix2dos"
- else:
- cmd = "find . -name '*' | xargs dos2unix"
- else:
- if windows:
- cmd = 'for /R %G in (*) do unix2dos "%G"'
- else:
- cmd = 'for /R %G in (*) do dos2unix "%G"'
- p = subprocess.Popen(cmd, cwd=folder, shell=True)
- print(p.communicate())
- print("\n")
- return
-
-
-def change_version_module(fname, version):
- """
- Update the version.f90 source code with the updated version number
- and turn develop mode off.
-
- """
- with open(fname) as f:
- lines = f.readlines()
- newlines = []
- found1 = False
- found2 = False
- for line in lines:
- newline = line
- srchtxt = "character(len=40), parameter :: VERSION"
- if srchtxt in line:
- newline = f"{srchtxt} = '{version}'"
- found1 = True
- srchtxt = "integer(I4B), parameter :: IDEVELOPMODE"
- if srchtxt in line:
- newline = f"{srchtxt} = {0}"
- found2 = True
- newlines.append(newline)
- if not found1 or not found2:
- raise Exception(
- "could not replace version or developmode in source code"
- )
- with open(fname, "w") as f:
- for line in newlines:
- f.write(line.strip() + "\n")
- return
-
-
-def make_zonebudget(srcpath, destpath, win_target_os, exepath):
- """
- Add zone budget to the distribution
-
- srcpath should be '../utils/zonebudget'
- destpath should be 'utils'
- sourcepath
-
-
- """
-
- # setup the folder structure
- name = "zonebudget"
- version = "zonebudget"
- subdirs = ["src", "make", "msvs"]
- fd = setup(name, destpath, version, subdirs)
-
- # copy source folder
- sourcepath = os.path.join(srcpath, "src")
- copytree(sourcepath, fd["src"], ignore=shutil.ignore_patterns(".DS_Store"))
-
- # Create makefile in the utils/zonebudget/pymake folder
- print("Creating zonebudget makefile")
- with cwd(os.path.join(srcpath, "pymake")):
- pymake.main(
- os.path.join("..", "src"),
- "zbud6",
- "gfortran",
- "gcc",
- makeclean=True,
- dryrun=True,
- include_subdirs=True,
- makefile=True,
- extrafiles="extrafiles.txt",
- )
- os.path.isfile("makefile")
- os.path.isfile("makedefaults")
-
- # Copy makefile to utils/zonebudget/make folder
- shutil.copyfile(
- os.path.join(srcpath, "pymake", "makefile"),
- os.path.join(srcpath, "make", "makefile"),
- )
- shutil.copyfile(
- os.path.join(srcpath, "pymake", "makedefaults"),
- os.path.join(srcpath, "make", "makedefaults"),
- )
-
- # Copy makefile to distribution/xxx/utils/zonebudget/make folder
- shutil.copyfile(
- os.path.join(srcpath, "pymake", "makefile"),
- os.path.join(fd["make"], "makefile"),
- )
- shutil.copyfile(
- os.path.join(srcpath, "pymake", "makedefaults"),
- os.path.join(fd["make"], "makedefaults"),
- )
-
- # Remove the makefile from the pymake folder
- os.remove(os.path.join(srcpath, "pymake", "makefile"))
- os.remove(os.path.join(srcpath, "pymake", "makedefaults"))
-
- # Copy the Visual Studio project file
- flist = [os.path.join(srcpath, "msvs", "zonebudget.vfproj")]
- print("Copying zonebudget msvs files")
- for d in flist:
- print(f" {d} ===> {fd['msvs']}")
- shutil.copy(d, fd["msvs"])
- print("\n")
-
- # build the executable
- exename = "zbud6"
- target = os.path.join(exepath, exename)
- if win_target_os:
- fc = "ifort"
- cc = "cl"
- exename += ".exe"
- else:
- fc = "gfortran"
- cc = "gcc"
- extrafiles = os.path.join(srcpath, "pymake", "extrafiles.txt")
- pymake.main(
- fd["src"],
- target,
- fc,
- cc,
- makeclean=True,
- include_subdirs=True,
- extrafiles=extrafiles,
- )
- if win_target_os:
- target += ".exe"
- if not os.path.isfile(target):
- raise Exception(f"Did not build target: {target}")
-
- return
-
-
-def make_mf5to6(srcpath, destpath, win_target_os, exepath):
- """
- Add mf5to6 to the distribution
-
- srcpath should be '../utils/mf5to6'
- destpath should be 'utils'
- sourcepath
-
-
- """
-
- # setup the folder structure
- name = "mf5to6"
- version = "mf5to6"
- subdirs = ["src", "make", "msvs"]
- fd = setup(name, destpath, version, subdirs)
-
- # copy source folder
- sourcepath = os.path.join(srcpath, "src")
- copytree(sourcepath, fd["src"], ignore=shutil.ignore_patterns(".DS_Store"))
-
- # Create makefile in the utils/mf5to6/pymake folder
- print("Creating mf5to6 makefile")
- with cwd(os.path.join(srcpath, "pymake")):
- pymake.main(
- os.path.join("..", "src"),
- name,
- "gfortran",
- "gcc",
- makeclean=True,
- dryrun=True,
- include_subdirs=True,
- makefile=True,
- extrafiles="extrafiles.txt",
- )
- os.path.isfile("makefile")
- os.path.isfile("makedefaults")
-
- # Copy makefile to utils/mf5to6/make folder
- print("Copying mf5to6 makefile")
- for fname in ["makefile", "makedefaults"]:
- fpath = os.path.join(srcpath, "pymake", fname)
- d = os.path.join(srcpath, "make", fname)
- print(f" {fpath} ===> {d}")
- shutil.copyfile(fpath, d)
-
- # Copy makefile to distribution/xxx/utils/mf5to6/make folder
- for fname in ["makefile", "makedefaults"]:
- fpath = os.path.join(srcpath, "pymake", fname)
- d = os.path.join(fd["make"], fname)
- print(f" {fpath} ===> {d}")
- shutil.copyfile(fpath, d)
-
- # Remove makefile and makedefaults from the pymake folder
- for fname in ["makefile", "makedefaults"]:
- fpath = os.path.join(srcpath, "pymake", fname)
- os.remove(fpath)
-
- # Copy the Visual Studio project file
- flist = [os.path.join(srcpath, "msvs", "mf5to6.vfproj")]
- print("Copying mf5to6 msvs files")
- for d in flist:
- print(f" {d} ===> {fd['msvs']}")
- shutil.copy(d, fd["msvs"])
- print("\n")
-
- # build the executable
- exename = "mf5to6"
- target = os.path.join(exepath, exename)
- if win_target_os:
- fc = "ifort"
- cc = "cl"
- exename += ".exe"
- else:
- fc = "gfortran"
- cc = "gcc"
- extrafiles = os.path.join(srcpath, "pymake", "extrafiles.txt")
- pymake.main(
- fd["src"],
- target,
- fc,
- cc,
- makeclean=True,
- include_subdirs=True,
- extrafiles=extrafiles,
- )
- if win_target_os:
- target += ".exe"
- if not os.path.isfile(target):
- raise Exception(f"Did not build target: {target}")
-
- return
-
-
-def delete_files(files, pth, allow_failure=False):
- for file in files:
- fpth = os.path.join(pth, file)
- try:
- print(f"removing...{file}")
- os.remove(fpth)
- except:
- print(f"could not remove...{file}")
- if not allow_failure:
- return False
- return True
-
-
-def run_command(argv, pth, timeout=None):
- with subprocess.Popen(
- argv, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=pth
- ) as process:
- try:
- output, unused_err = process.communicate(timeout=timeout)
- buff = output.decode("utf-8")
- ierr = process.returncode
- except subprocess.TimeoutExpired:
- process.kill()
- output, unused_err = process.communicate()
- buff = output.decode("utf-8")
- ierr = 100
- except:
- output, unused_err = process.communicate()
- buff = output.decode("utf-8")
- ierr = 101
-
- return buff, ierr
-
-
-def clean_latex_files():
-
- print("Cleaning latex files")
- exts = ["pdf", "aux", "bbl", "idx", "lof", "out", "toc"]
- pth = os.path.join("..", "doc", "mf6io")
- files = [f"mf6io.{e}" for e in exts]
- delete_files(files, pth, allow_failure=True)
- assert not os.path.isfile(pth + ".pdf")
-
- pth = os.path.join("..", "doc", "ReleaseNotes")
- files = [f"ReleaseNotes.{e}" for e in exts]
- delete_files(files, pth, allow_failure=True)
- assert not os.path.isfile(pth + ".pdf")
-
- pth = os.path.join("..", "doc", "zonebudget")
- files = [f"zonebudget.{e}" for e in exts]
- delete_files(files, pth, allow_failure=True)
- assert not os.path.isfile(pth + ".pdf")
-
- pth = os.path.join("..", "doc", "ConverterGuide")
- files = [f"converter_mf5to6.{e}" for e in exts]
- delete_files(files, pth, allow_failure=True)
- assert not os.path.isfile(pth + ".pdf")
-
- pth = os.path.join("..", "..", "modflow6-docs.git", "mf6suptechinfo")
- files = [f"mf6suptechinfo.{e}" for e in exts]
- delete_files(files, pth, allow_failure=True)
- assert not os.path.isfile(pth + ".pdf")
-
- pth = os.path.join("..", "..", "modflow6-examples.git", "doc")
- files = [f"mf6examples.{e}" for e in exts]
- delete_files(files, pth, allow_failure=True)
- assert not os.path.isfile(pth + ".pdf")
-
- return
-
-
-def rebuild_tex_from_dfn():
-
- npth = os.path.join("..", "doc", "mf6io", "mf6ivar")
- pth = "./"
-
- with cwd(npth):
-
- # get list of TeX files
- files = [
- f
- for f in os.listdir("tex")
- if os.path.isfile(os.path.join("tex", f))
- ]
- for f in files:
- fpth = os.path.join("tex", f)
- os.remove(fpth)
-
- # run python
- argv = ["python", "mf6ivar.py"]
- buff, ierr = run_command(argv, pth)
- msg = f"\nERROR {ierr}: could not run {argv[0]} with {argv[1]}"
- assert ierr == 0, buff + msg
-
- # get list for dfn files
- dfnfiles = [
- os.path.splitext(f)[0]
- for f in os.listdir("dfn")
- if os.path.isfile(os.path.join("dfn", f))
- and "dfn" in os.path.splitext(f)[1]
- ]
- texfiles = [
- os.path.splitext(f)[0]
- for f in os.listdir("tex")
- if os.path.isfile(os.path.join("tex", f))
- and "tex" in os.path.splitext(f)[1]
- ]
- missing = ""
- icnt = 0
- for f in dfnfiles:
- if "common" in f:
- continue
- fpth = f"{f}-desc"
- if fpth not in texfiles:
- icnt += 1
- missing += f" {icnt:3d} {fpth}.tex\n"
- msg = (
- "\n{} TeX file(s) are missing. ".format(icnt)
- + f"Missing files:\n{missing}"
- )
- assert icnt == 0, msg
-
- return
-
-
-def update_mf6io_tex_files(distfolder, mf6pth, expth=None):
-
- texpth = "../doc/mf6io"
- fname1 = os.path.join(texpth, "mf6output.tex")
- fname2 = os.path.join(texpth, "mf6noname.tex")
- fname3 = os.path.join(texpth, "mf6switches.tex")
- local = False
- if expth is None:
- local = True
- expth = os.path.join(distfolder, "examples", "ex-gwf-twri01")
- expth = os.path.abspath(expth)
-
- assert os.path.isfile(mf6pth), f"{mf6pth} does not exist"
- assert os.path.isdir(expth), f"{expth} does not exist"
-
- # run an example model
- if local:
- if os.path.isdir("./temp"):
- shutil.rmtree("./temp")
- shutil.copytree(expth, "./temp")
- cmd = [os.path.abspath(mf6pth)]
- if local:
- simpth = "./temp"
- else:
- simpth = expth
- buff, ierr = run_command(cmd, simpth)
- lines = buff.split("\r\n")
- with open(fname1, "w") as f:
- f.write("{\\small\n")
- f.write("\\begin{lstlisting}[style=modeloutput]\n")
- for line in lines:
- f.write(line.rstrip() + "\n")
- f.write("\\end{lstlisting}\n")
- f.write("}\n")
-
- # run model without a namefile present
- if os.path.isdir("./temp"):
- shutil.rmtree("./temp")
- os.mkdir("./temp")
- cmd = [os.path.abspath(mf6pth)]
- buff, ierr = run_command(cmd, "./temp")
- lines = buff.split("\r\n")
- with open(fname2, "w") as f:
- f.write("{\\small\n")
- f.write("\\begin{lstlisting}[style=modeloutput]\n")
- for line in lines:
- f.write(line.rstrip() + "\n")
- f.write("\\end{lstlisting}\n")
- f.write("}\n")
-
- # run mf6 command with -h to show help
- cmd = [os.path.abspath(mf6pth), "-h"]
- buff, ierr = run_command(cmd, "./temp")
- lines = buff.split("\r\n")
- with open(fname3, "w") as f:
- f.write("{\\small\n")
- f.write("\\begin{lstlisting}[style=modeloutput]\n")
- for line in lines:
- f.write(line.rstrip() + "\n")
- f.write("\\end{lstlisting}\n")
- f.write("}\n")
-
- # clean up
- if os.path.isdir("./temp"):
- shutil.rmtree("./temp")
-
- return
-
-
-def build_latex_docs():
- print("Building latex files")
- pth1 = os.path.join("..", "doc")
- pth2 = os.path.join("..", "..", "modflow6-docs.git")
- pth3 = os.path.join("..", "..", "modflow6-examples.git")
- doclist = [
- (pth1, "mf6io", "mf6io.tex"),
- (pth1, "ReleaseNotes", "ReleaseNotes.tex"),
- (pth1, "zonebudget", "zonebudget.tex"),
- (pth1, "ConverterGuide", "converter_mf5to6.tex"),
- (pth2, "mf6suptechinfo", "mf6suptechinfo.tex"),
- (pth3, "doc", "mf6examples.tex"),
- ]
-
- # copy version.tex from doc to modflow6-docs
- shutil.copy(os.path.join(pth1, "version.tex"), pth2)
-
- for p, d, t in doclist:
- print(f"Building latex document: {t}")
- dirname = os.path.join(p, d)
- with cwd(dirname):
-
- pdflatexcmd = [
- "pdflatex",
- "-interaction=nonstopmode",
- "-halt-on-error",
- t,
- ]
-
- print(" Pass 1/4...")
- cmd = pdflatexcmd
- buff, ierr = run_command(cmd, "./")
- msg = f"\nERROR {ierr}: could not run {cmd[0]} on {cmd[1]}"
- assert ierr == 0, buff + msg
-
- cmd = ["bibtex", os.path.splitext(t)[0] + ".aux"]
- print(" Pass 2/4...")
- buff, ierr = run_command(cmd, "./")
- msg = f"\nERROR {ierr}: could not run {cmd[0]} on {cmd[1]}"
- assert ierr == 0, buff + msg
-
- print(" Pass 3/4...")
- cmd = pdflatexcmd
- buff, ierr = run_command(cmd, "./")
- msg = f"\nERROR {ierr}: could not run {cmd[0]} on {cmd[1]}"
- assert ierr == 0, buff + msg
-
- print(" Pass 4/4...")
- cmd = pdflatexcmd
- buff, ierr = run_command(cmd, "./")
- msg = f"\nERROR {ierr}: could not run {cmd[0]} on {cmd[1]}"
- assert ierr == 0, buff + msg
-
- fname = os.path.splitext(t)[0] + ".pdf"
- assert os.path.isfile(fname), "Could not find " + fname
-
- return
-
-
-def update_latex_releaseinfo(examples_folder):
-
- pth = os.path.join("..", "doc", "ReleaseNotes")
- files = ["folder_struct.tex"]
- delete_files(files, pth, allow_failure=True)
-
- cmd = ["python", "mk_folder_struct.py"]
- buff, ierr = run_command(cmd, pth)
- msg = f"\nERROR {ierr}: could not run {cmd[0]} on {cmd[1]}"
- assert ierr == 0, buff + msg
-
- cmd = ["python", "mk_runtimecomp.py"]
- buff, ierr = run_command(cmd, pth)
- msg = f"\nERROR {ierr}: could not run {cmd[0]} on {cmd[1]}"
- assert ierr == 0, buff + msg
-
- for f in files:
- assert os.path.isfile(os.path.join(pth, f)), (
- "File does not exist: " + f
- )
-
- return
-
-
-def setup_examples(examples_repo, exdestpath, mf6path):
-
- # trap
- assert os.path.isdir(examples_repo)
- assert os.path.isdir(exdestpath)
-
- # next create all examples, but don't run them
- scripts_folder = os.path.join(examples_repo, "scripts")
- scripts_folder = os.path.abspath(scripts_folder)
- exclude_list = ["ex-gwf-capture.py"]
- scripts = [
- fname
- for fname in os.listdir(scripts_folder)
- if fname.endswith(".py")
- and fname.startswith("ex-")
- and fname not in exclude_list
- ]
- for script in scripts:
- dest = os.path.abspath(exdestpath)
- argv = [
- "python",
- script,
- "--no_run",
- "--no_plot",
- "--destination",
- dest,
- ] # no run no plot
- print(f"running {argv} in {scripts_folder}")
- run_command(argv, scripts_folder)
-
- # create list of folders with mfsim.nam
- simulation_folders = []
- for root, dirs, files in os.walk(exdestpath):
- for d in dirs:
- dwpath = os.path.join(root, d)
- if "mfsim.nam" in os.listdir(dwpath):
- simulation_folders.append(dwpath)
- simulation_folders = sorted(simulation_folders)
-
- # go through each simulation folder and add a run.bat file
- for dwpath in simulation_folders:
- fname = os.path.join(dwpath, "run.bat")
- print(f"Adding {fname}")
- with open(fname, "w") as f:
- f.write("@echo off" + "\n")
- runbatloc = os.path.relpath(mf6path, start=dwpath)
- f.write(runbatloc + "\n")
- f.write("echo." + "\n")
- f.write("echo Run complete. Press any key to continue" + "\n")
- f.write("pause>nul" + "\n")
-
- # add runall.bat, which runs all examples
- fname = os.path.join(exdestpath, "runall.bat")
- with open(fname, "w") as f:
- for dwpath in simulation_folders:
- d = os.path.relpath(dwpath, start=exdestpath)
- s = f"cd {d}"
- f.write(s + "\n")
- runbatloc = os.path.relpath(mf6path, start=dwpath)
- f.write(runbatloc + "\n")
- d = os.path.relpath(exdestpath, start=dwpath)
- s = f"cd {d}"
- f.write(s + "\n")
- s = ""
- f.write(s + "\n")
- f.write("pause" + "\n")
-
- return
-
-
-if __name__ == "__main__":
-
- # setup paths and folder structure
- win_target_os = False
- if sys.platform.lower() == "win32":
- win_target_os = True
-
- name = "MODFLOW 6"
- exename = "mf6"
- destpath = "."
- versiontexname = os.path.join("..", "doc", "version.tex")
- version, versiondate = get_distribution_info(versiontexname)
- distfolder = os.path.join(destpath, version)
- subdirs = [
- "bin",
- "doc",
- "examples",
- "src",
- "srcbmi",
- "msvs",
- "make",
- "utils",
- ]
- fd = setup(name, destpath, version, subdirs)
-
- # Copy the Visual Studio solution and project files
- flist = [
- os.path.join("..", "msvs", "mf6.sln"),
- os.path.join("..", "msvs", "mf6.vfproj"),
- os.path.join("..", "msvs", "mf6core.vfproj"),
- os.path.join("..", "msvs", "mf6bmi.sln"),
- os.path.join("..", "msvs", "mf6bmi.vfproj"),
- ]
- print("Copying msvs files")
- for d in flist:
- print(f" {d} ===> {fd['msvs']}")
- shutil.copy(d, fd["msvs"])
- print("\n")
-
- # copy source folder
- copytree(
- os.path.join("..", "src"),
- fd["src"],
- ignore=shutil.ignore_patterns(".DS_Store"),
- )
-
- # copy srcbmi folder
- copytree(
- os.path.join("..", "srcbmi"),
- fd["srcbmi"],
- ignore=shutil.ignore_patterns(".DS_Store"),
- )
-
- # Remove existing makefile and makedefaults
- print("Creating makefile")
- makedir = os.path.join("..", "make")
- for fname in ["makefile", "makedefaults"]:
- fpath = os.path.join(makedir, fname)
- if os.path.isfile(fpath):
- os.remove(fpath)
-
- # Create makefile in the make folder
- with cwd(makedir):
- pymake.main(
- os.path.join("..", "src"),
- "mf6",
- "gfortran",
- "gcc",
- makeclean=True,
- dryrun=True,
- include_subdirs=True,
- makefile=True,
- extrafiles=None,
- )
-
- # Copy makefile to the distribution
- for fname in ["makefile", "makedefaults"]:
- fpath = os.path.join(makedir, fname)
- print(f" {fpath} ===> {fd['make']}")
- shutil.copy(fpath, fd["make"])
-
- # build MODFLOW 6 executable
- srcdir = fd["src"]
- target = os.path.join(fd["bin"], exename)
- if win_target_os:
- fc = "ifort"
- cc = "cl"
- else:
- fc = "gfortran"
- cc = "gcc"
- pymake.main(srcdir, target, fc, cc, makeclean=True, include_subdirs=True)
- if win_target_os:
- target += ".exe"
- if not os.path.isfile(target):
- raise Exception(f"Did not build target: {target}")
-
- # setup zone budget
- make_zonebudget(
- os.path.join("..", "utils", "zonebudget"),
- fd["utils"],
- win_target_os,
- fd["bin"],
- )
-
- # setup mf5to6
- make_mf5to6(
- os.path.join("..", "utils", "mf5to6"),
- fd["utils"],
- win_target_os,
- fd["bin"],
- )
-
- # setup the examples
- exdstpath = fd["examples"]
- examples_repo = os.path.join("..", "..", "modflow6-examples.git")
- setup_examples(examples_repo, exdstpath, target)
-
- # run the comparison tests so the run time comparison table can be
- # created for the release notes
- cmd = ["python", "evaluate_run_times.py"]
- pth = "."
- buff, ierr = run_command(cmd, pth)
- msg = f"\nERROR {ierr}: could not run {cmd[0]} on {cmd[1]}"
- assert ierr == 0, buff + msg
-
- # Clean and then remake latex docs
- clean_latex_files()
- rebuild_tex_from_dfn()
- update_mf6io_tex_files(distfolder, target)
- update_latex_releaseinfo(fd["examples"])
- build_latex_docs()
-
- # docs
- docsrc = os.path.join("..", "doc")
- doclist = [
- [
- os.path.join(docsrc, "ReleaseNotes", "ReleaseNotes.pdf"),
- "release.pdf",
- ],
- [os.path.join(docsrc, "mf6io", "mf6io.pdf"), "mf6io.pdf"],
- [
- os.path.join(docsrc, "ConverterGuide", "converter_mf5to6.pdf"),
- "mf5to6.pdf",
- ],
- [
- os.path.join("..", "doc", "zonebudget", "zonebudget.pdf"),
- "zonebudget.pdf",
- ],
- [
- os.path.join(
- "..",
- "..",
- "modflow6-docs.git",
- "mf6suptechinfo",
- "mf6suptechinfo.pdf",
- ),
- "mf6suptechinfo.pdf",
- ],
- [
- os.path.join(
- "..", "..", "modflow6-examples.git", "doc", "mf6examples.pdf"
- ),
- "mf6examples.pdf",
- ],
- ]
-
- print("Copying documentation")
- for din, dout in doclist:
- dst = os.path.join(fd["doc"], dout)
- print(f" copying {din} ===> {dst}")
- shutil.copy(din, dst)
- print("\n")
-
- print("Downloading published reports for inclusion in distribution")
- for url in [
- "https://pubs.usgs.gov/tm/06/a57/tm6a57.pdf",
- "https://pubs.usgs.gov/tm/06/a55/tm6a55.pdf",
- "https://pubs.usgs.gov/tm/06/a56/tm6a56.pdf",
- ]:
- print(f" downloading {url}")
- download_and_unzip(url, pth=fd["doc"], delete_zip=False, verify=False)
- print("\n")
-
- # Prior to zipping, enforce os line endings on all text files
- windows_line_endings = True
- convert_line_endings(distfolder, windows_line_endings)
-
- # Zip the distribution
- uflag = "u"
- if win_target_os:
- uflag = ""
- zipname = version + uflag + ".zip"
- if os.path.exists(zipname):
- print(f"Removing existing file: {zipname}")
- os.remove(zipname)
- print(f"Creating zipped file: {zipname}")
- zipdir(distfolder, zipname)
- print("\n")
-
- print("Done...")
- print("\n")
diff --git a/distribution/pytest.ini b/distribution/pytest.ini
new file mode 100644
index 00000000000..9936b24dfc0
--- /dev/null
+++ b/distribution/pytest.ini
@@ -0,0 +1,8 @@
+[pytest]
+addopts = -ra
+python_files =
+ benchmark_*.py
+ build_*.py
+ update_*.py
+markers =
+ dist
\ No newline at end of file
diff --git a/distribution/update_version.py b/distribution/update_version.py
new file mode 100644
index 00000000000..7a6d89dded4
--- /dev/null
+++ b/distribution/update_version.py
@@ -0,0 +1,415 @@
+#!/usr/bin/python
+
+"""
+Update files in this modflow6 repository according to release information.
+
+This script is used to update several files in the modflow6 repository, including:
+
+ ../version.txt
+ ../doc/version.tex
+ ../README.md
+ ../DISCLAIMER.md
+ ../code.json
+ ../src/Utilities/version.f90
+
+Information in these files include version number (major.minor.patch), build date, whether or not
+the release is a release candidate or an actual release, whether the source code should be compiled
+in develop mode or in release mode, and the approval status.
+
+The version number is read in from ../../version.txt, which contains major, minor, and patch version
+numbers. These numbers will be propagated through the source code, latex files, markdown files,
+etc. The version numbers can be overridden using the command line argument --version major.minor.macro.
+
+Develop mode is set to 0 if the distribution is approved.
+
+Once this script is run, these updated files will be used in compilation, latex documents, and
+other parts of the repo to mark the overall status.
+
+"""
+import argparse
+import json
+import os
+import shutil
+import textwrap
+from collections import OrderedDict
+from datetime import datetime
+from enum import Enum
+from os import PathLike
+from pathlib import Path
+from typing import NamedTuple, Optional
+
+import pytest
+from filelock import FileLock
+
+from utils import get_modified_time
+
+project_name = "MODFLOW 6"
+project_root_path = Path(__file__).parent.parent
+version_file_path = project_root_path / "version.txt"
+touched_file_paths = [
+ version_file_path,
+ project_root_path / "doc" / "version.tex",
+ project_root_path / "doc" / "version.py",
+ project_root_path / "README.md",
+ project_root_path / "DISCLAIMER.md",
+ project_root_path / "code.json",
+ project_root_path / "src" / "Utilities" / "version.f90",
+]
+
+
+class Version(NamedTuple):
+ """Semantic version number, not including extensions (e.g., 'Release Candidate')"""
+
+ major: int = 0
+ minor: int = 0
+ patch: int = 0
+
+ def __repr__(self):
+ return f"{self.major}.{self.minor}.{self.patch}"
+
+ @classmethod
+ def from_string(cls, version: str) -> "Version":
+ t = version.split(".")
+
+ vmajor = int(t[0])
+ vminor = int(t[1])
+ vpatch = int(t[2])
+
+ return cls(major=vmajor, minor=vminor, patch=vpatch)
+
+ @classmethod
+ def from_file(cls, path: PathLike) -> "Version":
+ path = Path(path).expanduser().absolute()
+ lines = [line.rstrip("\n") for line in open(Path(path), "r")]
+
+ vmajor = vminor = vpatch = None
+ for line in lines:
+ t = line.split()
+ if "major =" in line:
+ vmajor = int(t[2])
+ elif "minor =" in line:
+ vminor = int(t[2])
+ elif "micro =" in line:
+ vpatch = int(t[2])
+
+ msg = "version string must follow semantic version format: major.minor.patch"
+ assert vmajor is not None, f"Missing major number, {msg}"
+ assert vminor is not None, f"Missing minor number, {msg}"
+ assert vpatch is not None, f"Missing patch number, {msg}"
+
+ return cls(major=vmajor, minor=vminor, patch=vpatch)
+
+
+class ReleaseType(Enum):
+ CANDIDATE = "Release Candidate"
+ APPROVED = "Release"
+
+
+
+_approved_fmtdisclaimer = ''' character(len=*), parameter :: FMTDISCLAIMER = &
+ "(/, &
+ &'This software has been approved for release by the U.S. Geological ',/, &
+ &'Survey (USGS). Although the software has been subjected to rigorous ',/, &
+ &'review, the USGS reserves the right to update the software as needed ',/, &
+ &'pursuant to further analysis and review. No warranty, expressed or ',/, &
+ &'implied, is made by the USGS or the U.S. Government as to the ',/, &
+ &'functionality of the software and related material nor shall the ',/, &
+ &'fact of release constitute any such warranty. Furthermore, the ',/, &
+ &'software is released on condition that neither the USGS nor the U.S. ',/, &
+ &'Government shall be held liable for any damages resulting from its ',/, &
+ &'authorized or unauthorized use. Also refer to the USGS Water ',/, &
+ &'Resources Software User Rights Notice for complete use, copyright, ',/, &
+ &'and distribution information.',/)"'''
+
+_preliminary_fmtdisclaimer = ''' character(len=*), parameter :: FMTDISCLAIMER = &
+ "(/, &
+ &'This software is preliminary or provisional and is subject to ',/, &
+ &'revision. It is being provided to meet the need for timely best ',/, &
+ &'science. The software has not received final approval by the U.S. ',/, &
+ &'Geological Survey (USGS). No warranty, expressed or implied, is made ',/, &
+ &'by the USGS or the U.S. Government as to the functionality of the ',/, &
+ &'software and related material nor shall the fact of release ',/, &
+ &'constitute any such warranty. The software is provided on the ',/, &
+ &'condition that neither the USGS nor the U.S. Government shall be held ',/,&
+ &'liable for any damages resulting from the authorized or unauthorized ',/, &
+ &'use of the software.',/)"'''
+
+_approved_disclaimer = """Disclaimer
+----------
+
+This software has been approved for release by the U.S. Geological Survey
+(USGS). Although the software has been subjected to rigorous review, the USGS
+reserves the right to update the software as needed pursuant to further analysis
+and review. No warranty, expressed or implied, is made by the USGS or the U.S.
+Government as to the functionality of the software and related material nor
+shall the fact of release constitute any such warranty. Furthermore, the
+software is released on condition that neither the USGS nor the U.S. Government
+shall be held liable for any damages resulting from its authorized or
+unauthorized use.
+"""
+
+_preliminary_disclaimer = """Disclaimer
+----------
+
+This software is preliminary or provisional and is subject to revision. It is
+being provided to meet the need for timely best science. The software has not
+received final approval by the U.S. Geological Survey (USGS). No warranty,
+expressed or implied, is made by the USGS or the U.S. Government as to the
+functionality of the software and related material nor shall the fact of release
+constitute any such warranty. The software is provided on the condition that
+neither the USGS nor the U.S. Government shall be held liable for any damages
+resulting from the authorized or unauthorized use of the software.
+"""
+
+
+def get_disclaimer(release_type: ReleaseType, formatted: bool = False) -> str:
+ approved = _approved_fmtdisclaimer if formatted else _approved_disclaimer
+ preliminary = _preliminary_fmtdisclaimer if formatted else _preliminary_disclaimer
+ return approved if release_type == ReleaseType.APPROVED else preliminary
+
+
+def log_update(path, release_type: ReleaseType, version: Version):
+ print(f"Updated {path} with version {version}" + (f" {release_type.value}" if release_type != ReleaseType.APPROVED else ""))
+
+
+def update_version_txt_and_py(
+ release_type: ReleaseType, timestamp: datetime, version: Version
+):
+ with open(version_file_path, "w") as f:
+ f.write(
+ f"# {project_name} version file automatically "
+ + f"created using...{os.path.basename(__file__)}\n"
+ )
+ f.write("# created on..." + f"{timestamp.strftime('%B %d, %Y %H:%M:%S')}\n")
+ f.write("\n")
+ f.write(f"major = {version.major}\n")
+ f.write(f"minor = {version.minor}\n")
+ f.write(f"micro = {version.patch}\n")
+ f.write("__version__ = '{:d}.{:d}.{:d}'.format(major, minor, micro)\n")
+ f.close()
+ log_update(version_file_path, release_type, version)
+
+ py_path = project_root_path / "doc" / version_file_path.name.replace(".txt", ".py")
+ shutil.copyfile(version_file_path, py_path)
+ log_update(py_path, release_type, version)
+
+
+def update_version_tex(
+ release_type: ReleaseType, timestamp: datetime, version: Version
+):
+ path = project_root_path / "doc" / "version.tex"
+
+ version_str = str(version)
+ if release_type != ReleaseType.APPROVED:
+ version_str += "rc"
+
+ with open(path, "w") as f:
+ line = "\\newcommand{\\modflowversion}{mf" + f"{version_str}" + "}"
+ f.write(f"{line}\n")
+ line = (
+ "\\newcommand{\\modflowdate}{" + f"{timestamp.strftime('%B %d, %Y')}" + "}"
+ )
+ f.write(f"{line}\n")
+ line = (
+ "\\newcommand{\\currentmodflowversion}"
+ + "{Version \\modflowversion---\\modflowdate}"
+ )
+ f.write(f"{line}\n")
+ f.close()
+
+ log_update(path, release_type, version)
+
+
+def update_version_f90(
+ release_type: ReleaseType, timestamp: datetime, version: Optional[Version]
+):
+ path = project_root_path / "src" / "Utilities" / "version.f90"
+ lines = open(path, "r").read().splitlines()
+ with open(path, "w") as f:
+ skip = False
+ for line in lines:
+ # skip all of the disclaimer text
+ if skip:
+ if ',/)"' in line:
+ skip = False
+ continue
+ elif ":: IDEVELOPMODE =" in line:
+ line = (
+ " integer(I4B), parameter :: "
+ + f"IDEVELOPMODE = {1 if release_type == ReleaseType.CANDIDATE else 0}"
+ )
+ elif ":: VERSIONNUMBER =" in line:
+ line = line.rpartition("::")[0] + f":: VERSIONNUMBER = '{version}'"
+ elif ":: VERSIONTAG =" in line:
+ fmat_tstmp = timestamp.strftime("%m/%d/%Y")
+ line = line.rpartition("::")[0] + f":: VERSIONTAG = ' {release_type.value} {fmat_tstmp}'"
+ elif ":: FMTDISCLAIMER =" in line:
+ line = get_disclaimer(release_type, formatted=True)
+ skip = True
+ f.write(f"{line}\n")
+
+ log_update(path, release_type, version)
+
+
+def update_readme_and_disclaimer(
+ release_type: ReleaseType, timestamp: datetime, version: Version
+):
+ disclaimer = get_disclaimer(release_type, formatted=False)
+ readme_path = str(project_root_path / "README.md")
+ readme_lines = open(readme_path, "r").read().splitlines()
+ with open(readme_path, "w") as f:
+ for line in readme_lines:
+ if "## Version " in line:
+ version_line = f"### Version {version}"
+ if release_type != ReleaseType.APPROVED:
+ version_line += f" {release_type.value}"
+ f.write(f"{version_line}\n")
+ elif "Disclaimer" in line:
+ f.write(f"{disclaimer}\n")
+ break
+ else:
+ f.write(f"{line}\n")
+ log_update(readme_path, release_type, version)
+
+ disclaimer_path = project_root_path / "DISCLAIMER.md"
+ with open(disclaimer_path, "w") as f:
+ f.write(disclaimer)
+ log_update(disclaimer_path, release_type, version)
+
+
+def update_codejson(release_type: ReleaseType, timestamp: datetime, version: Version):
+ path = project_root_path / "code.json"
+ with open(path, "r") as f:
+ data = json.load(f, object_pairs_hook=OrderedDict)
+
+ data[0]["date"]["metadataLastUpdated"] = timestamp.strftime("%Y-%m-%d")
+ data[0]["version"] = str(version)
+ data[0]["status"] = release_type.value
+ with open(path, "w") as f:
+ json.dump(data, f, indent=4)
+ f.write("\n")
+
+ log_update(path, release_type, version)
+
+
+def update_version(
+ release_type: ReleaseType = ReleaseType.CANDIDATE,
+ timestamp: datetime = datetime.now(),
+ version: Version = None,
+):
+ """
+ Update version information stored in version.txt in the project root,
+ as well as several other files in the repository. Version updates are
+ performed by explicitly providing a version argument to this function
+ and a lock is held on the version file to make sure that the state of
+ the multiple files containing version information stays synchronized.
+ If no version argument is provided, the version number isn't changed.
+ """
+
+ lock_path = Path(version_file_path.name + ".lock")
+ try:
+ lock = FileLock(lock_path)
+ previous = Version.from_file(version_file_path)
+ version = (
+ version
+ if version
+ else Version(previous.major, previous.minor, previous.patch)
+ )
+
+ with lock:
+ update_version_txt_and_py(release_type, timestamp, version)
+ update_version_tex(release_type, timestamp, version)
+ update_version_f90(release_type, timestamp, version)
+ update_readme_and_disclaimer(release_type, timestamp, version)
+ update_codejson(release_type, timestamp, version)
+ finally:
+ lock_path.unlink(missing_ok=True)
+
+
+_initial_version = Version(0, 0, 1)
+_current_version = Version.from_file(version_file_path)
+
+
+@pytest.mark.skip(reason="reverts repo files on cleanup, tread carefully")
+@pytest.mark.parametrize(
+ "release_type", [ReleaseType.CANDIDATE, ReleaseType.APPROVED]
+)
+@pytest.mark.parametrize(
+ "version",
+ [None, Version(major=_initial_version.major, minor=_initial_version.minor, patch=_initial_version.patch)],
+)
+def test_update_version(tmp_path, release_type, version):
+ m_times = [get_modified_time(file) for file in touched_file_paths]
+ timestamp = datetime.now()
+
+ try:
+ update_version(release_type=release_type, timestamp=timestamp, version=version)
+ updated = Version.from_file(version_file_path)
+
+ # check files containing version info were modified
+ for p, t in zip(touched_file_paths, m_times):
+ assert p.stat().st_mtime > t
+
+ if version:
+ # version should be auto-incremented
+ assert updated.major == _initial_version.major
+ assert updated.minor == _initial_version.minor
+ assert updated.patch == _initial_version.patch
+ else:
+ # version should not have changed
+ assert updated.major == _current_version.major
+ assert updated.minor == _current_version.minor
+ assert updated.patch == _current_version.patch
+ finally:
+ for p in touched_file_paths:
+ os.system(f"git restore {p}")
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ prog="Update Modflow 6 version",
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ epilog=textwrap.dedent(
+ """\
+ Update version information stored in version.txt in the project root,
+ as well as several other files in the repository. If --version is not
+ provided, the version number will not be changed. A file lock is held
+ to synchronize file access. To indicate a version is production-ready
+ use --approve. This will change the disclaimer and version tag label,
+ removing 'Release Candidate' from the latter and modifying the former
+ to reflect approval The IDEVELOPMODE flag is set to 1 for preliminary
+ versions and 0 for approved versions. The version tag must follow the
+ '..' format conventions for semantic versioning.
+ """
+ )
+ )
+ parser.add_argument(
+ "-v",
+ "--version",
+ required=False,
+ help="Specify the release version",
+ )
+ parser.add_argument(
+ "-a",
+ "--approve",
+ required=False,
+ action="store_true",
+ help="Indicate release is approved (defaults to false for preliminary/candidate distributions)",
+ )
+ parser.add_argument(
+ "-g",
+ "--get",
+ required=False,
+ action="store_true",
+ help="Just get the current version number, don't update anything (defaults to false)",
+ )
+ args = parser.parse_args()
+
+ if args.get:
+ print(Version.from_file(project_root_path / "version.txt"))
+ else:
+ update_version(
+ release_type=ReleaseType.APPROVED if args.approve else ReleaseType.CANDIDATE,
+ timestamp=datetime.now(),
+ version=Version.from_string(args.version) if args.version else _current_version,
+ )
diff --git a/distribution/utils.py b/distribution/utils.py
new file mode 100644
index 00000000000..c477a73cfc2
--- /dev/null
+++ b/distribution/utils.py
@@ -0,0 +1,110 @@
+import platform
+import shutil
+import subprocess
+import sys
+from datetime import datetime
+from os import PathLike, environ
+from pathlib import Path
+from warnings import warn
+
+from modflow_devtools.markers import requires_exe
+
+_project_root_path = Path(__file__).parent.parent
+
+
+def get_project_root_path():
+ return _project_root_path
+
+
+def get_modified_time(path: Path) -> float:
+ return path.stat().st_mtime if path.is_file() else datetime.today().timestamp()
+
+
+def get_ostag():
+ zipname = sys.platform.lower()
+ if zipname == "linux2":
+ zipname = "linux"
+ elif zipname == "darwin":
+ zipname = "mac"
+ elif zipname == "win32":
+ if platform.architecture()[0] == "64bit":
+ zipname = "win64"
+ return zipname
+
+
+def get_repo_path() -> Path:
+ """
+ Returns the path to the folder containing example/test model repositories.
+ """
+ repo_path = environ.get("REPOS_PATH", None)
+ if not repo_path:
+ warn(
+ f"REPOS_PATH environment variable missing, defaulting to parent of project root"
+ )
+ return Path(repo_path) if repo_path else Path(__file__).parent.parent.parent
+
+
+def copytree(src: PathLike, dst: PathLike, symlinks=False, ignore=None):
+ """
+ Copy a folder from src to dst. If dst does not exist, then create it.
+ """
+ src = Path(src).expanduser().absolute()
+ dst = Path(dst).expanduser().absolute()
+
+ for s in src.glob("*"):
+ d = dst / s.name
+ if s.is_dir():
+ print(f" copying {s} ===> {d}")
+ shutil.copytree(s, d, symlinks, ignore)
+ else:
+ print(f" copying {s} ===> {d}")
+ shutil.copy2(s, d)
+
+
+def run_command(argv, pth, timeout=None):
+ with subprocess.Popen(
+ argv, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=pth
+ ) as process:
+ try:
+ output, unused_err = process.communicate(timeout=timeout)
+ buff = output.decode("utf-8")
+ ierr = process.returncode
+ except subprocess.TimeoutExpired:
+ process.kill()
+ output, unused_err = process.communicate()
+ buff = output.decode("utf-8")
+ ierr = 100
+ except:
+ output, unused_err = process.communicate()
+ buff = output.decode("utf-8")
+ ierr = 101
+
+ return buff, ierr
+
+
+def convert_line_endings(folder, windows=True):
+ """
+ Convert all of the line endings to windows or unix
+
+ """
+ # Prior to zipping, enforce os line endings on all text files
+ print("Converting line endings...")
+ platform = sys.platform
+ cmd = None
+ if platform.lower() == "darwin":
+ if windows:
+ cmd = "find . -name '*' | xargs unix2dos"
+ else:
+ cmd = "find . -name '*' | xargs dos2unix"
+ else:
+ if windows:
+ cmd = 'for /R %G in (*) do unix2dos "%G"'
+ else:
+ cmd = 'for /R %G in (*) do dos2unix "%G"'
+ p = subprocess.Popen(cmd, cwd=folder, shell=True)
+ print(p.communicate())
+
+
+@requires_exe("dos2unix", "unix2dos")
+def test_convert_line_endings():
+ pass
diff --git a/doc/ReleaseNotes/mk_runtimecomp.py b/doc/ReleaseNotes/mk_runtimecomp.py
index 820d4b9c682..782b151be22 100644
--- a/doc/ReleaseNotes/mk_runtimecomp.py
+++ b/doc/ReleaseNotes/mk_runtimecomp.py
@@ -1,65 +1,63 @@
# Simple script to convert the run time comparison markdown table in the
# distribution folder into a latex table that can be included in the
# release notes
+import argparse
+from pathlib import Path
+from warnings import warn
-import os
-header = r"""
-\section{Run-Time Comparison}
-
-Comparison of run times of the current version of MODFLOW 6 to the
-previous version. The distribution example models are used to compare run
-times. Simulations that fail are indicated by '--'. The percent difference,
-where calculated, is relative to the simulation run time for the previous
-version. Percent differences for example problems with short run times
-(less than 30 seconds) may not be significant.
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser()
+ parser.add_argument("path")
+ args = parser.parse_args()
-\small
-\begin{longtable}[!htbp]{p{5cm} p{3cm} p{3cm} p{1.5cm}}
-\caption{List of run time comparsons}
-\label{table:run-time-comparisons}
-\tabularnewline
+ header = r"""
+ \section{Run-Time Comparison}
-\hline
-\hline
-\textbf{Example Problem} & \textbf{Current Version} & \textbf{Previous Version} & \textbf{Percent difference} \\
-\hline
-\endfirsthead
+ Comparison of run times of the current version of MODFLOW 6 to the
+ previous version. The distribution example models are used to compare run
+ times. Simulations that fail are indicated by '--'. The percent difference,
+ where calculated, is relative to the simulation run time for the previous
+ version. Percent differences for example problems with short run times
+ (less than 30 seconds) may not be significant.
-\hline
-\hline
-\textbf{Example Problem} & \textbf{Current Version} & \textbf{Previous Version} & \textbf{Percent difference} \\
-\hline
-\endhead
+ \small
+ \begin{longtable}[!htbp]{p{5cm} p{3cm} p{3cm} p{1.5cm}}
+ \caption{List of run time comparsons}
+ \label{table:run-time-comparisons}
+ \tabularnewline
-"""
+ \hline
+ \hline
+ \textbf{Example Problem} & \textbf{Current Version} & \textbf{Previous Version} & \textbf{Percent difference} \\
+ \hline
+ \endfirsthead
-data = r"""
-ex-gwf-advtidal & 0.592 Seconds & 0.623 Seconds & -4.98 \\
-\hline
-ex-gwf-bcf2ss-p01a & 0.020 Seconds & 0.040 Seconds & -50.00 \\
-\hline
-"""
+ \hline
+ \hline
+ \textbf{Example Problem} & \textbf{Current Version} & \textbf{Previous Version} & \textbf{Percent difference} \\
+ \hline
+ \endhead
-footer = r"""
-\hline
-\end{longtable}
-\normalsize
-"""
+ """
-if __name__ == "__main__":
+ footer = r"""
+ \hline
+ \end{longtable}
+ \normalsize
+ """
- fname = "../../distribution/run-time-comparison.md"
- fnametex = "run-time-comparison.tex"
- if os.path.isfile(fnametex):
- os.remove(fnametex)
+ fname = "run-time-comparison"
+ fpath = Path(args.path).expanduser().absolute()
+ fnametex = Path(f"{fname}.tex").absolute()
+ fnametex.unlink(missing_ok=True)
- # if the markdown table exists, then convert it to latex
- if os.path.isfile(fname):
+ # if the markdown file exists, convert it to latex
+ if fpath.is_file():
ftex = open(fnametex, 'w')
ftex.write(header)
skipline = True
- with open(fname) as fmd:
+ with open(fpath) as fmd:
for line in fmd:
if not skipline:
ll = line.strip().split('|')
@@ -74,3 +72,6 @@
skipline = False
ftex.write(footer)
ftex.close()
+ print(f"Created LaTex file {fnametex} from markdown benchmark results file {fpath}")
+ else:
+ warn(f"Benchmark results not found: {fpath}")
diff --git a/environment.yml b/environment.yml
index acdb3fc6451..2933f2dc9ac 100644
--- a/environment.yml
+++ b/environment.yml
@@ -5,19 +5,23 @@ channels:
- defaults
dependencies:
- - appdirs
+ - appdirs
+ - filelock
- fprettify
- - numpy
+ - jupytext
- matplotlib
- meson!=0.63.0
- ninja
+ - numpy
- pip
- pip:
- git+https://github.com/modflowpy/flopy.git
- git+https://github.com/modflowpy/pymake.git
- git+https://github.com/Deltares/xmipy.git
- git+https://github.com/MODFLOW-USGS/modflowapi.git
+ - modflow-devtools
- pytest
+ - pytest-cases
+ - pytest-dotenv
- pytest-xdist
- flaky
- - requests
diff --git a/src/Utilities/comarg.f90 b/src/Utilities/comarg.f90
index f9d328dee06..4fba5b32e3d 100644
--- a/src/Utilities/comarg.f90
+++ b/src/Utilities/comarg.f90
@@ -38,7 +38,6 @@ subroutine GetCommandLineArguments()
character(len=LINELENGTH) :: cexe
character(len=LENBIGLINE) :: compiler
character(len=20) :: cdate
- character(len=17) :: ctyp
character(len=LENBIGLINE) :: coptions
logical :: ltyp
logical :: lexist
@@ -75,15 +74,6 @@ subroutine GetCommandLineArguments()
trim(adjustl(cexe)), '- MODFLOW', &
trim(adjustl(VERSION)), '(compiled', trim(adjustl(cdate)), ')'
!
- ! -- set ctyp
- if (IDEVELOPMODE == 1) then
- ctyp = 'Release Candidate'
- ltyp = .TRUE.
- else
- ctyp = 'Release'
- ltyp = .FALSE.
- end if
- !
! -- check for silent option
do iarg = 1, icountcmd
call get_command_argument(iarg, uctag)
@@ -153,7 +143,7 @@ subroutine GetCommandLineArguments()
case ('-V', '--VERSION')
lstop = .TRUE.
write (line, '(2a,2(1x,a))') &
- trim(adjustl(cexe)), ':', trim(adjustl(VERSION)), ctyp
+ trim(adjustl(cexe)), ':', trim(adjustl(VERSION))
call write_message(line, skipbefore=1, skipafter=1)
case ('-DEV', '--DEVELOP')
lstop = .TRUE.
diff --git a/src/Utilities/version.f90 b/src/Utilities/version.f90
index b8af4080bdc..872b6ebbbab 100644
--- a/src/Utilities/version.f90
+++ b/src/Utilities/version.f90
@@ -16,8 +16,8 @@ module VersionModule
! -- modflow 6 version
integer(I4B), parameter :: IDEVELOPMODE = 1
character(len=*), parameter :: VERSIONNUMBER = '6.4.0'
- character(len=*), parameter :: VERSIONTAG = ' release candidate 11/30/2022'
- character(len=40), parameter :: VERSION = '6.4.1 release candidate 12/02/2022'
+ character(len=*), parameter :: VERSIONTAG = ' Release Candidate 11/30/2022'
+ character(len=40), parameter :: VERSION = VERSIONNUMBER//VERSIONTAG
character(len=10), parameter :: MFVNAM = ' 6'
character(len=*), parameter :: MFTITLE = &
&'U.S. GEOLOGICAL SURVEY MODULAR HYDROLOGIC MODEL'