diff --git a/.github/workflows/build_conda.yml b/.github/workflows/build_conda.yml index d9ba9162..94acc688 100644 --- a/.github/workflows/build_conda.yml +++ b/.github/workflows/build_conda.yml @@ -7,10 +7,15 @@ jobs: build: runs-on: ubuntu-latest container: - image: continuumio/miniconda3:latest + image: ghcr.io/noaa-gfdl/fre-cli:miniconda24.7.1_gcc14.2.0 steps: - name: Checkout Files uses: actions/checkout@v4 + with: + submodules: 'recursive' + - name: Add mkmf to PATH + run: | + echo $PWD/mkmf/bin >> $GITHUB_PATH - name: Run Conda to Build run: | conda config --append channels conda-forge diff --git a/.github/workflows/create_test_conda_env.yml b/.github/workflows/create_test_conda_env.yml index ce5de814..25d382a5 100644 --- a/.github/workflows/create_test_conda_env.yml +++ b/.github/workflows/create_test_conda_env.yml @@ -5,66 +5,54 @@ on: [push] jobs: build-linux: runs-on: ubuntu-latest + container: + image: ghcr.io/noaa-gfdl/fre-cli:miniconda24.7.1_gcc14.2.0 steps: - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 with: - python-version: '>=3.9' - - - name: Add conda to system path - run: | - # $CONDA is an env var pointing to root of miniconda dir - echo $CONDA/bin >> $GITHUB_PATH - + submodules: 'recursive' - name: Create fre-cli environment run: | # create environment containing all dependencies # the env cannot be explicitly activated in github CI/CD conda env create -f environment.yml --name fre-cli - # add conda env's executables to github's PATH equiv. + # sets CONDA to wherever it may be on the image + source /root/.bashrc + + # add conda env's executables and mkmf to github's PATH equiv. echo $CONDA/envs/fre-cli/bin >> $GITHUB_PATH - + echo $PWD/mkmf/bin >> $GITHUB_PATH + # use *conda environment's pip* to install fre-cli # called w/ full path to conda's python for explicitness # called as a module (-m pip) for explicitness - $CONDA/envs/fre-cli/bin/python -m pip install --prefix $CONDA/envs/fre-cli . + $CONDA/envs/fre-cli/bin/python -m pip install --prefix $CONDA/envs/fre-cli . - name: Run pytest in fre-cli environment run: | - # try to make sure the right things are in GITHUB_PATH - echo $CONDA/envs/fre-cli/bin >> $GITHUB_PATH - - # are we talking to the right python? - which python - python --version - $CONDA/envs/fre-cli/bin/python --version - + # add spack installed binaries to front of path so that + # conda's netcdf/hdf5 installs don't break compilation tests + export path_save=$PATH + export PATH="/opt/views/view/bin:$PATH" + # run pytest pytest --junit-xml=pytest_results.xml --config-file=fre/pytest.ini --cov-config=fre/coveragerc --cov-report=xml --cov=fre fre/ - - # install genbadge to generate coverage badge based on xml + + # restore original path and install genbadge to generate coverage badge based on xml + export PATH="$path_save" pip install genbadge genbadge coverage -v -i coverage.xml -o docs/cov_badge.svg genbadge tests -v -i pytest_results.xml -o docs/pytest_badge.svg - + - name: Run pylint in fre-cli environment run: | - # try to make sure the right things are in GITHUB_PATH - echo $CONDA/envs/fre-cli/bin >> $GITHUB_PATH - - # are we talking to the right python? - which python - python --version - $CONDA/envs/fre-cli/bin/python --version - - # run pylint, ignored modules avoid warnings arising from code internal to those modules + # run pylint, ignored modules avoid warnings arising from code internal to those modules pylint --max-args 6 -ry --ignored-modules netCDF4,cmor fre/ || echo "pylint returned non-zero exit code. preventing workflow from dying with this echo." - + - name: Install Sphinx and Build Documentation run: | - pip install sphinx renku-sphinx-theme sphinx-rtd-theme + pip install sphinx renku-sphinx-theme sphinx-rtd-theme pip install --upgrade sphinx-rtd-theme sphinx-apidoc --output-dir docs fre/ --separate sphinx-build docs build diff --git a/.github/workflows/publish_conda.yml b/.github/workflows/publish_conda.yml index e36a72ea..26b1825a 100644 --- a/.github/workflows/publish_conda.yml +++ b/.github/workflows/publish_conda.yml @@ -7,10 +7,15 @@ jobs: publish: runs-on: ubuntu-latest container: - image: continuumio/miniconda3:latest + image: ghcr.io/noaa-gfdl/fre-cli:miniconda24.7.1_gcc14.2.0 steps: - name: Checkout Files uses: actions/checkout@v4 + with: + submodules: 'recursive' + - name: Add mkmf to PATH + run: | + echo $PWD/mkmf/bin >> $GITHUB_PATH - name: Run Conda to Build and Publish run: | conda config --append channels conda-forge diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml deleted file mode 100644 index fef13ef2..00000000 --- a/.gitlab-ci.yml +++ /dev/null @@ -1,13 +0,0 @@ -# You can override the included template(s) by including variable overrides -# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings -# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings -# Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings -# Container Scanning customization: https://docs.gitlab.com/ee/user/application_security/container_scanning/#customizing-the-container-scanning-settings -# Note that environment variables can be set in several places -# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence -stages: -- test -sast: - stage: test -include: -- template: Security/SAST.gitlab-ci.yml diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..b95f9e27 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,6 @@ +[submodule "mkmf"] + path = mkmf + url = https://github.com/NOAA-GFDL/mkmf +[submodule "fre/gfdl_msd_schemas"] + path = fre/gfdl_msd_schemas + url = https://github.com/NOAA-GFDL/gfdl_msd_schemas diff --git a/.public/.nojekyll b/.public/.nojekyll deleted file mode 100644 index e69de29b..00000000 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index a7bd7b6c..c37953c9 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,7 +1,7 @@ ## **For Developers** * Developers are free to use this repository's `README.md` to familiarize with the CLI and save time from having to install any dependencies, but development within a Conda environment is heavily recommended regardless -* Gain access to the repository with `git clone git@github.com:NOAA-GFDL/fre-cli.git` or your fork's link (recommended) and an SSH RSA key +* Gain access to the repository with `git clone --recursive git@github.com:NOAA-GFDL/fre-cli.git` or your fork's link (recommended) and an SSH RSA key - Once inside the repository, developers can test local changes by running a `pip install .` inside of the root directory to install the fre-cli package locally with the newest local changes on top of the installed Conda fre-cli dependencies - Test as a normal user would use the CLI * Create a GitHub issue to reflect your contribution's background and reference it with Git commits diff --git a/docs/FAQ.rst b/docs/FAQ.rst deleted file mode 100644 index cde88d85..00000000 --- a/docs/FAQ.rst +++ /dev/null @@ -1,2 +0,0 @@ -FAQ -=== diff --git a/docs/api.rst b/docs/api.rst new file mode 100644 index 00000000..a3655f99 --- /dev/null +++ b/docs/api.rst @@ -0,0 +1,4 @@ +============= +API +============= +Auto-harvested goodness, coming soon. diff --git a/docs/badges.rst b/docs/badges.rst index 542793d4..3fbc5d66 100644 --- a/docs/badges.rst +++ b/docs/badges.rst @@ -1,3 +1,5 @@ +.. this file is explicitly for the hyperlinkage in the base README.md to the badge image files +====== Badges ====== diff --git a/docs/contributing_to_doc.rst b/docs/contributing_to_doc.rst new file mode 100644 index 00000000..15721886 --- /dev/null +++ b/docs/contributing_to_doc.rst @@ -0,0 +1,62 @@ +.. last updated early Nov 2024. + could use some refinement +=========================== +Documentation-Documentation +=========================== + +Welcome to ``fre-cli``'s Documentation-documentation- where we document how the documentation is +documented + +How to Contribute to ``fre-cli``'s documentation +================================================ + + + +fork and poke at the settings +----------------------------- + +* Fork ``fre-cli`` on github + +* On github, navigate to your ``fre-cli`` fork, and click “settings” + +* In “settings”, click “pages” + +* In “pages”, under “build and deployment”, make sure “source” is set to “Deploy from a branch” + +* Under that, find “Branch”, make sure the branch selected is ``gh-pages`` + +* The branch ``gh-pages`` is "automagic”- i.e. do not change anything about it nor create a new one, + nor interact with anything in that branch directly + + +enable workflows for your fork +------------------------------ + +note: this step may depend on user-specific settings! + +* Back on top where you found “settings”, find and click “actions” to the left + +* Enable running the workflow actions assoc with the ``fre-cli`` repo under ``.github/workflows`` + + +run your fork's first workflow +------------------------------ + +* The documentation builds as the last steps to ``create_test_conda_env.yml`` when theres a push to ``main`` + +* To get your first workflow run on your fork, comment out the ``github.ref == ‘refs/heads/main’`` bit + so that it runs when you push to any branch, and make a small, trivial, commit somewhere to your + remote fork + +* You should be able to find the deployed webpage from a successful workflow at + https://your_username.github.io/fre-cli (if you did not change the fork’s name from ``fre-cli``, that is) + +* If you’re only editing docs, you can make the turn-around time on your workflow ~3 min faster by + commenting-out the ``pylint`` and ``pytest`` steps in ``create_test_conda_env.yml``, and disable running the + ``build_conda.yml`` workflow + + + +Other Helpful Things +==================== +`restructured text cheat-sheet `_ diff --git a/docs/for-developers.rst b/docs/for-developers.rst new file mode 100644 index 00000000..92feada5 --- /dev/null +++ b/docs/for-developers.rst @@ -0,0 +1,118 @@ +=============== +For developers +=============== + +Developers are free to use the user guide above to familiarize with the CLI and save time from +having to install any dependencies, but development within a Conda environment is heavily +recommended regardless. + +Gain access to the repository with ``git clone --recursive git@github.com:NOAA-GFDL/fre-cli.git`` or your fork's +link (recommended) and an SSH RSA key. Once inside the repository, developers can test local changes +by running a ``pip install .`` inside of the root directory to install the ``fre-cli`` package locally +with the newest local changes. Test as a normal user would use the CLI. + + +Adding New Tools +================ + + +From Other Repositories +----------------------- + +Currently, the solution to this task is to approach it using Conda packages. The tool that is being +added must reside within a repository that contains a ``meta.yaml`` that includes Conda dependencies +like the one in this repository and ideally a ``setup.py`` (may be subject to change due to deprecation) +that may include any potentially needed pip dependencies + +* Once published as a Conda package, ideally on the `NOAA-GFDL conda channel `_, + an addition can be made to the ``run`` section under ``requirements`` in ``meta.yaml`` of the ``fre-cli`` + following the syntax ``channel::package`` + +* On pushes to the main branch, the package located at https://anaconda.org/NOAA-GFDL/fre-cli will automatically + be updated using by the workflow defined in ``.github/workflows/publish_conda.yml`` + + +Checklist +--------- + +For the new tool you are trying to develop, there are a few criteria to satisfy + +1. Create a subdirectory for the tool group inside the ``fre/`` directory; i.e. ``fre/`` + +2. Add an ``__init__.py`` inside of ``fre/`` + +* typically this file should be empty, but it depends on the ````'s needs +* even if empty, the file facillitates module importability and must be present + +3. Add a file named ``fre//fre.py``. This will serve as the main entry point for ``fre`` + into the ````'s functionality + +4. Add a ``click`` group named after ```` within ``fre//fre.py`` + +* This ``click`` group will contain all the functionality under the ```` + +5. Create separate files as needed for different commands; do not code out the full + implemetation of ```` inside of a ``click`` command within ``fre//fre.py``. + +* better yet, consider what structure your tool may need in the future for maintainability's sake +* if you need, specify a ```` like ``fre//``. ``fre/app`` currently has + this structure + +6. Be sure to import the contents of the needed subcommand scripts inside of ``fre.py`` + +* i.e. from ``fre..toolCommandScript import *`` + +7. At this point, you can copy and paste the parts of your main ``click`` command from its script + into ``fre.py`` when implementing the function reflective of the command function + +* Everything will remain the same; i.e. arguments, options, etc. + +* However, this new function within ``fre.py`` must a new line after the arguments, options, + and other command components; ``@click.pass_context`` + +* Along with this, a new argument ``context`` must now be added to the parameters of the command + (preferably at the beginning, but it won't break it if it's not) + +8. From here, all that needs to be added after defining the command with a name is + ``context.forward(mainFunctionOfToolCommand)``, and done! + +9. The last step is to replicate the command in the same way as done in ``fre.py`` + inside of ``fre.py``, but make sure to add ``from fre import `` and + ``from fre. import *`` + +Please refer to this issue when encountering naming issues: +`NOAA-GFDL#31 `_ + + +Example ``fre/`` Directory Structure +------------------------------------ + +``fre/`` +├── ``__init__.py`` +├── ``fre.py`` +├── ``fre`` +│ ├── ``__init__.py`` +│ ├── ``toolCommandScript.py`` +│ └── ``fre.py`` + + +``MANIFEST.in`` +--------------- + +In the case where non-python files like templates, examples, and outputs are to be included in the ``fre-cli`` package, +``MANIFEST.in`` can provide the solution. Ensure that the file exists within the correct folder, and add a line to the +``MANIFEST.in`` file saying something like ``include fre/fre/fileName.fileExtension`` + +* For more efficiency, if there are multiple files of the same type needed, the ``MANIFEST.in`` addition can be something + like ``recursive-include fre/fre *.fileExtension`` which would recursively include every file matching that + ``fileExtension`` within the specified directory and its respective subdirectories. + + +Adding Documentation +-------------------- + +see section "Documentation-Documentation" + + + + diff --git a/docs/index.rst b/docs/index.rst index 77c46de6..9c65e98a 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,23 +1,28 @@ -.. Fre-Cli documentation master file, created by - sphinx-quickstart on Wed Mar 6 22:28:21 2024. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. +.. Fre-Cli documentation master file, created by sphinx-quickstart on Wed Mar 6 22:28:21 2024. + You can adapt this file completely to your liking, but it should at least contain the root + \`toctree\` directive (no backslashes) + Some sphinx markdown examples: + https://gist.github.com/SMotaal/24006b13b354e6edad0c486749171a70 -Welcome to Fre-Cli's documentation! -=================================== +======================================= +Welcome to ``fre-cli``'s documentation! +======================================= +.. the entry in the toc must be the .rst filename. + what shows in the webpage is the first header or title .. toctree:: - :maxdepth: 2 + :maxdepth: 1 :caption: Contents: + what-is-fre setup usage - subtools - FAQ - badges + tools + api + for-developers -Indices and tables -================== +Indices +======= * :ref:`genindex` * :ref:`modindex` diff --git a/docs/setup.rst b/docs/setup.rst index cf06bb99..7fd26b14 100644 --- a/docs/setup.rst +++ b/docs/setup.rst @@ -1,32 +1,35 @@ +===== Setup ===== +fre-cli is conda-installable from the “noaa-gfdl” anaconda channel (https://anaconda.org/NOAA-GFDL/fre-cli) +and is deployed on GFDL systems as Environment Modules. + +On GFDL systems +======================== +If you are at GFDL (gaea, PP/AN, workstations), you may skip installation:: -Need to set up Conda environment first and foremost + module load fre/2024.01 -If on workstation: -module load conda + fre --help -Create new Conda environment -conda create -n [environmentName] +Generic +======================= +If you are outside GFDL or are a FRE developer, install with conda. If you're at GFDL, bring conda into your PATH:: -Append necessary channels -conda config --append channels noaa-gfdl -conda config --append channels conda-forge + module load miniforge -Run conda install on needed dependencies -conda install noaa-gfdl::fre-cli should install the CLI package located at https://anaconda.org/NOAA-GFDL/fre-cli created from the meta.yaml file +If you are outside GFDL, install the miniconda tool with the standard instructions (https://docs.anaconda.com/miniconda/miniconda-install/). -All other dependencies used by the tools are installed along with this install (configured inside the meta.yaml), with the exception of local modules -setup.py file allows fre.py to be ran with fre as the entry point on the command line instead of python fre.py +Once you have conda available, install the latest fre-cli from the NOAA-GFDL anaconda channel:: -Enter commands and follow --help messages for guidance (brief rundown of commands also provided below) + conda create --name fre --channel noaa-gfdl --channel conda-forge fre-cli -If the user just runs fre, it will list all the command groups following fre, such as run, make, pp, etc. and once the user specifies a command group, the list of available subcommands for that group will be shown +To install a specific version:: -Commands that require arguments to run will alert user about missing arguments, and will also list the rest of the optional parameters if --help is executed + conda create --name fre-202401 --channel noaa-gfdl --channel conda-forge fre-cli::2024.01 -Argument flags are not positional, can be specified in any order as long as they are specified +and activate it:: -Can run directly from any directory, no need to clone repository + conda activate fre -May need to deactivate environment and reactivate it in order for changes to apply + fre --help diff --git a/docs/subtools.rst b/docs/subtools.rst deleted file mode 100644 index ee6f663f..00000000 --- a/docs/subtools.rst +++ /dev/null @@ -1,55 +0,0 @@ -Subtools -======== - -fre app --------- - -fre catalog --------- - -The fre catalog tool brings the functionality of the `GFDL catalog builder `_ to fre users. The catalog builder is a python community package ecosystem that allows you to generate data catalogs compatible with intake-esm. - -**Subtools** - -Buildcatalog - Generate a data catalog - -Validate - Validate the catalog - -**Flags** - -Overwrite - Overwrite an existing catalog at the given output path - -Apend - Append (without headerlist) to an existing catalog at the given output path - -**Quickstart** - -Catalogs are generated by the following command: fre catalog buildcatalog - -(OUTPUT_PATH should end with the desired output filename WITHOUT a file ending) See example below. - -.. code-block:: console -fre catalog buildcatalog --overwrite /archive/path_to_data_dir ~/output - -fre check (not yet implemented) --------- - -fre cmor --------- - -fre list (not yet implemented) --------- - -fre make --------- - -fre pp --------- - -fre run (not yet implemented) --------- - -fre test (not yet implemented) --------- - -fre yamltools (not yet implemented) --------- diff --git a/docs/tool_guides.rst b/docs/tool_guides.rst new file mode 100644 index 00000000..9a6459db --- /dev/null +++ b/docs/tool_guides.rst @@ -0,0 +1,156 @@ +.. NEEDS UPDATING #TODO +============= +Tool Guides +============= + +Guides for the process in which subtools are used with tools. + + +``fre app`` +============ + +``fre catalog`` +============ + +``fre cmor`` +============ + +.. _fre-make-guide: + +``fre make guide`` +============ + +1. Bare-metal Build: + +.. code-block:: + + # Create checkout script + fre make create-checkout -y [model yaml file] -p [platform] -t [target] + + # Create and run checkout script + fre make create-checkout -y [model yaml file] -p [platform] -t [target] --execute + + # Create Makefile + fre make create-makefile -y [model yaml file] -p [platform] -t [target] + + # Creat the compile script + fre make create-compile -y [model yaml file] -p [platform] -t [target] + + # Create and run the compile script + fre make create-compile -y [model yaml file] -p [platform] -t [target] --execute + + # Run all of fremake + fre make run-fremake -y [model yaml file] -p [platform] -t [target] [other options...] + +2. Container Build: + +For the container build, parallel checkouts are not supported, so the `-npc` options must be used for the checkout script. In addition the platform must be a container platform. + +Users will not be able to create containers unless they have podman access on gaea. + +.. code-block:: + + # Create checkout script + fre make create-checkout -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] -npc + + # Create and run checkout script + fre make create-checkout -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] --execute + + # Create Makefile + fre make create-makefile -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] + + # Create a Dockerfile + fre make create-dockerfile -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] + + # Create and run the Dockerfile + fre make create-dockerfile -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] --execute + + +**Quickstart** + +1. Bare-metal Build: + +.. code-block:: + + # Create checkout script + fre make create-checkout -y am5.yaml -p ncrc5.intel23 -t prod + + # Create and run checkout script + fre make create-checkout -y am5.yaml -p ncrc5.intel23 -t prod --execute + + # Create Makefile + fre make create-makefile -y am5.yaml -p ncrc5.intel23 -t prod + + # Create the compile script + fre make create-compile -y am5.yaml -p ncrc5.intel23 -t prod + + # Create and run the compile script + fre make create-compile -y am5.yaml -p ncrc5.intel23 -t prod --execute + +2. Bare-metal Build Multi-target: + +.. code-block:: + + # Create checkout script + fre make create-checkout -y am5.yaml -p ncrc5.intel23 -t prod -t debug + + # Create and run checkout script + fre make create-checkout -y am5.yaml -p ncrc5.intel23 -t prod -t debug --execute + + # Create Makefile + fre make create-makefile -y am5.yaml -p ncrc5.intel23 -t prod -t debug + + # Create the compile script + fre make create-compile -y am5.yaml -p ncrc5.intel23 -t prod -t debug + + # Create and run the compile script + fre make create-compile -y am5.yaml -p ncrc5.intel23 -t prod -t debug --execute + +3. Container Build: + +In order for the container to build successfully, a `-npc`, or `--no-parallel-checkout` is needed. + +.. code-block:: + + # Create checkout script + fre make create-checkout -y am5.yaml -p hpcme.2023 -t prod -npc + + # Create and run checkout script + fre make create-checkout -y am5.yaml -p hpcme.2023 -t prod -npc --execute + + # Create Makefile + fre make create-makefile -y am5.yaml -p hpcme.2023 -t prod + + # Create Dockerfile + fre make create-dockerfile -y am5.yaml -p hpcme.2023 -t prod + + # Create and run the Dockerfile + fre make create-dockerfile -y am5.yaml -p hpcme.2023 -t prod --execute + +4. Run all of fremake: + +.. code-block:: + + # Bare-metal + fre make run-fremake -y am5.yaml -p ncrc5.intel23 -t prod + + # Container + fre make run-fremake -y am5.yaml -p hpcme.2023 -t prod -npc + +``fre pp`` +============ + +``fre yamltools`` +============ + +``fre check`` +============ + +``fre list`` +============ + +``fre run`` +============ + +``fre test`` +============ diff --git a/docs/tools.rst b/docs/tools.rst new file mode 100644 index 00000000..c658b211 --- /dev/null +++ b/docs/tools.rst @@ -0,0 +1,65 @@ +============= +Tools +============= + +Notes on command-line interface +======================================== +The “cli” in fre-cli derives from the shell “fre SUBCOMMAND COMMAND” structure inspired by git, cylc, and other modern Linux command-line tools. This enables discovery of the tooling capability, useful for complex tools with multiple options. + +To discover subcommands, use ``--help``, e.g.:: + + fre --help + + fre make --help + + fre pp --help + +Commands that require arguments to run will alert user about missing arguments, and will also list +the rest of the optional parameters if ``--help`` is executed. e.g.:: + + fre pp configure-yaml --help + +Argument flags are not positional, can be specified in any order. Some arguments expect sub-arguments. + +fre app +=========== + +.. include:: tools/app.rst + + +fre catalog +=============== + +.. include:: tools/catalog.rst + + +fre cmor +============ + +* See also, ``fre cmor``'s `README `_ +* See also, ``fre cmor``'s `project board `_ + +This set of tools leverages the external ``cmor`` python package within the ``fre`` ecosystem. ``cmor`` is an +acronym for "climate model output rewriter". The process of rewriting model-specific output files for model +intercomparisons (MIPs) using the ``cmor`` module is, quite cleverly, referred to as "CMORizing". + + +.. include:: tools/cmor.rst + + +fre make +============ + +.. include:: tools/make.rst + + +fre pp +========== + +.. include:: tools/pp.rst + + +fre yamltools +================= + +.. include:: tools/yamltools.rst diff --git a/docs/tools/app.rst b/docs/tools/app.rst new file mode 100644 index 00000000..252871a3 --- /dev/null +++ b/docs/tools/app.rst @@ -0,0 +1 @@ +`fre app` tools are intended to be a collection of single-purpose tools. diff --git a/docs/tools/catalog.rst b/docs/tools/catalog.rst new file mode 100644 index 00000000..03eeca22 --- /dev/null +++ b/docs/tools/catalog.rst @@ -0,0 +1,12 @@ +.. NEEDS UPDATING #TODO +``builder`` +----------- + Generate a catalog +* Builds json and csv format catalogs from user input directory path +* Minimal Syntax: ``fre catalog builder -i [input path] -o [output path]`` +* Module(s) needed: n/a +* Example: ``fre catalog builder -i /archive/am5/am5/am5f3b1r0/c96L65_am5f3b1r0_pdclim1850F/gfdl.ncrc5-deploy-prod-openmp/pp -o ~/output --overwrite`` + +``validate`` +------------ +Validate the catalog diff --git a/docs/tools/cmor.rst b/docs/tools/cmor.rst new file mode 100644 index 00000000..ee55cf43 --- /dev/null +++ b/docs/tools/cmor.rst @@ -0,0 +1,91 @@ +.. last updated Nov 2024 + +``run`` +------- + +``fre cmor run`` rewrites climate model output files in a target directory in a CMIP-compliant manner +for downstream publishing. It accepts 6 arguments, only one being optional. A brief description of each: + + +arguments +~~~~~~~~~ + +* (required) ``-d, --indir TEXT``, input directory containing netCDF files to CMORize. + + - all netCDF files within ``indir`` will have their filename checked for local variables + specified in ``varlist`` as keys, and ISO datetime strings extracted and kept in a list + for later iteration over target files + + - a debugging-oriented boolean flag constant at the top of ``cmor_mixer.py``, if ``True`` + will process one file of all files found within ``indir`` and cease processing for that + variable after succeeding on one file + +* (required) ``-l, --varlist TEXT``, path to variable list dictionary. + + - each entry in the variable list dictionary corresponds to a key/value pair + + - the key (local variable) is used for ID'ing files within ``indir`` to be processed + + - associated with the key (local variable), is the value (target variable), which should + be the label attached to the data within the targeted file(s) + +* (required) ``-r, --table_config TEXT``, path to MIP json configuration holding variable + metadata. + + - typically, this is to be provided by a data-request associated with the MIP and + participating experiments + +* (required) ``-p, --exp_config TEXT``, path to json configuration holding experiment/model + metadata + + - contains e.g. ``grid_label``, and points to other important configuration files + associated with the MIP + + - the other configuration files are typically housing metadata associated with ``coordinates``, + ``formula_terms``, and controlled-vocabulary (``CV``). + +* (required) ``-o, --outdir TEXT``, path-prefix inwhich the output directory structure is created. + + - further output-directories and structure/template information is specified specified in ``exp_config`` + + - in addition to the output-structure/template used, an additional directory corresponding to the + date the CMORizing was done is created near the end of the directory tree structure + +* (optional) ``-v, --opt_var_name TEXT``, a specific variable to target for processing + + - largely a debugging convenience functionality, this can be helpful for targeting more specific + input files as desired. + + +examples +~~~~~~~~ +with a local clone of ``fre-cli``, the following call should work out-of-the-box from +the root directory of the repository. + +.. code-block:: python + + fre cmor run \ + -d fre/tests/test_files/ocean_sos_var_file \ + -l fre/tests/test_files/varlist \ + -r fre/tests/test_files/cmip6-cmor-tables/Tables/CMIP6_Omon.json \ + -p fre/tests/test_files/CMOR_input_example.json \ + -o fre/tests/test_files/outdir + +background +~~~~~~~~~~ + +The bulk of this routine is housed in ``fre/cmor/cmor_mixer.py``, which is a rewritten version of +Sergey Malyshev's original ``CMORcommander.py`` script, utilized during GFDL's CMIP6 publishing run. + +This code is dependent on two primary json configuration files- a MIP +variable table and another containing experiment (i.e. model) specific metdata (e.g. grid) to append +to the output netCDF file headers, in addition to other configuration options such as output directory +name specification, output path templates, and specification of other json configuration files containing +controlled-vocabulary (CV), coordinate, and formula term conventions for rewriting the output metadata. + + + + + + + diff --git a/docs/tools/make.rst b/docs/tools/make.rst new file mode 100644 index 00000000..5a3e557b --- /dev/null +++ b/docs/tools/make.rst @@ -0,0 +1,59 @@ +``create-checkout`` +------------- + +``fre make create-checkout [options]`` + - Purpose: Creates the checkout script and can check out source code (with execute option) + - Options: + - `-y, --yamlfile [experiment yaml] (required)` + - `-p, --platform [platform] (required)` + - `-t, --target [target] (required)` + - `-j, --jobs [number of jobs to run simultneously]` + - `-npc, --no-parallel-checkout (for container build)` + - `-e, --execute` + +``create-makefile`` +------------- + +``fre make create-makefile [options]`` + - Purpose: Creates the makefile + - Options: + - `-y, --yamlfile [experiment yaml] (required)` + - `-p, --platform [platform] (required)` + - `-t, --target [target] (required)` + +``create-compile`` +------------- + +``fre make create-compile [options]`` + - Purpose: Creates the compile script and compiles the model (with execute option) + - Options: + - `-y, --yamlfile [experiment yaml] (required)` + - `-p, --platform [platform] (required)` + - `-t, --target [target] (required)` + - `-j, --jobs [number of jobs to run simultneously]` + - `-n, --parallel [number of concurrent modile compiles]` + - `-e, --execute` + +``create-dockerfile`` +------------- + +``fre make create-dockerfile [options]`` + - Purpose: Creates the dockerfile and creates the container (with execute option) + - With the creation of the dockerfile, the Makefile, checkout script, and any other necessary script is copied into the container from a temporary location + - Options: + - `-y, --yamlfile [experiment yaml] (required)` + - `-p, --platform [platform] (required)` + - `-t, --target [target] (required)` + +``run-fremake`` +------------- + +``fre make run-fremake [options]`` + - Purpose: Create the checkout script, Makefile, compile script, and dockerfile (platform dependent) for the compilation of the model + - Options: + - `-y, --yamlfile [experiment yaml] (required)` + - `-p, --platform [platform] (required)` + - `-t, --target [target] (required)` + - `-npc, --no-parallel-checkout (for container build)` + - `-j, --jobs [number of jobs to run simultneously]` + - `-n, --parallel [number of concurrent modile compiles]` diff --git a/docs/tools/pp.rst b/docs/tools/pp.rst new file mode 100644 index 00000000..6160a1e5 --- /dev/null +++ b/docs/tools/pp.rst @@ -0,0 +1,16 @@ +.. NEEDS UPDATING #TODO +``configure`` +------------- + +* Postprocessing yaml configuration +* Minimal Syntax: ``fre pp configure -y [user-edit yaml file]`` +* Module(s) needed: n/a +* Example: ``fre pp configure -y /home/$user/pp/ue2/user-edits/edits.yaml`` + +``checkout`` +------------ + +* Checkout template file and clone gitlab.gfdl.noaa.gov/fre2/workflows/postprocessing.git repository +* Minimal Syntax: ``fre pp checkout -e [experiment name] -p [platform name] -t [target name]`` +* Module(s) needed: n/a +* Example: ``fre pp checkout -e c96L65_am5f4b4r0_amip -p gfdl.ncrc5-deploy -t prod-openmp`` diff --git a/docs/tools/yamltools.rst b/docs/tools/yamltools.rst new file mode 100644 index 00000000..0bf3868a --- /dev/null +++ b/docs/tools/yamltools.rst @@ -0,0 +1,6 @@ +.. NEEDS UPDATING #TODO + +``combine-yamls`` +----------------- + +* placehold diff --git a/docs/usage.rst b/docs/usage.rst index 95afbe5a..bf5be2a6 100644 --- a/docs/usage.rst +++ b/docs/usage.rst @@ -1,156 +1,24 @@ +============= Usage -===== +============= +Using a set of YAML configuration files, ``fre make`` compiles a FMS-based model, and ``fre pp`` postprocesses the history output and runs diagnostic analysis scripts. Please note that model running is not yet supported in FRE 2024; continue to use FRE Bronx frerun. -User Usage ----------- +Build FMS model +======================= +.. include:: usage/compile.rst -**Conda environment set up** +Run FMS model +======================= +Check back in the latter half of 2025 or so. -Load Conda +Postprocess FMS history output +============================== +.. include:: usage/postprocess.rst -.. code-block::console - module load conda +CMORize postprocessed output +============================ +.. include:: usage/cmor.rst -Create new Conda environment - -.. code-block::console - conda create -n [environmentName] - -Append necessary channels - -.. code-block::console - conda config --append channels noaa-gfdl - conda config --append channels conda-forge - -Install needed dependencies - -.. code-block::console - conda install noaa-gfdl::fre-cli - -setup.py file allows fre.py to be ran with fre as the entry point on the command line instead of python fre.py - -Enter commands and follow *--help* messages for guidance (brief rundown of commands also provided below) - -If the user just runs *fre*, it will list all the command groups following *fre*, such as *run*, *make*, *pp*, etc. and once the user specifies a command group, the list of available subcommands for that group will be shown - -Commands that require arguments to run will alert user about missing arguments, and will also list the rest of the optional parameters if *--help* is executed - -Argument flags are not positional, can be specified in any order as long as they are specified - -Can run directly from any directory, no need to clone repository - -May need to deactivate environment and reactivate it in order for changes to apply - - -Tools ------ - -A few subtools are currently in development: - -**fre pp** - -1. configure - -* Postprocessing yaml configuration -* Minimal Syntax: *fre pp configure -y [user-edit yaml file]* -* Module(s) needed: n/a -* Example: *fre pp configure -y /home/$user/pp/ue2/user-edits/edits.yaml* - -2. checkout - -* Checkout template file and clone gitlab.gfdl.noaa.gov/fre2/workflows/postprocessing.git repository -* Minimal Syntax: *fre pp checkout -e [experiment name] -p [platform name] -t [target name]* -* Module(s) needed: n/a -* Example: *fre pp checkout -e c96L65_am5f4b4r0_amip -p gfdl.ncrc5-deploy -t prod-openmp* - - -**fre catalog** - -1. buildCatalog1 -* Builds json and csv format catalogs from user input directory path -* Minimal Syntax: *fre catalog buildCatalog -i [input path] -o [output path]* -* Module(s) needed: n/a -* Example: *fre catalog buildCatalog -i /archive/am5/am5/am5f3b1r0/c96L65_am5f3b1r0_pdclim1850F/gfdl.ncrc5-deploy-prod-openmp/pp -o ~/output --overwrite* - -**To be developed:** - -#. fre check -#. fre list -#. fre make -#. fre run -#. fre test -#. fre yamltools - - -Usage (Developers) ------------------- - -Developers are free to use the user guide above to familiarize with the CLI and save time from having to install any dependencies, but development within a Conda environment is heavily recommended regardless - -Gain access to the repository with *git clone git@github.com:NOAA-GFDL/fre-cli.git* or your fork's link (recommended) and an SSH RSA key - -Once inside the repository, developers can test local changes by running a *pip install .* inside of the root directory to install the fre-cli package locally with the newest local changes - -Test as a normal user would use the CLI - -**Adding New Tools - Checklist** - -If there is *no* subdirectory created for the new tool you are trying to develop, there are a few steps to follow: - -1. Create a subdirectory for the tool group inside the /fre folder; i.e. /fre/fre(subTool) - -2. Add an *__init__.py* inside of the new subdirectory - -* This will contain one line, *from fre.fre(subTool) import ** -* The purpose of this line is to allow the subTool module to include all the scripts and functions within it when invoked by fre - -3. Add a file named *fre(subTool).py*. This will serve as the main file to house all of the tool's related subcommands - -4. Add a Click group named after the subTool within *fre(subTool).py* - -* This group will contain all of the subcommands - -5. Create separate files to house the code for each different subcommand; do not code out the full implemetation of a function inside of a Click command within *fre(subTool).py* - -6. Be sure to import the contents of the needed subcommand scripts inside of fre(subTool).py - -* i.e. from fre.fre(subTool).subCommandScript import * - -7. At this point, you can copy and paste the parts of your main Click subcommand from its script into *fre(subTool).py* when implementing the function reflective of the subcommand function - -* Everything will remain the same; i.e. arguments, options, etc. - -* However, this new function within *fre(subTool).py* must a new line after the arguments, options, and other command components; *@click.pass_context* - -* Along with this, a new argument "context" must now be added to the parameters of the command (preferably at the beginning, but it won't break it if it's not) - -8. From here, all that needs to be added after defining the command with a name is *context.forward(mainFunctionOfSubcommand)*, and done! - -9. After this step, it is important to add *from fre.fre(subTool) import* to the *__init__.py* within the /fre folder - -10. The last step is to replicate the subcommand in the same way as done in *fre(subTool).py* inside of *fre.py*, but make sure to add *from fre import fre(subTool)* and *from fre.fre(subTool).fre(subTool) import ** - -Please refer to this issue when encountering naming issues: `NOAA-GFDL#31 `_ - -**Adding Tools From Other Repositories** - -Currently, the solution to this task is to approach it using Conda packages. The tool that is being added must reside within a repository that contains a meta.yaml that includes Conda dependencies like the one in this repository and ideally a setup.py (may be subject to change due to deprecation) that may include any potentially needed pip dependencies - -* Once published as a Conda package, ideally on the NOAA-GFDL channel at https://anaconda.org/NOAA-GFDL, an addition can be made to the "run" section under the "requirements" category in the meta.yaml of the fre-cli following the syntax channel::package - -* On pushes to the main branch, the package located at https://anaconda.org/NOAA-GFDL/fre-cli will automatically be updated using the workflow file - -**MANIFEST.in** - -In the case where non-python files like templates, examples, and outputs are to be included in the fre-cli package, MANIFEST.in can provide the solution. Ensure that the file exists within the correct folder, and add a line to the MANIFEST.in file saying something like *include fre/fre(subTool)/fileName.fileExtension* - -* For more efficiency, if there are multiple files of the same type needed, the MANIFEST.in addition can be something like *recursive-include fre/fre(subTool) *.fileExtension* which would recursively include every file matching that fileExtension within the specified directory and its respective subdirectories. - -**Example /fre Directory Structure** -. -├── __init__.py -├── fre.py -├── fre(subTool) -│ ├── __init__.py -│ ├── subCommandScript.py -│ └── fre(subTool).py +Generate data catalogs +====================== +.. include:: usage/catalogs.rst diff --git a/docs/usage/cmor.rst b/docs/usage/cmor.rst new file mode 100644 index 00000000..d886bf4b --- /dev/null +++ b/docs/usage/cmor.rst @@ -0,0 +1,20 @@ +Brief rundown of commands also provided below: + +* Enter commands and follow ``--help`` messages for guidance +* If the user just runs ``fre``, it will list all the command groups following ``fre``, such as + ``run``, ``make``, ``pp``, etc. and once the user specifies a command group, the list of available + subcommands for that group will be shown +* Commands that require arguments to run will alert user about missing arguments, and will also list + the rest of the optional parameters if ``--help`` is executed +* Argument flags are not positional, can be specified in any order as long as they are specified +* Can run directly from any directory, no need to clone repository +* May need to deactivate environment and reactivate it in order for changes to apply +* ``fre/setup.py`` allows ``fre/fre.py`` to be ran as ``fre`` on the command line by defining it as an + *entry point*. Without it, the call would be instead, something like ``python fre/fre.py`` + +* See also, ``fre cmor``'s `README `_ +* See also, ``fre cmor``'s `project board `_ + +This set of tools leverages the external ``cmor`` python package within the ``fre`` ecosystem. ``cmor`` is an +acronym for "climate model output rewriter". The process of rewriting model-specific output files for model +intercomparisons (MIPs) using the ``cmor`` module is, quite cleverly, referred to as "CMORizing". diff --git a/docs/usage/compile.rst b/docs/usage/compile.rst new file mode 100644 index 00000000..8f9dfbe5 --- /dev/null +++ b/docs/usage/compile.rst @@ -0,0 +1,19 @@ +``fre make`` can compile a traditional "bare metal" executable or a containerized executable using a set of YAML configuration files. + +Through the fre-cli, `fre make` can be used to create and run a checkout script, makefile, and compile a model. + +Fremake Canopy Supports: + - multiple targets; use `-t` flag to define each target + - bare-metal build + - container creation + - parallel checkouts for bare-metal build** + +** **Note: Users will not be able to create containers without access to podman** + +.. include:: fre_make.rst + +Guide and quickstart to `fre make` subtools: + +:ref:`fre-make-guide` + +https://github.com/NOAA-GFDL/fre-cli/blob/main/fre/make/README.md diff --git a/docs/usage/postprocess.rst b/docs/usage/postprocess.rst new file mode 100644 index 00000000..994a4f1b --- /dev/null +++ b/docs/usage/postprocess.rst @@ -0,0 +1,88 @@ +``fre pp`` regrids FMS history files and generates timeseries, climatologies, and static postprocessed files, with instructions specified in YAML. + +Bronx plug-in refineDiag and analysis scripts can also be used, and a reimagined analysis script ecosystem is being developed and is available now (for adventurous users). The new analysis script framework is independent of and compatible with FRE (https://github.com/NOAA-GFDL/analysis-scripts). The goal is to combine the ease-of-use of legacy FRE analysis scripts with the standardization of model output data catalogs and python virtual environments. + +In the future, output NetCDF files will be rewritten by CMOR by default, ready for publication to community archives (e.g. ESGF). Presently, standalone CMOR tooling is available as ``fre cmor``. + +By default, an intake-esm-compatible data catalog is generated and updated, containing a programmatic metadata-enriched searchable interface to the postprocessed output. The catalog tooling can be independently assessed as ``fre catalog``. + +FMS history files +----------------- +FRE experiments are run in segments of simulated time. The FMS diagnostic manager, as configured in +experiment configuration files (diag yamls) saves a set of diagnostic output files, or "history files." +The history files are organized by label and can contain one or more temporal or static diagnostics. +FRE (Bronx frerun) renames and combines the raw model output (that is usually on a distributed grid), +and saves the history files in one tarfile per segment, date-stamped with the date of the beginning of the segment. +The FMS diagnostic manager requires +that variables within one history file be the same temporal frequency (e.g. daily, monthly, annual), +but statics are allowed in any history file. Usually, variables in a history file +share a horizontal and vertical grid. + +Each history tarfile, again, is date-stamped with the date of the beginning of the segment, in YYYYMMDD format. +For example, for a 5-year experiment with 6-month segments, there will be 10 history files containing the +raw model output. Each history tarfile contains a segment's worth of time (in this case 6 months).:: + + 19790101.nc.tar 19800101.nc.tar 19810101.nc.tar 19820101.nc.tar 19830101.nc.tar + 19790701.nc.tar 19800701.nc.tar 19810701.nc.tar 19820701.nc.tar 19830701.nc.tar + +Each history file within the history tarfiles are also similarly date-stamped. Atmosphere and land history files +are on the native cubed-sphere grid, which have 6 tiles that represent the global domain. Ocean, ice, and +global scalar output have just one file that covers the global domain. + +For example, if the diagnostic manager were configured to save atmospheric and ocean annual and monthly history files, +the 19790101.nc.tar tarfile might contain:: + + tar -tf 19790101.nc.tar | sort + + ./19790101.atmos_month.tile1.nc + ./19790101.atmos_month.tile2.nc + ./19790101.atmos_month.tile3.nc + ./19790101.atmos_month.tile4.nc + ./19790101.atmos_month.tile5.nc + ./19790101.atmos_month.tile6.nc + ./19790101.atmos_annual.tile1.nc + ./19790101.atmos_annual.tile2.nc + ./19790101.atmos_annual.tile3.nc + ./19790101.atmos_annual.tile4.nc + ./19790101.atmos_annual.tile5.nc + ./19790101.atmos_annual.tile6.nc + ./19790101.ocean_month.nc + ./19790101.ocean_annual.nc + +The name of the history file, while often predictably named, are arbitrary labels within the Diagnostic Manager configuration +(diag yamls). Each history file is a CF-standard NetCDF file that can be inspected with common NetCDF tools such as the NCO or CDO tools, or even ``ncdump``. + +Postprocess components +---------------------- +History files are not immediately convenient for analysis. +On native grid, named in a single namespace. +Desire: regridded, renamed, ts + +Timeseries +---------- +Set chunk_a, and chunk_b if desired. + +XY-regridding +------------- +blahblah + +Climatologies +------------- +annual and monthly climatologies +less fine-grained than bronx +per-component switch coming +now it's one switch for entire pp + +Statics +------- +underbaked, known deficiency +currently, takes statics from "source" history files + +Analysis scripts +---------------- + +Surface masking for FMS pressure-level history +---------------------------------------------- + +Legacy refineDiag scripts +------------------------- diff --git a/docs/what-is-fre.rst b/docs/what-is-fre.rst new file mode 100644 index 00000000..6e289ff6 --- /dev/null +++ b/docs/what-is-fre.rst @@ -0,0 +1,13 @@ +============ +What is FRE? +============ + +FRE, the FMS Runtime Environment, is the companion runtime workflow for FMS-based climate and earth system models, and contains scripts and batch job handlers to compile models, run experiments, and postprocess and analyze the output. Developed around 2004 by GFDL's Modeling System Division, FRE was developed primarily in one repository ("fre-commands", https://github.com/NOAA-GFDL/FRE), used subtools in another repository (FRE-NCtools, https://github.com/NOAA-GFDL/fre-nctools), and was deployed using a set of Environment Modules (https://gitlab.gfdl.noaa.gov/fre/modulefiles). Originally, the major releases of FRE were rivers (Arkansas, Bronx) and the minor releases were numbers. In practice, though, the "Bronx" release name was retained and the number has been incremented over the years. e.g. Bronx-23 is the latest release. + +Over the last couple years, MSD's workflow team has reengineered the compiling and postprocessing parts of FRE, in a modern python and Cylc-based ecosystem (running experiments is not yet possible with this new FRE; stay tuned). Following a semantic versioning adopted in other FMS repositories, the reengineered FRE is versioned with a year and incrementing two-digit number. e.g. the first release of 2024 is 2024.01, the second 2024.02, and the first release next year will be 2025.01. (Optional minor releases are also available in the scheme; e.g. 2024.01.01 would be the first minor/patch release after 2024.01.) This version is used as tags in FRE repositories and in the corresponding conda (and in the future, container) release, and can be retrieved from ``fre --version``. + +fre-cli (this repository) can be considered a successor to the FRE Bronx “fre-commands” repository, which primarily contains user-facing tools and subtools. fre-workflows (https://github.com/NOAA-GFDL/fre-workflows) is a companion repository containing workflow definitions that can be run by the Cylc workflow engine. It contains workflow-specific elements previously in FRE Bronx, and allows flexibility to support multiple and more complex workflows. The two new FRE repositories are versioned with the same approach, and updates will be released together for some time to ensure compatibility. + +The “cli” in fre-cli derives from the shell “fre SUBCOMMAND COMMAND” structure inspired by git, cylc, and other modern Linux command-line tools. This enables discovery of the tooling capability, useful for complex tools with multiple options. e.g. ``fre --help``, ``fre make --help``, ``fre pp --help``. + +Underneath, fre-cli is python, and the workflows and tooling can be run through a Jupyter notebook, or through other python scripts. fre-cli is conda-installable from the “noaa-gfdl” channel (``conda install --channel noaa-gfdl fre-cli``). diff --git a/fre/app/generate_time_averages/tests/.unused_tests/test_multiply_duration.py b/fre/app/generate_time_averages/tests/.unused_tests/test_multiply_duration.py deleted file mode 100644 index 704907b5..00000000 --- a/fre/app/generate_time_averages/tests/.unused_tests/test_multiply_duration.py +++ /dev/null @@ -1,17 +0,0 @@ -from fre_python_tools.utilities.multiply_duration import multiply_duration -import metomi.isodatetime.parsers as parse - -def test_month(): - '''1 month x 2 = 2 months''' - two_months = parse.DurationParser().parse('P2M') - assert multiply_duration('P1M', 2) == two_months - -def test_minutes(): - '''12 minutes x 5 = 1 hour''' - hour = parse.DurationParser().parse('PT1H') - assert multiply_duration('PT12M', 5) == hour - -def test_fail(): - '''10 minutes x 5 != 1 hour''' - hour = parse.DurationParser().parse('PT1H') - assert multiply_duration('PT10M', 5) != hour diff --git a/fre/app/generate_time_averages/tests/.unused_tests/test_subtract_durations.py b/fre/app/generate_time_averages/tests/.unused_tests/test_subtract_durations.py deleted file mode 100644 index f495745d..00000000 --- a/fre/app/generate_time_averages/tests/.unused_tests/test_subtract_durations.py +++ /dev/null @@ -1,17 +0,0 @@ -from fre_python_tools.utilities.subtract_durations import subtract_durations -import metomi.isodatetime.parsers as parse - -def test_months(): - '''13 months - 3 months = 10 months''' - ten_months = parse.DurationParser().parse('P10M') - assert subtract_durations('P13M', 'P3M') == ten_months - -def test_hour(): - '''2 hours minus 30 minutes = 90 minutes''' - ninety_mins = parse.DurationParser().parse('PT90M') - assert subtract_durations('PT2H', 'PT30M') == ninety_mins - -def test_fail(): - '''2 hours minus 60 minutes != 90 minutes''' - ninety_mins = parse.DurationParser().parse('PT90M') - assert subtract_durations('PT2H', 'PT60M') != ninety_mins diff --git a/fre/app/generate_time_averages/tests/test_generate_time_averages.py b/fre/app/generate_time_averages/tests/test_generate_time_averages.py index 4ce3963d..7e1345d9 100644 --- a/fre/app/generate_time_averages/tests/test_generate_time_averages.py +++ b/fre/app/generate_time_averages/tests/test_generate_time_averages.py @@ -95,32 +95,32 @@ def test_cdo_time_unwgt_stddevs(): #def test_cdo_time_stddevs(): ## frepythontools avgs+stddevs, weighted+unweighted, all ------------------------ -def test_fre_python_tools_time_avgs(): - ''' generates a time averaged file using fre_python_tools's version ''' +def test_fre_cli_time_avgs(): + ''' generates a time averaged file using fre_cli's version ''' ''' weighted average, no std deviation ''' assert run_avgtype_pkg_calculations( infile = (time_avg_file_dir+test_file_name), outfile = (time_avg_file_dir+'frepytools_timavg_'+test_file_name), pkg='fre-python-tools',avg_type='all', unwgt=False ) -def test_fre_python_tools_time_unwgt_avgs(): - ''' generates a time averaged file using fre_python_tools's version ''' +def test_fre_cli_time_unwgt_avgs(): + ''' generates a time averaged file using fre_cli's version ''' ''' weighted average, no std deviation ''' assert run_avgtype_pkg_calculations( infile = (time_avg_file_dir+test_file_name), outfile = (time_avg_file_dir+'frepytools_unwgt_timavg_'+test_file_name), pkg='fre-python-tools',avg_type='all', unwgt=True ) -def test_fre_python_tools_time_avgs_stddevs(): - ''' generates a time averaged file using fre_python_tools's version ''' +def test_fre_cli_time_avgs_stddevs(): + ''' generates a time averaged file using fre_cli's version ''' ''' weighted average, no std deviation ''' assert run_avgtype_pkg_calculations( infile = (time_avg_file_dir+test_file_name), outfile = (time_avg_file_dir+'frepytools_stddev_'+test_file_name), pkg='fre-python-tools',avg_type='all', stddev_type='samp', unwgt=False ) -def test_fre_python_tools_time_unwgt_avgs_stddevs(): - ''' generates a time averaged file using fre_python_tools's version ''' +def test_fre_cli_time_unwgt_avgs_stddevs(): + ''' generates a time averaged file using fre_cli's version ''' ''' weighted average, no std deviation ''' assert run_avgtype_pkg_calculations( infile = (time_avg_file_dir+test_file_name), @@ -128,15 +128,15 @@ def test_fre_python_tools_time_unwgt_avgs_stddevs(): pkg='fre-python-tools',avg_type='all', stddev_type='samp', unwgt=True ) ## (TODO) WRITE THESE VERSIONS FOR FREPYTOOLSTIMEAVERAGER CLASS THEN MAKE THESE TESTS -#def test_monthly_fre_python_tools_time_avgs(): -#def test_monthly_fre_python_tools_time_unwgt_avgs(): -#def test_monthly_fre_python_tools_time_avgs_stddevs(): -#def test_monthly_fre_python_tools_time_unwgt_avgs_stddevs(): +#def test_monthly_fre_cli_time_avgs(): +#def test_monthly_fre_cli_time_unwgt_avgs(): +#def test_monthly_fre_cli_time_avgs_stddevs(): +#def test_monthly_fre_cli_time_unwgt_avgs_stddevs(): # -#def test_seasonal_fre_python_tools_time_avgs(): -#def test_seasonal_fre_python_tools_time_unwgt_avgs(): -#def test_seasonal_fre_python_tools_time_avgs_stddevs(): -#def test_seasonal_fre_python_tools_time_unwgt_avgs_stddevs(:) +#def test_seasonal_fre_cli_time_avgs(): +#def test_seasonal_fre_cli_time_unwgt_avgs(): +#def test_seasonal_fre_cli_time_avgs_stddevs(): +#def test_seasonal_fre_cli_time_unwgt_avgs_stddevs(:) @@ -144,7 +144,7 @@ def test_fre_python_tools_time_unwgt_avgs_stddevs(): #alt_str_fre_nctools_inf= \ # 'tests/time_avg_test_files/fre_nctools_timavg_CLI_test_r8_b_atmos_LWP_1979_5y.nc' #def test_fre_nctools_time_avgs(): -# ''' generates a time averaged file using fre_python_tools's version ''' +# ''' generates a time averaged file using fre_cli's version ''' # ''' weighted average, no std deviation ''' # infile =time_avg_file_dir+test_file_name # all_outfile=time_avg_file_dir+'frenctools_timavg_'+test_file_name @@ -153,7 +153,7 @@ def test_fre_python_tools_time_unwgt_avgs_stddevs(): # print('output test file exists. deleting before remaking.') # pl.Path(all_outfile).unlink() #delete file so we check that it can be recreated # -# from fre_python_tools.generate_time_averages import generate_time_averages as gtas +# from fre_cli.generate_time_averages import generate_time_averages as gtas # gtas.generate_time_average(infile = infile, outfile = all_outfile, # pkg='fre-nctools', unwgt=False, avg_type='all') # assert pl.Path(all_outfile).exists() @@ -169,8 +169,8 @@ def test_fre_python_tools_time_unwgt_avgs_stddevs(): str_unwgt_cdo_inf=time_avg_file_dir+'timmean_unwgt_'+test_file_name -def test_compare_fre_python_tools_to_fre_nctools(): - ''' compares fre_python_tools pkg answer to fre_nctools pkg answer ''' +def test_compare_fre_cli_to_fre_nctools(): + ''' compares fre_cli pkg answer to fre_nctools pkg answer ''' import numpy as np import netCDF4 as nc fre_pytools_inf=nc.Dataset(str_fre_pytools_inf,'r') @@ -208,8 +208,8 @@ def test_compare_fre_python_tools_to_fre_nctools(): assert not( (non_zero_count > 0.) or (non_zero_count < 0.) ) @pytest.mark.skip(reason='test fails b.c. cdo cannot bitwise-reproduce fre-nctools answer') -def test_compare_fre_python_tools_to_cdo(): - ''' compares fre_python_tools pkg answer to cdo pkg answer ''' +def test_compare_fre_cli_to_cdo(): + ''' compares fre_cli pkg answer to cdo pkg answer ''' import numpy as np import netCDF4 as nc fre_pytools_inf=nc.Dataset(str_fre_pytools_inf,'r') @@ -239,8 +239,8 @@ def test_compare_fre_python_tools_to_cdo(): assert not( (non_zero_count > 0.) or (non_zero_count < 0.) ) -def test_compare_unwgt_fre_python_tools_to_unwgt_cdo(): - ''' compares fre_python_tools pkg answer to cdo pkg answer ''' +def test_compare_unwgt_fre_cli_to_unwgt_cdo(): + ''' compares fre_cli pkg answer to cdo pkg answer ''' import numpy as np import netCDF4 as nc fre_pytools_inf=nc.Dataset(str_unwgt_fre_pytools_inf,'r') diff --git a/fre/coveragerc b/fre/coveragerc index f95c3c41..0e936b95 100644 --- a/fre/coveragerc +++ b/fre/coveragerc @@ -1,3 +1,15 @@ +# https://pytest-cov.readthedocs.io/en/latest/config.html [run] omit = - */test_*py + fre/tests/* + fre/app/generate_time_averages/tests/* + fre/app/regrid_xy/tests/* + fre/catalog/tests/* + fre/check + fre/cmor/tests/* + fre/list + fre/make/tests/* + fre/pp/tests/* + fre/run + fre/test + fre/yamltools/tests/* diff --git a/fre/gfdl_msd_schemas b/fre/gfdl_msd_schemas new file mode 160000 index 00000000..04c8150b --- /dev/null +++ b/fre/gfdl_msd_schemas @@ -0,0 +1 @@ +Subproject commit 04c8150bc362304d82e60e765405135460b69f06 diff --git a/fre/make/fremake.py b/fre/make/fremake.py index b2053e5f..f39a6be0 100644 --- a/fre/make/fremake.py +++ b/fre/make/fremake.py @@ -73,7 +73,7 @@ def make_cli(): @click.pass_context def run_fremake(context, yamlfile, platform, target, parallel, jobs, no_parallel_checkout, verbose): """ - Perform all fremake functions to run checkout and compile model""" - context.forward(runfremake._fremake_run) + context.forward(runFremake._fremake_run) #### @make_cli.command() diff --git a/fre/make/gfdlfremake/.gitignore b/fre/make/gfdlfremake/.gitignore deleted file mode 100644 index a623caf0..00000000 --- a/fre/make/gfdlfremake/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -__pycache__/ -Dockerfile -checkout.sh -compile.sh -Makefile diff --git a/fre/make/gfdlfremake/.gitlab-ci.yml b/fre/make/gfdlfremake/.gitlab-ci.yml deleted file mode 100644 index 1e1b666c..00000000 --- a/fre/make/gfdlfremake/.gitlab-ci.yml +++ /dev/null @@ -1,12 +0,0 @@ -stages: - - test - -test_build_am5: - stage: test - script: -# conda env -# - /ncrc/sw/gaea-c5/python/3.9/anaconda-base/envs/noaa_py3.9 - - cd yamls/ - - ../fremake -y am5.yaml -p ncrc5.intel -t prod - tags: - - ncrc5 diff --git a/fre/make/gfdlfremake/checkout.py b/fre/make/gfdlfremake/checkout.py index 9afd0ef9..d3fa5d4e 100644 --- a/fre/make/gfdlfremake/checkout.py +++ b/fre/make/gfdlfremake/checkout.py @@ -111,7 +111,7 @@ def finish (self,pc): ## TODO: batch script building def run (self): """ - Brief: Changes the permission on the checkout script and runs it + Brief: Runs the checkout script Param: - self The checkout script object """ diff --git a/fre/make/gfdlfremake/fremake b/fre/make/gfdlfremake/fremake deleted file mode 100755 index 6c245424..00000000 --- a/fre/make/gfdlfremake/fremake +++ /dev/null @@ -1,249 +0,0 @@ -#!/usr/bin/python3 -## \date 2023 -## \author Tom Robinson -## \author Dana Singh -## \description fremake is used to create and run a code checkout script and compile a model. - -import subprocess -import os -import yaml -import argparse -import logging -from . import targetfre, varsfre, yamlfre, checkout, makefilefre, buildDocker, buildBaremetal -from multiprocessing.dummy import Pool - -## Add in cli options -if __name__ == "__main__": - parser = argparse.ArgumentParser(description='Fremake is used to create a code checkout script to compile models for FRE experiments.') - parser.add_argument("-y", - "--yamlfile", - type=str, help="Experiment yaml compile FILE",required=True) - parser.add_argument("-p", - "--platform", - nargs='*', - type=str, help="Hardware and software FRE platform space separated list of STRING(s). This sets platform-specific data and instructions",required=True) - parser.add_argument("-t", - "--target", - nargs='*', - type=str, help="FRE target space separated list of STRING(s) that defines compilation settings and linkage directives for experiments.\n\nPredefined targets refer to groups of directives that exist in the mkmf template file (referenced in buildDocker.py). Possible predefined targets include 'prod', 'openmp', 'repro', 'debug, 'hdf5'; however 'prod', 'repro', and 'debug' are mutually exclusive (cannot not use more than one of these in the target list). Any number of targets can be used.",required=True) - parser.add_argument("-f", - "--force-checkout", - action="store_true", - help="Force checkout to get a fresh checkout to source directory in case the source directory exists") - parser.add_argument("-F", - "--force-compile", - action="store_true", - help="Force compile to compile a fresh executable in case the executable directory exists") - parser.add_argument("-K", - "--keep-compiled", - action="store_true", - help="Keep compiled files in the executable directory for future use") - parser.add_argument("--no-link", - action="store_true", - help="Do not link the executable") - parser.add_argument("-E", - "--execute", - action="store_true", - help="Execute all the created scripts in the current session") - parser.add_argument("-n", - "--parallel", - type=int, - metavar='', default=1, - help="Number of concurrent model compiles (default 1)") - parser.add_argument("-j", - "--jobs", - type=int, - metavar='', default=4, - help="Number of jobs to run simultaneously. Used for make -jJOBS and git clone recursive --jobs=JOBS") - parser.add_argument("-npc", - "--no-parallel-checkout", - action="store_true", - help="Use this option if you do not want a parallel checkout. The default is to have parallel checkouts.") - parser.add_argument("-s", - "--submit", - action="store_true", - help="Submit all the created scripts as batch jobs") - parser.add_argument("-v", - "--verbose", - action="store_true", - help="Get verbose messages (repeat the option to increase verbosity level)") - parser.add_argument("-w NUM", - "--walltime=NUM", - type=int, metavar='', - help="Maximum wall time NUM (in minutes) to use") - parser.add_argument("--mail-list=STRING", - action="store_true", - help="Email the comma=separated STRING list of emails rather than \$USER\@noaa.gov") - - ## Parse the arguments - args = parser.parse_args() - - ## Define arguments as variables - yml = args.yamlfile - ps = args.platform - ts = args.target - nparallel = args.parallel - jobs = str(args.jobs) - pcheck = args.no_parallel_checkout - - ## Define parallelism addition for checkouts - # If pcheck is defined, no parallel checkouts - # If pcheck is not defined, default is to have parallel checkouts - if pcheck: - pc = "" - else: - pc = " &" - - ## Define operation of option(s) above - if args.verbose: - logging.basicCOnfig(level=logging.INFO) - else: - logging.basicConfig(level=logging.ERROR) - -#### Main -srcDir="src" -checkoutScriptName = "checkout.sh" -baremetalRun = False # This is needed if there are no bare metal runs - -## Split and store the platforms and targets in a list -plist = args.platform -tlist = args.target - -## Get the variables in the model yaml -freVars = varsfre.frevars(yml) - -## Open the yaml file and parse as fremakeYaml -modelYaml = yamlfre.freyaml(yml,freVars) -fremakeYaml = modelYaml.getCompileYaml() - -## Error checking the targets -for targetName in tlist: - target = targetfre.fretarget(targetName) - -## Loop through the platforms specified on the command line -## If the platform is a baremetal platform, write the checkout script and run it once -## This should be done separately and serially because bare metal platforms should all be using -## the same source code. -for platformName in plist: - if modelYaml.platforms.hasPlatform(platformName): - pass - else: - raise SystemExit (platformName + " does not exist in " + modelYaml.platformsfile) - (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,containerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) - - ## Create the checkout script - if iscontainer == False: - ## Create the source directory for the platform - srcDir = modelRoot + "/" + fremakeYaml["experiment"] + "/src" - if not os.path.exists(srcDir): - os.system("mkdir -p " + srcDir) - if not os.path.exists(srcDir+"/checkout.sh"): - freCheckout = checkout.checkout("checkout.sh",srcDir) - freCheckout.writeCheckout(modelYaml.compile.getCompileYaml(),jobs,pc) - freCheckout.finish(pc) - -## TODO: Options for running on login cluster? - freCheckout.run() - -fremakeBuildList = [] -## Loop through platforms and targets -for platformName in plist: - for targetName in tlist: - target = targetfre.fretarget(targetName) - if modelYaml.platforms.hasPlatform(platformName): - pass - else: - raise SystemExit (platformName + " does not exist in " + modelYaml.platformsfile) - (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,containerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) - - ## Make the source directory based on the modelRoot and platform - srcDir = modelRoot + "/" + fremakeYaml["experiment"] + "/src" - - ## Check for type of build - if iscontainer == False: - baremetalRun = True - ## Make the build directory based on the modelRoot, the platform, and the target - bldDir = modelRoot + "/" + fremakeYaml["experiment"] + "/" + platformName + "-" + target.gettargetName() + "/exec" - os.system("mkdir -p " + bldDir) - - ## Create the Makefile - freMakefile = makefilefre.makefile(exp = fremakeYaml["experiment"], - libs = fremakeYaml["baremetal_linkerflags"], - srcDir = srcDir, - bldDir = bldDir, - mkTemplatePath = mkTemplate) - - - # Loop through components and send the component name, requires, and overrides for the Makefile - for c in fremakeYaml['src']: - freMakefile.addComponent(c['component'],c['requires'],c['makeOverrides']) - freMakefile.writeMakefile() - -## Create a list of compile scripts to run in parallel - fremakeBuild = buildBaremetal.buildBaremetal(exp = fremakeYaml["experiment"], - mkTemplatePath = mkTemplate, - srcDir = srcDir, - bldDir = bldDir, - target = target, - modules = modules, - modulesInit = modulesInit, - jobs = jobs) - - for c in fremakeYaml['src']: - fremakeBuild.writeBuildComponents(c) - fremakeBuild.writeScript() - fremakeBuildList.append(fremakeBuild) - ## Run the build - fremakeBuild.run() - else: -#################################### container stuff below ########################################################### - ## Run the checkout script -# image="hpc-me-intel:2021.1.1" - image="ecpe4s/noaa-intel-prototype:2023.09.25" - bldDir = modelRoot + "/" + fremakeYaml["experiment"] + "/exec" - tmpDir = "tmp/"+platformName - - ## Create the checkout script - freCheckout = checkout.checkoutForContainer("checkout.sh", srcDir, tmpDir) - freCheckout.writeCheckout(modelYaml.compile.getCompileYaml(),jobs,pc) - freCheckout.finish(pc) - - ## Create the makefile -### Should this even be a separate class from "makefile" in makefilefre? ~ ejs - freMakefile = makefilefre.makefileContainer(exp = fremakeYaml["experiment"], - libs = fremakeYaml["container_addlibs"], - srcDir = srcDir, - bldDir = bldDir, - mkTemplatePath = mkTemplate, - tmpDir = tmpDir) - - # Loop through components and send the component name and requires for the Makefile - for c in fremakeYaml['src']: - freMakefile.addComponent(c['component'],c['requires'],c['makeOverrides']) - freMakefile.writeMakefile() - - ## Build the dockerfile - dockerBuild = buildDocker.container(base = image, - exp = fremakeYaml["experiment"], - libs = fremakeYaml["container_addlibs"], - RUNenv = RUNenv, - target = target) - - dockerBuild.writeDockerfileCheckout("checkout.sh", tmpDir+"/checkout.sh") - dockerBuild.writeDockerfileMakefile(freMakefile.getTmpDir() + "/Makefile", freMakefile.getTmpDir()+"/linkline.sh") - - for c in fremakeYaml['src']: - dockerBuild.writeDockerfileMkmf(c) - - dockerBuild.writeRunscript(RUNenv,containerRun,tmpDir+"/execrunscript.sh") - - ## Run the dockerfile; build the container - dockerBuild.build(containerBuild,containerRun) - - #freCheckout.cleanup() - #buildDockerfile(fremakeYaml,image) - -if baremetalRun: - if __name__ == '__main__': - pool = Pool(processes=nparallel) # Create a multiprocessing Pool - pool.map(buildBaremetal.fremake_parallel,fremakeBuildList) # process data_inputs iterable with pool diff --git a/fre/make/gfdlfremake/platformfre.py b/fre/make/gfdlfremake/platformfre.py index fe8924f9..4f8d0eed 100644 --- a/fre/make/gfdlfremake/platformfre.py +++ b/fre/make/gfdlfremake/platformfre.py @@ -52,7 +52,7 @@ def __init__(self,platforminfo): p["container"] except: p["container"] = False - p["RUNenv"] = "" + p["RUNenv"] = [""] p["containerBuild"] = "" p["containerRun"] = "" if p["container"]: diff --git a/fre/make/gfdlfremake/schema.json b/fre/make/gfdlfremake/schema.json deleted file mode 100644 index 751bb9db..00000000 --- a/fre/make/gfdlfremake/schema.json +++ /dev/null @@ -1,201 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-06/schema#", - "type": "object", - "additionalProperties": false, - "properties": { - "name": { - "description": "The name of the experiment", - "type": "string" - }, - "platform": { - "description": "The platforms listed in the command", - "type": "string" - }, - "target": { - "description": "The targets listed in the command", - "type": "string" - }, - "build": { - "type": "object", - "additionalProperties": false, - "properties": { - "compileYaml": { - "description": "Path to the compile yaml.", - "type": "string" - }, - "platformYaml": { - "description": "Path to the platform yaml.", - "type": "string" - } - } - }, - "compile": { - "description": "The source code descriptions", - "$ref": "#/definitions/Compile" - }, - "platforms": { - "description": "FRE platforms", - "type": "array", - "items": {"$ref": "#/definitions/Platform"} - } - }, - "definitions": { - "Compile": { - "type": "object", - "properties": { - "experiment": { - "description": "The name of the model", - "type": "string" - }, - "container_addlibs": { - "description": "Libraries and packages needed for linking in the container", - "type": ["array","string","null"] - }, - "baremetal_linkerflags": { - "description": "Linker flags of libraries and packages needed for linking in the bare-metal build", - "type": ["array","string","null"] - }, - "src": { - "type": "array", - "items": {"$ref": "#/definitions/Src"} - } - } - }, - "Src": { - "type": "object", - "properties": { - "component": { - "description": "The name of the model component", - "type": "string" - }, - "repo": { - "anyOf": [ - { - "description": "The URL of the code repository", - "type": "array", - "items": { - "type": "string", - "format": "uri", - "qt-uri-protocols": [ - "https" - ], - "qt-uri-extensions": [ - ".git" - ] - } - }, - { - "description": "The URL of the code repository", - "type": "string", - "format": "uri", - "qt-uri-protocols": [ - "https" - ], - "qt-uri-extensions": [ - ".git" - ] - } - ] - }, - "cppdefs": { - "description": "String of CPPDEFs to include in compiling the component", - "type": "string" - }, - "branch": { - "anyOf": [ - { - "description": "The version of code to clone", - "type": "array", - "items": { - "type": "string" - } - }, - { - "description": "The version of code to clone", - "type": "string" - } - ] - }, - "otherFlags": { - "description": "String of Include flags necessary to retrieve other code needed", - "type": "string" - }, - "requires": { - "description": "list of componets that this component depends on", - "type": "array", - "items": {"type": "string"} - }, - "paths": { - "description": "A list of the paths in the component to compile", - "type": "array", - "items": {"type": "string"} - }, - "doF90Cpp": { - "description": "True if the preprocessor needs to be run", - "type": "boolean" - }, - "makeOverrides": { - "description": "Overrides openmp target for MOM6", - "type": "string" - } - } - }, - "Platform": { - "type": "object", - "properties": { - "name": { - "description": "The name of the platform", - "type": "string" - }, - "compiler": { - "description": "The compiler used to build the model", - "type": "string" - }, - "modulesInit": { - "description": "Array of commands to run before loading modules", - "type": "array", - "items": {"type": "string"} - }, - "modules": { - "description": "List (array) of modules to load", - "type": "array", - "items": { - "type": "string" - } - }, - "fc": { - "description": "The Fortran compiler", - "type": "string" - }, - "cc": { - "description": "The C compiler", - "type": "string" - }, - "mkTemplate": { - "description": "Path to the mk template file", - "type": "string" - }, - "modelRoot": { - "description": "Path to the root for all model install files", - "type": "string" - }, - "RUNenv": { - "description": "Commands needed at the beginning of a RUN in dockerfile", - "type": ["array","string"] - }, - "container": { - "description": "True/False if using container to compile", - "type": "boolean" - }, - "containerBuild": { - "description": "Program used to build the container", - "type": "string" - }, - "containerRun": { - "description": "Program used to run the container", - "type": "string" - } - } - } - } -} diff --git a/fre/make/gfdlfremake/yamlfre.py b/fre/make/gfdlfremake/yamlfre.py index 6f638bbb..72458a8c 100644 --- a/fre/make/gfdlfremake/yamlfre.py +++ b/fre/make/gfdlfremake/yamlfre.py @@ -1,5 +1,6 @@ import os import json +from pathlib import Path import yaml from jsonschema import validate, ValidationError, SchemaError from . import platformfre @@ -176,8 +177,8 @@ def __init__(self,combinedyaml,v): #self.freyaml.update(self.platformsyaml) ## VALIDATION OF COMBINED YAML FOR COMPILATION - fremake_package_dir = os.path.dirname(os.path.abspath(__file__)) - schema_path = os.path.join(fremake_package_dir, 'schema.json') + fremake_package_dir = Path(__file__).resolve().parents[2] + schema_path = os.path.join(fremake_package_dir, 'gfdl_msd_schemas', 'FRE', 'fre_make.json') with open(schema_path, 'r') as f: s = f.read() schema = json.loads(s) diff --git a/fre/make/runFremake.py b/fre/make/runFremake.py index 14fa5ec1..ffe2ec96 100644 --- a/fre/make/runFremake.py +++ b/fre/make/runFremake.py @@ -15,7 +15,7 @@ targetfre, varsfre, yamlfre, checkout, makefilefre, buildDocker, buildBaremetal ) -def _fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verbose): +def fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verbose): ''' run fremake via click''' yml = yamlfile name = yamlfile.split(".")[0] @@ -86,6 +86,7 @@ def _fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,ver freCheckout = checkout.checkout("checkout.sh",srcDir) freCheckout.writeCheckout(modelYaml.compile.getCompileYaml(),jobs,pc) freCheckout.finish(pc) + os.chmod(srcDir+"/checkout.sh", 0o744) ## TODO: Options for running on login cluster? freCheckout.run() @@ -200,12 +201,12 @@ def _fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,ver pool.map(buildBaremetal.fremake_parallel,fremakeBuildList) @click.command() -def fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verbose): +def _fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verbose): ''' Decorator for calling _fremake_run - allows the decorated version of the function to be separate from the undecorated version ''' - return _fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verbose) + return fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verbose) if __name__ == "__main__": fremake_run() diff --git a/fre/make/tests/compilation/test_fre_make_run_fremake.py b/fre/make/tests/compilation/test_fre_make_run_fremake.py new file mode 100644 index 00000000..be91a547 --- /dev/null +++ b/fre/make/tests/compilation/test_fre_make_run_fremake.py @@ -0,0 +1,21 @@ +''' test "fre make run-fremake" calls ''' + +import os +from fre.make import runFremake +from pathlib import Path + +# command options +YAMLFILE = "fre/make/tests/null_example/null_model.yaml" +PLATFORM = [ "ci.gnu" ] +CONTAINER_PLATFORM = ["hpcme.2023"] +TARGET = ["debug"] +EXPERIMENT = "null_model_full" + +# get HOME dir to check output +HOME_DIR = os.environ["HOME"] + +def test_fre_make_run_fremake_null_model_serial_compile(): + ''' run fre make with run-fremake subcommand and build the null model experiment with gnu''' + runFremake.fremake_run(YAMLFILE, PLATFORM, TARGET, False, 1, False, False) + assert Path(f"{HOME_DIR}/fremake_canopy/test/{EXPERIMENT}/{PLATFORM[0]}-{TARGET[0]}/exec/{EXPERIMENT}.x").exists() + diff --git a/fre/make/tests/null_example/platforms.yaml b/fre/make/tests/null_example/platforms.yaml index 60d1aad2..fdfa9d4f 100644 --- a/fre/make/tests/null_example/platforms.yaml +++ b/fre/make/tests/null_example/platforms.yaml @@ -24,3 +24,9 @@ platforms: container: True containerBuild: "podman" containerRun: "apptainer" + - name: ci.gnu + compiler: gnu + fc: mpifort + cc: mpicc + mkTemplate: /__w/fre-cli/fre-cli/mkmf/templates/linux-ubuntu-xenial-gnu.mk + modelRoot: ${HOME}/fremake_canopy/test diff --git a/fre/make/tests/test_create_makefile.py b/fre/make/tests/test_create_makefile.py new file mode 100644 index 00000000..36188b33 --- /dev/null +++ b/fre/make/tests/test_create_makefile.py @@ -0,0 +1,72 @@ +""" +Test fre make create-makefile +""" +import os +import shutil +from pathlib import Path +from fre.make import createMakefile + +# SET-UP +test_dir = Path("fre/make/tests") +NM_EXAMPLE = Path("null_example") +YAMLFILE = "null_model.yaml" +BM_PLATFORM = ["ncrc5.intel23"] +CONTAINER_PLATFORM = ["hpcme.2023"] +TARGET = ["debug"] +EXPERIMENT = "null_model_full" + +# Create output location +out = f"{test_dir}/makefile_out" +if Path(out).exists(): + # remove + shutil.rmtree(out) + # create output directory + Path(out).mkdir(parents=True,exist_ok=True) +else: + Path(out).mkdir(parents=True,exist_ok=True) + +# Set output directory as home for fre make output +#os.environ["HOME"]=str(Path(out)) + +def test_modelyaml_exists(): + """ + Check the model yaml exists + """ + assert Path(f"{test_dir}/{NM_EXAMPLE}/{YAMLFILE}").exists() + +def test_compileyaml_exists(): + """ + Check the compile yaml exists + """ + assert Path(f"{test_dir}/{NM_EXAMPLE}/compile.yaml").exists() + +def test_platformyaml_exists(): + """ + Check the platform yaml exists + """ + assert Path(f"{test_dir}/{NM_EXAMPLE}/platforms.yaml").exists() + +def test_bm_makefile_creation(): + """ + Check the makefile is created when a bare-metal platform is used + """ + # Set output directory as home for fre make output + os.environ["HOME"]=str(Path(out)) + + bm_plat = BM_PLATFORM[0] + targ = TARGET[0] + yamlfile_path = f"{test_dir}/{NM_EXAMPLE}/{YAMLFILE}" + + createMakefile.makefile_create(yamlfile_path,BM_PLATFORM,TARGET) + + assert Path(f"{out}/fremake_canopy/test/{EXPERIMENT}/{bm_plat}-{targ}/exec/Makefile").exists() + +def test_container_makefile_creation(): + """ + Check the makefile is created when the container platform is used + """ + container_plat = CONTAINER_PLATFORM[0] + yamlfile_path = f"{test_dir}/{NM_EXAMPLE}/{YAMLFILE}" + createMakefile.makefile_create(yamlfile_path,CONTAINER_PLATFORM,TARGET) + + assert Path(f"tmp/{container_plat}/Makefile").exists() diff --git a/fre/pp/configure_script_yaml.py b/fre/pp/configure_script_yaml.py index 443d6e00..b782e3de 100644 --- a/fre/pp/configure_script_yaml.py +++ b/fre/pp/configure_script_yaml.py @@ -147,7 +147,7 @@ def set_rose_apps(yamlfile,rose_regrid,rose_remap): value=f'{interp_split[0]}_{interp_split[1]}.{interp_method}') #################### -def _yamlInfo(yamlfile,experiment,platform,target): +def yamlInfo(yamlfile,experiment,platform,target): """ Using a valid pp.yaml, the rose-app and rose-suite configuration files are created in the cylc-src @@ -200,12 +200,12 @@ def _yamlInfo(yamlfile,experiment,platform,target): print(" " + outfile) @click.command() -def yamlInfo(yamlfile,experiment,platform,target): +def _yamlInfo(yamlfile,experiment,platform,target): ''' Wrapper script for calling yamlInfo - allows the decorated version of the function to be separate from the undecorated version ''' - return _yamlInfo(yamlfile,experiment,platform,target) + return yamlInfo(yamlfile,experiment,platform,target) # Use parseyaml function to parse created edits.yaml if __name__ == '__main__': diff --git a/fre/pp/tests/test_configure_script_yaml.py b/fre/pp/tests/test_configure_script_yaml.py index 1f61efd4..eaf1fc2e 100644 --- a/fre/pp/tests/test_configure_script_yaml.py +++ b/fre/pp/tests/test_configure_script_yaml.py @@ -1,25 +1,24 @@ +""" +Test configure_script_yaml +""" import os from pathlib import Path from fre.pp import configure_script_yaml as csy # Set what would be click options -experiment = "c96L65_am5f7b12r1_amip" -platform = "gfdl.ncrc5-intel22-classic" -target = "prod-openmp" +EXPERIMENT = "c96L65_am5f7b12r1_amip" +PLATFORM = "gfdl.ncrc5-intel22-classic" +TARGET = "prod-openmp" # Set example yaml paths, input directory -CWD = Path.cwd() test_dir = Path("fre/pp/tests") -test_yaml = Path(f"AM5_example/am5.yaml") - -# Set home for ~/cylc-src location in script -os.environ["HOME"]=str(Path(f"{CWD}/{test_dir}/configure_yaml_out")) +test_yaml = Path("AM5_example/am5.yaml") def test_combinedyaml_exists(): """ Make sure combined yaml file exists """ - assert Path(f"{CWD}/{test_dir}/{test_yaml}").exists() + assert Path(f"{test_dir}/{test_yaml}").exists() def test_configure_script(): """ @@ -27,23 +26,21 @@ def test_configure_script(): Creates rose-suite, regrid rose-app, remap rose-app TO-DO: will break this up for better tests """ - os.chdir(f"{CWD}/{test_dir}/AM5_example") + # Set home for ~/cylc-src location in script + os.environ["HOME"]=str(Path(f"{test_dir}/configure_yaml_out")) # Set output directory - out_dir = Path(f"{os.getenv('HOME')}/cylc-src/{experiment}__{platform}__{target}") + out_dir = Path(f"{os.getenv('HOME')}/cylc-src/{EXPERIMENT}__{PLATFORM}__{TARGET}") Path(out_dir).mkdir(parents=True,exist_ok=True) # Define combined yaml - model_yaml = str(Path(f"{CWD}/{test_dir}/{test_yaml}")) + model_yaml = str(Path(f"{test_dir}/{test_yaml}")) # Invoke configure_yaml_script.py - csy._yamlInfo(model_yaml,experiment,platform,target) + csy.yamlInfo(model_yaml,EXPERIMENT,PLATFORM,TARGET) # Check for configuration creation and final combined yaml - assert all([Path(f"{out_dir}/{experiment}.yaml").exists(), + assert all([Path(f"{out_dir}/{EXPERIMENT}.yaml").exists(), Path(f"{out_dir}/rose-suite.conf").exists(), Path(f"{out_dir}/app/regrid-xy/rose-app.conf").exists(), Path(f"{out_dir}/app/remap-pp-components/rose-app.conf").exists()]) - - # Go back to original directory - os.chdir(CWD) diff --git a/fre/pytest.ini b/fre/pytest.ini index 131a2523..eb793231 100644 --- a/fre/pytest.ini +++ b/fre/pytest.ini @@ -9,6 +9,7 @@ testpaths = fre/pp/tests # fre/run/tests # fre/test/tests -# fre/yamltools/tests + fre/yamltools/tests # fre/app/tests fre/app/generate_time_averages/tests + fre/app/regrid_xy/tests diff --git a/fre/yamltools/combine_yamls.py b/fre/yamltools/combine_yamls.py index 4584c115..b2b6540f 100755 --- a/fre/yamltools/combine_yamls.py +++ b/fre/yamltools/combine_yamls.py @@ -90,7 +90,7 @@ def experiment_check(mainyaml_dir,comb,experiment): if expyaml is not None: ey_path=[] for e in expyaml: - if Path(e).exists(): + if Path(os.path.join(mainyaml_dir,e)).exists(): ey=Path(os.path.join(mainyaml_dir,e)) ey_path.append(ey) else: @@ -115,7 +115,7 @@ def experiment_check(mainyaml_dir,comb,experiment): class init_compile_yaml(): def __init__(self,yamlfile,platform,target): """ - Process to combine yamls appllicable to compilation + Process to combine yamls applicable to compilation """ self.yml = yamlfile self.name = yamlfile.split(".")[0] diff --git a/fre/yamltools/freyamltools.py b/fre/yamltools/freyamltools.py index 3ca3ba93..55817472 100644 --- a/fre/yamltools/freyamltools.py +++ b/fre/yamltools/freyamltools.py @@ -1,21 +1,12 @@ ''' fre yamltools ''' import click -from .freyamltoolsexample import yamltools_test_function from .combine_yamls import _consolidate_yamls @click.group(help=click.style(" - access fre yamltools subcommands", fg=(202,177,95))) def yamltools_cli(): ''' entry point to fre yamltools click commands ''' -@yamltools_cli.command() -@click.option('--uppercase', '-u', is_flag=True, help = 'Print statement in uppercase.') -@click.pass_context -def function(context, uppercase): - # pylint: disable=unused-argument - """ - Execute fre yamltools test """ - context.forward(yamltools_test_function) - @yamltools_cli.command() @click.option("-y", "--yamlfile", diff --git a/fre/yamltools/freyamltoolsexample.py b/fre/yamltools/freyamltoolsexample.py deleted file mode 100644 index e86fb206..00000000 --- a/fre/yamltools/freyamltoolsexample.py +++ /dev/null @@ -1,18 +0,0 @@ -""" -experimentation file for integrating one file's functions into main prototype fre file -authored by Bennett.Chang@noaa.gov | bcc2761 -NOAA | GFDL -""" - -import click - -@click.command() -def yamltools_test_function(uppercase=None): - """Execute fre list testfunction2.""" - statement = "testingtestingtestingtesting" - if uppercase: - statement = statement.upper() - click.echo(statement) - -if __name__ == '__main__': - yamltools_test_function() diff --git a/fre/yamltools/tests/test_combine_yamls.py b/fre/yamltools/tests/test_combine_yamls.py index f9e95fa2..7df6eb36 100644 --- a/fre/yamltools/tests/test_combine_yamls.py +++ b/fre/yamltools/tests/test_combine_yamls.py @@ -13,13 +13,14 @@ ## SET-UP # Set example yaml paths, input directory, output directory -CWD = Path.cwd() +#CWD = Path.cwd() TEST_DIR = Path("fre/yamltools/tests") -IN_DIR = Path(f"{CWD}/{TEST_DIR}/AM5_example") +IN_DIR = Path(f"{TEST_DIR}/AM5_example") +SCHEMA_DIR = Path("fre/gfdl_msd_schemas/FRE") # Create output directories -COMP_OUT_DIR = Path(f"{CWD}/{TEST_DIR}/combine_yamls_out/compile") -PP_OUT_DIR = Path(f"{CWD}/{TEST_DIR}/combine_yamls_out/pp") +COMP_OUT_DIR = Path(f"{TEST_DIR}/combine_yamls_out/compile") +PP_OUT_DIR = Path(f"{TEST_DIR}/combine_yamls_out/pp") # If output directory exists, remove and create again for out in [COMP_OUT_DIR, PP_OUT_DIR]: @@ -63,32 +64,26 @@ def test_merged_compile_yamls(): Check for the creation of the combined-[experiment] yaml Check that the model yaml was merged into the combined yaml """ - # Go into the input directory - os.chdir(IN_DIR) - # Model yaml path - modelyaml = "am5.yaml" + modelyaml = str(Path(f"{IN_DIR}/am5.yaml")) use = "compile" # Merge the yamls cy.consolidate_yamls(modelyaml, COMP_EXPERIMENT, COMP_PLATFORM, COMP_TARGET, use) # Move combined yaml to output location - shutil.move("combined-am5.yaml", COMP_OUT_DIR) + shutil.move(f"{IN_DIR}/combined-am5.yaml", COMP_OUT_DIR) # Check that the combined yaml exists assert Path(f"{COMP_OUT_DIR}/combined-{COMP_EXPERIMENT}.yaml").exists() - # Go back to original directory - os.chdir(CWD) - def test_combined_compileyaml_validation(): """ Validate the combined compile yaml """ combined_yamlfile =f"{COMP_OUT_DIR}/combined-{COMP_EXPERIMENT}.yaml" - schema_file = os.path.join(f"{IN_DIR}","compile_yamls","schema.json") - + schema_file = os.path.join(SCHEMA_DIR, "fre_make.json") + with open(combined_yamlfile,'r') as cf: yml = yaml.safe_load(cf) @@ -108,48 +103,39 @@ def test_combined_compileyaml_combinefail(): Check to test if compile yaml is incorrect/does not exist, the combine fails. (compile yaml path misspelled) """ - # Go into the input directory - os.chdir(f"{IN_DIR}/compile_yamls/compile_fail") - # Model yaml path - modelyaml = "am5-wrong_compilefile.yaml" + modelyaml = str(Path(f"{IN_DIR}/compile_yamls/compile_fail/am5-wrong_compilefile.yaml")) use = "compile" # Merge the yamls - should fail since there is no compile yaml specified in the model yaml try: cy.consolidate_yamls(modelyaml, COMP_EXPERIMENT, COMP_PLATFORM, COMP_TARGET, use) # Move combined yaml to output location - shutil.move("combined-am5-wrong_compilefile.yaml", COMP_OUT_DIR) + shutil.move(f"{IN_DIR}/compile_yamls/compile_fail/combined-am5-wrong_compilefile.yaml", COMP_OUT_DIR) except: print("EXPECTED FAILURE") # Move combined yaml to output location - shutil.move("combined-am5-wrong_compilefile.yaml", COMP_OUT_DIR) + shutil.move(f"{IN_DIR}/compile_yamls/compile_fail/combined-am5-wrong_compilefile.yaml", COMP_OUT_DIR) assert True - # Go back to original directory - os.chdir(CWD) - def test_combined_compileyaml_validatefail(): """ Check if the schema is validating correctly Branch should be string """ - # Go into the input directory - os.chdir(f"{IN_DIR}/compile_yamls/compile_fail") - # Model yaml path - modelyaml = "am5-wrong_datatype.yaml" + modelyaml = str(Path(f"{IN_DIR}/compile_yamls/compile_fail/am5-wrong_datatype.yaml")) use = "compile" # Merge the yamls cy.consolidate_yamls(modelyaml, COMP_EXPERIMENT, COMP_PLATFORM, COMP_TARGET, use) # Move combined yaml to output location - shutil.move("combined-am5-wrong_datatype.yaml", COMP_OUT_DIR) + shutil.move(f"{IN_DIR}/compile_yamls/compile_fail/combined-am5-wrong_datatype.yaml", COMP_OUT_DIR) # Validate against schema; should fail wrong_combined = Path(f"{COMP_OUT_DIR}/combined-am5-wrong_datatype.yaml") - schema_file = os.path.join(f"{IN_DIR}","compile_yamls","schema.json") + schema_file = os.path.join(SCHEMA_DIR, "fre_make.json") # Open/load combined yaml file with open(wrong_combined,'r') as cf: @@ -166,9 +152,6 @@ def test_combined_compileyaml_validatefail(): except: assert True - # Go back to original directory - os.chdir(CWD) - ############ PP ############ def test_expyaml_exists(): """ @@ -188,9 +171,6 @@ def test_merged_pp_yamls(): Check for the creation of the combined-[experiment] yaml Check that the model yaml was merged into the combined yaml """ - # Go into the input directory - os.chdir(IN_DIR) - # Model yaml path modelyaml = Path(f"{IN_DIR}/am5.yaml") use = "pp" @@ -204,9 +184,6 @@ def test_merged_pp_yamls(): # Check that the combined yaml exists assert Path(f"{PP_OUT_DIR}/combined-{PP_EXPERIMENT}.yaml").exists() - # Go back to original directory - os.chdir(CWD) - def test_combined_ppyaml_validation(): """ Validate the combined compile yaml diff --git a/ideas.md b/ideas.md deleted file mode 100644 index 5862d04b..00000000 --- a/ideas.md +++ /dev/null @@ -1,34 +0,0 @@ -# Ideas for Implementation: - -## Helpful Click Decorators & Utilities -* click's `--help` option will be ideal for users -* `click.option()`: this will be very useful for flags that may be used - - will be able to use commands like `is_flag`, `flag_value`, `count`, `help`, etc. -* `click.group()`: this will allow FRE to be broken up into parts and subparts for each part - - will be able to use commands like `add_command` -* `click.progressbar()`: potential for the user to see progress while something runs like `fre run` -* `click.confirm()`: potential for the user to verify actions and proceed -* `click.style()`: can style text with wanted configurations if needed (can use click.secho()) -* `click.pause()`: stops executing current command and waits for user input to continue -* `click.pass_context`: allows use of `context.forward(command)` and `context.invoke(command, args)` for discouraged yet possible invocation of commands from another command, probably what is going to be the solution to running all of something like `fre make` at once - -## Potential Errors -* `click.confirm()` actions will be hard for users to script - -## Questions for Users/Devs -* do we want to use flags (`click.option()`), confirmations (`click.confirm()`), or a mix of both to allow users to run what they want, how they want? - - this means that users can either use certain flags (i.e `--execute`), which will be included and explained in the `--help` feature, or they will just be prompted for what features they want and can decide if they want it with [y/N] - -## Things to Consider/Implement -* use of classes, arguments (necessary) vs. flags (optional) - - arguments can be used for specific cases; i.e need to parse specific file -* NOAA GFDL Conda channel to get this into userspace (Conda > pip/venv) - -## Required Changes to Make -* `fre pp configure -y file.yaml` only works when inside folder containing schema at the moment -* want to polish up .gitignore file -* deployment via GitLab -* is there a way to check that all python dependencies needed by fre-cli are available in the current python envioronment? Like "python fre.py" or something? - -## Potential Additional Uses for Click -* program using BeautifulSoup to scrape GFDL pages for immediate tutorial guidance after prompting for GFDL login \ No newline at end of file diff --git a/meta.yaml b/meta.yaml index 07f76686..1e5d65ad 100644 --- a/meta.yaml +++ b/meta.yaml @@ -5,13 +5,10 @@ package: version: '{{ environ.get("GIT_DESCRIBE_TAG", data.get("version")) }}' source: - path: . -# ideally we want this git_url path, but it messes with conda publish -# where it builds only the sourced url and not local/branch changes -# git_url: https://github.com/NOAA-GFDL/fre-cli + git_url: https://github.com/NOAA-GFDL/fre-cli.git build: - script: + script: - {{ PYTHON }} -m pip install . -vv number: 1 noarch: python @@ -45,47 +42,41 @@ requirements: - conda-forge::cdo>=2.0.0 test: - source_files: + requires: + - pip + - pylint + - pytest + - pytest-cov + source_files: - fre/ imports: - - click - - pytest - - pylint - fre - - fre.pp - - fre.pp.install - - fre.pp.status - - fre.pp.run - - fre.pp.validate - - fre.make - fre.app - - fre.cmor - fre.catalog + # - fre.check + - fre.cmor + # - fre.list + - fre.make + - fre.pp + # - fre.run + # - fre.test + - fre.yamltools commands: - pylint --max-args 6 -ry --ignored-modules netCDF4,cmor fre/ || echo "pylint returned non-zero exit code and will kill the workflow. guarding against this now." - - pip install GitPython && pytest --config-file=fre/pytest.ini --cov-config=fre/coveragerc --cov=fre fre/ + # run pytest but ignore any tests that require compilation + - pip install GitPython && pytest --ignore=fre/make/tests/compilation --config-file=fre/pytest.ini --cov-report term-missing --cov-config=fre/coveragerc --cov=fre fre/ - fre --help - - fre pp --help - - fre pp install --help - - fre pp status --help - - fre pp run --help - - fre pp validate --help - - fre make --help - - fre make create-checkout --help - - fre make create-compile --help - - fre make create-dockerfile --help - - fre make create-makefile --help - - fre make run-fremake --help - fre app --help - - fre app mask-atmos-plevel --help + - fre catalog --help + # - fre check --help - fre cmor --help - - fre cmor run --help + # - fre list --help + - fre make --help + - fre pp --help + # - fre run --help + # - fre test --help + - fre yamltools --help - requires: - - pip - - pylint - - pytest - - pytest-cov about: home: https://github.com/NOAA-GFDL/fre-cli diff --git a/mkmf b/mkmf new file mode 160000 index 00000000..9830f1ac --- /dev/null +++ b/mkmf @@ -0,0 +1 @@ +Subproject commit 9830f1ac08566ec94e6b28555c921df28b6d0fea