diff --git a/Externals.cfg b/Externals.cfg index 9acd326b6..0cf851b4a 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -12,7 +12,7 @@ protocol = git repo_url = https://github.com/ufs-community/ufs-weather-model # Specify either a branch name or a hash but not both. #branch = develop -hash = 38a29a6 +hash = 6b0f516 local_path = sorc/ufs-weather-model required = True @@ -21,7 +21,7 @@ protocol = git repo_url = https://github.com/NOAA-EMC/UPP # Specify either a branch name or a hash but not both. #branch = develop -hash = 81b38a8 +hash = 6f5dd62 local_path = sorc/UPP required = True diff --git a/doc/ContribGuide/contributing.rst b/doc/ContribGuide/contributing.rst index 0f7231e26..7576d62cf 100644 --- a/doc/ContribGuide/contributing.rst +++ b/doc/ContribGuide/contributing.rst @@ -12,7 +12,7 @@ Fork and PR Overview Contributions to the ``ufs-srweather-app`` project are made via a :github-docs:`Fork` and :github-docs:`Pull Request (PR)` model. GitHub provides a thorough description of this contribution model in their `Contributing to a project` :github-docs:`Quickstart`, but the steps, with respect to ``ufs-srweather-app`` contributions, can be summarized as: #. :github-docs:`Create an issue ` to document proposed changes. -#. :github-docs:`Fork` the :srw-repo:`ufs-srweather-app repository<>` into your personal GitHub account. +#. :github-docs:`Fork` the :srw-repo:`ufs-srweather-app repository<>` into your personal GitHub account. #. :github-docs:`Clone` your fork onto your development system. #. :github-docs:`Create a branch` in your clone for your changes. All development should take place on a branch, *not* on ``develop``. #. :github-docs:`Make, commit, and push changes` in your clone / to your fork. @@ -25,7 +25,7 @@ Development and Testing Process ================================= #. **Create issue:** Open an :srw-repo:`issue ` in the ``ufs-srweather-app`` to document proposed changes. See :ref:`Opening an Issue ` for detailed instructions. -#. **Fork & Clone the SRW App:** :github-docs:`Fork` the :srw-repo:`ufs-srweather-app repository<>` into your personal GitHub account and :github-docs:`clone` your fork onto your development system if you have not already done so. +#. **Fork & Clone the SRW App:** :github-docs:`Fork` the :srw-repo:`ufs-srweather-app repository<>` into your personal GitHub account and :github-docs:`clone` your fork onto your development system if you have not already done so. #. **Create a branch:** in your clone for your changes. All development should take place on a branch, not on ``develop``. Branches should be named as follows, where ``[name]`` is a one-word description of the branch: * ``bugfix/[name]``: Fixes a demonstrably incorrect portion of code diff --git a/doc/UsersGuide/BackgroundInfo/Introduction.rst b/doc/UsersGuide/BackgroundInfo/Introduction.rst index 4b0978ef2..e0295d022 100644 --- a/doc/UsersGuide/BackgroundInfo/Introduction.rst +++ b/doc/UsersGuide/BackgroundInfo/Introduction.rst @@ -58,6 +58,7 @@ Building, Running, and Testing the SRW App * :numref:`Section %s: Tutorials ` walks users through different SRW App experiment cases and analysis of results. * :numref:`Section %s: METplus Verification Sample Cases ` explains how to run METplus verification as part of the workflow. * :numref:`Section %s: Air Quality Modeling ` provides information specific to air quality modeling (AQM). This feature is currently unsupported, so documentation may be behind the current state of development, which is progressing rapidly. However, this section is a starting point for those interested in AQM. + * :numref:`Section %s: SRW Smoke & Dust ` provides information specific to smoke and dust modeling. This feature is currently only supported on Hera and Orion/Hercules, but it is a starting point for those interested in regional smoke & dust modeling. .. hint:: * To get started with the SRW App, it is recommended that users try one of the following options: diff --git a/doc/UsersGuide/BuildingRunningTesting/SRW-SD.rst b/doc/UsersGuide/BuildingRunningTesting/SRW-SD.rst new file mode 100644 index 000000000..1d990a0bf --- /dev/null +++ b/doc/UsersGuide/BuildingRunningTesting/SRW-SD.rst @@ -0,0 +1,242 @@ +.. _srw-sd: + +===================================== +SRW Smoke & Dust (SRW-SD) Features +===================================== + +.. attention:: + + SRW-SD capabilities are a new SRW App feature supported on Hera and Orion/Hercules; on other systems, users can expect only limited support. + +This chapter provides instructions for running a simple, example six-hour forecast for July 22, 2019 at 0z using SRW Smoke & Dust (SRW-SD) features. These features have been merged into an SRW App feature branch from a UFS WM Rapid Refresh Forecast System (RRFS) production branch. This forecast uses RAP data for :term:`ICs` and :term:`LBCs`, the ``RRFS_CONUS_3km`` predefined grid, and the ``FV3_HRRR_gf`` physics suite. This physics suite is similar to the NOAA operational HRRR v4 suite (Dowell et al., 2022), with the addition of the Grell-Freitas deep convective parameterization. `Scientific documentation for the HRRR_gf suite `_ and `technical documentation `_ are available with the CCPP v7.0.0 release but may differ slightly from the version available in the SRW App. + +.. note:: + + Although this chapter is the primary documentation resource for running the SRW-SD configuration, users may need to refer to :numref:`Chapter %s ` and :numref:`Chapter %s ` for additional information on building and running the SRW App, respectively. + +Quick Start Guide (SRW-SD) +============================= + +.. attention:: + + These instructions should work smoothly on Hera and Orion/Hercules, but users on other systems may need to make additional adjustments. + +Download the Code +------------------- + +Clone the |branch| branch of the authoritative SRW App repository: + +.. code-block:: console + + git clone -b main_aqm https://github.com/ufs-community/ufs-srweather-app + cd ufs-srweather-app/sorc + +Checkout Externals +--------------------- + +Users must run the ``checkout_externals`` script to collect (or "check out") the individual components of the SRW App (AQM version) from their respective GitHub repositories. + +.. code-block:: console + + ./manage_externals/checkout_externals -e Externals_smoke_dust.cfg + +Build the SRW App +------------------ + +.. code-block:: console + + ./app_build.sh -p= + +where ```` is ``hera``, ``orion``, or ``hercules``. + +Building the SRW App with SRW-SD on other machines, including other :srw-wiki:`Level 1 ` platforms, is not currently guaranteed to work, and users may have to make adjustments to the modulefiles for their system. + +If SRW-SD builds correctly, users should see the standard executables listed in :numref:`Table %s ` in the ``ufs-srweather-app/exec`` directory. + +Load the |wflow_env| Environment +-------------------------------------------- + +Load the workflow environment: + +.. code-block:: console + + module purge + source /path/to/ufs-srweather-app/versions/run.ver_ + module use /path/to/ufs-srweather-app/modulefiles + module load wflow_ + +where ```` is ``hera``, ``orion``, or ``hercules``. The workflow should load on other platforms listed under the ``MACHINE`` variable in :numref:`Section %s `, but users may need to adjust other elements of the process when running on those platforms. + +.. _srw-sd-config: + +Configure an Experiment +--------------------------- + +Users will need to configure their experiment by setting parameters in the ``config.yaml`` file. To start, users can copy a default experiment setting into ``config.yaml``: + +.. code-block:: console + + cd /path/to/ufs-srweather-app/parm + cp config.smoke_dust.yaml config.yaml + +Users will need to change the ``ACCOUNT`` variable in ``config.yaml`` to an account that they have access to. They will also need to indicate which ``MACHINE`` they are working on. Users may also wish to adjust other experiment settings. For more information on each task and variable, see :numref:`Section %s `. + +If running on Orion or Hercules, users will need to change the data paths to :term:`ICs/LBCs` on the following lines in the ``task_get_extrn_*:`` sections of ``config.yaml`` by commenting out the Hera lines and uncommenting the Orion/Hercules lines: + +.. code-block:: console + + task_get_extrn_ics: + # EXTRN_MDL_SOURCE_BASEDIR_ICS: /scratch2/NAGAPE/epic/SRW-AQM_DATA/data_smoke_dust/RAP_DATA_SD/${yyyymmddhh} # hera + EXTRN_MDL_SOURCE_BASEDIR_ICS: /work/noaa/epic/SRW-AQM_DATA/input_model_data/RAP/${yyyymmddhh} # orion/hercules + task_get_extrn_lbcs: + # EXTRN_MDL_SOURCE_BASEDIR_LBCS: /scratch2/NAGAPE/epic/SRW-AQM_DATA/data_smoke_dust/RAP_DATA_SD/${yyyymmddhh} # hera + EXTRN_MDL_SOURCE_BASEDIR_LBCS: /work/noaa/epic/SRW-AQM_DATA/input_model_data/RAP/${yyyymmddhh} # orion/hercules + +In addition to the UFS SRW fixed files, additional data files are required to run the smoke and dust experiment: + + * ``fix_smoke``: Contains analysis grids, regridding weights, a vegetation map, and dummy emissions (used when no in situ emission files are available). + * ``data_smoke_dust/RAVE_fire``: Emission estimates and Fire Radiative Power (FRP) observations derived from `RAVE `_ satellite observations. + +.. note:: + Smoke and dust fixed file data has not been added to the `SRW App data bucket `_. Users and developers who would like access to the fixed file data necessary to run the application should reach out the UFS SRW team in a :srw-repo:`GitHub Discussion `. + +Users may also wish to change :term:`cron`-related parameters in ``config.yaml``. In the ``config.smoke_dust.yaml`` file, which was copied into ``config.yaml``, cron can be used for automatic submission and resubmission of the workflow by setting the following variables: + +.. code-block:: console + + workflow: + USE_CRON_TO_RELAUNCH: true + CRON_RELAUNCH_INTVL_MNTS: 3 + +This means that cron will submit the launch script every 3 minutes. Users may choose not to submit using cron or to submit at a different frequency. Note that users should create a crontab by running ``crontab -e`` the first time they use cron. + +When using the basic ``config.smoke_dust.yaml`` experiment, the usual pre-processing and coldstart forecast tasks are used, because ``"parm/wflow/prep.yaml"`` appears in the list of workflow files in the ``rocoto: tasks: taskgroups:`` section of ``config.yaml`` (see :numref:`Section %s ` for task descriptions). To turn on AQM *post*-processing tasks in the workflow, include ``"parm/wflow/aqm_post.yaml"`` in the ``rocoto: tasks: taskgroups:`` section, too (see :numref:`Section %s ` for task descriptions). + +.. _srw-sd-more-tasks: + +Additional SRW-SD Tasks +-------------------------- + +.. COMMENT: Add workflow diagram in the future. + +Compared to the typical SRW App workflow, the SRW-SD has slightly different tasks for pre- and post-processing. As in the SRW App default workflow, the SRW-SD workflow uses the preprocessing tasks from ``prep.yaml``, but it adds smoke-and-dust-specific tasks from ``smoke_dust.yaml``. For post-processing, it uses the NCO-compliant ``upp_post.yaml`` instead of the usual ``post.yaml``. + +The new tasks for SRW-SD are shown in :numref:`Table %s `. + +.. _pre-srw-sd: + +.. list-table:: *Tasks for SRW-SD Pre- and Post-Processing* + :widths: 20 50 30 + :header-rows: 1 + + * - Task Name + - Description + - File + * - smoke_dust + - Generates the input data file for smoke and dust to be used in the UFS Weather Model. + - ``parm/wflow/smoke_dust.yaml`` + * - prepstart + - Adds the smoke and dust fields to the ICs file from the restart file in the previous cycle. + - ``parm/wflow/smoke_dust.yaml`` + * - upp_post + - Performs post-processing with UPP. + - ``parm/wflow/upp_post.yaml`` + +The Python scripts listed in :numref:`Table %s ` are used to perform data processing and calculations required for the SRW-SD forecast. + +.. _sd-scripts: + +.. list-table:: *Python Scripts Used by Smoke and Dust Tasks* + :widths: 20 50 + :header-rows: 1 + + * - Script + - Description + * - ``ush/smoke_dust_add_smoke.py`` + - Transfers smoke and dust-related variables from FV3 tracer outputs to GFS initial conditions. + * - ``ush/smoke_dust_fire_emiss_tools.py`` + - Calculates fire behavior and emission variables and creates input for the smoke and dust tracers. + * - ``ush/smoke_dust_generate_fire_emissions.py`` + - Entry point for the smoke and dust fire-related initial conditions generated during the ``smoke_dust`` task. + * - ``ush/smoke_dust_hwp_tools.py`` + - Utilities for calculating Hourly Wildfire Potential (HWP). + * - ``ush/smoke_dust_interp_tools.py`` + - Regridding utilities using `esmpy `_ that interpolate data from the RAVE observational grid to the RRFS grid. + +Generate the Workflow +------------------------ + +Generate the workflow: + +.. code-block:: console + + ./generate_FV3LAM_wflow.py + +Run the Workflow +------------------ + +If ``USE_CRON_TO_RELAUNCH`` is set to true in ``config.yaml`` (see :numref:`Section %s `), the workflow will run automatically. If it was set to false, users must submit the workflow manually from the experiment directory: + +.. code-block:: console + + cd ../../expt_dirs/smoke_dust_conus3km + ./launch_FV3LAM_wflow.sh + +Repeat the launch command regularly until a SUCCESS or FAILURE message appears on the terminal window. + +Users may check experiment status from the experiment directory with either of the following commands: + +.. code-block:: console + + # Check the experiment status (for cron jobs) + rocotostat -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10 + + # Check the experiment status and relaunch the workflow (for manual jobs) + ./launch_FV3LAM_wflow.sh; tail -n 40 log.launch_FV3LAM_wflow + +.. _srw-sd-success: + +Experiment Output +-------------------- + +The workflow run is complete when all tasks display a "SUCCEEDED" message. If everything goes smoothly, users will eventually see a workflow status table similar to the following: + +.. code-block:: console + + [orion-login smoke_dust_conus3km]$ rocotostat -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10 + CYCLE TASK JOBID STATE EXIT STATUS TRIES DURATION + ============================================================================================== + 201907220000 make_grid 18984137 SUCCEEDED 0 1 29.0 + 201907220000 make_orog 18984148 SUCCEEDED 0 1 419.0 + 201907220000 make_sfc_climo 18984184 SUCCEEDED 0 1 82.0 + 201907220000 smoke_dust 18984186 SUCCEEDED 0 1 243.0 + 201907220000 prepstart 18984324 SUCCEEDED 0 1 24.0 + 201907220000 get_extrn_ics 18984138 SUCCEEDED 0 1 11.0 + 201907220000 get_extrn_lbcs 18984149 SUCCEEDED 0 1 12.0 + 201907220000 make_ics_mem000 18984185 SUCCEEDED 0 1 157.0 + 201907220000 make_lbcs_mem000 18984187 SUCCEEDED 0 1 85.0 + 201907220000 forecast_mem000 18984328 SUCCEEDED 0 1 6199.0 + 201907220000 upp_post_mem000_f000 18988282 SUCCEEDED 0 1 212.0 + 201907220000 upp_post_mem000_f001 18988283 SUCCEEDED 0 1 247.0 + 201907220000 upp_post_mem000_f002 18988284 SUCCEEDED 0 1 258.0 + 201907220000 upp_post_mem000_f003 18988285 SUCCEEDED 0 1 271.0 + 201907220000 upp_post_mem000_f004 18988286 SUCCEEDED 0 1 284.0 + 201907220000 upp_post_mem000_f005 18988287 SUCCEEDED 0 1 286.0 + 201907220000 upp_post_mem000_f006 18988288 SUCCEEDED 0 1 292.0 + ============================================================================================== + 201907220600 smoke_dust 18988289 SUCCEEDED 0 1 225.0 + 201907220600 prepstart 18988302 SUCCEEDED 0 1 112.0 + 201907220600 get_extrn_ics 18984150 SUCCEEDED 0 1 10.0 + 201907220600 get_extrn_lbcs 18984151 SUCCEEDED 0 1 14.0 + 201907220600 make_ics_mem000 18984188 SUCCEEDED 0 1 152.0 + 201907220600 make_lbcs_mem000 18984189 SUCCEEDED 0 1 79.0 + 201907220600 forecast_mem000 18988311 SUCCEEDED 0 1 6191.0 + 201907220600 upp_post_mem000_f000 18989105 SUCCEEDED 0 1 212.0 + 201907220600 upp_post_mem000_f001 18989106 SUCCEEDED 0 1 283.0 + 201907220600 upp_post_mem000_f002 18989107 SUCCEEDED 0 1 287.0 + 201907220600 upp_post_mem000_f003 18989108 SUCCEEDED 0 1 284.0 + 201907220600 upp_post_mem000_f004 18989109 SUCCEEDED 0 1 289.0 + 201907220600 upp_post_mem000_f005 18989110 SUCCEEDED 0 1 294.0 + 201907220600 upp_post_mem000_f006 18989111 SUCCEEDED 0 1 294.0 + +If something goes wrong, users can check the log files, which are located by default in ``expt_dirs/smoke_dust_conus3km/nco_logs/20190722``. diff --git a/doc/UsersGuide/BuildingRunningTesting/index.rst b/doc/UsersGuide/BuildingRunningTesting/index.rst index a0aa69c85..a370d9d03 100644 --- a/doc/UsersGuide/BuildingRunningTesting/index.rst +++ b/doc/UsersGuide/BuildingRunningTesting/index.rst @@ -3,7 +3,6 @@ Building, Running, and Testing the SRW App .. toctree:: :maxdepth: 3 - Quickstart ContainerQuickstart @@ -13,4 +12,5 @@ Building, Running, and Testing the SRW App Tutorial VXCases AQM + SRW-SD FIRE diff --git a/doc/UsersGuide/CustomizingTheWorkflow/DefineWorkflow.rst b/doc/UsersGuide/CustomizingTheWorkflow/DefineWorkflow.rst index b5d587969..c07238fa2 100644 --- a/doc/UsersGuide/CustomizingTheWorkflow/DefineWorkflow.rst +++ b/doc/UsersGuide/CustomizingTheWorkflow/DefineWorkflow.rst @@ -6,7 +6,7 @@ Defining an SRW App Workflow Many predefined workflows with optional variants exist within the Short-Range Weather Application, but the Application also includes the ability to define a new workflow from scratch. This functionality allows users to add tasks to the workflow to meet their scientific exploration needs. -Rocoto is the primary workflow manager software used by the UFS SRW App. Rocoto workflows are defined in an XML file (``FV3LAM_wflow.xml``) based on parameters set during experiment generation. This section explains how the Rocoto XML is built using a Jinja2 template (`Jinja docs here `_) and structured YAML files. The YAML follows the requirements in the `Rocoto documentation `__ with a few exceptions or additions outlined in this documentation. +Rocoto is the primary workflow manager software used by the UFS SRW App. Rocoto workflows are defined in an XML file (``FV3LAM_wflow.xml``) based on parameters set during experiment generation. This section explains how the Rocoto XML is built using a Jinja2 template (`Jinja docs here `_) and structured YAML files. The YAML follows the requirements in the `Rocoto documentation `_ with a few exceptions or additions outlined in this documentation. The Jinja2 Template =================== diff --git a/modulefiles/build_derecho_intel.lua b/modulefiles/build_derecho_intel.lua index 1356fdb3e..ba24823e8 100644 --- a/modulefiles/build_derecho_intel.lua +++ b/modulefiles/build_derecho_intel.lua @@ -6,7 +6,7 @@ the CISL machine Derecho (Cray) using Intel@2021.10.0 whatis([===[Loads libraries needed for building the UFS SRW App on Derecho ]===]) prepend_path("MODULEPATH","/lustre/desc1/scratch/epicufsrt/contrib/modulefiles_extra") -prepend_path("MODULEPATH", "/glade/work/epicufsrt/contrib/spack-stack/derecho/spack-stack-1.6.0/envs/upp-addon-env/install/modulefiles/Core") +prepend_path("MODULEPATH", "/glade/work/epicufsrt/contrib/spack-stack/derecho/spack-stack-1.6.0/envs/fms-2024.01/install/modulefiles/Core") load(pathJoin("stack-intel", os.getenv("stack_intel_ver") or "2021.10.0")) load(pathJoin("stack-cray-mpich", os.getenv("stack_cray_mpich_ver") or "8.1.25")) diff --git a/modulefiles/build_gaea_intel.lua b/modulefiles/build_gaea_intel.lua index 2a53acf15..9c627a5b1 100644 --- a/modulefiles/build_gaea_intel.lua +++ b/modulefiles/build_gaea_intel.lua @@ -5,7 +5,7 @@ the NOAA RDHPC machine Gaea C5 using Intel-2023.1.0 whatis([===[Loads libraries needed for building the UFS SRW App on Gaea C5 ]===]) -prepend_path("MODULEPATH","/ncrc/proj/epic/spack-stack/spack-stack-1.6.0/envs/upp-addon-env/install/modulefiles/Core") +prepend_path("MODULEPATH","/ncrc/proj/epic/spack-stack/spack-stack-1.6.0/envs/fms-2024.01/install/modulefiles/Core") stack_intel_ver=os.getenv("stack_intel_ver") or "2023.2.0" load(pathJoin("stack-intel", stack_intel_ver)) diff --git a/modulefiles/build_hera_gnu.lua b/modulefiles/build_hera_gnu.lua index 621c7581a..adf4fa828 100644 --- a/modulefiles/build_hera_gnu.lua +++ b/modulefiles/build_hera_gnu.lua @@ -7,7 +7,7 @@ whatis([===[Loads libraries needed for building the UFS SRW App on Hera using GN prepend_path("MODULEPATH", "/scratch2/NCEPDEV/stmp1/role.epic/installs/gnu/modulefiles") prepend_path("MODULEPATH", "/scratch2/NCEPDEV/stmp1/role.epic/installs/openmpi/modulefiles") -prepend_path("MODULEPATH", "/scratch2/NCEPDEV/stmp1/role.epic/spack-stack/spack-stack-1.6.0_gnu13/envs/upp-addon-env/install/modulefiles/Core") +prepend_path("MODULEPATH", "/scratch2/NCEPDEV/stmp1/role.epic/spack-stack/spack-stack-1.6.0_gnu13/envs/fms-2024.01/install/modulefiles/Core") load("stack-gcc/13.3.0") load("stack-openmpi/4.1.6") diff --git a/modulefiles/build_hera_intel.lua b/modulefiles/build_hera_intel.lua index d7ef51b1e..a4e3d434b 100644 --- a/modulefiles/build_hera_intel.lua +++ b/modulefiles/build_hera_intel.lua @@ -8,7 +8,7 @@ whatis([===[Loads libraries needed for building the UFS SRW App on Hera ]===]) prepend_path("MODULEPATH","/contrib/sutils/modulefiles") load("sutils") -prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.6.0/envs/upp-addon-env/install/modulefiles/Core") +prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.6.0/envs/fms-2024.01/install/modulefiles/Core") stack_intel_ver=os.getenv("stack_intel_ver") or "2021.5.0" load(pathJoin("stack-intel", stack_intel_ver)) diff --git a/modulefiles/build_hercules_intel.lua b/modulefiles/build_hercules_intel.lua index 54c82569e..944546dc9 100644 --- a/modulefiles/build_hercules_intel.lua +++ b/modulefiles/build_hercules_intel.lua @@ -5,7 +5,7 @@ the MSU machine Hercules using intel-oneapi-compilers/2022.2.1 whatis([===[Loads libraries needed for building the UFS SRW App on Hercules ]===]) -prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.6.0/envs/upp-addon-env/install/modulefiles/Core") +prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.6.0/envs/fms-2024.01/install/modulefiles/Core") load("stack-intel/2021.9.0") load("stack-intel-oneapi-mpi/2021.9.0") diff --git a/modulefiles/build_jet_intel.lua b/modulefiles/build_jet_intel.lua index 04124e4bf..ef80750d6 100644 --- a/modulefiles/build_jet_intel.lua +++ b/modulefiles/build_jet_intel.lua @@ -5,7 +5,7 @@ the NOAA RDHPC machine Jet using Intel-2021.5.0 whatis([===[Loads libraries needed for building the UFS SRW App on Jet ]===]) -prepend_path("MODULEPATH","/contrib/spack-stack/spack-stack-1.6.0/envs/upp-addon-env/install/modulefiles/Core") +prepend_path("MODULEPATH","/contrib/spack-stack/spack-stack-1.6.0/envs/fms-2024.01/install/modulefiles/Core") load("stack-intel/2021.5.0") load("stack-intel-oneapi-mpi/2021.5.1") diff --git a/modulefiles/build_orion_intel.lua b/modulefiles/build_orion_intel.lua index 31efcb57c..1b2d492e7 100644 --- a/modulefiles/build_orion_intel.lua +++ b/modulefiles/build_orion_intel.lua @@ -5,7 +5,7 @@ the MSU machine Orion using intel-oneapi-compilers/2021.9.0 whatis([===[Loads libraries needed for building the UFS SRW App on Orion ]===]) -prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.6.0/envs/upp-addon-env/install/modulefiles/Core") +prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.6.0/envs/fms-2024.01/install/modulefiles/Core") load("stack-intel/2021.9.0") load("stack-intel-oneapi-mpi/2021.9.0") diff --git a/modulefiles/srw_common.lua b/modulefiles/srw_common.lua index d2bdbe679..b002b8dca 100644 --- a/modulefiles/srw_common.lua +++ b/modulefiles/srw_common.lua @@ -6,7 +6,7 @@ load("netcdf-c/4.9.2") load("netcdf-fortran/4.6.1") load("parallelio/2.5.10") load("esmf/8.6.0") -load("fms/2023.04") +load("fms/2024.01") load("bacio/2.4.1") load("crtm/2.4.0.1") diff --git a/parm/metplus/STATAnalysisConfig_skill_score b/parm/metplus/STATAnalysisConfig_skill_score index fba1106d6..2aa0f97df 100644 --- a/parm/metplus/STATAnalysisConfig_skill_score +++ b/parm/metplus/STATAnalysisConfig_skill_score @@ -12,7 +12,6 @@ model = ["FV3_WoFS_v0_SUBCONUS_3km_test_mem000", "FV3_GFS_v16_SUBCONUS_3km"]; fcst_lead = [ "6", "12", - "6", "12", "6", "12", "6", "12", "12", @@ -51,24 +50,22 @@ obs_init_inc = []; obs_init_exc = []; obs_init_hour = []; -fcst_var = [ "PRMSL", "PRMSL", - "WIND", "WIND", - "DPT", "DPT", - "TMP", "TMP", - "WIND", - "WIND", - "WIND", - "TMP", - "TMP", - "TMP", - "SPFH", - "SPFH", - "SPFH" +fcst_var = [ "WIND", "WIND", + "DPT", "DPT", + "TMP", "TMP", + "WIND", + "WIND", + "WIND", + "TMP", + "TMP", + "TMP", + "SPFH", + "SPFH", + "SPFH" ]; obs_var = []; -fcst_lev = [ "Z0", "Z0", - "Z10", "Z10", +fcst_lev = [ "Z10", "Z10", "Z2", "Z2", "Z2", "Z2", "P250", @@ -102,7 +99,6 @@ line_type = [ "SL1L2" ]; column = [ "RMSE" ]; weight = [ 10.0, 8.0, - 10.0, 8.0, 10.0, 8.0, 10.0, 8.0, 4.0, diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml index 95b57b0aa..9b3784cd9 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml @@ -22,5 +22,7 @@ task_get_extrn_lbcs: EXTRN_MDL_NAME_LBCS: FV3GFS LBC_SPEC_INTVL_HRS: 6 USE_USER_STAGED_EXTRN_FILES: true +task_run_fcst: + OMP_NUM_THREADS_RUN_FCST: 1 task_plot_allvars: PLOT_DOMAINS: ["regional"]