From c377164582ee071ce8b3921e10b2d0f100141887 Mon Sep 17 00:00:00 2001 From: Christina Holt <56881914+christinaholtNOAA@users.noreply.github.com> Date: Fri, 26 Jul 2024 11:26:41 -0600 Subject: [PATCH] [develop] Transition the var_defns bash file to YAML. (#1098) Use YAML for the configuration language at run time. --------- Co-authored-by: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com> Co-authored-by: Michael Kavulich Co-authored-by: michael.lueken --- .cicd/scripts/wrapper_srw_ftest.sh | 3 +- aqm_environment.yml | 2 +- .../CustomizingTheWorkflow/ConfigWorkflow.rst | 6 +- environment.yml | 2 +- jobs/JREGIONAL_CHECK_POST_OUTPUT | 19 +- jobs/JREGIONAL_GET_EXTRN_MDL_FILES | 64 +++-- jobs/JREGIONAL_GET_VERIF_OBS | 18 +- jobs/JREGIONAL_INTEGRATION_TEST | 31 ++- jobs/JREGIONAL_MAKE_GRID | 114 ++------ jobs/JREGIONAL_MAKE_ICS | 30 +- jobs/JREGIONAL_MAKE_LBCS | 28 +- jobs/JREGIONAL_MAKE_OROG | 26 +- jobs/JREGIONAL_MAKE_SFC_CLIMO | 29 +- jobs/JREGIONAL_PLOT_ALLVARS | 49 +++- jobs/JREGIONAL_RUN_FCST | 25 +- ...EGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT | 15 +- ...JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX | 18 +- ...L_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSMEAN | 16 +- ...L_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB | 16 +- jobs/JREGIONAL_RUN_MET_PB2NC_OBS | 16 +- jobs/JREGIONAL_RUN_MET_PCPCOMBINE | 17 +- jobs/JREGIONAL_RUN_POST | 42 ++- jobs/JREGIONAL_RUN_PRDGEN | 33 ++- jobs/JSRW_AQM_ICS | 6 +- jobs/JSRW_AQM_LBCS | 7 +- jobs/JSRW_BIAS_CORRECTION_O3 | 7 +- jobs/JSRW_BIAS_CORRECTION_PM25 | 7 +- jobs/JSRW_FIRE_EMISSION | 6 +- jobs/JSRW_NEXUS_EMISSION | 6 +- jobs/JSRW_NEXUS_GFS_SFC | 8 +- jobs/JSRW_NEXUS_POST_SPLIT | 6 +- jobs/JSRW_POINT_SOURCE | 7 +- jobs/JSRW_POST_STAT_O3 | 7 +- jobs/JSRW_POST_STAT_PM25 | 7 +- jobs/JSRW_PRE_POST_STAT | 6 +- .../tasks/cheyenne/plot_allvars.local.lua | 2 +- .../tasks/derecho/plot_allvars.local.lua | 2 +- modulefiles/tasks/gaea/plot_allvars.local.lua | 2 +- modulefiles/tasks/hera/plot_allvars.local.lua | 2 +- .../tasks/hercules/plot_allvars.local.lua | 2 +- modulefiles/tasks/jet/plot_allvars.local.lua | 2 +- .../tasks/noaacloud/plot_allvars.local.lua | 7 +- .../tasks/orion/plot_allvars.local.lua | 2 +- parm/wflow/aqm_post.yaml | 10 +- parm/wflow/aqm_prep.yaml | 16 +- parm/wflow/coldstart.yaml | 10 +- parm/wflow/default_workflow.yaml | 2 +- parm/wflow/plot.yaml | 2 +- parm/wflow/post.yaml | 2 +- parm/wflow/prdgen.yaml | 2 +- parm/wflow/prep.yaml | 6 +- parm/wflow/test.yaml | 2 +- parm/wflow/verify_det.yaml | 8 +- parm/wflow/verify_ens.yaml | 16 +- parm/wflow/verify_pre.yaml | 18 +- scripts/exregional_check_post_output.sh | 44 ++- scripts/exregional_get_extrn_mdl_files.sh | 72 ++++- scripts/exregional_get_verif_obs.sh | 27 +- scripts/exregional_integration_test.py | 6 +- scripts/exregional_make_grid.sh | 111 +++++++- scripts/exregional_make_ics.sh | 92 +++++- scripts/exregional_make_lbcs.sh | 88 +++++- scripts/exregional_make_orog.sh | 262 ++++++++++-------- scripts/exregional_make_sfc_climo.sh | 51 +++- scripts/exregional_run_fcst.sh | 169 +++++++++-- ...onal_run_met_genensprod_or_ensemblestat.sh | 6 +- ...gional_run_met_gridstat_or_pointstat_vx.sh | 10 +- ...un_met_gridstat_or_pointstat_vx_ensmean.sh | 6 +- ...un_met_gridstat_or_pointstat_vx_ensprob.sh | 6 +- scripts/exregional_run_met_pb2nc_obs.sh | 6 +- scripts/exregional_run_met_pcpcombine.sh | 10 +- scripts/exregional_run_post.sh | 76 ++++- scripts/exregional_run_prdgen.sh | 8 +- scripts/exsrw_aqm_ics.sh | 5 +- scripts/exsrw_aqm_lbcs.sh | 12 +- scripts/exsrw_bias_correction_o3.sh | 8 +- scripts/exsrw_bias_correction_pm25.sh | 8 +- scripts/exsrw_fire_emission.sh | 5 +- scripts/exsrw_nexus_emission.sh | 6 +- scripts/exsrw_nexus_gfs_sfc.sh | 7 +- scripts/exsrw_nexus_post_split.sh | 5 +- scripts/exsrw_point_source.sh | 6 +- scripts/exsrw_post_stat_o3.sh | 6 +- scripts/exsrw_post_stat_pm25.sh | 6 +- scripts/exsrw_pre_post_stat.sh | 6 +- tests/WE2E/utils.py | 6 +- tests/test_python/test_retrieve_data.py | 58 ---- ush/bash_utils/check_var_valid_value.sh | 2 +- ush/bash_utils/create_symlink_to_file.sh | 1 + ush/bash_utils/print_msg.sh | 2 +- ush/bash_utils/source_config.sh | 53 ---- ush/bash_utils/source_yaml.sh | 36 +++ ush/config_defaults.yaml | 16 +- ush/create_aqm_rc_file.py | 4 +- ush/create_diag_table_file.py | 4 +- ush/create_model_configure_file.py | 4 +- ush/create_ufs_configure_file.py | 4 +- ush/generate_FV3LAM_wflow.py | 26 +- ush/job_preamble.sh | 7 +- ush/launch_FV3LAM_wflow.sh | 50 +--- ush/link_fix.py | 4 +- ush/load_modules_run_task.sh | 129 +++++---- ush/machine/hera.yaml | 4 +- ush/set_fv3nml_ens_stoch_seeds.py | 8 +- ush/set_fv3nml_sfc_climo_filenames.py | 8 +- ush/setup.py | 11 +- ush/source_util_funcs.sh | 6 +- ush/update_input_nml.py | 4 +- ush/wrappers/run_fcst.sh | 7 +- ush/wrappers/run_get_ics.sh | 7 +- ush/wrappers/run_get_lbcs.sh | 7 +- ush/wrappers/run_make_grid.sh | 7 +- ush/wrappers/run_make_ics.sh | 7 +- ush/wrappers/run_make_lbcs.sh | 7 +- ush/wrappers/run_make_orog.sh | 7 +- ush/wrappers/run_make_sfc_climo.sh | 7 +- ush/wrappers/run_post.sh | 7 +- 117 files changed, 1748 insertions(+), 706 deletions(-) delete mode 100644 ush/bash_utils/source_config.sh create mode 100644 ush/bash_utils/source_yaml.sh mode change 100755 => 100644 ush/launch_FV3LAM_wflow.sh diff --git a/.cicd/scripts/wrapper_srw_ftest.sh b/.cicd/scripts/wrapper_srw_ftest.sh index 950ceb7a34..ee26edadaf 100755 --- a/.cicd/scripts/wrapper_srw_ftest.sh +++ b/.cicd/scripts/wrapper_srw_ftest.sh @@ -24,7 +24,8 @@ fi if [[ "${SRW_PLATFORM}" == gaea ]]; then sed -i '15i #SBATCH --clusters=c5' ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh sed -i 's|qos=batch|qos=normal|g' ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh - sed -i 's|${JOBSdir}/JREGIONAL_RUN_POST|$USHdir/load_modules_run_task.sh "run_post" ${JOBSdir}/JREGIONAL_RUN_POST|g' ${WORKSPACE}/${SRW_PLATFORM}/ush/wrappers/run_post.sh + sed -i 's|00:30:00|00:45:00|g' ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh + sed -i 's|${JOBSdir}/JREGIONAL_RUN_POST|$USHdir/load_modules_run_task.sh "gaea" "run_post" ${JOBSdir}/JREGIONAL_RUN_POST|g' ${WORKSPACE}/${SRW_PLATFORM}/ush/wrappers/run_post.sh fi if [[ "${SRW_PLATFORM}" == hera ]]; then diff --git a/aqm_environment.yml b/aqm_environment.yml index afd8a7b634..11bf9e57e3 100644 --- a/aqm_environment.yml +++ b/aqm_environment.yml @@ -9,5 +9,5 @@ dependencies: - pylint=2.17* - pytest=7.2* - scipy=1.10.* - - uwtools=2.1* + - uwtools=2.3* - xarray=2022.11.* diff --git a/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst index 52cce90c2c..5161268980 100644 --- a/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst +++ b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst @@ -293,7 +293,7 @@ WORKFLOW Configuration Parameters If non-default parameters are selected for the variables in this section, they should be added to the ``workflow:`` section of the ``config.yaml`` file. -``WORKFLOW_ID``: (Default: ``!nowtimestamp ''``) +``WORKFLOW_ID``: (Default: ``''``) Unique ID for the workflow run that will be set in ``setup.py``. ``RELATIVE_LINK_FLAG``: (Default: "--relative") @@ -458,8 +458,8 @@ This section contains files and paths to files that are staged in the experiment ``WFLOW_XML_FN``: (Default: "FV3LAM_wflow.xml") Name of the Rocoto workflow XML file that the experiment generation script creates. This file defines the workflow for the experiment. -``GLOBAL_VAR_DEFNS_FN``: (Default: "var_defns.sh") - Name of the file (a shell script) containing definitions of the primary and secondary experiment variables (parameters). This file is sourced by many scripts (e.g., the J-job scripts corresponding to each workflow task) in order to make all the experiment variables available in those scripts. The primary variables are defined in the default configuration file (``config_defaults.yaml``) and in the user configuration file (``config.yaml``). The secondary experiment variables are generated by the experiment generation script. +``GLOBAL_VAR_DEFNS_FN``: (Default: "var_defns.yaml") + Name of the auto-generated experiment configuration file. It contains the primary experiment variables defined in this default configuration script and in the user-specified configuration as well as secondary experiment variables generated by the experiment generation script from machine files and other settings. This file is the primary source of information used in the scripts at run time. ``ROCOTO_YAML_FN``: (Default: "rocoto_defns.yaml") Name of the YAML file containing the YAML workflow definition from which the Rocoto XML file is created. diff --git a/environment.yml b/environment.yml index e2dd6b8300..a735213198 100644 --- a/environment.yml +++ b/environment.yml @@ -5,4 +5,4 @@ channels: dependencies: - pylint=2.17* - pytest=7.2* - - uwtools=2.2* + - uwtools=2.3* diff --git a/jobs/JREGIONAL_CHECK_POST_OUTPUT b/jobs/JREGIONAL_CHECK_POST_OUTPUT index f55f730cf4..358b1fad72 100755 --- a/jobs/JREGIONAL_CHECK_POST_OUTPUT +++ b/jobs/JREGIONAL_CHECK_POST_OUTPUT @@ -3,7 +3,22 @@ # #----------------------------------------------------------------------- # +# The J-Job script for checking the post output. # +# Run-time environment variables: +# +# CDATE +# ENSMEM_INDX +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# workflow: +# EXPTDIR # #----------------------------------------------------------------------- # @@ -16,7 +31,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_GET_EXTRN_MDL_FILES b/jobs/JREGIONAL_GET_EXTRN_MDL_FILES index 80366f0ddc..fbd582201a 100755 --- a/jobs/JREGIONAL_GET_EXTRN_MDL_FILES +++ b/jobs/JREGIONAL_GET_EXTRN_MDL_FILES @@ -3,20 +3,48 @@ # #----------------------------------------------------------------------- # -# This script gets either from the system directory or from mass store -# (HPSS) the files generated by the external model (specified by the -# variable EXTRN_MDL_NAME) for either the initial conditions (ICs) or the -# lateral boundary conditions (LBCs). Which of these we are considering -# depends on the value of the variable ICS_OR_LBCS, which should be defined -# in the environment (when calling this script from a rocoto workflow, -# the workflow should define this variable, e.g. using rocoto's -# tag). -# -# Note that when we refer to ICs, we are referring to not only the atmospheric -# fields at the initial time but also various surface fields (which are -# for now time-independent) as well as the 0-th forecast hour LBCs. Also, -# when we refer to LBCs, we are referring to the LBCs excluding the one -# at the 0-th hour. +# The J-Job script for getting the model files that will be used for +# either initial conditions or lateral boundary conditions for the +# experiment. +# +# Run-time environment variables: +# +# CDATE +# COMIN +# cyc +# DATA +# EXTRN_MDL_STAGING_DIR +# GLOBAL_VAR_DEFNS_FP +# ICS_OR_LBCS +# PDY +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# workflow: +# EXPTDIR +# +# task_get_extrn_lbcs: +# EXTRN_MDL_FILES_LBCS +# EXTRN_MDL_LBCS_OFFSET_HRS +# EXTRN_MDL_NAME_LBCS +# EXTRN_MDL_SOURCE_BASEDIR_LBCS +# EXTRN_MDL_SYSBASEDIR_LBCS +# FV3GFS_FILE_FMT_LBCS +# LBC_SPEC_INTVL_HRS +# USE_USER_STAGED_EXTRN_FILES +# +# task_get_extrn_ics: +# EXTRN_MDL_FILES_ICS +# EXTRN_MDL_ICS_OFFSET_HRS +# EXTRN_MDL_NAME_ICS +# EXTRN_MDL_SOURCE_BASEDIR_ICS +# EXTRN_MDL_SYSBASEDIR_ICS +# FV3GFS_FILE_FMT_ICS +# USE_USER_STAGED_EXTRN_FILES # #----------------------------------------------------------------------- # @@ -29,8 +57,12 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_get_extrn_ics|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh "TRUE" +for sect in user nco workflow task_get_extrn_lbcs task_get_extrn_ics ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done +. $USHdir/job_preamble.sh + + # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_GET_VERIF_OBS b/jobs/JREGIONAL_GET_VERIF_OBS index 3820a739db..7c083e96c6 100755 --- a/jobs/JREGIONAL_GET_VERIF_OBS +++ b/jobs/JREGIONAL_GET_VERIF_OBS @@ -3,7 +3,19 @@ # #----------------------------------------------------------------------- # -# This script checks, pulls, and stages observation data for model verification. +# The J-Job script that checks, pulls, and stages observation data for +# model verification. +# +# Run-time environment variables: +# +# CDATE +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -16,7 +28,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task " " ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_INTEGRATION_TEST b/jobs/JREGIONAL_INTEGRATION_TEST index cbb93e86cf..983981ecf3 100755 --- a/jobs/JREGIONAL_INTEGRATION_TEST +++ b/jobs/JREGIONAL_INTEGRATION_TEST @@ -1,5 +1,31 @@ #!/bin/bash + +# +#----------------------------------------------------------------------- +# +# This J-Job script runs a set of tests at the end of WE2E tests. +# +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# CDATE +# FCST_DIR +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# RUN_ENV +# SCRIPTSdir +# USHdir +# +# workflow: +# FCST_LEN_HRS +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,8 +34,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_integration_test|task_run_fcst" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh + # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_MAKE_GRID b/jobs/JREGIONAL_MAKE_GRID index 8d65540d1c..01484041e9 100755 --- a/jobs/JREGIONAL_MAKE_GRID +++ b/jobs/JREGIONAL_MAKE_GRID @@ -3,97 +3,25 @@ # #----------------------------------------------------------------------- # -# This script generates grid and orography files in NetCDF format that -# are required as inputs for running the FV3-LAM model (i.e. the FV3 mo- -# del on a regional domain). It in turn calls three other scripts whose -# file names are specified in the variables grid_gen_scr, orog_gen_scr, -# and orog_fltr_scr and then calls the executable defined in the varia- -# ble shave_exec. These scripts/executable perform the following tasks: -# -# 1) grid_gen_scr: -# -# This script generates grid files that will be used by subsequent -# preprocessing steps. It places its output in the directory defined -# by GRID_DIR. Note that: -# -# a) This script creates grid files for each of the 7 tiles of the -# cubed sphere grid (where tiles 1 through 6 cover the globe, and -# tile 7 is the regional grid located somewhere within tile 6) -# even though the forecast will be performed only on tile 7. -# -# b) The tile 7 grid file that this script creates includes a halo, -# i.e. a layer of cells beyond the boundary of tile 7). The width -# of this halo (i.e. the number of cells in the halo in the direc- -# tion perpendicular to the boundary of the tile) must be made -# large enough such that the "shave" steps later below (which take -# this file as input and generate grid files with thinner halos) -# have a wide enough starting halo to work with. More specifical- -# ly, the FV3-LAM model needs as inputs two grid files: one with a -# halo that is 3 cells and another with a halo that is 4 cells -# wide. Thus, the halo in the grid file that the grid_gen_scr -# script generates must be greater than 4 since otherwise, the -# shave steps would shave off cells from within the interior of -# tile 7. We will let NHW denote the width of the halo in the -# grid file generated by grid_gen_scr. The "n" in this variable -# name denotes number of cells, the "h" is used to indicate that -# it refers to a halo region, the "w" is used to indicate that it -# refers to a wide halo (i.e. wider than the 3-cell and 4-cell ha- -# los that the FV3-LAM model requires as inputs, and the "T7" is -# used to indicate that the cell count is on tile 7. -# -# 2) orog_gen_scr: -# -# This script generates the orography file. It places its output in -# the directory defined by OROG_DIR. Note that: -# -# a) This script generates an orography file only on tile 7. -# -# b) This orography file contains a halo of the same width (NHW) -# as the grid file for tile 7 generated by the grid_gen_scr script -# in the previous step. -# -# 3) orog_fltr_scr: -# -# This script generates a filtered version of the orography file ge- -# nerated by the script orog_gen_scr. This script places its output -# in the temporary directory defined in WORKDIR_FLTR. Note that: -# -# a) The filtered orography file generated by this script contains a -# halo of the same width (NHW) as the (unfiltered) orography file -# generated by script orog_gen_scr (and the grid file generated by -# grid_gen_scr). -# -# b) In analogy with the input grid files, the FV3-LAM model needs as -# input two (filtered) orography files -- one with no halo cells -# and another with 3. These are obtained later below by "shaving" -# off layers of halo cells from the (filtered) orography file ge- -# nerated in this step. -# -# 4) shave_exec: -# -# This "shave" executable is called 4 times to generate 4 files from -# the tile 7 grid file generated by grid_gen_scr and the tile 7 fil- -# tered orography file generated by orog_fltr_scr (both of which have -# a halo of width NHW cells). The 4 output files are placed in the -# temporary directory defined in WORKDIR_SHVE. More specifically: -# -# a) shave_exec is called to shave the halo in the tile 7 grid file -# generated by grid_gen_scr down to a width of 3 cells and store -# the result in a new grid file in WORKDIR_SHVE. -# -# b) shave_exec is called to shave the halo in the tile 7 grid file -# generated by grid_gen_scr down to a width of 4 cells and store -# the result in a new grid file in WORKDIR_SHVE. -# -# c) shave_exec is called to shave the halo in the tile 7 filtered -# orography file generated by orog_fltr_scr down to a width of 0 -# cells (i.e. no halo) and store the result in a new filtered oro- -# graphy file in WORKDIR_SHVE. -# -# d) shave_exec is called to shave the halo in the tile 7 filtered -# orography file generated by orog_fltr_scr down to a width of 4 -# cells and store the result in a new filtered orography file in -# WORKDIR_SHVE. +# The J-Job that generates input NetCDF grid files for running the +# regional configuration of FV3 +# +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# DATA +# +# Experiment variables +# +# user: +# USHdir +# SCRIPTSdir +# +# workflow: +# PREEXISTING_DIR_METHOD +# +# task_make_grid: +# GRID_DIR # #----------------------------------------------------------------------- # @@ -106,7 +34,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_grid" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow task_make_grid ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_ICS b/jobs/JREGIONAL_MAKE_ICS index c4fb429f1b..10a3b36fb7 100755 --- a/jobs/JREGIONAL_MAKE_ICS +++ b/jobs/JREGIONAL_MAKE_ICS @@ -1,5 +1,31 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The J-job to run chgres_cube for preparing initial conditions for the +# FV3 forecast +# +# Run-time environment variables: +# +# COMIN +# DATA +# GLOBAL_VAR_DEFNS_FP +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# RUN_ENVIR +# SCRIPTSdir +# USHdir +# +# workflow: +# EXPTDIR +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +34,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_ics" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_LBCS b/jobs/JREGIONAL_MAKE_LBCS index 81e2578fd4..91d9d3edbe 100755 --- a/jobs/JREGIONAL_MAKE_LBCS +++ b/jobs/JREGIONAL_MAKE_LBCS @@ -1,5 +1,29 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The J-job to run chgres_cube for preparing lateral boundary conditions +# for the FV3 forecast +# +# Run-time environment variables: +# +# CDATE +# COMIN +# DATA +# GLOBAL_VAR_DEFNS_FP +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# RUN_ENVIR +# SCRIPTSdir +# USHdir +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +32,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_lbcs" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_OROG b/jobs/JREGIONAL_MAKE_OROG index b6f674e5ee..28e2f965a5 100755 --- a/jobs/JREGIONAL_MAKE_OROG +++ b/jobs/JREGIONAL_MAKE_OROG @@ -1,5 +1,27 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The J-Job that generates input NetCDF orography files for running the +# regional configuration of FV3 +# +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# task_make_orog: +# OROG_DIR +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +30,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_orog" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow task_make_orog ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_SFC_CLIMO b/jobs/JREGIONAL_MAKE_SFC_CLIMO index 7cbd0cc23e..30b2d2c346 100755 --- a/jobs/JREGIONAL_MAKE_SFC_CLIMO +++ b/jobs/JREGIONAL_MAKE_SFC_CLIMO @@ -1,5 +1,30 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The J-job to run chgres_cube for preparing lateral boundary conditions +# for the FV3 forecast +# +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# workflow: +# PREEXISTING_DIR_METHOD +# +# task_make_sfc_climo: +# SFC_CLIMO_DIR +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +33,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_sfc_climo" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow task_make_sfc_climo ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_PLOT_ALLVARS b/jobs/JREGIONAL_PLOT_ALLVARS index 5e59abd93d..be5ee10f82 100755 --- a/jobs/JREGIONAL_PLOT_ALLVARS +++ b/jobs/JREGIONAL_PLOT_ALLVARS @@ -1,5 +1,45 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The J-job to plot the forecast output +# +# Run-time environment variables: +# +# CDATE +# COMOUT +# GLOBAL_VAR_DEFNS_FP +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# RUN_ENVIR +# SCRIPTSdir +# USHdir +# +# platform: +# FIXshp +# +# workflow: +# EXPT_SUBDIR +# PREEXISTING_DIR_METHOD +# PREDEF_GRID_NAME +# +# task_plot_allvars: +# COMOUT_REF +# PLOT_DOMAINS +# PLOT_FCST_END +# PLOT_FCST_INC +# PLOT_FCST_START +# +# task_run_fcst: +# FCST_LEN_HRS +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +48,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_plot_allvars|task_run_fcst" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow task_plot_allvars task_run_fcst ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -67,6 +109,11 @@ COMOUT_REF=$(eval echo ${COMOUT_REF}) #----------------------------------------------------------------------- # +if [ -n "${SRW_GRAPHICS_ENV:-}" ] ; then + set +u + conda activate ${SRW_GRAPHICS_ENV} + set -u +fi # plot all variables $SCRIPTSdir/exregional_plot_allvars.py \ --cycle ${CDATE} \ diff --git a/jobs/JREGIONAL_RUN_FCST b/jobs/JREGIONAL_RUN_FCST index 45f826c0d7..2542ab32f8 100755 --- a/jobs/JREGIONAL_RUN_FCST +++ b/jobs/JREGIONAL_RUN_FCST @@ -3,9 +3,24 @@ # #----------------------------------------------------------------------- # -# This script copies files from various directories into the experiment -# directory, creates links to some of them, and modifies others (e.g. -# templates) to customize them for the current experiment setup. +# The J-Job that runs the forecast +# +# Run-time environment variables: +# +# CDATE +# COMIN +# DATA +# GLOBAL_VAR_DEFNS_FP +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# workflow: +# RUN_ENVIR # #----------------------------------------------------------------------- # @@ -18,7 +33,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_fcst" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh "TRUE" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT b/jobs/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT index 707697b5ab..c7aee12df1 100755 --- a/jobs/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT +++ b/jobs/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT @@ -3,7 +3,18 @@ # #----------------------------------------------------------------------- # +# The J-Job that runs that runs either METplus's gen_ens_prod tool or its +# ensemble_stat tool for ensemble verification. # +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -16,7 +27,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_ensgrid|task_run_vx_enspoint" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX index 0301e9946a..e1207e0a81 100755 --- a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX +++ b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX @@ -3,8 +3,18 @@ # #----------------------------------------------------------------------- # -# This script runs the METplus GridStat or PointStat tool for deterministic -# verification. +# This script runs the METplus GridStat or PointStat tool for +# deterministic verification. +# +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -17,7 +27,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_gridstat" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSMEAN b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSMEAN index ab08320f33..29b22502a4 100755 --- a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSMEAN +++ b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSMEAN @@ -3,7 +3,19 @@ # #----------------------------------------------------------------------- # +# The J-Job that runs MET/METplus's GridStat or PointStat tool to +# perform verification on the ensemble mean of a specified field (or +# group of fields). # +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -16,7 +28,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_ensgrid_mean" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB index 7da98212ac..731cf575a5 100755 --- a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB +++ b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB @@ -3,7 +3,19 @@ # #----------------------------------------------------------------------- # +# The J-Job that runs METplus's GridStat or PointStat tool to perform +# verification on the ensemble frequencies/ probabilities of a specified +# field (or group of fields). # +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -16,7 +28,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_ensgrid_prob" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_MET_PB2NC_OBS b/jobs/JREGIONAL_RUN_MET_PB2NC_OBS index 2767ae1146..89c9bb73f4 100755 --- a/jobs/JREGIONAL_RUN_MET_PB2NC_OBS +++ b/jobs/JREGIONAL_RUN_MET_PB2NC_OBS @@ -4,6 +4,18 @@ #----------------------------------------------------------------------- # # +# The J-Job that runs METplus for point-stat by initialization time for +# all forecast hours. +# +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -16,7 +28,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_met_pb2nc_obs" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_MET_PCPCOMBINE b/jobs/JREGIONAL_RUN_MET_PCPCOMBINE index 7364ed96c9..8ac29887e8 100755 --- a/jobs/JREGIONAL_RUN_MET_PCPCOMBINE +++ b/jobs/JREGIONAL_RUN_MET_PCPCOMBINE @@ -3,7 +3,20 @@ # #----------------------------------------------------------------------- # +# The J-job that runs the MET/METplus PcpCombine tool on hourly +# accumulated precipitation (APCP) data to obtain APCP for multi-hour +# accumulation periods. The data can be from CCPA observations or a +# focrecast. # +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -16,7 +29,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_met_pcpcombine" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_POST b/jobs/JREGIONAL_RUN_POST index 692b3ae65d..58c469fc6d 100755 --- a/jobs/JREGIONAL_RUN_POST +++ b/jobs/JREGIONAL_RUN_POST @@ -3,8 +3,38 @@ # #----------------------------------------------------------------------- # -# This script runs the post-processor (UPP) on the NetCDF output files -# of the write component of the FV3-LAM model. +# The J-Job that runs the Unified Post-processor (UPP) on the NetCDF +# output from FV3. +# +# Run-time environment variables: +# +# COMIN +# COMOUT +# cyc +# DATA +# DATAROOT +# GLOBAL_VAR_DEFNS_FP +# PDY +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# platform: +# WORKFLOW_MANAGER +# +# workflow: +# DATE_FIRST_CYCL +# FCST_LEN_CYCL +# FCST_LEN_HRS +# INCR_CYCL_FREQ +# RUN_ENVIR +# +# task_run_post: +# SUB_HOURLY_POST # #----------------------------------------------------------------------- # @@ -17,7 +47,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_post|task_run_fcst" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -60,7 +92,7 @@ on the output files corresponding to a specified forecast hour. # minutes (fmn) are set to "00". This is necessary in order to pass # "fmn" into the post ex-script for the calculation of post_time. # -if [ "${SUB_HOURLY_POST}" != "TRUE" ]; then +if [ $(boolify "${SUB_HOURLY_POST}") != "TRUE" ]; then export fmn="00" fi # @@ -88,7 +120,7 @@ if [ "${RUN_ENVIR}" = "community" ]; then mkdir -p "${COMOUT}" fi -if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then +if [ $(boolify "${SUB_HOURLY_POST}") = "TRUE" ]; then export DATA_FHR="${DATA:-$COMOUT}/$fhr$fmn" else export DATA_FHR="${DATA:-$COMOUT}/$fhr" diff --git a/jobs/JREGIONAL_RUN_PRDGEN b/jobs/JREGIONAL_RUN_PRDGEN index 24479cb62d..1cf933b666 100755 --- a/jobs/JREGIONAL_RUN_PRDGEN +++ b/jobs/JREGIONAL_RUN_PRDGEN @@ -3,10 +3,33 @@ # #----------------------------------------------------------------------- # -# This script runs wgrib2 to create various subdomain GRIB2 files from -# the raw UPP-generated GRIB2 output from the run_post task of the +# The J-Job that runs wgrib2 to create various subdomain GRIB2 files +# from the raw UPP-generated GRIB2 output from the run_post task of the # FV3-LAM model. # +# Run-time environment variables: +# +# COMIN +# COMOUT +# DATA +# GLOBAL_VAR_DEFNS_FP +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# platform: +# WORKFLOW_MANAGER +# +# workflow: +# RUN_ENVIR +# +# task_run_post: +# SUB_HOURLY_POST +# #----------------------------------------------------------------------- # @@ -18,7 +41,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_prdgen|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -87,7 +112,7 @@ fi mkdir -p "${COMOUT}" # subhourly post -if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then +if [ $(boolify "${SUB_HOURLY_POST}") = "TRUE" ]; then export DATA_FHR="${DATA:-$COMOUT}/$fhr$fmn" else export DATA_FHR="${DATA:-$COMOUT}/$fhr" diff --git a/jobs/JSRW_AQM_ICS b/jobs/JSRW_AQM_ICS index 0c4df8aa5b..5d5f6d970e 100755 --- a/jobs/JSRW_AQM_ICS +++ b/jobs/JSRW_AQM_ICS @@ -31,7 +31,9 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_aqm_ics" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -130,7 +132,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_AQM_LBCS b/jobs/JSRW_AQM_LBCS index 11a1420d5e..9279dbe190 100755 --- a/jobs/JSRW_AQM_LBCS +++ b/jobs/JSRW_AQM_LBCS @@ -31,7 +31,10 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_get_extrn_lbcs|task_make_orog|task_make_lbcs|cpl_aqm_parm|task_aqm_lbcs" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm task_get_extrn_lbcs \ + task_make_orog task_make_lbcs ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -131,7 +134,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_BIAS_CORRECTION_O3 b/jobs/JSRW_BIAS_CORRECTION_O3 index 3ab2f2d40f..0849614840 100755 --- a/jobs/JSRW_BIAS_CORRECTION_O3 +++ b/jobs/JSRW_BIAS_CORRECTION_O3 @@ -30,7 +30,10 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_bias_correction_o3" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm task_run_post \ + task_bias_correction_o3 ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -123,7 +126,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_BIAS_CORRECTION_PM25 b/jobs/JSRW_BIAS_CORRECTION_PM25 index 42210e7f29..a0a7f76dad 100755 --- a/jobs/JSRW_BIAS_CORRECTION_PM25 +++ b/jobs/JSRW_BIAS_CORRECTION_PM25 @@ -30,7 +30,10 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_bias_correction_pm25" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm task_run_post \ + task_bias_correction_pm25 ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -123,7 +126,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_FIRE_EMISSION b/jobs/JSRW_FIRE_EMISSION index ae0343e60e..8a2b581274 100755 --- a/jobs/JSRW_FIRE_EMISSION +++ b/jobs/JSRW_FIRE_EMISSION @@ -30,7 +30,9 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_fire_emission" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -126,7 +128,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_NEXUS_EMISSION b/jobs/JSRW_NEXUS_EMISSION index 33f1aca757..aab5869cff 100755 --- a/jobs/JSRW_NEXUS_EMISSION +++ b/jobs/JSRW_NEXUS_EMISSION @@ -30,7 +30,9 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_run_fcst|cpl_aqm_parm|task_nexus_emission" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm task_nexus_emission ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -128,7 +130,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_NEXUS_GFS_SFC b/jobs/JSRW_NEXUS_GFS_SFC index 89d84c740d..ceed6be32a 100755 --- a/jobs/JSRW_NEXUS_GFS_SFC +++ b/jobs/JSRW_NEXUS_GFS_SFC @@ -30,7 +30,9 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_nexus_gfs_sfc" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -128,7 +130,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= @@ -143,7 +145,7 @@ fi if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_NEXUS_POST_SPLIT b/jobs/JSRW_NEXUS_POST_SPLIT index 6e5a0a259a..10f4101d5c 100755 --- a/jobs/JSRW_NEXUS_POST_SPLIT +++ b/jobs/JSRW_NEXUS_POST_SPLIT @@ -30,7 +30,9 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_nexus_post_split" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -128,7 +130,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_POINT_SOURCE b/jobs/JSRW_POINT_SOURCE index a112a2d275..6218acaa99 100755 --- a/jobs/JSRW_POINT_SOURCE +++ b/jobs/JSRW_POINT_SOURCE @@ -30,7 +30,10 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_run_fcst|cpl_aqm_parm|task_point_source" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm task_point_source \ + task_run_fcst ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -123,7 +126,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_POST_STAT_O3 b/jobs/JSRW_POST_STAT_O3 index 8924cba9e5..5fadd70d30 100755 --- a/jobs/JSRW_POST_STAT_O3 +++ b/jobs/JSRW_POST_STAT_O3 @@ -30,7 +30,10 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_o3" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -124,7 +127,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_POST_STAT_PM25 b/jobs/JSRW_POST_STAT_PM25 index 83434fa8c7..2d7d6e9e88 100755 --- a/jobs/JSRW_POST_STAT_PM25 +++ b/jobs/JSRW_POST_STAT_PM25 @@ -30,7 +30,10 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_pm25" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -123,7 +126,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_PRE_POST_STAT b/jobs/JSRW_PRE_POST_STAT index 12561085c2..8c51e18510 100755 --- a/jobs/JSRW_PRE_POST_STAT +++ b/jobs/JSRW_PRE_POST_STAT @@ -30,7 +30,9 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_pre_post_stat" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -127,7 +129,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/modulefiles/tasks/cheyenne/plot_allvars.local.lua b/modulefiles/tasks/cheyenne/plot_allvars.local.lua index b49b8bb863..7cee04231e 100644 --- a/modulefiles/tasks/cheyenne/plot_allvars.local.lua +++ b/modulefiles/tasks/cheyenne/plot_allvars.local.lua @@ -1,3 +1,3 @@ unload("python") load("conda") -setenv("SRW_ENV", "srw_graphics") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/derecho/plot_allvars.local.lua b/modulefiles/tasks/derecho/plot_allvars.local.lua index b49b8bb863..7cee04231e 100644 --- a/modulefiles/tasks/derecho/plot_allvars.local.lua +++ b/modulefiles/tasks/derecho/plot_allvars.local.lua @@ -1,3 +1,3 @@ unload("python") load("conda") -setenv("SRW_ENV", "srw_graphics") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/gaea/plot_allvars.local.lua b/modulefiles/tasks/gaea/plot_allvars.local.lua index 104da06f5c..41da34ecca 100644 --- a/modulefiles/tasks/gaea/plot_allvars.local.lua +++ b/modulefiles/tasks/gaea/plot_allvars.local.lua @@ -1,4 +1,4 @@ unload("python") load("conda") -setenv("SRW_ENV", "srw_graphics") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/hera/plot_allvars.local.lua b/modulefiles/tasks/hera/plot_allvars.local.lua index b7e9528710..85291013c7 100644 --- a/modulefiles/tasks/hera/plot_allvars.local.lua +++ b/modulefiles/tasks/hera/plot_allvars.local.lua @@ -1,2 +1,2 @@ load("conda") -setenv("SRW_ENV", "srw_graphics") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/hercules/plot_allvars.local.lua b/modulefiles/tasks/hercules/plot_allvars.local.lua index b49b8bb863..7cee04231e 100644 --- a/modulefiles/tasks/hercules/plot_allvars.local.lua +++ b/modulefiles/tasks/hercules/plot_allvars.local.lua @@ -1,3 +1,3 @@ unload("python") load("conda") -setenv("SRW_ENV", "srw_graphics") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/jet/plot_allvars.local.lua b/modulefiles/tasks/jet/plot_allvars.local.lua index b7e9528710..85291013c7 100644 --- a/modulefiles/tasks/jet/plot_allvars.local.lua +++ b/modulefiles/tasks/jet/plot_allvars.local.lua @@ -1,2 +1,2 @@ load("conda") -setenv("SRW_ENV", "srw_graphics") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/noaacloud/plot_allvars.local.lua b/modulefiles/tasks/noaacloud/plot_allvars.local.lua index b7e9528710..2fd9b41eb5 100644 --- a/modulefiles/tasks/noaacloud/plot_allvars.local.lua +++ b/modulefiles/tasks/noaacloud/plot_allvars.local.lua @@ -1,2 +1,5 @@ -load("conda") -setenv("SRW_ENV", "srw_graphics") +unload("python") +append_path("MODULEPATH","/contrib/EPIC/miniconda3/modulefiles") +load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) + +setenv("SRW_GRAPHICS_ENV", "regional_workflow") diff --git a/modulefiles/tasks/orion/plot_allvars.local.lua b/modulefiles/tasks/orion/plot_allvars.local.lua index b49b8bb863..7cee04231e 100644 --- a/modulefiles/tasks/orion/plot_allvars.local.lua +++ b/modulefiles/tasks/orion/plot_allvars.local.lua @@ -1,3 +1,3 @@ unload("python") load("conda") -setenv("SRW_ENV", "srw_graphics") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/parm/wflow/aqm_post.yaml b/parm/wflow/aqm_post.yaml index 5f307184d3..48a0761fef 100644 --- a/parm/wflow/aqm_post.yaml +++ b/parm/wflow/aqm_post.yaml @@ -22,7 +22,7 @@ default_aqm_task: &default_aqm task_pre_post_stat: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "pre_post_stat" "&HOMEdir;/jobs/JSRW_PRE_POST_STAT"' + command: '&LOAD_MODULES_RUN_TASK; "pre_post_stat" "&HOMEdir;/jobs/JSRW_PRE_POST_STAT"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' dependency: or: @@ -36,7 +36,7 @@ task_pre_post_stat: task_post_stat_o3: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "post_stat_o3" "&HOMEdir;/jobs/JSRW_POST_STAT_O3"' + command: '&LOAD_MODULES_RUN_TASK; "post_stat_o3" "&HOMEdir;/jobs/JSRW_POST_STAT_O3"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' memory: 120G dependency: @@ -46,7 +46,7 @@ task_post_stat_o3: task_post_stat_pm25: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "post_stat_pm25" "&HOMEdir;/jobs/JSRW_POST_STAT_PM25"' + command: '&LOAD_MODULES_RUN_TASK; "post_stat_pm25" "&HOMEdir;/jobs/JSRW_POST_STAT_PM25"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' memory: 120G dependency: @@ -56,7 +56,7 @@ task_post_stat_pm25: task_bias_correction_o3: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "bias_correction_o3" "&HOMEdir;/jobs/JSRW_BIAS_CORRECTION_O3"' + command: '&LOAD_MODULES_RUN_TASK; "bias_correction_o3" "&HOMEdir;/jobs/JSRW_BIAS_CORRECTION_O3"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' memory: 120G dependency: @@ -66,7 +66,7 @@ task_bias_correction_o3: task_bias_correction_pm25: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "bias_correction_pm25" "&HOMEdir;/jobs/JSRW_BIAS_CORRECTION_PM25"' + command: '&LOAD_MODULES_RUN_TASK; "bias_correction_pm25" "&HOMEdir;/jobs/JSRW_BIAS_CORRECTION_PM25"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' memory: 120G dependency: diff --git a/parm/wflow/aqm_prep.yaml b/parm/wflow/aqm_prep.yaml index c57d2198f0..d90bbde60f 100644 --- a/parm/wflow/aqm_prep.yaml +++ b/parm/wflow/aqm_prep.yaml @@ -29,7 +29,7 @@ default_aqm_task: &default_aqm task_nexus_gfs_sfc: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_gfs_sfc" "&HOMEdir;/jobs/JSRW_NEXUS_GFS_SFC"' + command: '&LOAD_MODULES_RUN_TASK; "nexus_gfs_sfc" "&HOMEdir;/jobs/JSRW_NEXUS_GFS_SFC"' native: '{% if platform.get("SCHED_NATIVE_CMD_HPSS") %}{{ platform.SCHED_NATIVE_CMD_HPSS }}{% else %}{{ platform.SCHED_NATIVE_CMD}}{% endif %}' partition: '{% if platform.get("PARTITION_HPSS") %}&PARTITION_HPSS;{% else %}None{% endif %}' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' @@ -53,7 +53,7 @@ metatask_nexus_emission: nspt: '{% for h in range(0, cpl_aqm_parm.NUM_SPLIT_NEXUS) %}{{ " %02d" % h }}{% endfor %}' task_nexus_emission_#nspt#: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_emission" "&HOMEdir;/jobs/JSRW_NEXUS_EMISSION"' + command: '&LOAD_MODULES_RUN_TASK; "nexus_emission" "&HOMEdir;/jobs/JSRW_NEXUS_EMISSION"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' nnodes: '{{ task_nexus_emission.NNODES_NEXUS_EMISSION }}' ppn: '{{ task_nexus_emission.PPN_NEXUS_EMISSION // 1 }}' @@ -68,7 +68,7 @@ metatask_nexus_emission: task_nexus_post_split: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_post_split" "&HOMEdir;/jobs/JSRW_NEXUS_POST_SPLIT"' + command: '&LOAD_MODULES_RUN_TASK; "nexus_post_split" "&HOMEdir;/jobs/JSRW_NEXUS_POST_SPLIT"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' dependency: metataskdep: @@ -77,13 +77,13 @@ task_nexus_post_split: task_fire_emission: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "fire_emission" "&HOMEdir;/jobs/JSRW_FIRE_EMISSION"' + command: '&LOAD_MODULES_RUN_TASK; "fire_emission" "&HOMEdir;/jobs/JSRW_FIRE_EMISSION"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' memory: 2G task_point_source: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "point_source" "&HOMEdir;/jobs/JSRW_POINT_SOURCE"' + command: '&LOAD_MODULES_RUN_TASK; "point_source" "&HOMEdir;/jobs/JSRW_POINT_SOURCE"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' walltime: 01:00:00 dependency: @@ -101,7 +101,7 @@ task_aqm_ics_ext: attrs: cycledefs: at_start maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_ics" "&HOMEdir;/jobs/JSRW_AQM_ICS"' + command: '&LOAD_MODULES_RUN_TASK; "aqm_ics" "&HOMEdir;/jobs/JSRW_AQM_ICS"' envars: <<: *default_vars PREV_CYCLE_DIR: '&WARMSTART_CYCLE_DIR;' @@ -127,7 +127,7 @@ task_aqm_ics: attrs: cycledefs: cycled_from_second maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_ics" "&HOMEdir;/jobs/JSRW_AQM_ICS"' + command: '&LOAD_MODULES_RUN_TASK; "aqm_ics" "&HOMEdir;/jobs/JSRW_AQM_ICS"' envars: <<: *default_vars PREV_CYCLE_DIR: '&COMIN_DIR;' @@ -150,7 +150,7 @@ task_aqm_ics: task_aqm_lbcs: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_lbcs" "&HOMEdir;/jobs/JSRW_AQM_LBCS"' + command: '&LOAD_MODULES_RUN_TASK; "aqm_lbcs" "&HOMEdir;/jobs/JSRW_AQM_LBCS"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' ppn: 24 dependency: diff --git a/parm/wflow/coldstart.yaml b/parm/wflow/coldstart.yaml index ceefe865e6..6fad0b8d83 100644 --- a/parm/wflow/coldstart.yaml +++ b/parm/wflow/coldstart.yaml @@ -20,7 +20,7 @@ default_task: &default_task task_get_extrn_ics: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "get_extrn_ics" "&JOBSdir;/JREGIONAL_GET_EXTRN_MDL_FILES"' + command: '&LOAD_MODULES_RUN_TASK; "get_extrn_ics" "&JOBSdir;/JREGIONAL_GET_EXTRN_MDL_FILES"' attrs: cycledefs: forecast maxtries: '2' @@ -51,7 +51,7 @@ task_get_extrn_ics: task_get_extrn_lbcs: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "get_extrn_lbcs" "&JOBSdir;/JREGIONAL_GET_EXTRN_MDL_FILES"' + command: '&LOAD_MODULES_RUN_TASK; "get_extrn_lbcs" "&JOBSdir;/JREGIONAL_GET_EXTRN_MDL_FILES"' attrs: cycledefs: forecast maxtries: '2' @@ -85,7 +85,7 @@ metatask_run_ensemble: mem: '{% if global.DO_ENSEMBLE %}{%- for m in range(1, global.NUM_ENS_MEMBERS+1) -%}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' task_make_ics_mem#mem#: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "make_ics" "&JOBSdir;/JREGIONAL_MAKE_ICS"' + command: '&LOAD_MODULES_RUN_TASK; "make_ics" "&JOBSdir;/JREGIONAL_MAKE_ICS"' envars: <<: *default_vars SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' @@ -124,7 +124,7 @@ metatask_run_ensemble: task_make_lbcs_mem#mem#: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "make_lbcs" "&JOBSdir;/JREGIONAL_MAKE_LBCS"' + command: '&LOAD_MODULES_RUN_TASK; "make_lbcs" "&JOBSdir;/JREGIONAL_MAKE_LBCS"' envars: <<: *default_vars SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' @@ -142,7 +142,7 @@ metatask_run_ensemble: task_run_fcst_mem#mem#: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "run_fcst" "&JOBSdir;/JREGIONAL_RUN_FCST"' + command: '&LOAD_MODULES_RUN_TASK; "run_fcst" "&JOBSdir;/JREGIONAL_RUN_FCST"' envars: <<: *default_vars SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' diff --git a/parm/wflow/default_workflow.yaml b/parm/wflow/default_workflow.yaml index c79415b3be..e37fdae1ea 100644 --- a/parm/wflow/default_workflow.yaml +++ b/parm/wflow/default_workflow.yaml @@ -11,7 +11,7 @@ rocoto: HOMEdir: '{{ user.HOMEdir }}' JOBSdir: '{{ user.JOBSdir }}' KEEPDATA: '{{ nco.KEEPDATA_default }}' - LOAD_MODULES_RUN_TASK_FP: '{{ workflow.LOAD_MODULES_RUN_TASK_FP }}' + LOAD_MODULES_RUN_TASK: '{{ workflow.LOAD_MODULES_RUN_TASK_FP }} {{ user.MACHINE }}' LOGEXT: ".log" NET: '{{ nco.NET_default }}' MRMS_OBS_DIR: '{{ platform.MRMS_OBS_DIR }}' diff --git a/parm/wflow/plot.yaml b/parm/wflow/plot.yaml index 6dad3e0dfa..445d238c15 100644 --- a/parm/wflow/plot.yaml +++ b/parm/wflow/plot.yaml @@ -26,7 +26,7 @@ default_task_plot: &default_task task_plot_allvars: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "plot_allvars" "&JOBSdir;/JREGIONAL_PLOT_ALLVARS"' + command: '&LOAD_MODULES_RUN_TASK; "plot_allvars" "&JOBSdir;/JREGIONAL_PLOT_ALLVARS"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' dependency: or_do_post: &post_files_exist diff --git a/parm/wflow/post.yaml b/parm/wflow/post.yaml index 5672e7343f..114e5de377 100644 --- a/parm/wflow/post.yaml +++ b/parm/wflow/post.yaml @@ -3,7 +3,7 @@ default_task_post: &default_task attrs: cycledefs: '#cycledef#' maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_post" "&JOBSdir;/JREGIONAL_RUN_POST"' + command: '&LOAD_MODULES_RUN_TASK; "run_post" "&JOBSdir;/JREGIONAL_RUN_POST"' envars: &default_vars GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;' USHdir: '&USHdir;' diff --git a/parm/wflow/prdgen.yaml b/parm/wflow/prdgen.yaml index 6b9f7cd4f6..3f2026a45f 100644 --- a/parm/wflow/prdgen.yaml +++ b/parm/wflow/prdgen.yaml @@ -10,7 +10,7 @@ metatask_run_prdgen: attrs: cycledefs: '#cycledef#' maxtries: 1 - command: '&LOAD_MODULES_RUN_TASK_FP; "run_prdgen" "&JOBSdir;/JREGIONAL_RUN_PRDGEN"' + command: '&LOAD_MODULES_RUN_TASK; "run_prdgen" "&JOBSdir;/JREGIONAL_RUN_PRDGEN"' envars: GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;' USHdir: '&USHdir;' diff --git a/parm/wflow/prep.yaml b/parm/wflow/prep.yaml index c9d5549909..a0c6e3119a 100644 --- a/parm/wflow/prep.yaml +++ b/parm/wflow/prep.yaml @@ -24,12 +24,12 @@ default_task_prep: &default_task task_make_grid: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "make_grid" "&JOBSdir;/JREGIONAL_MAKE_GRID"' + command: '&LOAD_MODULES_RUN_TASK; "make_grid" "&JOBSdir;/JREGIONAL_MAKE_GRID"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' task_make_orog: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "make_orog" "&JOBSdir;/JREGIONAL_MAKE_OROG"' + command: '&LOAD_MODULES_RUN_TASK; "make_orog" "&JOBSdir;/JREGIONAL_MAKE_OROG"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' dependency: or: &make_grid_satisfied @@ -47,7 +47,7 @@ task_make_orog: task_make_sfc_climo: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "make_sfc_climo" "&JOBSdir;/JREGIONAL_MAKE_SFC_CLIMO"' + command: '&LOAD_MODULES_RUN_TASK; "make_sfc_climo" "&JOBSdir;/JREGIONAL_MAKE_SFC_CLIMO"' envars: <<: *default_envars join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' diff --git a/parm/wflow/test.yaml b/parm/wflow/test.yaml index 716665b228..9c084d6875 100644 --- a/parm/wflow/test.yaml +++ b/parm/wflow/test.yaml @@ -29,7 +29,7 @@ metatask_integration_test: mem: '{% if global.DO_ENSEMBLE %}{%- for m in range(1, global.NUM_ENS_MEMBERS+1) -%}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' task_integration_test_mem#mem#: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "integration_test" "&JOBSdir;/JREGIONAL_INTEGRATION_TEST"' + command: '&LOAD_MODULES_RUN_TASK; "integration_test" "&JOBSdir;/JREGIONAL_INTEGRATION_TEST"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' dependency: and_run_fcst: diff --git a/parm/wflow/verify_det.yaml b/parm/wflow/verify_det.yaml index e82d7c61e1..a62adb4481 100644 --- a/parm/wflow/verify_det.yaml +++ b/parm/wflow/verify_det.yaml @@ -31,7 +31,7 @@ metatask_GridStat_CCPA_all_accums_all_mems: <<: *default_task_verify_det attrs: maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' envars: <<: *default_vars OBS_DIR: '&CCPA_OBS_DIR;' @@ -63,7 +63,7 @@ metatask_GridStat_NOHRSC_all_accums_all_mems: <<: *default_task_verify_det attrs: maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' envars: <<: *default_vars OBS_DIR: '&NOHRSC_OBS_DIR;' @@ -93,7 +93,7 @@ metatask_GridStat_MRMS_all_mems: VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["REFC", "RETOP"] %}{{ "%s " % var }}{% endif %}{% endfor %}' task_run_MET_GridStat_vx_#VAR#_mem#mem#: <<: *default_task_verify_det - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' envars: <<: *default_vars OBS_DIR: '&MRMS_OBS_DIR;' @@ -124,7 +124,7 @@ metatask_PointStat_NDAS_all_mems: VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["ADPSFC", "ADPUPA"] %}{{ "%s " % var }}{% endif %}{% endfor %}' task_run_MET_PointStat_vx_#VAR#_mem#mem#: <<: *default_task_verify_det - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' envars: <<: *default_vars OBS_DIR: '&NDAS_OBS_DIR;' diff --git a/parm/wflow/verify_ens.yaml b/parm/wflow/verify_ens.yaml index 18b23a1eb0..71bc20b3b0 100644 --- a/parm/wflow/verify_ens.yaml +++ b/parm/wflow/verify_ens.yaml @@ -26,7 +26,7 @@ metatask_GenEnsProd_EnsembleStat_CCPA: ACCUM_HH: '{% for ah in verification.VX_APCP_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' task_run_MET_GenEnsProd_vx_APCP#ACCUM_HH#h: &task_GenEnsProd_CCPA <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' envars: &envars_GenEnsProd_CCPA <<: *default_vars ACCUM_HH: '#ACCUM_HH#' @@ -63,7 +63,7 @@ metatask_GenEnsProd_EnsembleStat_NOHRSC: ACCUM_HH: '{% for ah in verification.VX_ASNOW_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' task_run_MET_GenEnsProd_vx_ASNOW#ACCUM_HH#h: &task_GenEnsProd_NOHRSC <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' envars: &envars_GenEnsProd_NOHRSC <<: *default_vars ACCUM_HH: '#ACCUM_HH#' @@ -101,7 +101,7 @@ metatask_GenEnsProd_EnsembleStat_MRMS: VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["REFC", "RETOP"] %}{{ "%s " % var }}{% endif %}{% endfor %}' task_run_MET_GenEnsProd_vx_#VAR#: &task_GenEnsProd_MRMS <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' envars: &envars_GenEnsProd_MRMS <<: *default_vars ACCUM_HH: '01' @@ -137,7 +137,7 @@ metatask_GenEnsProd_EnsembleStat_NDAS: VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["ADPSFC", "ADPUPA"] %}{{ "%s " % var }}{% endif %}{% endfor %}' task_run_MET_GenEnsProd_vx_#VAR#: &task_GenEnsProd_NDAS <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' envars: &envars_GenEnsProd_NDAS <<: *default_vars OBS_DIR: '&NDAS_OBS_DIR;' @@ -178,7 +178,7 @@ metatask_GridStat_CCPA_ensmeanprob_all_accums: ACCUM_HH: '{% for ah in verification.VX_APCP_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' task_run_MET_GridStat_vx_ens#statlc#_APCP#ACCUM_HH#h: <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"' envars: <<: *default_vars OBS_DIR: '&CCPA_OBS_DIR;' @@ -202,7 +202,7 @@ metatask_GridStat_NOHRSC_ensmeanprob_all_accums: ACCUM_HH: '{% for ah in verification.VX_ASNOW_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' task_run_MET_GridStat_vx_ens#statlc#_ASNOW#ACCUM_HH#h: <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"' envars: <<: *default_vars OBS_DIR: '&NOHRSC_OBS_DIR;' @@ -222,7 +222,7 @@ metatask_GridStat_MRMS_ensprob: VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["REFC", "RETOP"] %}{{ "%s " % var }}{% endif %}{% endfor %}' task_run_MET_GridStat_vx_ensprob_#VAR#: <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB"' envars: <<: *default_vars ACCUM_HH: '01' @@ -246,7 +246,7 @@ metatask_PointStat_NDAS_ensmeanprob: VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["ADPSFC", "ADPUPA"] %}{{ "%s " % var }}{% endif %}{% endfor %}' task_run_MET_PointStat_vx_ens#statlc#_#VAR#: <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"' envars: <<: *default_vars OBS_DIR: '&NDAS_OBS_DIR;' diff --git a/parm/wflow/verify_pre.yaml b/parm/wflow/verify_pre.yaml index b7511bf63f..0d4e1c2448 100644 --- a/parm/wflow/verify_pre.yaml +++ b/parm/wflow/verify_pre.yaml @@ -23,7 +23,7 @@ default_task_verify_pre: &default_task_verify_pre task_get_obs_ccpa: <<: *default_task_verify_pre - command: '&LOAD_MODULES_RUN_TASK_FP; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' + command: '&LOAD_MODULES_RUN_TASK; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' envars: <<: *default_vars ACCUM_HH: '01' @@ -37,7 +37,7 @@ task_get_obs_ccpa: task_get_obs_nohrsc: <<: *default_task_verify_pre - command: '&LOAD_MODULES_RUN_TASK_FP; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' + command: '&LOAD_MODULES_RUN_TASK; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' envars: <<: *default_vars OBS_DIR: '&NOHRSC_OBS_DIR;' @@ -50,7 +50,7 @@ task_get_obs_nohrsc: task_get_obs_mrms: <<: *default_task_verify_pre - command: '&LOAD_MODULES_RUN_TASK_FP; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' + command: '&LOAD_MODULES_RUN_TASK; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' envars: <<: *default_vars OBS_DIR: '&MRMS_OBS_DIR;' @@ -69,7 +69,7 @@ task_get_obs_ndas: OBS_DIR: '&NDAS_OBS_DIR;' OBTYPE: 'NDAS' FHR: '{% for h in range(0, workflow.FCST_LEN_HRS+1) %}{{ " %02d" % h }}{% endfor %}' - command: '&LOAD_MODULES_RUN_TASK_FP; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' + command: '&LOAD_MODULES_RUN_TASK; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' queue: "&QUEUE_HPSS;" native: '{% if platform.get("SCHED_NATIVE_CMD_HPSS") %}{{ platform.SCHED_NATIVE_CMD_HPSS }}{% else %}{{ platform.SCHED_NATIVE_CMD}}{% endif %}' partition: '{% if platform.get("PARTITION_HPSS") %}&PARTITION_HPSS;{% else %}None{% endif %}' @@ -80,7 +80,7 @@ task_run_MET_Pb2nc_obs: attrs: cycledefs: forecast maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PB2NC_OBS"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PB2NC_OBS"' envars: <<: *default_vars VAR: ADPSFC @@ -110,7 +110,7 @@ metatask_PcpCombine_obs: attrs: cycledefs: forecast maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' envars: <<: *default_vars VAR: APCP @@ -140,7 +140,7 @@ metatask_check_post_output_all_mems: attrs: cycledefs: forecast maxtries: '1' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_CHECK_POST_OUTPUT"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_CHECK_POST_OUTPUT"' envars: <<: *default_vars VAR: APCP @@ -221,7 +221,7 @@ metatask_PcpCombine_fcst_APCP_all_accums_all_mems: attrs: cycledefs: forecast maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' envars: <<: *default_vars VAR: APCP @@ -249,7 +249,7 @@ metatask_PcpCombine_fcst_ASNOW_all_accums_all_mems: attrs: cycledefs: forecast maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' envars: <<: *default_vars VAR: ASNOW diff --git a/scripts/exregional_check_post_output.sh b/scripts/exregional_check_post_output.sh index ba0d141c5d..320311cc94 100755 --- a/scripts/exregional_check_post_output.sh +++ b/scripts/exregional_check_post_output.sh @@ -1,5 +1,43 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The ex-script for checking the post output. +# +# Run-time environment variables: +# +# ACCUM_HH +# CDATE +# ENSMEM_INDX +# GLOBAL_VAR_DEFNS_FP +# VAR +# +# Experiment variables +# +# user: +# USHdir +# +# workflow: +# FCST_LEN_HRS +# +# global: +# DO_ENSEMBLE +# ENS_TIME_LAG_HRS +# +# verification: +# FCST_FN_TEMPLATE +# FCST_SUBDIR_TEMPLATE +# NUM_MISSING_FCST_FILES_MAX +# VX_FCST_INPUT_BASEDIR +# VX_NDIGITS_ENSMEM_NAMES +# +# constants: +# SECS_PER_HOUR +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +46,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_met_pcpcombine|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow global verification constants task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -64,7 +104,7 @@ user-staged. #----------------------------------------------------------------------- # i="0" -if [ "${DO_ENSEMBLE}" = "TRUE" ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]; then i=$( bc -l <<< "${ENSMEM_INDX}-1" ) fi time_lag=$( bc -l <<< "${ENS_TIME_LAG_HRS[$i]}*${SECS_PER_HOUR}" ) diff --git a/scripts/exregional_get_extrn_mdl_files.sh b/scripts/exregional_get_extrn_mdl_files.sh index 018a30c285..96c3136e33 100755 --- a/scripts/exregional_get_extrn_mdl_files.sh +++ b/scripts/exregional_get_extrn_mdl_files.sh @@ -1,5 +1,65 @@ #!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# The ex-script for getting the model files that will be used for either +# initial conditions or lateral boundary conditions for the experiment. +# +# Run-time environment variables: +# +# CDATE +# COMIN +# cyc +# DATA +# EXTRN_MDL_CDATE +# EXTRN_MDL_NAME +# EXTRN_MDL_STAGING_DIR +# GLOBAL_VAR_DEFNS_FP +# ICS_OR_LBCS +# NET +# PDY +# TIME_OFFSET_HRS +# +# Experiment variables +# +# user: +# MACHINE +# PARMdir +# RUN_ENVIR +# USHdir +# +# platform: +# EXTRN_MDL_DATA_STORES +# +# workflow: +# DATE_FIRST_CYCL +# EXTRN_MDL_VAR_DEFNS_FN +# FCST_LEN_CYCL +# INCR_CYCL_FREQ +# SYMLINK_FIX_FILES +# +# task_get_extrn_lbcs: +# EXTRN_MDL_FILES_LBCS +# EXTRN_MDL_SOURCE_BASEDIR_LBCS +# EXTRN_MDL_SYSBASEDIR_LBCS +# FV3GFS_FILE_FMT_LBCS +# LBC_SPEC_INTVL_HRS +# +# task_get_extrn_ics: +# EXTRN_MDL_FILES_ICS +# EXTRN_MDL_SOURCE_BASEDIR_ICS +# EXTRN_MDL_SYSBASEDIR_ICS +# FV3GFS_FILE_FMT_ICS +# +# global: +# DO_ENSEMBLE +# NUM_ENS_MEMBERS +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +68,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_get_extrn_ics|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP} + +for sect in user nco platform workflow global task_get_extrn_lbcs \ + task_get_extrn_ics ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -147,12 +211,12 @@ if [ -n "${input_file_path:-}" ] ; then --input_file_path ${input_file_path}" fi -if [ $SYMLINK_FIX_FILES = "TRUE" ]; then +if [ $(boolify $SYMLINK_FIX_FILES) = "TRUE" ]; then additional_flags="$additional_flags \ --symlink" fi -if [ $DO_ENSEMBLE == "TRUE" ] ; then +if [ $(boolify $DO_ENSEMBLE) = "TRUE" ] ; then mem_dir="/mem{mem:03d}" member_list=(1 ${NUM_ENS_MEMBERS}) additional_flags="$additional_flags \ @@ -222,7 +286,7 @@ if [ "${EXTRN_MDL_NAME}" = "GEFS" ]; then for num in $(seq -f "%02g" ${NUM_ENS_MEMBERS}); do sorted_fn=( ) for fcst_hr in "${all_fcst_hrs_array[@]}"; do - # Read in filenames from $EXTRN_MDL_FNS and sort them + # Read in filenames from EXTRN_MDL_FNS and sort them base_path="${EXTRN_MDL_STAGING_DIR}/mem`printf %03d $num`" filenames_array=`awk -F= '/EXTRN_MDL_FNS/{print $2}' $base_path/${EXTRN_DEFNS}` for filename in ${filenames_array[@]}; do diff --git a/scripts/exregional_get_verif_obs.sh b/scripts/exregional_get_verif_obs.sh index a74f11cd3a..6ad6aaed0e 100755 --- a/scripts/exregional_get_verif_obs.sh +++ b/scripts/exregional_get_verif_obs.sh @@ -1,5 +1,28 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The ex-script that checks, pulls, and stages observation data for +# model verification. +# +# Run-time environment variables: +# +# FHR +# GLOBAL_VAR_DEFNS_FP +# OBS_DIR +# OBTYPE +# PDY +# VAR +# +# Experiment variables +# +# user: +# USHdir +# PARMdir +# +#----------------------------------------------------------------------- + # #----------------------------------------------------------------------- # @@ -8,7 +31,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task " " ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_integration_test.py b/scripts/exregional_integration_test.py index f0ac3d9af6..996cf6320e 100755 --- a/scripts/exregional_integration_test.py +++ b/scripts/exregional_integration_test.py @@ -4,16 +4,16 @@ #### Python Script Documentation Block # # Script name: exregional_integration_test.py -# Script description: Ensures the correct number of netcdf files are generated +# Script description: Ensures the correct number of netcdf files are generated # for each experiment # # Author: Eddie Snyder Org: NOAA EPIC Date: 2024-02-05 -# +# # Instructions: 1. Pass the appropriate info for the required arguments: # --fcst_dir=/path/to/forecast/files # --fcst_len= # 2. Run script with arguments -# +# # Notes/future work: - Currently SRW App only accepts netcdf as the UFS WM # output file format. If that changes, then additional # logic is needed to address the other file formats. diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index c1876651d8..104875f8dc 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -1,5 +1,99 @@ #!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# This script generates NetCDF-formatted grid files required as input +# the FV3 model configured for the regional domain. +# +# The output of this script is placed in a directory defined by GRID_DIR. +# +# More about the grid for regional configurations of FV3: +# +# a) This script creates grid files for tile 7 (reserved for the +# regional grid located soewhere within tile 6 of the 6 global +# tiles. +# +# b) Regional configurations of FV3 need two grid files, one with 3 +# halo cells and one with 4 halo cells. The width of the halo is +# the number of cells in the direction perpendicular to the +# boundary. +# +# c) The tile 7 grid file that this script creates includes a halo, +# with at least 4 cells to accommodate this requirement. The halo +# is made thinner in a subsequent step called "shave". +# +# d) We will let NHW denote the width of the wide halo that is wider +# than the required 3- or 4-cell halos. (NHW; N=number of cells, +# H=halo, W=wide halo) +# +# e) T7 indicates the cell count on tile 7. +# +# +# This script does the following: +# +# - Create the grid, either an ESGgrid with the regional_esg_grid +# executable or a GFDL-type grid with the hgrid executable +# - Calculate the regional grid's global uniform cubed-sphere grid +# equivalent resolution with the global_equiv_resol executable +# - Use the shave executable to reduce the halo to 3 and 4 cells +# - Call an ush script that runs the make_solo_mosaic executable +# +# Run-time environment variables: +# +# DATA +# GLOBAL_VAR_DEFNS_FP +# REDIRECT_OUT_ERR +# +# Experiment variables +# +# user: +# EXECdir +# USHdir +# +# platform: +# PRE_TASK_CMDS +# RUN_CMD_SERIAL + +# workflow: +# DOT_OR_USCORE +# GRID_GEN_METHOD +# RES_IN_FIXLAM_FILENAMES +# RGNL_GRID_NML_FN +# VERBOSE +# +# task_make_grid: +# GFDLgrid_NUM_CELLS +# GFDLgrid_USE_NUM_CELLS_IN_FILENAMES +# GRID_DIR +# +# constants: +# NH3 +# NH4 +# TILE_RGNL +# +# grid_params: +# DEL_ANGLE_X_SG +# DEL_ANGLE_Y_SG +# GFDLgrid_REFINE_RATIO +# IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG +# ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG +# JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG +# JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG +# LAT_CTR +# LON_CTR +# NEG_NX_OF_DOM_WITH_WIDE_HALO +# NEG_NY_OF_DOM_WITH_WIDE_HALO +# NHW +# NX +# NY +# PAZI +# STRETCH_FAC +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +102,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_grid" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow constants grid_params task_make_grid ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -276,6 +372,7 @@ generation executable (exec_fp): 'pazi': ${PAZI} " + # UW takes input from stdin when no -i/--input-config flag is provided (cat << EOF $settings EOF @@ -372,7 +469,7 @@ res_equiv=${res_equiv//$'\n'/} #----------------------------------------------------------------------- # if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - if [ "${GFDLgrid_USE_NUM_CELLS_IN_FILENAMES}" = "TRUE" ]; then + if [ $(boolify "${GFDLgrid_USE_NUM_CELLS_IN_FILENAMES}") = "TRUE" ]; then CRES="C${GFDLgrid_NUM_CELLS}" else CRES="C${res_equiv}" @@ -380,7 +477,15 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then elif [ "${GRID_GEN_METHOD}" = "ESGgrid" ]; then CRES="C${res_equiv}" fi -set_file_param "${GLOBAL_VAR_DEFNS_FP}" "CRES" "'$CRES'" + + # UW takes the update values from stdin when no --update-file flag is + # provided. It needs --update-format to do it correctly, though. +echo "workflow: {CRES: ${CRES}}" | uw config realize \ + --input-file $GLOBAL_VAR_DEFNS_FP \ + --update-format yaml \ + --output-file $GLOBAL_VAR_DEFNS_FP \ + --verbose + # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_make_ics.sh b/scripts/exregional_make_ics.sh index 875249b107..8cd49076b0 100755 --- a/scripts/exregional_make_ics.sh +++ b/scripts/exregional_make_ics.sh @@ -1,5 +1,83 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The ex-scrtipt that sets up and runs chgres_cube for preparing initial +# conditions for the FV3 forecast +# +# Run-time environment variables: +# +# COMIN +# COMOUT +# COMROOT +# DATA +# DATAROOT +# DATA_SHARE +# EXTRN_MDL_CDATE +# GLOBAL_VAR_DEFNS_FP +# INPUT_DATA +# NET +# PDY +# REDIRECT_OUT_ERR +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# EXECdir +# MACHINE +# PARMdir +# RUN_ENVIR +# USHdir +# +# platform: +# FIXgsm +# PRE_TASK_CMDS +# RUN_CMD_UTILS +# +# workflow: +# CCPP_PHYS_SUITE +# COLDSTART +# CRES +# DATE_FIRST_CYCL +# DOT_OR_USCORE +# EXTRN_MDL_VAR_DEFNS_FN +# FIXlam +# SDF_USES_RUC_LSM +# SDF_USES_THOMPSON_MP +# THOMPSON_MP_CLIMO_FP +# VERBOSE +# +# task_make_ics: +# FVCOM_DIR +# FVCOM_FILE +# FVCOM_WCSTART +# KMP_AFFINITY_MAKE_ICS +# OMP_NUM_THREADS_MAKE_ICS +# OMP_STACKSIZE_MAKE_ICS +# USE_FVCOM +# VCOORD_FILE +# +# task_get_extrn_ics: +# EXTRN_MDL_NAME_ICS +# FV3GFS_FILE_FMT_ICS +# +# global: +# HALO_BLEND +# +# cpl_aqm_parm: +# CPL_AQM +# +# constants: +# NH0 +# NH4 +# TILE_RGNL +# +#----------------------------------------------------------------------- +# + + # #----------------------------------------------------------------------- # @@ -8,7 +86,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_ics|task_get_extrn_ics" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow global cpl_aqm_parm constants task_get_extrn_ics task_make_ics ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -306,7 +386,7 @@ convert_nst="" nsoill_out="4" if [ "${EXTRN_MDL_NAME_ICS}" = "HRRR" -o \ "${EXTRN_MDL_NAME_ICS}" = "RAP" ] && \ - [ "${SDF_USES_RUC_LSM}" = "TRUE" ]; then + [ $(boolify "${SDF_USES_RUC_LSM}") = "TRUE" ]; then nsoill_out="9" fi # @@ -326,7 +406,7 @@ fi thomp_mp_climo_file="" if [ "${EXTRN_MDL_NAME_ICS}" != "HRRR" -a \ "${EXTRN_MDL_NAME_ICS}" != "RAP" ] && \ - [ "${SDF_USES_THOMPSON_MP}" = "TRUE" ]; then + [ $(boolify "${SDF_USES_THOMPSON_MP}") = "TRUE" ]; then thomp_mp_climo_file="${THOMPSON_MP_CLIMO_FP}" fi # @@ -643,9 +723,9 @@ POST_STEP # #----------------------------------------------------------------------- # -if [ "${CPL_AQM}" = "TRUE" ]; then +if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then COMOUT="${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}" #temporary path, should be removed later - if [ "${COLDSTART}" = "TRUE" ] && [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ]; then + if [ $(boolify "${COLDSTART}") = "TRUE" ] && [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ]; then data_trans_path="${COMOUT}" else data_trans_path="${DATA_SHARE}" @@ -667,7 +747,7 @@ fi # #----------------------------------------------------------------------- # -if [ "${USE_FVCOM}" = "TRUE" ]; then +if [ $(boolify "${USE_FVCOM}") = "TRUE" ]; then #Format for fvcom_time: YYYY-MM-DDTHH:00:00.000000 fvcom_exec_fn="fvcom_to_FV3" diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh index 5a2d24bcea..35b4da388a 100755 --- a/scripts/exregional_make_lbcs.sh +++ b/scripts/exregional_make_lbcs.sh @@ -1,5 +1,83 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The ex-scrtipt that sets up and runs chgres_cube for preparing lateral +# boundary conditions for the FV3 forecast +# +# Run-time environment variables: +# +# COMIN +# COMOUT +# COMROOT +# DATA +# DATAROOT +# DATA_SHARE +# EXTRN_MDL_CDATE +# INPUT_DATA +# GLOBAL_VAR_DEFNS_FP +# NET +# PDY +# REDIRECT_OUT_ERR +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# EXECdir +# MACHINE +# PARMdir +# RUN_ENVIR +# USHdir +# +# platform: +# FIXgsm +# PRE_TASK_CMDS +# RUN_CMD_UTILS +# +# workflow: +# CCPP_PHYS_SUITE +# COLDSTART +# CRES +# DATE_FIRST_CYCL +# DOT_OR_USCORE +# EXTRN_MDL_VAR_DEFNS_FN +# FIXlam +# SDF_USES_RUC_LSM +# SDF_USES_THOMPSON_MP +# THOMPSON_MP_CLIMO_FP +# VERBOSE +# +# task_get_extrn_lbcs: +# EXTRN_MDL_NAME_LBCS +# FV3GFS_FILE_FMT_LBCS +# +# task_make_lbcs: +# FVCOM_DIR +# FVCOM_FILE +# FVCOM_WCSTART +# KMP_AFFINITY_MAKE_LBCS +# OMP_NUM_THREADS_MAKE_LBCS +# OMP_STACKSIZE_MAKE_LBCS +# USE_FVCOM +# VCOORD_FILE +# +# global: +# HALO_BLEND +# +# cpl_aqm_parm: +# CPL_AQM +# +# constants: +# NH0 +# NH4 +# TILE_RGNL +# +#----------------------------------------------------------------------- +# + + # #----------------------------------------------------------------------- # @@ -8,7 +86,10 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_lbcs|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP} +set -x +for sect in user nco platform workflow global cpl_aqm_parm constants task_get_extrn_lbcs task_make_lbcs ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -250,7 +331,7 @@ tracers="\"\"" thomp_mp_climo_file="" if [ "${EXTRN_MDL_NAME_LBCS}" != "HRRR" -a \ "${EXTRN_MDL_NAME_LBCS}" != "RAP" ] && \ - [ "${SDF_USES_THOMPSON_MP}" = "TRUE" ]; then + [ $(boolify "${SDF_USES_THOMPSON_MP}") = "TRUE" ]; then thomp_mp_climo_file="${THOMPSON_MP_CLIMO_FP}" fi # @@ -495,6 +576,7 @@ FORTRAN namelist file has not specified for this external LBC model (EXTRN_MDL_N " nml_fn="fort.41" + # UW takes input from stdin when no -i/--input-config flag is provided (cat << EOF $settings EOF @@ -559,7 +641,7 @@ located in the following directory: lbc_spec_fhrs=( "${EXTRN_MDL_FHRS[$i]}" ) fcst_hhh=$(( ${lbc_spec_fhrs} - ${EXTRN_MDL_LBCS_OFFSET_HRS} )) fcst_hhh_FV3LAM=$( printf "%03d" "$fcst_hhh" ) - if [ "${CPL_AQM}" = "TRUE" ]; then + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then cp -p gfs.bndy.nc ${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc else mv gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index 9a3d5da7fc..34b1675d8c 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -1,5 +1,86 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# This ex-script is responsible for creating orography files for the FV3 +# forecast. +# +# The output of this script is placed in a directory defined by OROG_DIR +# +# More about the orog for the regional configuration of the FV3: +# +# a) Only the tile 7 orography file is created. +# +# b) This orography file contains a halo of the same width (NHW) +# as the grid file for tile 7 generated by the make_grid script +# +# c) Filtered versions of the orogoraphy files are created with the +# same width (NHW) as the unfiltered orography file and the grid +# file. FV3 requires two filtered orography files, one with no +# halo cells and one with 4 halo cells. +# +# This script does the following: +# +# - Create the raw orography files by running the orog executable. +# - Run the orog_gsl executable if any of several GSL-developed +# physics suites is chosen by the user. +# - Run the filter_topo executable on the raw orography files +# - Run the shave executable for the 0- and 4-cell halo orography +# files +# +# Run-time environment variables: +# +# DATA +# GLOBAL_VAR_DEFNS_FP +# REDIRECT_OUT_ERR +# +# Experiment variables +# +# user: +# EXECdir +# USHdir +# +# platform: +# FIXorg +# PRE_TASK_CMDS +# RUN_CMD_SERIAL +# +# workflow: +# CCPP_PHYS_SUITE +# CRES +# DOT_OR_USCORE +# FIXam +# FIXlam +# GRID_GEN_METHOD +# PREEXISTING_DIR_METHOD +# VERBOSE +# +# task_make_orog: +# KMP_AFFINITY_MAKE_OROG +# OMP_NUM_THREADS_MAKE_OROG +# OMP_STACKSIZE_MAKE_OROG +# OROG_DIR +# +# task_make_grid: +# GFDLgrid_NUM_CELLS +# GFDLgrid_STRETCH_FAC +# GFDLgrid_REFINE_RATIO +# +# constants: +# NH0 +# NH4 +# TILE_RGNL +# +# grid_params: +# NHW +# NX +# NY +# STRETCH_FAC +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +89,10 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_orog|task_make_grid" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow constants grid_params task_make_grid task_make_orog task_make_grid ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done + # #----------------------------------------------------------------------- # @@ -30,13 +114,7 @@ source_config_for_task "task_make_orog|task_make_grid" ${GLOBAL_VAR_DEFNS_FP} scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) scrfunc_fn=$( basename "${scrfunc_fp}" ) scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# + print_info_msg " ======================================================================== Entering script: \"${scrfunc_fn}\" @@ -54,17 +132,7 @@ This is the ex-script for the task that generates orography files. export KMP_AFFINITY=${KMP_AFFINITY_MAKE_OROG} export OMP_NUM_THREADS=${OMP_NUM_THREADS_MAKE_OROG} export OMP_STACKSIZE=${OMP_STACKSIZE_MAKE_OROG} -# -#----------------------------------------------------------------------- -# -# Load modules and set various computational parameters and directories. -# -# Note: -# These module loads should all be moved to modulefiles. This has been -# done for Hera but must still be done for other machines. -# -#----------------------------------------------------------------------- -# + eval ${PRE_TASK_CMDS} if [ -z "${RUN_CMD_SERIAL:-}" ] ; then @@ -103,9 +171,6 @@ mkdir -p "${shave_dir}" # #----------------------------------------------------------------------- # -# Set the name and path to the executable that generates the raw orography -# file and make sure that it exists. -# exec_fn="orog" exec_fp="$EXECdir/${exec_fn}" if [ ! -f "${exec_fp}" ]; then @@ -114,10 +179,7 @@ The executable (exec_fp) for generating the orography file does not exist: exec_fp = \"${exec_fp}\" Please ensure that you've built this executable." fi -# -# Create a temporary (work) directory in which to generate the raw orography -# file and change location to it. -# + DATA="${DATA:-${raw_dir}/tmp}" mkdir -p "${DATA}" cd "${DATA}" @@ -131,15 +193,7 @@ cp ${FIXorg}/gmted2010.30sec.int fort.235 # #----------------------------------------------------------------------- # -# The orography filtering code reads in from the grid mosaic file the -# the number of tiles, the name of the grid file for each tile, and the -# dimensions (nx and ny) of each tile. Next, set the name of the grid -# mosaic file and create a symlink to it in filter_dir. -# -# Note that in the namelist file for the orography filtering code (created -# later below), the mosaic file name is saved in a variable called -# "grid_file". It would have been better to call this "mosaic_file" -# instead so it doesn't get confused with the grid file for a given tile... +# Get the grid file info from the mosaic file # #----------------------------------------------------------------------- # @@ -152,21 +206,15 @@ grid_fp="${FIXlam}/${grid_fn}" # #----------------------------------------------------------------------- # -# Set input parameters for the orography generation executable and write -# them to a text file. +# Set input parameters for the orog executable in a formatted text file. +# The executable takes its parameters via the command line. # -# Note that it doesn't matter what lonb and latb are set to below because -# if we specify an input grid file to the executable read in (which is -# what we do below), then if lonb and latb are not set to the dimensions -# of the grid specified in that file (divided by 2 since the grid file -# specifies a "supergrid"), then lonb and latb effectively get reset to -# the dimensions specified in the grid file. +# Note: lonb and latb are placeholders in this case since the program +# uses the ones obtained from the grid file. # #----------------------------------------------------------------------- # mtnres=1 -#lonb=$res -#latb=$res lonb=0 latb=0 jcap=0 @@ -195,15 +243,13 @@ cat "${input_redirect_fn}" # Call the executable to generate the raw orography file corresponding # to tile 7 (the regional domain) only. # -# The following will create an orography file named +# The script moves the output file from its temporary directory to the +# OROG_DIR and names it: # -# oro.${CRES}.tile7.nc +# ${CRES}_raw_orog.tile7.halo${NHW}.nc # -# and will place it in OROG_DIR. Note that this file will include -# orography for a halo of width NHW cells around tile 7. The follow- -# ing will also create a work directory called tile7 under OROG_DIR. -# This work directory can be removed after the orography file has been -# created (it is currently not deleted). +# Note that this file will include orography for a halo of width NHW +# cells around tile 7. # #----------------------------------------------------------------------- # @@ -225,9 +271,7 @@ cd - # #----------------------------------------------------------------------- # -# Move the raw orography file from the temporary directory to raw_dir. -# In the process, rename it such that its name includes CRES and the halo -# width. +# Move the raw orography file and rename it. # #----------------------------------------------------------------------- # @@ -240,9 +284,9 @@ mv "${raw_orog_fp_orig}" "${raw_orog_fp}" # #----------------------------------------------------------------------- # -# Call the code to generate the two orography statistics files (large- -# and small-scale) needed for the drag suite in the FV3_HRRR physics -# suite. +# Call the orog_gsl executable to generate the two orography statistics +# files (large- and small-scale) needed for the drag suite in certain +# GSL physics suites. # #----------------------------------------------------------------------- # @@ -321,14 +365,14 @@ fi # resolution of res_regional. These interpolated/extrapolated values are # then used to perform the orography filtering. # -# The above approach works for a GFDLgrid type of grid. To handle ESGgrid -# type grids, we set res in the namelist to the orography filtering code -# the equivalent global uniform cubed-sphere resolution of the regional -# grid, we set stretch_fac to 1 (since the equivalent resolution assumes -# a uniform global grid), and we set refine_ratio to 1. This will cause -# res_regional above to be set to the equivalent global uniform cubed- -# sphere resolution, so the filtering parameter values will be interpolated/ -# extrapolated to that resolution value. +# To handle ESGgrid type grids, we set res in the namelist to the +# orography filtering code the equivalent global uniform cubed-sphere +# resolution of the regional grid, we set stretch_fac to 1 (since the +# equivalent resolution assumes a uniform global grid), and we set +# refine_ratio to 1. This will cause res_regional above to be set to +# the equivalent global uniform cubed-sphere resolution, so the +# filtering parameter values will be interpolated/extrapolated to that +# resolution value. # #----------------------------------------------------------------------- # @@ -346,13 +390,11 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then # Really depends on what EMC wants to do. res="${GFDLgrid_NUM_CELLS}" -# stretch_fac="${GFDLgrid_STRETCH_FAC}" refine_ratio="${GFDLgrid_REFINE_RATIO}" elif [ "${GRID_GEN_METHOD}" = "ESGgrid" ]; then res="${CRES:1}" -# stretch_fac="${STRETCH_FAC}" refine_ratio="1" fi @@ -368,17 +410,12 @@ The executable (exec_fp) for filtering the raw orography does not exist: Please ensure that you've built this executable." fi # -# The orography filtering executable replaces the contents of the given -# raw orography file with a file containing the filtered orography. The -# name of the input raw orography file is in effect specified by the -# namelist variable topo_file; the orography filtering code assumes that -# this name is constructed by taking the value of topo_file and appending -# to it the string ".tile${N}.nc", where N is the tile number (which for -# a regional grid, is always 7). (Note that topo_file may start with a -# a path to the orography file that the filtering code will read in and -# replace.) Thus, we now copy the raw orography file (whose full path is -# specified by raw_orog_fp) to filter_dir and in the process rename it -# such that its new name: +# The filter_topo program overwrites its input file with filtered +# output, which is specified by topo_file in the namelist, but with a +# suffix ".tile7.nc" for the regional configuration. To avoid +# overwriting the output of the orog program, copy its output file to +# the filter_topo working directory and rename it. Here, the name is +# chosen such that it: # # (1) indicates that it contains filtered orography data (because that # is what it will contain once the orography filtering executable @@ -392,21 +429,20 @@ filtered_orog_fp_prefix="${filter_dir}/${filtered_orog_fn_prefix}" filtered_orog_fp="${filtered_orog_fp_prefix}.${fn_suffix_without_halo}" cp "${raw_orog_fp}" "${filtered_orog_fp}" # -# The orography filtering executable looks for the grid file specified -# in the grid mosaic file (more specifically, specified by the gridfiles -# variable in the mosaic file) in the directory in which the executable -# is running. Recall that above, we already extracted the name of the -# grid file from the mosaic file and saved it in the variable grid_fn, -# and we saved the full path to this grid file in the variable grid_fp. -# Thus, we now create a symlink in the filter_dir directory (where the -# filtering executable will run) with the same name as the grid file and -# point it to the actual grid file specified by grid_fp. +# The filter_topo program looks for the grid file specified +# in the mosaic file (more specifically, specified by the gridfiles +# variable in the mosaic file) in its own run directory. Make a symlink +# to it. # create_symlink_to_file ${grid_fp} ${filter_dir}/${grid_fn} TRUE # # Create the namelist file (in the filter_dir directory) that the orography # filtering executable will read in. # +# Note that in the namelist file for the orography filtering code (created +# later below), the mosaic file name is saved in a variable called +# "grid_file". It would have been better to call this "mosaic_file" +# instead so it doesn't get confused with the grid file for a given tile. cat > "${filter_dir}/input.nml" < "${filter_dir}/input.nml" < ${nml_fn} + > ${ascii_fn} PREP_STEP -eval ${RUN_CMD_SERIAL} ${exec_fp} < ${nml_fn} ${REDIRECT_OUT_ERR} || \ +eval ${RUN_CMD_SERIAL} ${exec_fp} < ${ascii_fn} ${REDIRECT_OUT_ERR} || \ print_err_msg_exit "\ Call to executable (exec_fp) to generate a (filtered) orography file with a ${NH0}-cell-wide halo from the orography file with a {NHW}-cell-wide halo returned with nonzero exit code: exec_fp = \"${exec_fp}\" -The namelist file (nml_fn) used in this call is in directory shave_dir: - nml_fn = \"${nml_fn}\" +The config file (ascii_fn) used in this call is in directory shave_dir: + ascii_fn = \"${ascii_fn}\" shave_dir = \"${shave_dir}\"" POST_STEP mv ${shaved_fp} ${OROG_DIR} # -# Create an input namelist file for the shave executable to generate an +# Create an input config file for the shave executable to generate an # orography file with a 4-cell-wide halo from the one with a wide halo. # Then call the shave executable. Finally, move the resultant file to # the OROG_DIR directory. @@ -524,21 +554,21 @@ print_info_msg "$VERBOSE" " \"Shaving\" filtered orography file with a ${NHW}-cell-wide halo to obtain a filtered orography file with a ${NH4}-cell-wide halo..." -nml_fn="input.shave.orog.halo${NH4}" +ascii_fn="input.shave.orog.halo${NH4}" shaved_fp="${shave_dir}/${CRES}${DOT_OR_USCORE}oro_data.tile${TILE_RGNL}.halo${NH4}.nc" printf "%s %s %s %s %s\n" \ $NX $NY ${NH4} \"${unshaved_fp}\" \"${shaved_fp}\" \ - > ${nml_fn} + > ${ascii_fn} PREP_STEP -eval ${RUN_CMD_SERIAL} ${exec_fp} < ${nml_fn} ${REDIRECT_OUT_ERR} || \ +eval ${RUN_CMD_SERIAL} ${exec_fp} < ${ascii_fn} ${REDIRECT_OUT_ERR} || \ print_err_msg_exit "\ Call to executable (exec_fp) to generate a (filtered) orography file with a ${NH4}-cell-wide halo from the orography file with a {NHW}-cell-wide halo returned with nonzero exit code: exec_fp = \"${exec_fp}\" -The namelist file (nml_fn) used in this call is in directory shave_dir: - nml_fn = \"${nml_fn}\" +The namelist file (ascii_fn) used in this call is in directory shave_dir: + ascii_fn = \"${ascii_fn}\" shave_dir = \"${shave_dir}\"" POST_STEP mv "${shaved_fp}" "${OROG_DIR}" @@ -549,8 +579,8 @@ cd - # #----------------------------------------------------------------------- # -# Add link in ORIG_DIR directory to the orography file with a 4-cell-wide -# halo such that the link name do not contain the halo width. These links +# Add link in OROG_DIR directory to the orography file with a 4-cell-wide +# halo such that the link name does not contain the halo width. These links # are needed by the make_sfc_climo task. # # NOTE: It would be nice to modify the sfc_climo_gen_code to read in @@ -563,13 +593,7 @@ python3 $USHdir/link_fix.py \ --file-group "orog" || \ print_err_msg_exit "\ Call to function to create links to orography files failed." -# -#----------------------------------------------------------------------- -# -# Print message indicating successful completion of script. -# -#----------------------------------------------------------------------- -# + print_info_msg " ======================================================================== Orography files with various halo widths generated successfully!!! diff --git a/scripts/exregional_make_sfc_climo.sh b/scripts/exregional_make_sfc_climo.sh index c4ee8f25b1..a916228b1f 100755 --- a/scripts/exregional_make_sfc_climo.sh +++ b/scripts/exregional_make_sfc_climo.sh @@ -1,5 +1,52 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# This ex-script generates surface climatology files needed to run FV3 +# forecasts. +# +# The script runs the sfc_climo_gen UFS Utils program, and links the +# output to the SFC_CLIMO_GEN directory +# +# Run-time environment variables: +# +# DATA +# GLOBAL_VAR_DEFNS_FP +# REDIRECT_OUT_ERR +# +# Experiment variables +# +# user: +# EXECdir +# USHdir +# +# platform: +# FIXsfc +# PRE_TASK_CMDS +# RUN_CMD_UTILS +# +# workflow: +# CRES +# DOT_OR_USCORE +# FIXlam +# VERBOSE +# +# task_make_sfc_climo: +# KMP_AFFINITY_MAKE_SFC_CLIMO +# OMP_NUM_THREADS_MAKE_SFC_CLIMO +# OMP_STACKSIZE_MAKE_SFC_CLIMO +# SFC_CLIMO_DIR +# +# constants: +# GTYPE +# NH0 +# NH4 +# TILE_RGNL +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +55,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_sfc_climo" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow constants task_make_sfc_climo ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index f769d4e225..0241dbd728 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -1,5 +1,113 @@ #!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# This ex-script is responsible for running the FV3 regional forecast. +# +# Run-time environment variables: +# +# CDATE +# COMIN +# COMOUT +# COMROOT +# DATA +# DBNROOT +# GLOBAL_VAR_DEFNS_FP +# INPUT_DATA +# NET +# PDY +# REDIRECT_OUT_ERR +# RUN +# SENDDBN +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# MACHINE +# PARMdir +# RUN_ENVIR +# USHdir +# +# platform: +# PRE_TASK_CMDS +# RUN_CMD_FCST +# +# workflow: +# CCPP_PHYS_DIR +# CCPP_PHYS_SUITE +# COLDSTART +# CRES +# DATA_TABLE_FN +# DATA_TABLE_FP +# DATE_FIRST_CYCL +# DOT_OR_USCORE +# EXPTDIR +# FCST_LEN_CYCL +# FCST_LEN_HRS +# FIELD_DICT_FP +# FIELD_DICT_FN +# FIELD_TABLE_FN +# FIELD_TABLE_FP +# FIXam +# FIXclim +# FIXlam +# FV3_NML_FN +# FV3_NML_FP +# FV3_NML_STOCH_FP +# INCR_CYCL_FREQ +# PREDEF_GRID_NAME +# SYMLINK_FIX_FILES +# VERBOSE +# +# task_get_extrn_lbcs: +# LBC_SPEC_INTVL_HRS +# +# task_run_fcst: +# DO_FCST_RESTART +# DT_ATMOS +# FV3_EXEC_FP +# KMP_AFFINITY_RUN_FCST +# OMP_NUM_THREADS_RUN_FCST +# OMP_STACKSIZE_RUN_FCST +# PRINT_ESMF +# RESTART_INTERVAL +# USE_MERRA_CLIMO +# WRITE_DOPOST +# +# task_run_post: +# CUSTOM_POST_CONFIG_FP +# DT_SUBHOURLY_POST_MNTS +# POST_OUTPUT_DOMAIN_NAME +# SUB_HOURLY_POST +# USE_CUSTOM_POST_CONFIG_FILE +# +# global: +# DO_ENSEMBLE +# DO_LSM_SPP +# DO_SHUM +# DO_SKEB +# DO_SPP +# DO_SPPT +# +# cpl_aqm_parm: +# AQM_RC_PRODUCT_FN +# CPL_AQM +# +# constants: +# NH0 +# NH3 +# NH4 +# TILE_RGNL +# +# fixed_files: +# CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +116,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_fcst|task_run_post|task_get_extrn_ics|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow global cpl_aqm_parm constants fixed_files \ + task_get_extrn_lbcs task_run_fcst task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done + # #----------------------------------------------------------------------- # @@ -57,7 +169,7 @@ export OMP_NUM_THREADS=${OMP_NUM_THREADS_RUN_FCST} export OMP_STACKSIZE=${OMP_STACKSIZE_RUN_FCST} export MPI_TYPE_DEPTH=20 export ESMF_RUNTIME_COMPLIANCECHECK=OFF:depth=4 -if [ "${PRINT_ESMF}" = "TRUE" ]; then +if [ $(boolify "${PRINT_ESMF}") = "TRUE" ]; then export ESMF_RUNTIME_PROFILE=ON export ESMF_RUNTIME_PROFILE_OUTPUT="SUMMARY" fi @@ -227,7 +339,7 @@ cd ${DATA}/INPUT # relative_link_flag="FALSE" -if [ "${CPL_AQM}" = "TRUE" ]; then +if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then COMIN="${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}" #temporary path, should be removed later target="${COMIN}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc" @@ -307,7 +419,7 @@ static) files in the FIXam directory: # isn't really an advantage to using relative symlinks, so we use symlinks # with absolute paths. # -if [ "${SYMLINK_FIX_FILES}" == "FALSE" ]; then +if [ $(boolify "${SYMLINK_FIX_FILES}") = "FALSE" ]; then relative_link_flag="TRUE" else relative_link_flag="FALSE" @@ -336,7 +448,7 @@ done # #----------------------------------------------------------------------- # -if [ "${USE_MERRA_CLIMO}" = "TRUE" ]; then +if [ $(boolify "${USE_MERRA_CLIMO}") = "TRUE" ]; then for f_nm_path in ${FIXclim}/*; do f_nm=$( basename "${f_nm_path}" ) pre_f="${f_nm%%.*}" @@ -397,16 +509,16 @@ create_symlink_to_file ${FIELD_TABLE_FP} ${DATA}/${FIELD_TABLE_FN} ${relative_li create_symlink_to_file ${FIELD_DICT_FP} ${DATA}/${FIELD_DICT_FN} ${relative_link_flag} -if [ ${WRITE_DOPOST} = "TRUE" ]; then +if [ $(boolify ${WRITE_DOPOST}) = "TRUE" ]; then cp ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat - if [ ${USE_CUSTOM_POST_CONFIG_FILE} = "TRUE" ]; then + if [ $(boolify ${USE_CUSTOM_POST_CONFIG_FILE}) = "TRUE" ]; then post_config_fp="${CUSTOM_POST_CONFIG_FP}" print_info_msg " ==================================================================== CUSTOM_POST_CONFIG_FP = \"${CUSTOM_POST_CONFIG_FP}\" ====================================================================" else - if [ "${CPL_AQM}" = "TRUE" ]; then + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then post_config_fp="${PARMdir}/upp/postxconfig-NT-AQM.txt" else post_config_fp="${PARMdir}/upp/postxconfig-NT-fv3lam.txt" @@ -420,7 +532,7 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then cp ${post_config_fp} ./postxconfig-NT.txt cp ${PARMdir}/upp/params_grib2_tbl_new . # Set itag for inline-post: - if [ "${CPL_AQM}" = "TRUE" ]; then + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then post_itag_add="aqf_on=.true.," else post_itag_add="" @@ -455,11 +567,14 @@ cp ${CCPP_PHYS_DIR}/noahmptable.tbl . #----------------------------------------------------------------------- # STOCH="FALSE" -if ([ "${DO_SPP}" = "TRUE" ] || [ "${DO_SPPT}" = "TRUE" ] || [ "${DO_SHUM}" = "TRUE" ] || \ - [ "${DO_SKEB}" = "TRUE" ] || [ "${DO_LSM_SPP}" = "TRUE" ]); then +if ([ $(boolify "${DO_SPP}") = "TRUE" ] || \ + [ $(boolify "${DO_SPPT}") = "TRUE" ] || \ + [ $(boolify "${DO_SHUM}") = "TRUE" ] || \ + [ $(boolify "${DO_SKEB}") = "TRUE" ] || \ + [ $(boolify "${DO_LSM_SPP}") = "TRUE" ]); then STOCH="TRUE" fi -if [ "${STOCH}" == "TRUE" ]; then +if [ "${STOCH}" = "TRUE" ]; then cp ${FV3_NML_STOCH_FP} ${DATA}/${FV3_NML_FN} else ln -sf ${FV3_NML_FP} ${DATA}/${FV3_NML_FN} @@ -472,7 +587,7 @@ fi # #----------------------------------------------------------------------- # -if ([ "$STOCH" == "TRUE" ] && [ "${DO_ENSEMBLE}" = "TRUE" ]); then +if ([ "$STOCH" == "TRUE" ] && [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]); then python3 $USHdir/set_fv3nml_ens_stoch_seeds.py \ --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ --cdate "$CDATE" || print_err_msg_exit "\ @@ -489,7 +604,7 @@ fi # #----------------------------------------------------------------------- # -if [ "${CPL_AQM}" = "TRUE" ] && [ "${PREDEF_GRID_NAME}" = "AQM_NA_13km" ]; then +if [ $(boolify "${CPL_AQM}") = "TRUE" ] && [ "${PREDEF_GRID_NAME}" = "AQM_NA_13km" ]; then python3 $USHdir/update_input_nml.py \ --namelist "${DATA}/${FV3_NML_FN}" \ --aqm_na_13km || print_err_msg_exit "\ @@ -507,10 +622,10 @@ fi #----------------------------------------------------------------------- # flag_fcst_restart="FALSE" -if [ "${DO_FCST_RESTART}" = "TRUE" ] && [ "$(ls -A ${DATA}/RESTART )" ]; then +if [ $(boolify "${DO_FCST_RESTART}") = "TRUE" ] && [ "$(ls -A ${DATA}/RESTART )" ]; then cp input.nml input.nml_orig cp model_configure model_configure_orig - if [ "${CPL_AQM}" = "TRUE" ]; then + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then cp aqm.rc aqm.rc_orig fi relative_link_flag="FALSE" @@ -574,8 +689,10 @@ fi # #----------------------------------------------------------------------- # -if [ "${CPL_AQM}" = "TRUE" ]; then - if [ "${COLDSTART}" = "TRUE" ] && [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ] && [ "${flag_fcst_restart}" = "FALSE" ]; then +if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then + if [ $(boolify "${COLDSTART}") = "TRUE" ] && \ + [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ] && \ + [ $(boolify "${flag_fcst_restart}") = "FALSE" ]; then init_concentrations="true" else init_concentrations="false" @@ -666,7 +783,7 @@ fi # #----------------------------------------------------------------------- # -if [ "${RUN_ENVIR}" = "nco" ] && [ "${CPL_AQM}" = "TRUE" ]; then +if [ "${RUN_ENVIR}" = "nco" ] && [ $(boolify "${CPL_AQM}") = "TRUE" ]; then # create an intermediate symlink to RESTART ln -sf "${DATA}/RESTART" "${COMIN}/RESTART" fi @@ -725,7 +842,7 @@ POST_STEP # #----------------------------------------------------------------------- # -if [ "${CPL_AQM}" = "TRUE" ]; then +if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then if [ "${RUN_ENVIR}" = "nco" ]; then if [ -d "${COMIN}/RESTART" ] && [ "$(ls -A ${DATA}/RESTART)" ]; then rm -rf "${COMIN}/RESTART" @@ -758,8 +875,8 @@ fi # #----------------------------------------------------------------------- # -if [ ${WRITE_DOPOST} = "TRUE" ]; then - +if [ $(boolify ${WRITE_DOPOST}) = "TRUE" ]; then + yyyymmdd=${PDY} hh=${cyc} fmn="00" @@ -785,7 +902,7 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then post_fn_suffix="GrbF${fhr_d}" post_renamed_fn_suffix="f${fhr}${post_mn_or_null}.${POST_OUTPUT_DOMAIN_NAME}.grib2" - if [ "${CPL_AQM}" = "TRUE" ]; then + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then fids=( "cmaq" ) else fids=( "prslev" "natlev" ) @@ -800,15 +917,15 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then if [ $RUN_ENVIR != "nco" ]; then basetime=$( $DATE_UTIL --date "$yyyymmdd $hh" +%y%j%H%M ) symlink_suffix="_${basetime}f${fhr}${post_mn}" - create_symlink_to_file ${post_renamed_fn} ${FID}${symlink_suffix} TRUE + create_symlink_to_file ${post_renamed_fn} ${FID}${symlink_suffix} TRUE fi # DBN alert - if [ $SENDDBN = "TRUE" ]; then + if [ "$SENDDBN" = "TRUE" ]; then $DBNROOT/bin/dbn_alert MODEL rrfs_post ${job} ${COMOUT}/${post_renamed_fn} fi done - if [ "${CPL_AQM}" = "TRUE" ]; then + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then mv ${DATA}/dynf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr}.nc mv ${DATA}/phyf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr}.nc fi diff --git a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh index 93caeaa7f2..05503bb963 100755 --- a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh +++ b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_ensgrid|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post task_run_vx_ensgrid ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh index 4f871e6e1b..03c6093943 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_gridstat|task_run_vx_pointstat|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post task_run_vx_gridstat task_run_vx_pointstat ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -118,7 +122,7 @@ set_vx_params \ #----------------------------------------------------------------------- # i="0" -if [ "${DO_ENSEMBLE}" = "TRUE" ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]; then i=$( bc -l <<< "${ENSMEM_INDX}-1" ) fi time_lag=$( bc -l <<< "${ENS_TIME_LAG_HRS[$i]}*${SECS_PER_HOUR}" ) @@ -151,7 +155,7 @@ else # or, better, just remove this variale and code "/${ensmem_name}" where # slash_ensmem_subdir_or_null currently appears below. # - if [ "${DO_ENSEMBLE}" = "TRUE" ]; then + if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]; then slash_ensmem_subdir_or_null="/${ensmem_name}" else slash_ensmem_subdir_or_null="" diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh index 6e4a4ff33f..12a54dc21b 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_ensgrid_mean|task_run_vx_enspoint_mean|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post task_run_vx_ensgrid_mean task_run_vx_enspoint_mean ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh index 924d321ec3..8fd4a59dfe 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_ensgrid_prob|task_run_vx_enspoint_prob|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_vx_ensgrid_prob task_run_vx_enspoint_prob task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_pb2nc_obs.sh b/scripts/exregional_run_met_pb2nc_obs.sh index 985cd33c7f..5281021f01 100755 --- a/scripts/exregional_run_met_pb2nc_obs.sh +++ b/scripts/exregional_run_met_pb2nc_obs.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_met_pb2nc_obs" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_met_pb2nc_obs ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_pcpcombine.sh b/scripts/exregional_run_met_pcpcombine.sh index 6e64d102e6..ce9e78ab17 100755 --- a/scripts/exregional_run_met_pcpcombine.sh +++ b/scripts/exregional_run_met_pcpcombine.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_met_pcpcombine|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_met_pcpcombine task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -122,7 +126,7 @@ set_vx_params \ time_lag="0" if [ "${FCST_OR_OBS}" = "FCST" ]; then i="0" - if [ "${DO_ENSEMBLE}" = "TRUE" ]; then + if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]; then i=$( bc -l <<< "${ENSMEM_INDX}-1" ) fi time_lag=$( bc -l <<< "${ENS_TIME_LAG_HRS[$i]}*${SECS_PER_HOUR}" ) @@ -157,7 +161,7 @@ if [ "${FCST_OR_OBS}" = "FCST" ]; then # or, better, just remove this variale and code "/${ensmem_name}" where # slash_ensmem_subdir_or_null currently appears below. # - if [ "${DO_ENSEMBLE}" = "TRUE" ]; then + if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]; then slash_ensmem_subdir_or_null="/${ensmem_name}" else slash_ensmem_subdir_or_null="" diff --git a/scripts/exregional_run_post.sh b/scripts/exregional_run_post.sh index 1bf45bd965..3f0ca93df9 100755 --- a/scripts/exregional_run_post.sh +++ b/scripts/exregional_run_post.sh @@ -1,5 +1,62 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The ex-script that runs UPP. +# +# Run-time environment variables: +# +# CDATE +# COMOUT +# DATA_FHR +# DBNROOT +# ENSMEM_INDX +# GLOBAL_VAR_DEFNS_FP +# NET +# PDY +# REDIRECT_OUT_ERR +# SENDDBN +# +# Experiment variables +# +# user: +# EXECdir +# MACHINE +# PARMdir +# RUN_ENVIR +# USHdir +# +# platform: +# PRE_TASK_CMDS +# RUN_CMD_POST +# +# workflow: +# VERBOSE +# +# task_run_fcst: +# DT_ATMOS +# +# task_run_post: +# CUSTOM_POST_CONFIG_FP +# KMP_AFFINITY_RUN_POST +# OMP_NUM_THREADS_RUN_POST +# OMP_STACKSIZE_RUN_POST +# NUMX +# POST_OUTPUT_DOMAIN_NAME +# SUB_HOURLY_POST +# USE_CUSTOM_POST_CONFIG_FILE +# +# global: +# CRTM_DIR +# USE_CRTM +# +# cpl_aqm_parm: +# CPL_AQM +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +65,10 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow global cpl_aqm_parm \ + task_run_fcst task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -82,7 +142,7 @@ fi # rm -f fort.* cp ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat -if [ ${USE_CUSTOM_POST_CONFIG_FILE} = "TRUE" ]; then +if [ $(boolify ${USE_CUSTOM_POST_CONFIG_FILE}) = "TRUE" ]; then post_config_fp="${CUSTOM_POST_CONFIG_FP}" print_info_msg " ==================================================================== @@ -92,7 +152,7 @@ to the temporary work directory (DATA_FHR): DATA_FHR = \"${DATA_FHR}\" ====================================================================" else - if [ "${CPL_AQM}" = "TRUE" ]; then + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then post_config_fp="${PARMdir}/upp/postxconfig-NT-AQM.txt" else post_config_fp="${PARMdir}/upp/postxconfig-NT-fv3lam.txt" @@ -107,7 +167,7 @@ temporary work directory (DATA_FHR): fi cp ${post_config_fp} ./postxconfig-NT.txt cp ${PARMdir}/upp/params_grib2_tbl_new . -if [ ${USE_CRTM} = "TRUE" ]; then +if [ $(boolify ${USE_CRTM}) = "TRUE" ]; then cp ${CRTM_DIR}/Nalli.IRwater.EmisCoeff.bin ./ cp ${CRTM_DIR}/FAST*.bin ./ cp ${CRTM_DIR}/NPOESS.IRland.EmisCoeff.bin ./ @@ -155,7 +215,7 @@ hh=${cyc} # must be set to a null string. # mnts_secs_str="" -if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then +if [ $(boolify "${SUB_HOURLY_POST}") = "TRUE" ]; then if [ ${fhr}${fmn} = "00000" ]; then mnts_secs_str=":"$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${DT_ATMOS} seconds" "+%M:%S" ) else @@ -185,7 +245,7 @@ post_mn=${post_time:10:2} # # Create the input namelist file to the post-processor executable. # -if [ "${CPL_AQM}" = "TRUE" ]; then +if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then post_itag_add="aqf_on=.true.," else post_itag_add="" @@ -273,7 +333,7 @@ post_renamed_fn_suffix="f${fhr}${post_mn_or_null}.${POST_OUTPUT_DOMAIN_NAME}.gri cd "${COMOUT}" basetime=$( $DATE_UTIL --date "$yyyymmdd $hh" +%y%j%H%M ) symlink_suffix="${dot_ensmem}.${basetime}f${fhr}${post_mn}" -if [ "${CPL_AQM}" = "TRUE" ]; then +if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then fids=( "cmaq" ) else fids=( "prslev" "natlev" ) @@ -287,7 +347,7 @@ for fid in "${fids[@]}"; do create_symlink_to_file ${post_renamed_fn} ${FID}${symlink_suffix} TRUE fi # DBN alert - if [ $SENDDBN = "TRUE" ]; then + if [ "$SENDDBN" = "TRUE" ]; then $DBNROOT/bin/dbn_alert MODEL rrfs_post ${job} ${COMOUT}/${post_renamed_fn} fi done diff --git a/scripts/exregional_run_prdgen.sh b/scripts/exregional_run_prdgen.sh index 5d1bfbf447..5baa779821 100755 --- a/scripts/exregional_run_prdgen.sh +++ b/scripts/exregional_run_prdgen.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_prdgen|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post task_run_prdgen ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -175,7 +179,7 @@ done # Remap to additional output grids if requested #----------------------------------------------- -if [ ${DO_PARALLEL_PRDGEN} == "TRUE" ]; then +if [ $(boolify ${DO_PARALLEL_PRDGEN}) = "TRUE" ]; then # # parallel run wgrib2 for product generation # diff --git a/scripts/exsrw_aqm_ics.sh b/scripts/exsrw_aqm_ics.sh index efd833b092..4fd040e597 100755 --- a/scripts/exsrw_aqm_ics.sh +++ b/scripts/exsrw_aqm_ics.sh @@ -8,7 +8,10 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_aqm_ics" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exsrw_aqm_lbcs.sh b/scripts/exsrw_aqm_lbcs.sh index 93dc119ec2..7b3058ef34 100755 --- a/scripts/exsrw_aqm_lbcs.sh +++ b/scripts/exsrw_aqm_lbcs.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_get_extrn_lbcs|task_make_orog|task_make_lbcs|cpl_aqm_parm|task_aqm_lbcs" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_get_extrn_lbcs task_make_lbcs task_make_orog ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -100,7 +104,7 @@ for hr in 0 ${LBC_SPEC_FCST_HRS[@]}; do cp -p "${DATA_SHARE}/${aqm_lbcs_fn}" ${DATA} done -if [ "${DO_AQM_CHEM_LBCS}" = "TRUE" ]; then +if [ $(boolify "${DO_AQM_CHEM_LBCS}") = "TRUE" ]; then ext_lbcs_file="${AQM_LBCS_FILES}" chem_lbcs_fn=${ext_lbcs_file///${MM}} chem_lbcs_fp="${FIXaqm}/chemlbc/${chem_lbcs_fn}" @@ -141,7 +145,7 @@ fi # #----------------------------------------------------------------------- # -if [ "${DO_AQM_GEFS_LBCS}" = "TRUE" ]; then +if [ $(boolify "${DO_AQM_GEFS_LBCS}") = "TRUE" ]; then AQM_GEFS_FILE_CYC=${AQM_GEFS_FILE_CYC:-"${HH}"} AQM_GEFS_FILE_CYC=$( printf "%02d" "${AQM_GEFS_FILE_CYC}" ) @@ -153,7 +157,7 @@ if [ "${DO_AQM_GEFS_LBCS}" = "TRUE" ]; then fi aqm_mofile_fn="${AQM_GEFS_FILE_PREFIX}.t${AQM_GEFS_FILE_CYC}z.atmf" - if [ "${DO_REAL_TIME}" = "TRUE" ]; then + if [ $(boolify "${DO_REAL_TIME}") = "TRUE" ]; then aqm_mofile_fp="${COMINgefs}/gefs.${YYYYMMDD}/${AQM_GEFS_FILE_CYC}/chem/sfcsig/${aqm_mofile_fn}" else aqm_mofile_fp="${COMINgefs}/${YYYYMMDD}/${AQM_GEFS_FILE_CYC}/${aqm_mofile_fn}" diff --git a/scripts/exsrw_bias_correction_o3.sh b/scripts/exsrw_bias_correction_o3.sh index 1ef4012528..343e7e6f2b 100755 --- a/scripts/exsrw_bias_correction_o3.sh +++ b/scripts/exsrw_bias_correction_o3.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_bias_correction_o3" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_bias_correction_o3 ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -199,7 +203,7 @@ POST_STEP cp ${DATA}/out/ozone/${yyyy}/*nc ${DATA}/data/bcdata.${yyyymm}/interpolated/ozone/${yyyy} -if [ "${DO_AQM_SAVE_AIRNOW_HIST}" = "TRUE" ]; then +if [ $(boolify "${DO_AQM_SAVE_AIRNOW_HIST}") = "TRUE" ]; then mkdir -p ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/ozone/${yyyy} cp ${DATA}/out/ozone/${yyyy}/*nc ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/ozone/${yyyy} diff --git a/scripts/exsrw_bias_correction_pm25.sh b/scripts/exsrw_bias_correction_pm25.sh index ae1a2d6f65..70cf512589 100755 --- a/scripts/exsrw_bias_correction_pm25.sh +++ b/scripts/exsrw_bias_correction_pm25.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_bias_correction_pm25" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_bias_correction_pm25 ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -198,7 +202,7 @@ POST_STEP cp ${DATA}/out/pm25/${yyyy}/*nc ${DATA}/data/bcdata.${yyyymm}/interpolated/pm25/${yyyy} -if [ "${DO_AQM_SAVE_AIRNOW_HIST}" = "TRUE" ]; then +if [ $(boolify "${DO_AQM_SAVE_AIRNOW_HIST}") = "TRUE" ]; then mkdir -p ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/pm25/${yyyy} cp ${DATA}/out/pm25/${yyyy}/*nc ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/pm25/${yyyy} fi diff --git a/scripts/exsrw_fire_emission.sh b/scripts/exsrw_fire_emission.sh index cb44c99d8d..3ae78422f5 100755 --- a/scripts/exsrw_fire_emission.sh +++ b/scripts/exsrw_fire_emission.sh @@ -8,7 +8,10 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_fire_emission" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exsrw_nexus_emission.sh b/scripts/exsrw_nexus_emission.sh index a5769a6483..0fa8c48754 100755 --- a/scripts/exsrw_nexus_emission.sh +++ b/scripts/exsrw_nexus_emission.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_nexus_emission" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_nexus_emission ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exsrw_nexus_gfs_sfc.sh b/scripts/exsrw_nexus_gfs_sfc.sh index 103842d46f..cadc27b89c 100755 --- a/scripts/exsrw_nexus_gfs_sfc.sh +++ b/scripts/exsrw_nexus_gfs_sfc.sh @@ -8,7 +8,10 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_nexus_gfs_sfc" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -73,7 +76,7 @@ fcst_len_hrs_offset=$(( FCST_LEN_HRS + TIME_OFFSET_HRS )) GFS_SFC_TAR_DIR="${NEXUS_GFS_SFC_ARCHV_DIR}/rh${YYYY}/${YYYYMM}/${YYYYMMDD}" GFS_SFC_TAR_SUB_DIR="gfs.${YYYYMMDD}/${HH}/atmos" -if [ "${DO_REAL_TIME}" = "TRUE" ]; then +if [ $(boolify "${DO_REAL_TIME}") = "TRUE" ]; then GFS_SFC_LOCAL_DIR="${COMINgfs}/${GFS_SFC_TAR_SUB_DIR}" else GFS_SFC_LOCAL_DIR="${NEXUS_GFS_SFC_DIR}/${GFS_SFC_TAR_SUB_DIR}" diff --git a/scripts/exsrw_nexus_post_split.sh b/scripts/exsrw_nexus_post_split.sh index 517893b5e5..151e0a2ea5 100755 --- a/scripts/exsrw_nexus_post_split.sh +++ b/scripts/exsrw_nexus_post_split.sh @@ -8,7 +8,10 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_nexus_post_split" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exsrw_point_source.sh b/scripts/exsrw_point_source.sh index 7acbc946f7..4cd693506c 100755 --- a/scripts/exsrw_point_source.sh +++ b/scripts/exsrw_point_source.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_run_fcst|cpl_aqm_parm|task_point_source" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_point_source task_run_fcst ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exsrw_post_stat_o3.sh b/scripts/exsrw_post_stat_o3.sh index 6fa1db7f8f..dfcdd24ffa 100755 --- a/scripts/exsrw_post_stat_o3.sh +++ b/scripts/exsrw_post_stat_o3.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_o3" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exsrw_post_stat_pm25.sh b/scripts/exsrw_post_stat_pm25.sh index ea7c1717c3..bdbf1fcbc5 100755 --- a/scripts/exsrw_post_stat_pm25.sh +++ b/scripts/exsrw_post_stat_pm25.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_pm25" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exsrw_pre_post_stat.sh b/scripts/exsrw_pre_post_stat.sh index dfb4c2cf9e..f6ec6a9a7d 100755 --- a/scripts/exsrw_pre_post_stat.sh +++ b/scripts/exsrw_pre_post_stat.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_pre_post|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/tests/WE2E/utils.py b/tests/WE2E/utils.py index fb96dab004..eb3c49fcba 100755 --- a/tests/WE2E/utils.py +++ b/tests/WE2E/utils.py @@ -21,7 +21,7 @@ cfg_to_yaml_str, flatten_dict, load_config_file, - load_shell_config + load_yaml_config ) REPORT_WIDTH = 100 @@ -154,13 +154,13 @@ def calculate_core_hours(expts_dict: dict) -> dict: for expt in expts_dict: # Read variable definitions file - vardefs_file = os.path.join(expts_dict[expt]["expt_dir"],"var_defns.sh") + vardefs_file = os.path.join(expts_dict[expt]["expt_dir"],"var_defns.yaml") if not os.path.isfile(vardefs_file): logging.warning(f"\nWARNING: For experiment {expt}, variable definitions file") logging.warning(f"{vardefs_file}\ndoes not exist!\n\nDropping experiment from summary") continue logging.debug(f'Reading variable definitions file {vardefs_file}') - vardefs = load_shell_config(vardefs_file) + vardefs = load_yaml_config(vardefs_file) vdf = flatten_dict(vardefs) cores_per_node = vdf["NCORES_PER_NODE"] for task in expts_dict[expt]: diff --git a/tests/test_python/test_retrieve_data.py b/tests/test_python/test_retrieve_data.py index 1d54e0904c..2c749c97ac 100644 --- a/tests/test_python/test_retrieve_data.py +++ b/tests/test_python/test_retrieve_data.py @@ -493,61 +493,3 @@ def test_ufs_lbcs_from_aws(self): # Testing that there is no failure retrieve_data.main(args) - - @unittest.skipIf(os.environ.get("CI") == "true", "Skipping HPSS tests") - def test_rap_obs_from_hpss(self): - - """Get RAP observations from hpss for a 06z time""" - - with tempfile.TemporaryDirectory(dir=self.path) as tmp_dir: - os.chdir(tmp_dir) - - # fmt: off - args = [ - '--file_set', 'obs', - '--config', self.config, - '--cycle_date', '2023032106', - '--data_stores', 'hpss', - '--data_type', 'RAP_obs', - '--output_path', tmp_dir, - '--debug', - ] - # fmt: on - - retrieve_data.main(args) - - # Verify files exist in temp dir - - path = os.path.join(tmp_dir, "*") - files_on_disk = glob.glob(path) - self.assertEqual(len(files_on_disk), 30) - - @unittest.skipIf(os.environ.get("CI") == "true", "Skipping HPSS tests") - def test_rap_e_obs_from_hpss(self): - - """Get RAP observations from hpss for a 12z time; - at 00z and 12z we expect to see additional files - with the 'rap_e' naming convention""" - - with tempfile.TemporaryDirectory(dir=self.path) as tmp_dir: - os.chdir(tmp_dir) - - # fmt: off - args = [ - '--file_set', 'obs', - '--config', self.config, - '--cycle_date', '2023032112', - '--data_stores', 'hpss', - '--data_type', 'RAP_obs', - '--output_path', tmp_dir, - '--debug', - ] - # fmt: on - - retrieve_data.main(args) - - # Verify files exist in temp dir - - path = os.path.join(tmp_dir, "*") - files_on_disk = glob.glob(path) - self.assertEqual(len(files_on_disk), 37) diff --git a/ush/bash_utils/check_var_valid_value.sh b/ush/bash_utils/check_var_valid_value.sh index 21288184db..5b942c1f73 100644 --- a/ush/bash_utils/check_var_valid_value.sh +++ b/ush/bash_utils/check_var_valid_value.sh @@ -96,7 +96,7 @@ where the arguments are defined as follows: var_value=${!var_name} valid_var_values_at="$valid_var_values_array_name[@]" - valid_var_values=("${!valid_var_values_at}") + valid_var_values=("${!valid_var_values_at:-}") if [ "$#" -eq 3 ]; then err_msg="$3" diff --git a/ush/bash_utils/create_symlink_to_file.sh b/ush/bash_utils/create_symlink_to_file.sh index c6a5213326..0cfcdc9fdf 100644 --- a/ush/bash_utils/create_symlink_to_file.sh +++ b/ush/bash_utils/create_symlink_to_file.sh @@ -30,6 +30,7 @@ fi target=$1 symlink=$2 relative=${3:-TRUE} +relative=$(boolify $relative) # #----------------------------------------------------------------------- # diff --git a/ush/bash_utils/print_msg.sh b/ush/bash_utils/print_msg.sh index 28a70d1431..8b032f9698 100644 --- a/ush/bash_utils/print_msg.sh +++ b/ush/bash_utils/print_msg.sh @@ -68,7 +68,7 @@ function print_info_msg() { elif [ "$#" -eq 2 ]; then - verbose="$1" + verbose=$(boolify "$1") info_msg="$2" # #----------------------------------------------------------------------- diff --git a/ush/bash_utils/source_config.sh b/ush/bash_utils/source_config.sh deleted file mode 100644 index df5a79a0df..0000000000 --- a/ush/bash_utils/source_config.sh +++ /dev/null @@ -1,53 +0,0 @@ -# -#----------------------------------------------------------------------- -# This file defines function that sources a config file (yaml/json etc) -# into the calling shell script -#----------------------------------------------------------------------- -# - -function config_to_str() { - $USHdir/config_utils.py -o $1 -c $2 "${@:3}" -} - -# -#----------------------------------------------------------------------- -# Define functions for different file formats -#----------------------------------------------------------------------- -# -function config_to_shell_str() { - config_to_str shell "$@" -} -function config_to_ini_str() { - config_to_str ini "$@" -} -function config_to_yaml_str() { - config_to_str yaml "$@" -} -function config_to_json_str() { - config_to_str json "$@" -} -function config_to_xml_str() { - config_to_str xml "$@" -} - -# -#----------------------------------------------------------------------- -# Source contents of a config file to shell script -#----------------------------------------------------------------------- -# -function source_config() { - - source <( config_to_shell_str "$@" ) - -} -# -#----------------------------------------------------------------------- -# Source partial contents of a config file to shell script. -# Only those variables needed by the task are sourced -#----------------------------------------------------------------------- -# -function source_config_for_task() { - - source <( config_to_shell_str "${@:2}" -k "(^(?!task_)|$1).*" ) - -} diff --git a/ush/bash_utils/source_yaml.sh b/ush/bash_utils/source_yaml.sh new file mode 100644 index 0000000000..669408416e --- /dev/null +++ b/ush/bash_utils/source_yaml.sh @@ -0,0 +1,36 @@ + + +function source_yaml () { + + local func_name="${FUNCNAME[0]}" + + if [ "$#" -lt 1 ] ; then + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: ${func_name} + Number of args specified: $# + +Usage: + + ${func_name} yaml_file [section] + + yaml_file: path to the YAML file to source + section: optional subsection of yaml +" + fi + local section + yaml_file=$1 + section=$2 + + while read -r line ; do + + + # A regex to match list representations + line=$(echo "$line" | sed -E "s/='\[(.*)\]'/=(\1)/") + line=${line//,/} + line=${line//\"/} + line=${line/None/} + source <( echo "${line}" ) + done < <(uw config realize -i "${yaml_file}" --output-format sh --key-path $section) +} diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index c9c0fc7cb8..90651c1b7f 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -468,7 +468,7 @@ workflow: # #----------------------------------------------------------------------- # - WORKFLOW_ID: !nowtimestamp '' + WORKFLOW_ID: "" # #----------------------------------------------------------------------- # @@ -718,13 +718,11 @@ workflow: # script creates and that defines the workflow for the experiment. # # GLOBAL_VAR_DEFNS_FN: - # Name of file (a shell script) containing the definitions of the primary - # experiment variables (parameters) defined in this default configuration - # script and in the user-specified configuration as well as secondary - # experiment variables generated by the experiment generation script. - # This file is sourced by many scripts (e.g. the J-job scripts corresponding - # to each workflow task) in order to make all the experiment variables - # available in those scripts. + # Name of the experiment configuration file. It contains the primary + # experiment variables defined in this default configuration script and in the + # user-specified configuration as well as secondary experiment variables + # generated by the experiment generation script. This file is the primary + # source of information used in the scripts at run time. # # ROCOTO_YAML_FN: # Name of the YAML file containing the YAML workflow definition from @@ -772,7 +770,7 @@ workflow: FCST_MODEL: "ufs-weather-model" WFLOW_XML_FN: "FV3LAM_wflow.xml" - GLOBAL_VAR_DEFNS_FN: "var_defns.sh" + GLOBAL_VAR_DEFNS_FN: "var_defns.yaml" ROCOTO_YAML_FN: "rocoto_defns.yaml" EXTRN_MDL_VAR_DEFNS_FN: "extrn_mdl_var_defns" WFLOW_LAUNCH_SCRIPT_FN: "launch_FV3LAM_wflow.sh" diff --git a/ush/create_aqm_rc_file.py b/ush/create_aqm_rc_file.py index 739a4d9f18..c37ed05d29 100644 --- a/ush/create_aqm_rc_file.py +++ b/ush/create_aqm_rc_file.py @@ -13,7 +13,7 @@ cfg_to_yaml_str, flatten_dict, import_vars, - load_shell_config, + load_yaml_config, print_info_msg, print_input_args, str_to_type, @@ -158,7 +158,7 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) + cfg = load_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) import_vars(dictionary=cfg) create_aqm_rc_file( diff --git a/ush/create_diag_table_file.py b/ush/create_diag_table_file.py index 975165dfe5..113953172d 100644 --- a/ush/create_diag_table_file.py +++ b/ush/create_diag_table_file.py @@ -14,7 +14,7 @@ cfg_to_yaml_str, flatten_dict, import_vars, - load_shell_config, + load_yaml_config, print_info_msg, print_input_args, ) @@ -102,7 +102,7 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) + cfg = load_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) import_vars(dictionary=cfg) create_diag_table_file(args.run_dir) diff --git a/ush/create_model_configure_file.py b/ush/create_model_configure_file.py index cd39087688..b8767f635a 100644 --- a/ush/create_model_configure_file.py +++ b/ush/create_model_configure_file.py @@ -13,7 +13,7 @@ cfg_to_yaml_str, flatten_dict, import_vars, - load_shell_config, + load_yaml_config, lowercase, print_info_msg, print_input_args, @@ -296,7 +296,7 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) + cfg = load_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) import_vars(dictionary=cfg) create_model_configure_file( diff --git a/ush/create_ufs_configure_file.py b/ush/create_ufs_configure_file.py index 9d4ea8afa4..3fd82f488b 100644 --- a/ush/create_ufs_configure_file.py +++ b/ush/create_ufs_configure_file.py @@ -15,7 +15,7 @@ cfg_to_yaml_str, flatten_dict, import_vars, - load_shell_config, + load_yaml_config, print_info_msg, print_input_args, ) @@ -113,7 +113,7 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) + cfg = load_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) import_vars(dictionary=cfg) create_ufs_configure_file( diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index ba0e9f3a2b..c671a69da8 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -11,12 +11,15 @@ import logging import os import sys +from stat import S_IXUSR +from string import Template from textwrap import dedent from uwtools.api.config import get_nml_config, get_yaml_config, realize from uwtools.api.template import render from python_utils import ( + list_to_str, log_info, import_vars, export_vars, @@ -24,7 +27,6 @@ ln_vrfy, mkdir_vrfy, mv_vrfy, - create_symlink_to_file, check_for_preexist_dir_file, cfg_to_yaml_str, find_pattern_in_str, @@ -137,9 +139,23 @@ def generate_FV3LAM_wflow( verbose=debug, ) - create_symlink_to_file( - wflow_launch_script_fp, os.path.join(exptdir, wflow_launch_script_fn), False - ) + with open(wflow_launch_script_fp, "r", encoding='utf-8') as launch_script_file: + launch_script_content = launch_script_file.read() + + # Stage an experiment-specific launch file in the experiment directory + template = Template(launch_script_content) + + # The script needs several variables from the workflow and user sections + template_variables = {**expt_config["user"], **expt_config["workflow"], + "valid_vals_BOOLEAN": list_to_str(expt_config["constants"]["valid_vals_BOOLEAN"])} + launch_content = template.safe_substitute(template_variables) + + launch_fp = os.path.join(exptdir, wflow_launch_script_fn) + with open(launch_fp, "w", encoding='utf-8') as expt_launch_fn: + expt_launch_fn.write(launch_content) + + os.chmod(launch_fp, os.stat(launch_fp).st_mode|S_IXUSR) + # # ----------------------------------------------------------------------- # @@ -639,7 +655,7 @@ def generate_FV3LAM_wflow( input_format="nml", output_file=FV3_NML_STOCH_FP, output_format="nml", - supplemental_configs=[settings], + update_config=get_nml_config(settings), ) # diff --git a/ush/job_preamble.sh b/ush/job_preamble.sh index 16b99393a2..ecfb94fb50 100644 --- a/ush/job_preamble.sh +++ b/ush/job_preamble.sh @@ -1,5 +1,7 @@ #!/bin/bash +set +u + # #----------------------------------------------------------------------- # @@ -67,13 +69,13 @@ export COMOUTwmo="${COMOUTwmo:-${COMOUT}/wmo}" # #----------------------------------------------------------------------- # -if [ ${subcyc} -ne 0 ]; then +if [ ${subcyc:-0} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" else export cycle="t${cyc}z" fi -if [ "${RUN_ENVIR}" = "nco" ] && [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z $ENSMEM_INDX ]; then +if [ "${RUN_ENVIR}" = "nco" ] && [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z $ENSMEM_INDX ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= @@ -215,4 +217,3 @@ In directory: \"${scrfunc_dir}\" ========================================================================" } - diff --git a/ush/launch_FV3LAM_wflow.sh b/ush/launch_FV3LAM_wflow.sh old mode 100755 new mode 100644 index 92dd24aee6..7c26511f4f --- a/ush/launch_FV3LAM_wflow.sh +++ b/ush/launch_FV3LAM_wflow.sh @@ -34,43 +34,10 @@ scrfunc_dir=$( dirname "${scrfunc_fp}" ) # #----------------------------------------------------------------------- # -# Get the experiment directory. We assume that there is a symlink to -# this script in the experiment directory, and this script is called via -# that symlink. Thus, finding the directory in which the symlink is -# located will give us the experiment directory. We find this by first -# obtaining the directory portion (i.e. the portion without the name of -# this script) of the command that was used to called this script (i.e. -# "$0") and then use the "readlink -f" command to obtain the corresponding -# absolute path. This will work for all four of the following ways in -# which the symlink in the experiment directory pointing to this script -# may be called: -# -# 1) Call this script from the experiment directory: -# > cd /path/to/experiment/directory -# > launch_FV3LAM_wflow.sh -# -# 2) Call this script from the experiment directory but using "./" before -# the script name: -# > cd /path/to/experiment/directory -# > ./launch_FV3LAM_wflow.sh -# -# 3) Call this script from any directory using the absolute path to the -# symlink in the experiment directory: -# > /path/to/experiment/directory/launch_FV3LAM_wflow.sh -# -# 4) Call this script from a directory that is several levels up from the -# experiment directory (but not necessarily at the root directory): -# > cd /path/to -# > experiment/directory/launch_FV3LAM_wflow.sh -# -# Note that given just a file name, e.g. the name of this script without -# any path before it, the "dirname" command will return a ".", e.g. in -# bash, -# -# > exptdir=$( dirname "launch_FV3LAM_wflow.sh" ) -# > echo $exptdir -# -# will print out ".". +# This script will be configured for a specific experiment when +# generate_FV3LAM_wflow.py. That process fills in what is necessary so +# this configured script in the experiment directory will need no +# additional information at run time. # #----------------------------------------------------------------------- # @@ -94,7 +61,12 @@ fi # #----------------------------------------------------------------------- # -. $exptdir/var_defns.sh + +# These variables are assumed to exist in the global environment by the +# bash_utils, which is a Very Bad (TM) thing. +export USHdir=$USHdir +export valid_vals_BOOLEAN=${valid_vals_BOOLEAN} + . $USHdir/source_util_funcs.sh # #----------------------------------------------------------------------- @@ -369,7 +341,7 @@ by expt_name has completed with the following workflow status (wflow_status): # Thus, there is no need to try to relaunch it. We also append a message # to the completion message above to indicate this. # - if [ "${USE_CRON_TO_RELAUNCH}" = "TRUE" ]; then + if [ $(boolify "${USE_CRON_TO_RELAUNCH}") = "TRUE" ]; then msg="${msg}\ Thus, there is no need to relaunch the workflow via a cron job. Removing diff --git a/ush/link_fix.py b/ush/link_fix.py index fdd9a65f28..f0d103d8ea 100755 --- a/ush/link_fix.py +++ b/ush/link_fix.py @@ -18,7 +18,7 @@ cd_vrfy, mkdir_vrfy, find_pattern_in_str, - load_shell_config, + load_yaml_config, ) @@ -403,7 +403,7 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) + cfg = load_yaml_config(args.path_to_defns) link_fix( verbose=cfg["workflow"]["VERBOSE"], file_group=args.file_group, diff --git a/ush/load_modules_run_task.sh b/ush/load_modules_run_task.sh index 89f3addf41..5ede278bfd 100755 --- a/ush/load_modules_run_task.sh +++ b/ush/load_modules_run_task.sh @@ -3,33 +3,43 @@ # #----------------------------------------------------------------------- # -# Source necessary files. +# This script loads the appropriate modules for a given task in an +# experiment. # -#----------------------------------------------------------------------- +# It requires the following global environment variables: # -. ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/source_util_funcs.sh +# GLOBAL_VAR_DEFNS_FP # -#----------------------------------------------------------------------- +# And uses these variables from the GLOBAL_VAR_DEFNS_FP file # -# Save current shell options (in a global array). Then set new options -# for this script/function. +# platform: +# BUILD_MOD_FN +# RUN_VER_FN +# +# workflow: +# VERBOSE # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 + +# Get the location of this file -- it's the USHdir +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +USHdir=$( dirname "${scrfunc_fp}" ) +HOMEdir=$( dirname $USHdir ) + +source $USHdir/source_util_funcs.sh + # #----------------------------------------------------------------------- # -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). +# Save current shell options (in a global array). Then set new options +# for this script/function. # #----------------------------------------------------------------------- # -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) +{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 + # #----------------------------------------------------------------------- # @@ -37,7 +47,7 @@ scrfunc_dir=$( dirname "${scrfunc_fp}" ) # #----------------------------------------------------------------------- # -if [ "$#" -ne 2 ]; then +if [ "$#" -ne 3 ]; then print_err_msg_exit " Incorrect number of arguments specified: @@ -46,15 +56,17 @@ Incorrect number of arguments specified: Usage: - ${scrfunc_fn} task_name jjob_fp + ${scrfunc_fn} machine task_name jjob_fp where the arguments are defined as follows: + machine: The name of the supported platform + task_name: The name of the rocoto task for which this script will load modules and launch the J-job. - jjob_fp + jjob_fp: The full path to the J-job script corresponding to task_name. This script will launch this J-job using the \"exec\" command (which will first terminate this script and then launch the j-job; see man page of @@ -65,12 +77,13 @@ fi # #----------------------------------------------------------------------- # -# Get the task name and the name of the J-job script. +# Save arguments # #----------------------------------------------------------------------- # -task_name="$1" -jjob_fp="$2" +machine=$(echo_lowercase $1) +task_name="$2" +jjob_fp="$3" # #----------------------------------------------------------------------- # @@ -99,12 +112,38 @@ set -u #----------------------------------------------------------------------- # default_modules_dir="$HOMEdir/modulefiles" -machine=$(echo_lowercase $MACHINE) -if [ "${WORKFLOW_MANAGER}" != "ecflow" ]; then +test ! $(module is-loaded ecflow > /dev/null 2>&1) && ecflow_loaded=false + +if [ "$ecflow_loaded" = "false" ] ; then source "${HOMEdir}/etc/lmod-setup.sh" ${machine} fi module use "${default_modules_dir}" +# Load workflow environment + +if [ -f ${default_modules_dir}/python_srw.lua ] ; then + module load python_srw || print_err_msg_exit "\ + Loading SRW common python module failed. Expected python_srw.lua + in the modules directory here: + modules_dir = \"${default_modules_dir}\"" +fi + +# Modules that use conda and need an environment activated will set the +# SRW_ENV variable to the name of the environment to be activated. That +# must be done within the script, and not inside the module. Do that +# now. +if [ -n "${SRW_ENV:-}" ] ; then + set +u + conda deactivate + conda activate ${SRW_ENV} + set -u +fi + +# Source the necessary blocks of the experiment config YAML +for sect in platform workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done + if [ "${machine}" != "wcoss2" ]; then module load "${BUILD_MOD_FN}" || print_err_msg_exit "\ Loading of platform- and compiler-specific module file (BUILD_MOD_FN) @@ -116,26 +155,15 @@ fi # #----------------------------------------------------------------------- # -# Set the directory (modules_dir) in which the module files for the va- -# rious workflow tasks are located. Also, set the name of the module -# file for the specified task. -# -# A module file is a file whose first line is the "magic cookie" string -# '#%Module'. It is interpreted by the "module load ..." command. It -# sets environment variables (including prepending/appending to paths) -# and loads modules. -# -# The UFS SRW App repository contains module files for the -# workflow tasks in the template rocoto XML file for the FV3-LAM work- -# flow that need modules not loaded in the BUILD_MOD_FN above. +# Set the directory for the modulefiles included with SRW and the +# specific module for the requested task. # # The full path to a module file for a given task is # # $HOMEdir/modulefiles/$machine/${task_name}.local # -# where HOMEdir is the base directory of the workflow, machine is the -# name of the machine that we're running on (in lowercase), and task_- -# name is the name of the current task (an input to this script). +# where HOMEdir is the SRW clone, machine is the name of the platform +# being used, and task_name is the current task to run. # #----------------------------------------------------------------------- # @@ -154,10 +182,10 @@ Loading modules for task \"${task_name}\" ..." module use "${modules_dir}" || print_err_msg_exit "\ Call to \"module use\" command failed." -# source version file (run) only if it is specified in versions directory -VERSION_FILE="${HOMEdir}/versions/${RUN_VER_FN}" -if [ -f ${VERSION_FILE} ]; then - . ${VERSION_FILE} +# source version file only if it exists in the versions directory +version_file="${HOMEdir}/versions/${RUN_VER_FN}" +if [ -f ${version_file} ]; then + source ${version_file} fi # # Load the .local module file if available for the given task @@ -170,20 +198,11 @@ specified task (task_name) failed: task_name = \"${task_name}\" modulefile_local = \"${modulefile_local}\" modules_dir = \"${modules_dir}\"" -elif [ -f ${default_modules_dir}/python_srw.lua ] ; then - module load python_srw || print_err_msg_exit "\ - Loading SRW common python module failed. Expected python_srw.lua - in the modules directory here: - modules_dir = \"${default_modules_dir}\"" fi - module list -# Modules that use conda and need an environment activated will set the -# SRW_ENV variable to the name of the environment to be activated. That -# must be done within the script, and not inside the module. Do that -# now. - +# Reactivate the workflow environment to ensure the correct Python +# environment is available first in the environment. if [ -n "${SRW_ENV:-}" ] ; then set +u conda deactivate @@ -204,11 +223,7 @@ Launching J-job (jjob_fp) for task \"${task_name}\" ... jjob_fp = \"${jjob_fp}\" " -if [ "${WORKFLOW_MANAGER}" = "ecflow" ]; then - /bin/bash "${jjob_fp}" -else - exec "${jjob_fp}" -fi +source "${jjob_fp}" # #----------------------------------------------------------------------- diff --git a/ush/machine/hera.yaml b/ush/machine/hera.yaml index 4d836af317..80fbb8fc98 100644 --- a/ush/machine/hera.yaml +++ b/ush/machine/hera.yaml @@ -20,8 +20,8 @@ platform: RUN_CMD_UTILS: srun --export=ALL RUN_CMD_NEXUS: srun -n ${nprocs} --export=ALL RUN_CMD_AQMLBC: srun --export=ALL -n ${numts} - SCHED_NATIVE_CMD: --export=NONE - SCHED_NATIVE_CMD_HPSS: -n 1 --export=NONE + SCHED_NATIVE_CMD: "--export=NONE" + SCHED_NATIVE_CMD_HPSS: "-n 1 --export=NONE" PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }' TEST_EXTRN_MDL_SOURCE_BASEDIR: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/input_model_data TEST_AQM_INPUT_BASEDIR: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/aqm_data diff --git a/ush/set_fv3nml_ens_stoch_seeds.py b/ush/set_fv3nml_ens_stoch_seeds.py index 3459fa8707..0b9b186210 100644 --- a/ush/set_fv3nml_ens_stoch_seeds.py +++ b/ush/set_fv3nml_ens_stoch_seeds.py @@ -10,12 +10,12 @@ import sys from textwrap import dedent -from uwtools.api.config import realize +from uwtools.api.config import get_nml_config, realize from python_utils import ( cfg_to_yaml_str, import_vars, - load_shell_config, + load_yaml_config, print_input_args, print_info_msg, ) @@ -112,7 +112,7 @@ def set_fv3nml_ens_stoch_seeds(cdate, expt_config): input_format="nml", output_file=fv3_nml_ensmem_fp, output_format="nml", - supplemental_configs=[settings], + update_config=get_nml_config(settings), ) def parse_args(argv): @@ -142,5 +142,5 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) + cfg = load_yaml_config(args.path_to_defns) set_fv3nml_ens_stoch_seeds(args.cdate, cfg) diff --git a/ush/set_fv3nml_sfc_climo_filenames.py b/ush/set_fv3nml_sfc_climo_filenames.py index 417aa0b5ee..7251a5b0e6 100644 --- a/ush/set_fv3nml_sfc_climo_filenames.py +++ b/ush/set_fv3nml_sfc_climo_filenames.py @@ -10,14 +10,14 @@ import sys from textwrap import dedent -from uwtools.api.config import get_yaml_config, realize +from uwtools.api.config import get_nml_config, get_yaml_config, realize from python_utils import ( cfg_to_yaml_str, check_var_valid_value, flatten_dict, import_vars, - load_shell_config, + load_yaml_config, print_info_msg, ) @@ -105,7 +105,7 @@ def set_fv3nml_sfc_climo_filenames(config, debug=False): input_format="nml", output_file=FV3_NML_FP, output_format="nml", - supplemental_configs=[settings], + update_config=get_nml_config(settings), ) def parse_args(argv): @@ -127,6 +127,6 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) + cfg = load_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) set_fv3nml_sfc_climo_filenames(cfg, args.debug) diff --git a/ush/setup.py b/ush/setup.py index 0511653fa2..51d5b2a084 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -10,10 +10,12 @@ from textwrap import dedent import yaml +from uwtools.api.config import get_yaml_config from python_utils import ( log_info, cd_vrfy, + date_to_str, mkdir_vrfy, rm_vrfy, check_var_valid_value, @@ -1499,10 +1501,13 @@ def dict_find(user_dict, substring): yaml.Dumper.ignore_aliases = lambda *args : True yaml.dump(expt_config.get("rocoto"), f, sort_keys=False) - var_defns_cfg = copy.deepcopy(expt_config) + var_defns_cfg = get_yaml_config(config=expt_config) del var_defns_cfg["rocoto"] - with open(global_var_defns_fp, "a") as f: - f.write(cfg_to_shell_str(var_defns_cfg)) + + # Fixup a couple of data types: + for dates in ("DATE_FIRST_CYCL", "DATE_LAST_CYCL"): + var_defns_cfg["workflow"][dates] = date_to_str(var_defns_cfg["workflow"][dates]) + var_defns_cfg.dump(global_var_defns_fp) # diff --git a/ush/source_util_funcs.sh b/ush/source_util_funcs.sh index 7fe3025d6a..9feceaf68e 100644 --- a/ush/source_util_funcs.sh +++ b/ush/source_util_funcs.sh @@ -220,15 +220,15 @@ function source_util_funcs() { #----------------------------------------------------------------------- # . ${bashutils_dir}/eval_METplus_timestr_tmpl.sh + # #----------------------------------------------------------------------- # -# Source the file containing the function that sources config files. +# Source the file that sources YAML files as if they were bash # #----------------------------------------------------------------------- # - . ${bashutils_dir}/source_config.sh - + . ${bashutils_dir}/source_yaml.sh } source_util_funcs diff --git a/ush/update_input_nml.py b/ush/update_input_nml.py index e975d9bc08..b85bbacd4a 100644 --- a/ush/update_input_nml.py +++ b/ush/update_input_nml.py @@ -9,7 +9,7 @@ import sys from textwrap import dedent -from uwtools.api.config import realize +from uwtools.api.config import get_nml_config, realize from python_utils import ( print_input_args, @@ -77,7 +77,7 @@ def update_input_nml(namelist, restart, aqm_na_13km): input_format="nml", output_file=namelist, output_format="nml", - supplemental_configs=[settings], + update_config=get_nml_config(settings), ) def parse_args(argv): diff --git a/ush/wrappers/run_fcst.sh b/ush/wrappers/run_fcst.sh index 7450de7cc5..c875cb16c0 100755 --- a/ush/wrappers/run_fcst.sh +++ b/ush/wrappers/run_fcst.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} export cyc=${DATE_FIRST_CYCL:8:2} diff --git a/ush/wrappers/run_get_ics.sh b/ush/wrappers/run_get_ics.sh index 0ee521a67d..494eab6850 100755 --- a/ush/wrappers/run_get_ics.sh +++ b/ush/wrappers/run_get_ics.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow task_get_extrn_ics ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} export cyc=${DATE_FIRST_CYCL:8:2} diff --git a/ush/wrappers/run_get_lbcs.sh b/ush/wrappers/run_get_lbcs.sh index 543ab6e47d..ec6fa23892 100755 --- a/ush/wrappers/run_get_lbcs.sh +++ b/ush/wrappers/run_get_lbcs.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow task_get_extrn_lbcs ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} export cyc=${DATE_FIRST_CYCL:8:2} diff --git a/ush/wrappers/run_make_grid.sh b/ush/wrappers/run_make_grid.sh index 2d55beaf94..f7a6f8aeed 100755 --- a/ush/wrappers/run_make_grid.sh +++ b/ush/wrappers/run_make_grid.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} diff --git a/ush/wrappers/run_make_ics.sh b/ush/wrappers/run_make_ics.sh index 5c629722fc..adcdc16180 100755 --- a/ush/wrappers/run_make_ics.sh +++ b/ush/wrappers/run_make_ics.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} export cyc=${DATE_FIRST_CYCL:8:2} diff --git a/ush/wrappers/run_make_lbcs.sh b/ush/wrappers/run_make_lbcs.sh index 27c94c127f..f9fe35d9da 100755 --- a/ush/wrappers/run_make_lbcs.sh +++ b/ush/wrappers/run_make_lbcs.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} export cyc=${DATE_FIRST_CYCL:8:2} diff --git a/ush/wrappers/run_make_orog.sh b/ush/wrappers/run_make_orog.sh index 5f02ff9599..ebc5259ec1 100755 --- a/ush/wrappers/run_make_orog.sh +++ b/ush/wrappers/run_make_orog.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} diff --git a/ush/wrappers/run_make_sfc_climo.sh b/ush/wrappers/run_make_sfc_climo.sh index fab33f75d6..8024f529fc 100755 --- a/ush/wrappers/run_make_sfc_climo.sh +++ b/ush/wrappers/run_make_sfc_climo.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} diff --git a/ush/wrappers/run_post.sh b/ush/wrappers/run_post.sh index 46ef104365..ca060acb1f 100755 --- a/ush/wrappers/run_post.sh +++ b/ush/wrappers/run_post.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} export cyc=${DATE_FIRST_CYCL:8:2}