From bf7b49058709b7bae3c02d49300127203b5266ff Mon Sep 17 00:00:00 2001
From: "Chan-Hoo.Jeon-NOAA" <60152248+chan-hoo@users.noreply.github.com>
Date: Mon, 8 Apr 2024 09:01:43 -0400
Subject: [PATCH 01/39] [develop] Port SRW-AQM to Orion and Hercules (#1067)
* Port SRW-AQM to Orion and Hercules
---
modulefiles/build_derecho_intel.lua | 2 ++
modulefiles/build_hercules_intel.lua | 1 +
modulefiles/build_orion_intel.lua | 1 +
modulefiles/tasks/derecho/nexus_post_split.local.lua | 3 ++-
modulefiles/tasks/derecho/python_srw.lua | 3 ---
modulefiles/tasks/hercules/aqm_ics.local.lua | 1 -
modulefiles/tasks/hercules/aqm_lbcs.local.lua | 3 +--
modulefiles/tasks/hercules/fire_emission.local.lua | 1 -
modulefiles/tasks/hercules/nexus_emission.local.lua | 1 -
modulefiles/tasks/hercules/nexus_post_split.local.lua | 1 -
modulefiles/tasks/hercules/python_srw.lua | 3 ---
modulefiles/tasks/orion/aqm_ics.local.lua | 1 -
modulefiles/tasks/orion/aqm_lbcs.local.lua | 3 +--
modulefiles/tasks/orion/fire_emission.local.lua | 1 -
modulefiles/tasks/orion/nexus_emission.local.lua | 1 -
modulefiles/tasks/orion/nexus_post_split.local.lua | 1 -
modulefiles/tasks/orion/python_srw.lua | 3 ---
parm/wflow/aqm_prep.yaml | 2 ++
.../aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml | 2 +-
ush/config.aqm.yaml | 10 ++++++----
ush/machine/derecho.yaml | 2 +-
ush/machine/hercules.yaml | 9 ++++++++-
ush/machine/orion.yaml | 9 ++++++++-
23 files changed, 34 insertions(+), 30 deletions(-)
delete mode 100644 modulefiles/tasks/derecho/python_srw.lua
delete mode 100644 modulefiles/tasks/hercules/python_srw.lua
delete mode 100644 modulefiles/tasks/orion/python_srw.lua
diff --git a/modulefiles/build_derecho_intel.lua b/modulefiles/build_derecho_intel.lua
index ac98c39e53..e057c9e5dc 100644
--- a/modulefiles/build_derecho_intel.lua
+++ b/modulefiles/build_derecho_intel.lua
@@ -14,5 +14,7 @@ load(pathJoin("cmake", os.getenv("cmake_ver") or "3.26.3"))
load("srw_common")
+load(pathJoin("prod_util", os.getenv("prod_util_ver") or "1.2.2"))
+
setenv("CMAKE_Platform","derecho.intel")
diff --git a/modulefiles/build_hercules_intel.lua b/modulefiles/build_hercules_intel.lua
index cec2a3a30e..531f48a080 100644
--- a/modulefiles/build_hercules_intel.lua
+++ b/modulefiles/build_hercules_intel.lua
@@ -17,6 +17,7 @@ load("srw_common")
load("nccmp/1.9.0.1")
load("nco/5.0.6")
+load(pathJoin("prod_util", os.getenv("prod_util_ver") or "1.2.2"))
setenv("CFLAGS","-diag-disable=10441")
setenv("FFLAGS","-diag-disable=10441")
diff --git a/modulefiles/build_orion_intel.lua b/modulefiles/build_orion_intel.lua
index d3e777d1dc..8e895c5bee 100644
--- a/modulefiles/build_orion_intel.lua
+++ b/modulefiles/build_orion_intel.lua
@@ -18,6 +18,7 @@ load("srw_common")
load("nccmp/1.9.0.1")
load("nco/5.0.6")
load("wget")
+load(pathJoin("prod_util", os.getenv("prod_util_ver") or "1.2.2"))
setenv("CMAKE_C_COMPILER","mpiicc")
setenv("CMAKE_CXX_COMPILER","mpiicpc")
diff --git a/modulefiles/tasks/derecho/nexus_post_split.local.lua b/modulefiles/tasks/derecho/nexus_post_split.local.lua
index 07d126ff0b..e7f216375c 100644
--- a/modulefiles/tasks/derecho/nexus_post_split.local.lua
+++ b/modulefiles/tasks/derecho/nexus_post_split.local.lua
@@ -1,3 +1,4 @@
-load(pathJoin("nco", os.getenv("nco_ver") or "5.0.6"))
+load("nco/5.0.6")
+
load("ncarenv")
load("python_srw_aqm")
diff --git a/modulefiles/tasks/derecho/python_srw.lua b/modulefiles/tasks/derecho/python_srw.lua
deleted file mode 100644
index fe6c73a7d5..0000000000
--- a/modulefiles/tasks/derecho/python_srw.lua
+++ /dev/null
@@ -1,3 +0,0 @@
-unload("python")
-load("conda")
-setenv("SRW_ENV", "srw_app")
diff --git a/modulefiles/tasks/hercules/aqm_ics.local.lua b/modulefiles/tasks/hercules/aqm_ics.local.lua
index 2aac950d8d..df0e35d5da 100644
--- a/modulefiles/tasks/hercules/aqm_ics.local.lua
+++ b/modulefiles/tasks/hercules/aqm_ics.local.lua
@@ -1,2 +1 @@
-load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
load("python_srw_aqm")
diff --git a/modulefiles/tasks/hercules/aqm_lbcs.local.lua b/modulefiles/tasks/hercules/aqm_lbcs.local.lua
index 5a7b0cece6..df0e35d5da 100644
--- a/modulefiles/tasks/hercules/aqm_lbcs.local.lua
+++ b/modulefiles/tasks/hercules/aqm_lbcs.local.lua
@@ -1,2 +1 @@
-load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/hercules/fire_emission.local.lua b/modulefiles/tasks/hercules/fire_emission.local.lua
index 2aac950d8d..df0e35d5da 100644
--- a/modulefiles/tasks/hercules/fire_emission.local.lua
+++ b/modulefiles/tasks/hercules/fire_emission.local.lua
@@ -1,2 +1 @@
-load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
load("python_srw_aqm")
diff --git a/modulefiles/tasks/hercules/nexus_emission.local.lua b/modulefiles/tasks/hercules/nexus_emission.local.lua
index 2aac950d8d..df0e35d5da 100644
--- a/modulefiles/tasks/hercules/nexus_emission.local.lua
+++ b/modulefiles/tasks/hercules/nexus_emission.local.lua
@@ -1,2 +1 @@
-load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
load("python_srw_aqm")
diff --git a/modulefiles/tasks/hercules/nexus_post_split.local.lua b/modulefiles/tasks/hercules/nexus_post_split.local.lua
index 2aac950d8d..df0e35d5da 100644
--- a/modulefiles/tasks/hercules/nexus_post_split.local.lua
+++ b/modulefiles/tasks/hercules/nexus_post_split.local.lua
@@ -1,2 +1 @@
-load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
load("python_srw_aqm")
diff --git a/modulefiles/tasks/hercules/python_srw.lua b/modulefiles/tasks/hercules/python_srw.lua
deleted file mode 100644
index fe6c73a7d5..0000000000
--- a/modulefiles/tasks/hercules/python_srw.lua
+++ /dev/null
@@ -1,3 +0,0 @@
-unload("python")
-load("conda")
-setenv("SRW_ENV", "srw_app")
diff --git a/modulefiles/tasks/orion/aqm_ics.local.lua b/modulefiles/tasks/orion/aqm_ics.local.lua
index 2aac950d8d..df0e35d5da 100644
--- a/modulefiles/tasks/orion/aqm_ics.local.lua
+++ b/modulefiles/tasks/orion/aqm_ics.local.lua
@@ -1,2 +1 @@
-load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
load("python_srw_aqm")
diff --git a/modulefiles/tasks/orion/aqm_lbcs.local.lua b/modulefiles/tasks/orion/aqm_lbcs.local.lua
index 5a7b0cece6..df0e35d5da 100644
--- a/modulefiles/tasks/orion/aqm_lbcs.local.lua
+++ b/modulefiles/tasks/orion/aqm_lbcs.local.lua
@@ -1,2 +1 @@
-load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/orion/fire_emission.local.lua b/modulefiles/tasks/orion/fire_emission.local.lua
index 2aac950d8d..df0e35d5da 100644
--- a/modulefiles/tasks/orion/fire_emission.local.lua
+++ b/modulefiles/tasks/orion/fire_emission.local.lua
@@ -1,2 +1 @@
-load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
load("python_srw_aqm")
diff --git a/modulefiles/tasks/orion/nexus_emission.local.lua b/modulefiles/tasks/orion/nexus_emission.local.lua
index 2aac950d8d..df0e35d5da 100644
--- a/modulefiles/tasks/orion/nexus_emission.local.lua
+++ b/modulefiles/tasks/orion/nexus_emission.local.lua
@@ -1,2 +1 @@
-load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
load("python_srw_aqm")
diff --git a/modulefiles/tasks/orion/nexus_post_split.local.lua b/modulefiles/tasks/orion/nexus_post_split.local.lua
index 2aac950d8d..df0e35d5da 100644
--- a/modulefiles/tasks/orion/nexus_post_split.local.lua
+++ b/modulefiles/tasks/orion/nexus_post_split.local.lua
@@ -1,2 +1 @@
-load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
load("python_srw_aqm")
diff --git a/modulefiles/tasks/orion/python_srw.lua b/modulefiles/tasks/orion/python_srw.lua
deleted file mode 100644
index fe6c73a7d5..0000000000
--- a/modulefiles/tasks/orion/python_srw.lua
+++ /dev/null
@@ -1,3 +0,0 @@
-unload("python")
-load("conda")
-setenv("SRW_ENV", "srw_app")
diff --git a/parm/wflow/aqm_prep.yaml b/parm/wflow/aqm_prep.yaml
index d8f01d2c82..c57d2198f0 100644
--- a/parm/wflow/aqm_prep.yaml
+++ b/parm/wflow/aqm_prep.yaml
@@ -106,6 +106,7 @@ task_aqm_ics_ext:
<<: *default_vars
PREV_CYCLE_DIR: '&WARMSTART_CYCLE_DIR;'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
+ memory: 64G
dependency:
and:
taskdep:
@@ -131,6 +132,7 @@ task_aqm_ics:
<<: *default_vars
PREV_CYCLE_DIR: '&COMIN_DIR;'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
+ memory: 64G
dependency:
and:
taskdep:
diff --git a/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml b/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml
index 1587fadcc1..2901d1ebf1 100644
--- a/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml
+++ b/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml
@@ -21,7 +21,7 @@ rocoto:
task_aqm_ics_ext:
metatask_run_ensemble:
task_run_fcst_mem#mem#:
- walltime: 01:00:00
+ walltime: 01:20:00
task_get_extrn_ics:
EXTRN_MDL_NAME_ICS: FV3GFS
FV3GFS_FILE_FMT_ICS: netcdf
diff --git a/ush/config.aqm.yaml b/ush/config.aqm.yaml
index 2718eafbbf..155f846add 100644
--- a/ush/config.aqm.yaml
+++ b/ush/config.aqm.yaml
@@ -1,8 +1,8 @@
metadata:
- description: config for Online-CMAQ, AQM_NA_13km, warm-start
+ description: config for SRW-AQM, AQM_NA_13km, warm-start
user:
RUN_ENVIR: community
- MACHINE: hera
+ MACHINE: [hera/orion/hercules/derecho]
ACCOUNT: [account name]
workflow:
USE_CRON_TO_RELAUNCH: true
@@ -22,7 +22,9 @@ workflow:
FIELD_TABLE_TMPL_FN: field_table_aqm.FV3_GFS_v16
DO_REAL_TIME: false
COLDSTART: false # set to true for cold start
- WARMSTART_CYCLE_DIR: '/scratch2/NAGAPE/epic/SRW-AQM_DATA/aqm_data/restart/2023111000'
+ WARMSTART_CYCLE_DIR: '/scratch2/NAGAPE/epic/SRW-AQM_DATA/aqm_data/restart/2023111000' # for hera
+# WARMSTART_CYCLE_DIR: '/work/noaa/epic/SRW-AQM_DATA/aqm_data/restart/2023111000' # for orion/hercules
+# WARMSTART_CYCLE_DIR: '' # for derecho
nco:
envir_default: test_aqm_warmstart
NET_default: aqm
@@ -33,7 +35,7 @@ rocoto:
# task_aqm_ics_ext: # uncomment this in case of COLDSTART: true
metatask_run_ensemble:
task_run_fcst_mem#mem#:
- walltime: 01:00:00
+ walltime: 01:20:00
task_get_extrn_ics:
EXTRN_MDL_NAME_ICS: FV3GFS
FV3GFS_FILE_FMT_ICS: netcdf
diff --git a/ush/machine/derecho.yaml b/ush/machine/derecho.yaml
index 511ccc2784..b12e65513c 100644
--- a/ush/machine/derecho.yaml
+++ b/ush/machine/derecho.yaml
@@ -16,7 +16,7 @@ platform:
RUN_CMD_SERIAL: time
RUN_CMD_UTILS: mpiexec -n $nprocs
RUN_CMD_NEXUS: mpiexec -n $nprocs
- RUN_CMD_AQMLBC: mpiexec -n ${NUMTS}
+ RUN_CMD_AQMLBC: mpiexec -n ${numts}
PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }'
TEST_EXTRN_MDL_SOURCE_BASEDIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/input_model_data
TEST_AQM_INPUT_BASEDIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/aqm_data
diff --git a/ush/machine/hercules.yaml b/ush/machine/hercules.yaml
index e300cf3d6d..e29801dd49 100644
--- a/ush/machine/hercules.yaml
+++ b/ush/machine/hercules.yaml
@@ -19,7 +19,7 @@ platform:
RUN_CMD_SERIAL: time
RUN_CMD_UTILS: srun --export=ALL -n $nprocs
RUN_CMD_NEXUS: srun --export=ALL
- RUN_CMD_AQMLBC: srun --export=ALL -n ${NUMTS}
+ RUN_CMD_AQMLBC: srun --export=ALL -n ${numts}
SCHED_NATIVE_CMD: --export=NONE
PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }'
TEST_EXTRN_MDL_SOURCE_BASEDIR: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data
@@ -33,6 +33,8 @@ platform:
FIXorg: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/fix/fix_orog
FIXsfc: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/fix/fix_sfc_climo
FIXshp: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/NaturalEarth
+ FIXaqm: /work/noaa/epic/SRW-AQM_DATA/fix_aqm
+ FIXemis: /work/noaa/epic/SRW-AQM_DATA/fix_emis
EXTRN_MDL_DATA_STORES: aws
data:
ics_lbcs:
@@ -44,3 +46,8 @@ data:
HRRR: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data/HRRR/${yyyymmdd}${hh}
RAP: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data/RAP/${yyyymmdd}${hh}
GSMGFS: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data/GSMGFS/${yyyymmdd}${hh}
+
+cpl_aqm_parm:
+ COMINfire_default: /work/noaa/epic/SRW-AQM_DATA/aqm_data/RAVE_fire
+ COMINgefs_default: /work/noaa/epic/SRW-AQM_DATA/aqm_data/GEFS_DATA
+ NEXUS_GFS_SFC_DIR: /work/noaa/epic/SRW-AQM_DATA/aqm_data/GFS_SFC_DATA
diff --git a/ush/machine/orion.yaml b/ush/machine/orion.yaml
index 3f74905c8f..3f756e2836 100644
--- a/ush/machine/orion.yaml
+++ b/ush/machine/orion.yaml
@@ -19,7 +19,7 @@ platform:
RUN_CMD_SERIAL: time
RUN_CMD_UTILS: srun --export=ALL
RUN_CMD_NEXUS: srun --export=ALL
- RUN_CMD_AQMLBC: srun --export=ALL -n ${NUMTS}
+ RUN_CMD_AQMLBC: srun --export=ALL -n ${numts}
SCHED_NATIVE_CMD: --export=NONE
PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }'
TEST_EXTRN_MDL_SOURCE_BASEDIR: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data
@@ -32,6 +32,8 @@ platform:
FIXorg: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/fix/fix_orog
FIXsfc: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/fix/fix_sfc_climo
FIXshp: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/NaturalEarth
+ FIXaqm: /work/noaa/epic/SRW-AQM_DATA/fix_aqm
+ FIXemis: /work/noaa/epic/SRW-AQM_DATA/fix_emis
EXTRN_MDL_DATA_STORES: aws nomads
data:
ics_lbcs:
@@ -43,3 +45,8 @@ data:
HRRR: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data/HRRR/${yyyymmdd}${hh}
RAP: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data/RAP/${yyyymmdd}${hh}
GSMGFS: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data/GSMGFS/${yyyymmdd}${hh}
+
+cpl_aqm_parm:
+ COMINfire_default: /work/noaa/epic/SRW-AQM_DATA/aqm_data/RAVE_fire
+ COMINgefs_default: /work/noaa/epic/SRW-AQM_DATA/aqm_data/GEFS_DATA
+ NEXUS_GFS_SFC_DIR: /work/noaa/epic/SRW-AQM_DATA/aqm_data/GFS_SFC_DATA
From aa1678b490e34a4bb0cd87960fa968d2eb082d25 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Fri, 12 Apr 2024 12:30:02 -0400
Subject: [PATCH 02/39] Bump idna from 3.6 to 3.7 in /doc (#1071)
Bumps [idna](https://github.com/kjd/idna) from 3.6 to 3.7.
- [Release notes](https://github.com/kjd/idna/releases)
- [Changelog](https://github.com/kjd/idna/blob/master/HISTORY.rst)
- [Commits](https://github.com/kjd/idna/compare/v3.6...v3.7)
---
updated-dependencies:
- dependency-name: idna
dependency-type: indirect
...
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
doc/requirements.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/requirements.txt b/doc/requirements.txt
index 0671225d72..eadc94dcaf 100644
--- a/doc/requirements.txt
+++ b/doc/requirements.txt
@@ -18,7 +18,7 @@ docutils==0.20.1
# sphinx
# sphinx-rtd-theme
# sphinxcontrib-bibtex
-idna==3.6
+idna==3.7
# via requests
imagesize==1.4.1
# via sphinx
From a609196be0d84cb4775d79373275717e389dde77 Mon Sep 17 00:00:00 2001
From: Bruce Kropp - Raytheon
<104453151+BruceKropp-Raytheon@users.noreply.github.com>
Date: Mon, 15 Apr 2024 08:42:57 -0700
Subject: [PATCH 03/39] [develop] Feature/cicd metrics adds methods to collect
resource usage data from major stages of the SRW pipeline build job (#1058)
Updated SRW Jenkinsfile with some run-time stats collection, and adds a final stage that triggers ufs-srw-metrics stats collection job for reporting metrics.
The SRW pipeline job that uses this Jenkinsfile will now use the 'time' command when executing major stages: init, build, test. This will collect CPU, Memory, and DiskUsage measurements that can be later used in trend plots on a metrics dashboard.
Additionally, it adds options to the pipeline job that allow the operator to select just a single test, or no test suite (default is still 'coverage' suite), and allows an option to select the depth of wrapper script tasks to execute during functional testing (default is still all 9 scripts).
---
.cicd/Jenkinsfile | 60 +++++++++++++++++++++++++-----
.cicd/scripts/disk_usage.sh | 48 ++++++++++++++++++++++++
.cicd/scripts/srw_build.sh | 3 +-
.cicd/scripts/srw_init.sh | 38 +++++++++++++++++++
.cicd/scripts/srw_test.sh | 12 +++++-
.cicd/scripts/wrapper_srw_ftest.sh | 4 ++
tests/WE2E/setup_WE2E_tests.sh | 1 +
7 files changed, 153 insertions(+), 13 deletions(-)
create mode 100755 .cicd/scripts/disk_usage.sh
create mode 100755 .cicd/scripts/srw_init.sh
diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile
index 8cc95c6b00..ea87029408 100644
--- a/.cicd/Jenkinsfile
+++ b/.cicd/Jenkinsfile
@@ -16,6 +16,10 @@ pipeline {
choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'derecho', 'gaea', 'hera', 'jet', 'orion', 'hercules'], description: 'Specify the platform(s) to use')
// Allow job runner to filter based on compiler
choice(name: 'SRW_COMPILER_FILTER', choices: ['all', 'gnu', 'intel'], description: 'Specify the compiler(s) to use to build')
+ // Workflow Wrapper test depth {0..9}, 0=none, 1=simple, 9=all [default]
+ choice(name: 'SRW_WRAPPER_TASK_DEPTH', choices: ['9', '1', '0'], description: '0=none, 1=simple, 9=all [default]')
+ // WE2E Tests ?
+ choice(name: 'SRW_WE2E_SINGLE_TEST', choices: ['coverage', 'none', 'skill-score', 'grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0'], description: 'Specify the WE2E test to use')
booleanParam name: 'SRW_WE2E_COMPREHENSIVE_TESTS', defaultValue: false, description: 'Whether to execute the comprehensive end-to-end tests'
}
@@ -126,10 +130,17 @@ pipeline {
stage('Initialize') {
steps {
dir ("${env.SRW_PLATFORM}") {
- echo "Initializing SRW (${env.SRW_COMPILER}) build environment on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})"
+ echo "${env.STAGE_NAME} SRW (${env.SRW_COMPILER}) build environment on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})"
cleanWs()
checkout scm
- sh '"${WORKSPACE}/${SRW_PLATFORM}/manage_externals/checkout_externals"'
+ sh '"${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/srw_init.sh"'
+ sh "STAGE_NAME=${env.STAGE_NAME} " + 'bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/disk_usage.sh"'
+ }
+ }
+ post {
+ always {
+ s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-time-srw_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
+ s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-disk-usage${env.STAGE_NAME}.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
}
}
}
@@ -138,8 +149,9 @@ pipeline {
stage('Build') {
steps {
dir ("${env.SRW_PLATFORM}") {
- echo "Building SRW (${env.SRW_COMPILER}) on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})"
+ echo "${env.STAGE_NAME} SRW (${env.SRW_COMPILER}) on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})"
sh 'bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/srw_build.sh"'
+ sh "STAGE_NAME=${env.STAGE_NAME} " + 'bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/disk_usage.sh"'
}
}
@@ -148,6 +160,11 @@ pipeline {
sh 'cd "${WORKSPACE}/${SRW_PLATFORM}/${INSTALL_NAME}" && tar --create --gzip --verbose --file "${WORKSPACE}/${SRW_PLATFORM}/${BUILD_NAME}.tgz" *'
s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: true, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/${env.BUILD_NAME}.tgz", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: true, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/build_${env.SRW_COMPILER}/srw_build-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
}
+ always {
+ s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-env.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
+ s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-time-srw_build.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
+ s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-disk-usage${env.STAGE_NAME}.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
+ }
}
}
@@ -163,9 +180,12 @@ pipeline {
// Try a few Workflow Task scripts to make sure E2E tests can be launched in a follow-on 'Test' stage
stage('Functional WorkflowTaskTests') {
+ environment {
+ TASK_DEPTH = "${env.SRW_WRAPPER_TASK_DEPTH}"
+ }
steps {
dir ("${env.SRW_PLATFORM}") {
- echo "Running simple workflow script task tests on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})"
+ echo "Running ${TASK_DEPTH} simple workflow script task tests on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})"
sh 'bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/wrapper_srw_ftest.sh"'
}
}
@@ -179,11 +199,12 @@ pipeline {
steps {
dir ("${env.SRW_PLATFORM}") {
- echo "Testing SRW (${env.SRW_COMPILER}) on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})"
+ echo "${env.STAGE_NAME} SRW (${env.SRW_COMPILER}) on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})"
// If executing for a Pull Request, check for the run_we2e_comprehensive_tests. If set,
// override the value of the SRW_WE2E_COMPREHENSIVE_TESTS parameter
script {
+ def single_test = params.SRW_WE2E_SINGLE_TEST
def run_we2e_comprehensive_tests = params.SRW_WE2E_COMPREHENSIVE_TESTS
def run_we2e_comprehensive_tests_label = 'run_we2e_comprehensive_tests'
@@ -195,18 +216,37 @@ pipeline {
}
}
- sh "SRW_WE2E_COMPREHENSIVE_TESTS=${run_we2e_comprehensive_tests}" + ' bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/srw_test.sh"'
- }
+ sh "SRW_WE2E_COMPREHENSIVE_TESTS=${run_we2e_comprehensive_tests} SRW_WE2E_SINGLE_TEST=${single_test}" + ' bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/srw_test.sh"'
+
+ // Archive the test log files
+ sh "[[ -d ${SRW_WE2E_EXPERIMENT_BASE_DIR} ]] && cd ${SRW_WE2E_EXPERIMENT_BASE_DIR} && tar --create --gzip --verbose --dereference --file ${WORKSPACE}/${SRW_PLATFORM}/we2e_test_logs-${SRW_PLATFORM}-${SRW_COMPILER}.tgz */log.generate_FV3LAM_wflow */log/* ${WORKSPACE}/${SRW_PLATFORM}/tests/WE2E/WE2E_tests_*yaml WE2E_summary*txt ${WORKSPACE}/${SRW_PLATFORM}/tests/WE2E/log.* || cat /dev/null > ${WORKSPACE}/${SRW_PLATFORM}/we2e_test_logs-${SRW_PLATFORM}-${SRW_COMPILER}.tgz"
+ }
+ sh "STAGE_NAME=${env.STAGE_NAME} " + 'bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/disk_usage.sh"'
}
}
post {
+ success {
+ s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/*_test_results-*-*.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/we2e_test_logs-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.tgz", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
+ }
always {
- // Archive the test log files
- sh 'cd "${SRW_WE2E_EXPERIMENT_BASE_DIR}" && tar --create --gzip --verbose --dereference --file "${WORKSPACE}/${SRW_PLATFORM}/we2e_test_logs-${SRW_PLATFORM}-${SRW_COMPILER}.tgz" */log.generate_FV3LAM_wflow */log/* ${WORKSPACE}/${SRW_PLATFORM}/tests/WE2E/WE2E_tests_*yaml WE2E_summary*txt ${WORKSPACE}/${SRW_PLATFORM}/tests/WE2E/log.*'
+ s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-time-srw_test.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
+ s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-disk-usage${env.STAGE_NAME}.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
// Remove the data sets from the experiments directory to conserve disk space
sh 'find "${SRW_WE2E_EXPERIMENT_BASE_DIR}" -regextype posix-extended -regex "^.*(orog|[0-9]{10})$" -type d | xargs rm -rf'
- s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/*_test_results-*-*.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/we2e_test_logs-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.tgz", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
+ }
+ }
+ }
+
+ stage('Metrics') {
+ steps {
+ script {
+ CI_BRANCH_NAME=env.JOB_BASE_NAME.replace("%2F","%252F")
+ echo "Triggering job for branch ${CI_BRANCH_NAME}/${env.BUILD_NUMBER} ..."
+ build job: '/ufs-srweather-app/ufs-srw-metrics', parameters: [
+ string(name: 'CI_JOB_NAME', value: "ufs-srweather-app/metrics"),
+ string(name: 'CI_BUILD_NUMBER', value: "${CI_BRANCH_NAME}/${env.BUILD_NUMBER}")
+ ], wait: false
}
}
}
diff --git a/.cicd/scripts/disk_usage.sh b/.cicd/scripts/disk_usage.sh
new file mode 100755
index 0000000000..08a482d70f
--- /dev/null
+++ b/.cicd/scripts/disk_usage.sh
@@ -0,0 +1,48 @@
+#!/usr/bin/env bash
+
+# Output a CSV report of disk usage on subdirs of some path
+# Usage:
+# [JOB_NAME=] [BUILD_NUMBER=] [SRW_COMPILER=] [SRW_PLATFORM=] disk_usage path depth size outfile.csv
+#
+# args:
+# directory=$1
+# depth=$2
+# size=$3
+# outfile=$4
+
+[[ -n ${WORKSPACE} ]] || WORKSPACE=$(pwd)
+[[ -n ${SRW_PLATFORM} ]] || SRW_PLATFORM=$(hostname -s 2>/dev/null) || SRW_PLATFORM=$(hostname 2>/dev/null)
+[[ -n ${SRW_COMPILER} ]] || SRW_COMPILER=compiler
+
+script_dir="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)"
+
+# Get repository root from Jenkins WORKSPACE variable if set, otherwise, set
+# relative to script directory.
+declare workspace
+if [[ -n "${WORKSPACE}/${SRW_PLATFORM}" ]]; then
+ workspace="${WORKSPACE}/${SRW_PLATFORM}"
+else
+ workspace="$(cd -- "${script_dir}/../.." && pwd)"
+fi
+
+echo "STAGE_NAME=${STAGE_NAME}" # from pipeline
+outfile="${4:-${workspace}-${SRW_COMPILER}-disk-usage${STAGE_NAME}.csv}"
+
+function disk_usage() {
+ local directory=${1:-${PWD}}
+ local depth=${2:-1}
+ local size=${3:-k}
+ echo "Disk usage: ${JOB_NAME:-ci}/${SRW_PLATFORM}/$(basename $directory)"
+ (
+ cd $directory || exit 1
+ echo "Platform,Build,Owner,Group,Inodes,${size:-k}bytes,Access Time,Filename"
+ du -Px -d ${depth:-1} --inode --exclude='./workspace' | \
+ while read line ; do
+ arr=($line); inode=${arr[0]}; filename=${arr[1]};
+ echo "${SRW_PLATFORM}-${SRW_COMPILER:-compiler},${JOB_NAME:-ci}/${BUILD_NUMBER:-0},$(stat -c '%U,%G' $filename),${inode:-0},$(du -Px -s -${size:-k} --time $filename)" | tr '\t' ',' ;
+ done | sort -t, -k5 -n #-r
+ )
+ echo ""
+}
+
+disk_usage $1 $2 $3 | tee ${outfile}
diff --git a/.cicd/scripts/srw_build.sh b/.cicd/scripts/srw_build.sh
index 196d984a05..4733c4a4ca 100755
--- a/.cicd/scripts/srw_build.sh
+++ b/.cicd/scripts/srw_build.sh
@@ -27,7 +27,8 @@ fi
# Build and install
cd ${workspace}/tests
set +e
-./build.sh ${platform} ${SRW_COMPILER}
+/usr/bin/time -p -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' -o ${WORKSPACE}/${SRW_PLATFORM}-${SRW_COMPILER}-time-srw_build.json \
+ ./build.sh ${platform} ${SRW_COMPILER}
build_exit=$?
set -e
cd -
diff --git a/.cicd/scripts/srw_init.sh b/.cicd/scripts/srw_init.sh
new file mode 100755
index 0000000000..688255ac98
--- /dev/null
+++ b/.cicd/scripts/srw_init.sh
@@ -0,0 +1,38 @@
+#!/usr/bin/env bash
+#
+# A unified init script for the SRW application. This script is expected to
+# fetch initial source for the SRW application for all supported platforms.
+#
+set -e -u -x
+
+script_dir="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)"
+
+# Get repository root from Jenkins WORKSPACE variable if set, otherwise, set
+# relative to script directory.
+declare workspace
+if [[ -n "${WORKSPACE}/${SRW_PLATFORM}" ]]; then
+ workspace="${WORKSPACE}/${SRW_PLATFORM}"
+else
+ workspace="$(cd -- "${script_dir}/../.." && pwd)"
+fi
+
+# Normalize Parallel Works cluster platform value.
+declare platform
+if [[ "${SRW_PLATFORM}" =~ ^(az|g|p)clusternoaa ]]; then
+ platform='noaacloud'
+else
+ platform="${SRW_PLATFORM}"
+fi
+
+# Build and install
+cd ${workspace}
+set +e
+/usr/bin/time -p -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' -o ${WORKSPACE}/${SRW_PLATFORM}-${SRW_COMPILER}-time-srw_init.json \
+ ./manage_externals/checkout_externals
+init_exit=$?
+echo "STAGE_NAME=${STAGE_NAME}"
+env | grep = | sort > ${WORKSPACE}/${SRW_PLATFORM}-${SRW_COMPILER}-env.txt
+set -e
+cd -
+
+exit $init_exit
diff --git a/.cicd/scripts/srw_test.sh b/.cicd/scripts/srw_test.sh
index 76ddf020df..49db0945a9 100755
--- a/.cicd/scripts/srw_test.sh
+++ b/.cicd/scripts/srw_test.sh
@@ -29,17 +29,25 @@ fi
we2e_experiment_base_dir="${workspace}/expt_dirs"
we2e_test_dir="${workspace}/tests/WE2E"
+# Clean any stale test logs
+rm -f ${workspace}/tests/WE2E/log.*
+rm -f ${we2e_experiment_base_dir}/*/log.generate_FV3LAM_wflow ${we2e_experiment_base_dir}/*/log/* WE2E_summary*txt
+
# Run the end-to-end tests.
if "${SRW_WE2E_COMPREHENSIVE_TESTS}"; then
test_type="comprehensive"
else
- test_type="coverage"
+ test_type=${SRW_WE2E_SINGLE_TEST:-"coverage"}
+ if [[ "${test_type}" = skill-score ]]; then
+ test_type="grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0"
+ fi
fi
cd ${we2e_test_dir}
# Progress file
progress_file="${workspace}/we2e_test_results-${platform}-${SRW_COMPILER}.txt"
-./setup_WE2E_tests.sh ${platform} ${SRW_PROJECT} ${SRW_COMPILER} ${test_type} \
+/usr/bin/time -p -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' -o ${WORKSPACE}/${SRW_PLATFORM}-${SRW_COMPILER}-time-srw_test.json \
+ ./setup_WE2E_tests.sh ${platform} ${SRW_PROJECT} ${SRW_COMPILER} ${test_type} \
--expt_basedir=${we2e_experiment_base_dir} | tee ${progress_file}
# Set exit code to number of failures
diff --git a/.cicd/scripts/wrapper_srw_ftest.sh b/.cicd/scripts/wrapper_srw_ftest.sh
index c6a4d19568..2ac31016e3 100755
--- a/.cicd/scripts/wrapper_srw_ftest.sh
+++ b/.cicd/scripts/wrapper_srw_ftest.sh
@@ -38,6 +38,10 @@ if [[ "${SRW_PLATFORM}" == jet ]]; then
sed -i '15i #SBATCH --partition=xjet' ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh
fi
+if [[ "${TASK_DEPTH}" == 0 ]] ; then
+ exit 0
+fi
+
# Call job card and return job_id
echo "Running: ${workflow_cmd} -A ${SRW_PROJECT} ${arg_1} ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh"
job_id=$(${workflow_cmd} -A ${SRW_PROJECT} ${arg_1} ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh)
diff --git a/tests/WE2E/setup_WE2E_tests.sh b/tests/WE2E/setup_WE2E_tests.sh
index 309c755966..8fa0977af7 100755
--- a/tests/WE2E/setup_WE2E_tests.sh
+++ b/tests/WE2E/setup_WE2E_tests.sh
@@ -80,6 +80,7 @@ export HOME=$homedir
source ../../ush/load_modules_wflow.sh ${machine}
# Run the E2E Workflow tests
+[[ ${tests} = none ]] && echo "none" || \
./run_WE2E_tests.py \
--machine=${machine} \
--account=${account} \
From c7e093d7133cb9368059039374f9f28245a2d9f2 Mon Sep 17 00:00:00 2001
From: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com>
Date: Mon, 15 Apr 2024 12:17:41 -0400
Subject: [PATCH 04/39] [develop] Update weather model hash and correct
behavior in Functional WorkflowTaskTests Jenkins stage (#1068)
* The ufs-weather-model hash has been updated to 1411b90 (April 1, 2024).
* Updated build_hera_gnu.lua file to allow it to work with updates to the ufs-weather-model.
* Updated behavior of the Functional WorkflowTaskTests Jenkins stage to allow the test to properly finish, rather than waiting in queue for all jobs associated with the EPIC role account to finish first (modification to .cicd/scripts/wrapper_srw_ftest.sh).
* Corrected the hang encountered while running the Functional WorkflowTaskTests stage on Gaea.
* Applied Mike Kavulich's modification to ush/bash_utils/create_symlink_to_file.sh and converted calls to the create_symlink_to_file function from using named arguments to positional arguments (Issue #1066).
---
.cicd/scripts/wrapper_srw_ftest.sh | 8 ++-
Externals.cfg | 2 +-
modulefiles/build_hera_gnu.lua | 8 +--
scripts/exregional_make_orog.sh | 12 ++--
scripts/exregional_run_fcst.sh | 66 +++++++-----------
scripts/exregional_run_post.sh | 4 +-
ush/bash_utils/create_symlink_to_file.sh | 85 +++---------------------
7 files changed, 47 insertions(+), 138 deletions(-)
diff --git a/.cicd/scripts/wrapper_srw_ftest.sh b/.cicd/scripts/wrapper_srw_ftest.sh
index 2ac31016e3..950ceb7a34 100755
--- a/.cicd/scripts/wrapper_srw_ftest.sh
+++ b/.cicd/scripts/wrapper_srw_ftest.sh
@@ -15,17 +15,16 @@ declare arg_1
if [[ "${SRW_PLATFORM}" == cheyenne ]] || [[ "${SRW_PLATFORM}" == derecho ]]; then
workflow_cmd=qsub
arg_1=""
- check_job="qstat -u ${USER} -r ${job_id}"
else
workflow_cmd=sbatch
arg_1="--parsable"
- check_job="squeue -u ${USER} -j ${job_id} --noheader"
fi
# Customize wrapper scripts
if [[ "${SRW_PLATFORM}" == gaea ]]; then
sed -i '15i #SBATCH --clusters=c5' ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh
sed -i 's|qos=batch|qos=normal|g' ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh
+ sed -i 's|${JOBSdir}/JREGIONAL_RUN_POST|$USHdir/load_modules_run_task.sh "run_post" ${JOBSdir}/JREGIONAL_RUN_POST|g' ${WORKSPACE}/${SRW_PLATFORM}/ush/wrappers/run_post.sh
fi
if [[ "${SRW_PLATFORM}" == hera ]]; then
@@ -52,6 +51,11 @@ sleep 10
# Check for job and exit when done
while true
do
+ if [[ "${SRW_PLATFORM}" == derecho ]]; then
+ check_job="qstat -u ${USER} -r ${job_id}"
+ else
+ check_job="squeue -u ${USER} -j ${job_id} --noheader"
+ fi
job_id_info=$($check_job)
if [ ! -z "$job_id_info" ]; then
echo "Job is still running. Check again in two minutes"
diff --git a/Externals.cfg b/Externals.cfg
index 49ea5ffc38..9ed03cd285 100644
--- a/Externals.cfg
+++ b/Externals.cfg
@@ -12,7 +12,7 @@ protocol = git
repo_url = https://github.com/ufs-community/ufs-weather-model
# Specify either a branch name or a hash but not both.
#branch = develop
-hash = 8518c2c
+hash = 1411b90
local_path = sorc/ufs-weather-model
required = True
diff --git a/modulefiles/build_hera_gnu.lua b/modulefiles/build_hera_gnu.lua
index 5355895da9..7defa36bbf 100644
--- a/modulefiles/build_hera_gnu.lua
+++ b/modulefiles/build_hera_gnu.lua
@@ -19,7 +19,7 @@ load(pathJoin("nccmp", os.getenv("nccmp_ver") or "1.9.0.1"))
load(pathJoin("nco", os.getenv("nco_ver") or "5.0.6"))
load(pathJoin("openblas", os.getenv("openblas_ver") or "0.3.19"))
-setenv("CMAKE_C_COMPILER","mpicc")
-setenv("CMAKE_CXX_COMPILER","mpicxx")
-setenv("CMAKE_Fortran_COMPILER","mpif90")
-setenv("CMAKE_Platform","hera.gnu")
+setenv("CC", "mpicc")
+setenv("CXX", "mpic++")
+setenv("FC", "mpif90")
+setenv("CMAKE_Platform", "hera.gnu")
diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh
index 47430a802d..0deac84d49 100755
--- a/scripts/exregional_make_orog.sh
+++ b/scripts/exregional_make_orog.sh
@@ -258,12 +258,9 @@ if [[ ${suites[@]} =~ "${CCPP_PHYS_SUITE}" ]] ; then
grid_fp_gwd="${FIXlam}/${grid_fn_gwd}"
ls_fn="geo_em.d01.lat-lon.2.5m.HGT_M.nc"
ss_fn="HGT.Beljaars_filtered.lat-lon.30s_res.nc"
- create_symlink_to_file target="${grid_fp_gwd}" symlink="${DATA}/${grid_fn_gwd}" \
- relative="TRUE"
- create_symlink_to_file target="${FIXam}/${ls_fn}" symlink="${DATA}/${ls_fn}" \
- relative="TRUE"
- create_symlink_to_file target="${FIXam}/${ss_fn}" symlink="${DATA}/${ss_fn}" \
- relative="TRUE"
+ create_symlink_to_file ${grid_fp_gwd} ${DATA}/${grid_fn_gwd} TRUE
+ create_symlink_to_file ${FIXam}/${ls_fn} ${DATA}/${ls_fn} TRUE
+ create_symlink_to_file ${FIXam}/${ss_fn} ${DATA}/${ss_fn} TRUE
input_redirect_fn="grid_info.dat"
cat > "${input_redirect_fn}" < /dev/null 2>&1
-#
-#-----------------------------------------------------------------------
-#
-# Get the full path to the file in which this script/function is located
-# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
-# which the file is located (scrfunc_dir).
-#
-#-----------------------------------------------------------------------
-#
- local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
- local scrfunc_fn=$( basename "${scrfunc_fp}" )
- local scrfunc_dir=$( dirname "${scrfunc_fp}" )
-#
-#-----------------------------------------------------------------------
-#
-# Get the name of this function.
-#
-#-----------------------------------------------------------------------
-#
- local func_name="${FUNCNAME[0]}"
-#
-#-----------------------------------------------------------------------
-#
# Specify the set of valid argument names for this script/function. Then
# process the arguments provided to this script/function (which should
# consist of a set of name-value pairs of the form arg1="value1", etc).
#
#-----------------------------------------------------------------------
#
- local valid_args=( \
-"target" \
-"symlink" \
-"relative" \
- )
- process_args valid_args "$@"
-#
-#-----------------------------------------------------------------------
-#
-# For debugging purposes, print out values of arguments passed to this
-# script. Note that these will be printed out only if VERBOSE is set to
-# TRUE.
-#
-#-----------------------------------------------------------------------
-#
- print_input_args valid_args
-#
-#-----------------------------------------------------------------------
-#
-# Verify that the required arguments to this function have been specified.
-# If not, print out an error message and exit.
-#
-#-----------------------------------------------------------------------
-#
- if [ -z "${target}" ]; then
- print_err_msg_exit "\
-The argument \"target\" specifying the target of the symbolic link that
-this function will create was not specified in the call to this function:
- target = \"$target\""
- fi
+if [[ $# -lt 2 ]]; then
+ usage
+ print_err_msg_exit "Function create_symlink_to_file() requires at least two arguments"
+fi
- if [ -z "${symlink}" ]; then
- print_err_msg_exit "\
-The argument \"symlink\" specifying the symbolic link that this function
-will create was not specified in the call to this function:
- symlink = \"$symlink\""
- fi
+target=$1
+symlink=$2
+relative=${3:-TRUE}
#
#-----------------------------------------------------------------------
#
@@ -106,8 +48,6 @@ will create was not specified in the call to this function:
#
#-----------------------------------------------------------------------
#
- relative=${relative:-"TRUE"}
-
valid_vals_relative=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no")
check_var_valid_value "relative" "valid_vals_relative"
#
@@ -148,16 +88,7 @@ not exist or is not a file:
#
#-----------------------------------------------------------------------
#
- ln_vrfy -sf ${relative_flag} "$target" "$symlink"
-#
-#-----------------------------------------------------------------------
-#
-# Restore the shell options saved at the beginning of this script/func-
-# tion.
-#
-#-----------------------------------------------------------------------
-#
- { restore_shell_opts; } > /dev/null 2>&1
+ln -sf ${relative_flag} "$target" "$symlink"
}
From 744bf17108b4ef7c48f60c819f941393f9bed1a2 Mon Sep 17 00:00:00 2001
From: RatkoVasic-NOAA <37597874+RatkoVasic-NOAA@users.noreply.github.com>
Date: Tue, 23 Apr 2024 14:05:38 -0400
Subject: [PATCH 05/39] [develop] Update nco version (#1077)
Hera with Intel compiler was using system installed nco library (4.9.3 version). It was not noticed until sys admins removed read permissions to 4.9.3 version and installed new version (5.1.6).
Will use spack-stack installed nco (version 5.0.6), like all other machines/compilers.
---
modulefiles/build_hera_intel.lua | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/modulefiles/build_hera_intel.lua b/modulefiles/build_hera_intel.lua
index ee11e4a386..72a90d9f47 100644
--- a/modulefiles/build_hera_intel.lua
+++ b/modulefiles/build_hera_intel.lua
@@ -26,7 +26,7 @@ load(pathJoin("cmake", cmake_ver))
load("srw_common")
load(pathJoin("nccmp", os.getenv("nccmp_ver") or "1.9.0.1"))
-load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
+load(pathJoin("nco", os.getenv("nco_ver") or "5.0.6"))
load(pathJoin("prod_util", os.getenv("prod_util_ver") or "1.2.2"))
setenv("CMAKE_C_COMPILER","mpiicc")
From 527b242748ebe1d8924470388061a2810b69e1fb Mon Sep 17 00:00:00 2001
From: Bruce Kropp - Raytheon
<104453151+BruceKropp-Raytheon@users.noreply.github.com>
Date: Thu, 25 Apr 2024 09:43:40 -0700
Subject: [PATCH 06/39] [develop] Feature cicd scorecard metric (#1079)
* Update CI/CD scripts to include skill-score metric output so that follow-on metrics collection can display it on metrics Dashboard.
* Update Jenkinsfile to fix post() section that calls follow-on metrics collection job so that it is only called once at the end, regardless if any platforms builds or tests fail independently.
* Update the Jenkinsfile to skip platform Nodes that appear to be offline, rather than put them in the launch queue. This also means we can re-add the NOAAcloud platforms to the list of possible Nodes to attempt. The will be skipped if they are not online.
* Update Jenkinsfile to include timeout limits on Build stage and Test stage, so they don't run forever.
* Update Jenkinsfile to allow seeing timestamps in the Jenkins console log.
---------
Co-authored-by: EdwardSnyder-NOAA
---
.cicd/Jenkinsfile | 49 ++++++++++++-------
.../{srw_metric_example.sh => srw_metric.sh} | 20 ++++----
.cicd/scripts/srw_test.sh | 15 +++---
3 files changed, 50 insertions(+), 34 deletions(-)
rename .cicd/scripts/{srw_metric_example.sh => srw_metric.sh} (87%)
diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile
index ea87029408..1c92a1bd65 100644
--- a/.cicd/Jenkinsfile
+++ b/.cicd/Jenkinsfile
@@ -5,6 +5,8 @@ pipeline {
disableConcurrentBuilds()
overrideIndexTriggers(false)
skipDefaultCheckout(true)
+ timestamps()
+ timeout(time: 12, unit: 'HOURS')
}
parameters {
@@ -74,6 +76,11 @@ pipeline {
// Run on all platform/compiler combinations by default or build and test only on the platform(s) and
// compiler(s) specified by SRW_PLATFORM_FILTER and SRW_COMPILER_FILTER
when {
+ beforeAgent true
+ expression {
+ return nodesByLabel(env.SRW_PLATFORM).size() > 0
+ }
+
allOf {
anyOf {
expression { params.SRW_PLATFORM_FILTER == 'all' }
@@ -137,6 +144,7 @@ pipeline {
sh "STAGE_NAME=${env.STAGE_NAME} " + 'bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/disk_usage.sh"'
}
}
+
post {
always {
s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-time-srw_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
@@ -147,6 +155,10 @@ pipeline {
// Run the unified build script; if successful create a tarball of the build and upload to S3
stage('Build') {
+ options {
+ timeout(time: 4, unit: 'HOURS')
+ }
+
steps {
dir ("${env.SRW_PLATFORM}") {
echo "${env.STAGE_NAME} SRW (${env.SRW_COMPILER}) on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})"
@@ -183,6 +195,7 @@ pipeline {
environment {
TASK_DEPTH = "${env.SRW_WRAPPER_TASK_DEPTH}"
}
+
steps {
dir ("${env.SRW_PLATFORM}") {
echo "Running ${TASK_DEPTH} simple workflow script task tests on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})"
@@ -193,6 +206,10 @@ pipeline {
// Run the unified test script
stage('Test') {
+ options {
+ timeout(time: 8, unit: 'HOURS')
+ }
+
environment {
SRW_WE2E_EXPERIMENT_BASE_DIR = "${env.WORKSPACE}/${env.SRW_PLATFORM}/expt_dirs"
}
@@ -228,25 +245,13 @@ pipeline {
post {
success {
s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/*_test_results-*-*.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/we2e_test_logs-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.tgz", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
+ s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/*-skill-score.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
}
always {
s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-time-srw_test.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-disk-usage${env.STAGE_NAME}.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
// Remove the data sets from the experiments directory to conserve disk space
sh 'find "${SRW_WE2E_EXPERIMENT_BASE_DIR}" -regextype posix-extended -regex "^.*(orog|[0-9]{10})$" -type d | xargs rm -rf'
- }
- }
- }
-
- stage('Metrics') {
- steps {
- script {
- CI_BRANCH_NAME=env.JOB_BASE_NAME.replace("%2F","%252F")
- echo "Triggering job for branch ${CI_BRANCH_NAME}/${env.BUILD_NUMBER} ..."
- build job: '/ufs-srweather-app/ufs-srw-metrics', parameters: [
- string(name: 'CI_JOB_NAME', value: "ufs-srweather-app/metrics"),
- string(name: 'CI_BUILD_NUMBER', value: "${CI_BRANCH_NAME}/${env.BUILD_NUMBER}")
- ], wait: false
}
}
}
@@ -254,13 +259,23 @@ pipeline {
}
}
}
+ // end of stages{}
- // Uncomment the following block to re-enable PW clusters
- /*
post {
always {
- // Stop any Parallel Works clusters that were started during the pipeline execution
script {
+ // Trigger another job to collect all build statistics
+ CI_JOB_NAME=env.JOB_NAME.replace("/${env.JOB_BASE_NAME}","")
+ CI_BRANCH_NAME=env.JOB_BASE_NAME.replace("%2F","%252F")
+ echo "post: Triggering ufs-srweather-app/ufs-srw-metrics job for ${CI_JOB_NAME} on branch build ${CI_BRANCH_NAME}/${env.BUILD_NUMBER} ..."
+ build job: '/ufs-srweather-app/ufs-srw-metrics', parameters: [
+ string(name: 'CI_JOB_NAME', value: "${CI_JOB_NAME}"),
+ string(name: 'CI_BUILD_NUMBER', value: "${CI_BRANCH_NAME}/${env.BUILD_NUMBER}")
+ ], wait: false
+
+ // Uncomment the following block to re-enable PW clusters
+ /*
+ // Stop any Parallel Works clusters that were started during the pipeline execution
// def pw_clusters = ['pclusternoaav2use1', 'azclusternoaav2eus1', 'gclusternoaav2usc1']
def pw_clusters = ['pclusternoaav2use1']
def clusters = []
@@ -279,8 +294,8 @@ pipeline {
// PW_CLUSTER_NAME parameter
build job: 'parallel-works-jenkins-client/stop-cluster', parameters: [string(name: 'PW_CLUSTER_NAME', value: clusters[i])]
}
+ */
}
}
}
- */
}
diff --git a/.cicd/scripts/srw_metric_example.sh b/.cicd/scripts/srw_metric.sh
similarity index 87%
rename from .cicd/scripts/srw_metric_example.sh
rename to .cicd/scripts/srw_metric.sh
index 45dd30c299..cbb216c959 100755
--- a/.cicd/scripts/srw_metric_example.sh
+++ b/.cicd/scripts/srw_metric.sh
@@ -56,17 +56,17 @@ else
fi
# Test directories
-we2e_experiment_base_dir="${workspace}/../expt_dirs/metric_test"
-we2e_test_dir="${workspace}/tests/WE2E"
-we2e_test_name="grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0"
+we2e_experiment_base_dir="${we2e_experiment_base_dir:=${workspace}/../expt_dirs/metric_test}"
+we2e_test_dir="${we2e_test_dir:=${workspace}/tests/WE2E}"
+we2e_test_name="${test_type:=grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0}"
pwd
# Setup the build environment
declare srw_compiler
srw_compiler=${SRW_COMPILER}
-source etc/lmod-setup.sh ${platform,,}
-module use modulefiles
+source ${workspace}/etc/lmod-setup.sh ${platform,,}
+module use ${workspace}/modulefiles
module load build_${platform,,}_${srw_compiler}
# Build srw
@@ -99,7 +99,7 @@ if [[ ${RUN_STAT_ANLY_OPT} == true ]]; then
rm -rf ${workspace}/Indy-Severe-Weather/
# Check if metprd data exists locally otherwise get it from S3
TEST_EXTRN_MDL_SOURCE_BASEDIR=$(grep TEST_EXTRN_MDL_SOURCE_BASEDIR ${workspace}/ush/machine/${SRW_PLATFORM}.yaml | awk '{print $NF}')
- if [[ ! -d $(dirname ${TEST_EXTRN_MDL_SOURCE_BASEDIR})/metprd/point_stat ]] ; then
+ if [[ -d $(dirname ${TEST_EXTRN_MDL_SOURCE_BASEDIR})/metprd/point_stat ]] ; then
mkdir -p Indy-Severe-Weather/metprd/point_stat
cp -rp $(dirname ${TEST_EXTRN_MDL_SOURCE_BASEDIR})/metprd/point_stat Indy-Severe-Weather/metprd
elif [[ -f Indy-Severe-Weather.tgz ]]; then
@@ -108,7 +108,7 @@ if [[ ${RUN_STAT_ANLY_OPT} == true ]]; then
wget https://noaa-ufs-srw-pds.s3.amazonaws.com/sample_cases/release-public-v2.1.0/Indy-Severe-Weather.tgz
tar xvfz Indy-Severe-Weather.tgz
fi
- [[ -f skill-score.txt ]] && rm skill-score.txt
+ [[ -f ${platform,,}-${srw_compiler}-skill-score.txt ]] && rm ${platform,,}-${srw_compiler}-skill-score.txt
# Skill score index is computed over several terms that are defined in parm/metplus/STATAnalysisConfig_skill_score.
# It is computed by aggregating the output from earlier runs of the Point-Stat and/or Grid-Stat tools over one or more cases.
# In this example, skill score index is a weighted average of 4 skill scores of RMSE statistics for wind speed, dew point temperature,
@@ -126,15 +126,15 @@ if [[ ${RUN_STAT_ANLY_OPT} == true ]]; then
sed -i 's|--load("conda")|load("conda")|g' ${workspace}/modulefiles/tasks/${platform,,}/run_vx.local.lua
fi
# Run stat_analysis
- stat_analysis -config parm/metplus/STATAnalysisConfig_skill_score -lookin ${workspace}/Indy-Severe-Weather/metprd/point_stat -v 2 -out skill-score.txt
+ stat_analysis -config parm/metplus/STATAnalysisConfig_skill_score -lookin ${workspace}/Indy-Severe-Weather/metprd/point_stat -v 2 -out ${platform,,}-${srw_compiler}-skill-score.txt
# check skill-score.txt
- cat skill-score.txt
+ cat ${platform,,}-${srw_compiler}-skill-score.txt
# get skill-score (SS_INDEX) and check if it is significantly smaller than 1.0
# A value greater than 1.0 indicates that the forecast model outperforms the reference,
# while a value less than 1.0 indicates that the reference outperforms the forecast.
- tmp_string=$( tail -2 skill-score.txt | head -1 )
+ tmp_string=$( tail -2 ${platform,,}-${srw_compiler}-skill-score.txt | head -1 )
SS_INDEX=$(echo $tmp_string | awk -F " " '{print $NF}')
echo "Skill Score: ${SS_INDEX}"
if [[ ${SS_INDEX} < "0.700" ]]; then
diff --git a/.cicd/scripts/srw_test.sh b/.cicd/scripts/srw_test.sh
index 49db0945a9..8ed4756987 100755
--- a/.cicd/scripts/srw_test.sh
+++ b/.cicd/scripts/srw_test.sh
@@ -11,7 +11,7 @@ script_dir="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)
# Get repository root from Jenkins WORKSPACE variable if set, otherwise, set
# relative to script directory.
declare workspace
-if [[ -n "${WORKSPACE}/${SRW_PLATFORM}" ]]; then
+if [[ -d "${WORKSPACE}/${SRW_PLATFORM}" ]]; then
workspace="${WORKSPACE}/${SRW_PLATFORM}"
else
workspace="$(cd -- "${script_dir}/../.." && pwd)"
@@ -26,8 +26,8 @@ else
fi
# Test directories
-we2e_experiment_base_dir="${workspace}/expt_dirs"
-we2e_test_dir="${workspace}/tests/WE2E"
+export we2e_experiment_base_dir="${workspace}/expt_dirs"
+export we2e_test_dir="${workspace}/tests/WE2E"
# Clean any stale test logs
rm -f ${workspace}/tests/WE2E/log.*
@@ -35,11 +35,11 @@ rm -f ${we2e_experiment_base_dir}/*/log.generate_FV3LAM_wflow ${we2e_experiment_
# Run the end-to-end tests.
if "${SRW_WE2E_COMPREHENSIVE_TESTS}"; then
- test_type="comprehensive"
+ export test_type="comprehensive"
else
- test_type=${SRW_WE2E_SINGLE_TEST:-"coverage"}
+ export test_type=${SRW_WE2E_SINGLE_TEST:-"coverage"}
if [[ "${test_type}" = skill-score ]]; then
- test_type="grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0"
+ export test_type="grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0"
fi
fi
@@ -48,7 +48,8 @@ cd ${we2e_test_dir}
progress_file="${workspace}/we2e_test_results-${platform}-${SRW_COMPILER}.txt"
/usr/bin/time -p -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' -o ${WORKSPACE}/${SRW_PLATFORM}-${SRW_COMPILER}-time-srw_test.json \
./setup_WE2E_tests.sh ${platform} ${SRW_PROJECT} ${SRW_COMPILER} ${test_type} \
- --expt_basedir=${we2e_experiment_base_dir} | tee ${progress_file}
+ --expt_basedir=${we2e_experiment_base_dir} | tee ${progress_file}; \
+ [[ -f ${we2e_experiment_base_dir}/grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0/log.generate_FV3LAM_wflow ]] && ${workspace}/.cicd/scripts/srw_metric.sh run_stat_anly
# Set exit code to number of failures
set +e
From 08537b0969d0a2aec9f1ecaf8d0d57afa7fa3d0b Mon Sep 17 00:00:00 2001
From: Emily Carpenter <137525341+elcarpenterNOAA@users.noreply.github.com>
Date: Fri, 26 Apr 2024 10:09:16 -0400
Subject: [PATCH 07/39] [develop] Replace existing UW CLI with UW API calls to
template (#1078)
This work continues the integration of the uwtools package by replacing current use of the UW CLI with UW API calls in Python scripts. These changes are limited to the UW template tool.
---
environment.yml | 2 +-
ush/create_aqm_rc_file.py | 38 +++++-------------------------
ush/create_diag_table_file.py | 32 ++++---------------------
ush/create_model_configure_file.py | 32 ++++---------------------
ush/create_ufs_configure_file.py | 37 +++++------------------------
ush/generate_FV3LAM_wflow.py | 30 +++++------------------
6 files changed, 29 insertions(+), 142 deletions(-)
diff --git a/environment.yml b/environment.yml
index faeb19d466..e2dd6b8300 100644
--- a/environment.yml
+++ b/environment.yml
@@ -5,4 +5,4 @@ channels:
dependencies:
- pylint=2.17*
- pytest=7.2*
- - uwtools=2.1*
+ - uwtools=2.2*
diff --git a/ush/create_aqm_rc_file.py b/ush/create_aqm_rc_file.py
index 726e8eb0f3..739a4d9f18 100644
--- a/ush/create_aqm_rc_file.py
+++ b/ush/create_aqm_rc_file.py
@@ -6,9 +6,8 @@
import argparse
import os
import sys
-import tempfile
-from subprocess import STDOUT, CalledProcessError, check_output
from textwrap import dedent
+from uwtools.api.template import render
from python_utils import (
cfg_to_yaml_str,
@@ -124,36 +123,11 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations):
#
#-----------------------------------------------------------------------
#
- with tempfile.NamedTemporaryFile(
- dir="./",
- mode="w+t",
- prefix="aqm_rc_settings",
- suffix=".yaml") as tmpfile:
- tmpfile.write(settings_str)
- tmpfile.seek(0)
- cmd = " ".join(["uw template render",
- "-i",
- AQM_RC_TMPL_FP,
- "-o",
- aqm_rc_fp,
- "-v",
- "--values-file",
- tmpfile.name,
- ]
- )
- indent = " "
- output = ""
- try:
- output = check_output(cmd, encoding="utf=8", shell=True,
- stderr=STDOUT, text=True)
- except CalledProcessError as e:
- output = e.output
- print(f"Failed with status: {e.returncode}")
- sys.exit(1)
- finally:
- print("Output:")
- for line in output.split("\n"):
- print(f"{indent * 2}{line}")
+ render(
+ input_file = AQM_RC_TMPL_FP,
+ output_file = aqm_rc_fp,
+ values_src = settings,
+ )
return True
def parse_args(argv):
diff --git a/ush/create_diag_table_file.py b/ush/create_diag_table_file.py
index 40f5e0deee..975165dfe5 100644
--- a/ush/create_diag_table_file.py
+++ b/ush/create_diag_table_file.py
@@ -7,9 +7,8 @@
import argparse
import os
import sys
-import tempfile
-from subprocess import STDOUT, CalledProcessError, check_output
from textwrap import dedent
+from uwtools.api.template import render
from python_utils import (
cfg_to_yaml_str,
@@ -74,32 +73,11 @@ def create_diag_table_file(run_dir):
verbose=VERBOSE,
)
- with tempfile.NamedTemporaryFile(dir="./",
- mode="w+t",
- prefix="aqm_rc_settings",
- suffix=".yaml") as tmpfile:
- tmpfile.write(settings_str)
- tmpfile.seek(0)
- cmd = " ".join(["uw template render",
- "-i", DIAG_TABLE_TMPL_FP,
- "-o", diag_table_fp,
- "-v",
- "--values-file", tmpfile.name,
- ]
+ render(
+ input_file = DIAG_TABLE_TMPL_FP,
+ output_file = diag_table_fp,
+ values_src = settings,
)
- indent = " "
- output = ""
- try:
- output = check_output(cmd, encoding="utf=8", shell=True,
- stderr=STDOUT, text=True)
- except CalledProcessError as e:
- output = e.output
- print(f"Failed with status: {e.returncode}")
- sys.exit(1)
- finally:
- print("Output:")
- for line in output.split("\n"):
- print(f"{indent * 2}{line}")
return True
diff --git a/ush/create_model_configure_file.py b/ush/create_model_configure_file.py
index cd10ac404e..cd39087688 100644
--- a/ush/create_model_configure_file.py
+++ b/ush/create_model_configure_file.py
@@ -6,9 +6,8 @@
import argparse
import os
import sys
-import tempfile
from textwrap import dedent
-from subprocess import STDOUT, CalledProcessError, check_output
+from uwtools.api.template import render
from python_utils import (
cfg_to_yaml_str,
@@ -220,32 +219,11 @@ def create_model_configure_file(
#
model_config_fp = os.path.join(run_dir, MODEL_CONFIG_FN)
- with tempfile.NamedTemporaryFile(dir="./",
- mode="w+t",
- suffix=".yaml",
- prefix="model_config_settings.") as tmpfile:
- tmpfile.write(settings_str)
- tmpfile.seek(0)
- cmd = " ".join(["uw template render",
- "-i", MODEL_CONFIG_TMPL_FP,
- "-o", model_config_fp,
- "-v",
- "--values-file", tmpfile.name,
- ]
+ render(
+ input_file = MODEL_CONFIG_TMPL_FP,
+ output_file = model_config_fp,
+ values_src = settings
)
- indent = " "
- output = ""
- try:
- output = check_output(cmd, encoding="utf=8", shell=True,
- stderr=STDOUT, text=True)
- except CalledProcessError as e:
- output = e.output
- print(f"Failed with status: {e.returncode}")
- sys.exit(1)
- finally:
- print("Output:")
- for line in output.split("\n"):
- print(f"{indent * 2}{line}")
return True
diff --git a/ush/create_ufs_configure_file.py b/ush/create_ufs_configure_file.py
index 03de3e24c7..9d4ea8afa4 100644
--- a/ush/create_ufs_configure_file.py
+++ b/ush/create_ufs_configure_file.py
@@ -8,9 +8,8 @@
import argparse
import os
import sys
-import tempfile
-from subprocess import STDOUT, CalledProcessError, check_output
from textwrap import dedent
+from uwtools.api.template import render
from python_utils import (
cfg_to_yaml_str,
@@ -46,7 +45,7 @@ def create_ufs_configure_file(run_dir):
#-----------------------------------------------------------------------
#
print_info_msg(f'''
- Creating a ufs.configure file (\"{UFS_CONFIG_FN}\") in the specified
+ Creating a ufs.configure file (\"{UFS_CONFIG_FN}\") in the specified
run directory (run_dir):
run_dir = \"{run_dir}\"''', verbose=VERBOSE)
#
@@ -87,35 +86,11 @@ def create_ufs_configure_file(run_dir):
#
#-----------------------------------------------------------------------
#
- # Store the settings in a temporary file
- with tempfile.NamedTemporaryFile(dir="./",
- mode="w+t",
- prefix="ufs_config_settings",
- suffix=".yaml") as tmpfile:
- tmpfile.write(settings_str)
- tmpfile.seek(0)
-
- cmd = " ".join(["uw template render",
- "-i", UFS_CONFIG_TMPL_FP,
- "-o", ufs_config_fp,
- "-v",
- "--values-file", tmpfile.name,
- ]
+ render(
+ input_file = UFS_CONFIG_TMPL_FP,
+ output_file = ufs_config_fp,
+ values_src = settings,
)
-
- indent = " "
- output = ""
- try:
- output = check_output(cmd, encoding="utf=8", shell=True,
- stderr=STDOUT, text=True)
- except CalledProcessError as e:
- output = e.output
- print(f"Failed with status: {e.returncode}")
- sys.exit(1)
- finally:
- print("Output:")
- for line in output.split("\n"):
- print(f"{indent * 2}{line}")
return True
def parse_args(argv):
diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py
index ec2b95c3f3..ba0e9f3a2b 100755
--- a/ush/generate_FV3LAM_wflow.py
+++ b/ush/generate_FV3LAM_wflow.py
@@ -11,10 +11,10 @@
import logging
import os
import sys
-from subprocess import STDOUT, CalledProcessError, check_output
from textwrap import dedent
from uwtools.api.config import get_nml_config, get_yaml_config, realize
+from uwtools.api.template import render
from python_utils import (
log_info,
@@ -112,29 +112,11 @@ def generate_FV3LAM_wflow(
# Call the python script to generate the experiment's XML file
#
rocoto_yaml_fp = expt_config["workflow"]["ROCOTO_YAML_FP"]
- cmd = " ".join(["uw template render",
- "-i", template_xml_fp,
- "-o", wflow_xml_fp,
- "-v",
- "--values-file", rocoto_yaml_fp,
- ]
- )
-
- indent = " "
- output = ""
- logfunc = logging.info
- try:
- output = check_output(cmd, encoding="utf=8", shell=True,
- stderr=STDOUT, text=True)
- except CalledProcessError as e:
- logfunc = logging.error
- output = e.output
- logging.exception(("Failed with status: %s", e.returncode))
- raise
- finally:
- logfunc("Output:")
- for line in output.split("\n"):
- logfunc("%s%s", indent * 2, line)
+ render(
+ input_file = template_xml_fp,
+ output_file = wflow_xml_fp,
+ values_src = rocoto_yaml_fp,
+ )
#
# -----------------------------------------------------------------------
#
From eea4c29e8ffea4daa487a675fc70d22668414cc7 Mon Sep 17 00:00:00 2001
From: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com>
Date: Tue, 30 Apr 2024 08:53:57 -0400
Subject: [PATCH 08/39] [develop] Update weather model hash and remove "_vrfy"
from bash commands (#1074)
The weather model hash has been updated to 4f32a4b (April 15).
Additionally, _vrfy has been removed from the cd, cp, ln, mkdir, mv, and rm bash commands in jobs, scripts, ush, and ush/bash_utils. The modified commands don't function as intended (issue #861) and aren't accepted by NCO (issue #1021).
---
.cicd/scripts/srw_metric.sh | 2 +-
Externals.cfg | 2 +-
jobs/JREGIONAL_CHECK_POST_OUTPUT | 2 +-
jobs/JREGIONAL_GET_EXTRN_MDL_FILES | 4 +-
jobs/JREGIONAL_MAKE_GRID | 4 +-
jobs/JREGIONAL_MAKE_ICS | 6 +-
jobs/JREGIONAL_MAKE_LBCS | 6 +-
jobs/JREGIONAL_MAKE_SFC_CLIMO | 4 +-
jobs/JREGIONAL_RUN_FCST | 4 +-
jobs/JREGIONAL_RUN_POST | 6 +-
jobs/JREGIONAL_RUN_PRDGEN | 6 +-
scripts/exregional_make_grid.sh | 16 +-
scripts/exregional_make_ics.sh | 14 +-
scripts/exregional_make_lbcs.sh | 2 +-
scripts/exregional_make_orog.sh | 48 +--
scripts/exregional_make_sfc_climo.sh | 8 +-
scripts/exregional_run_fcst.sh | 58 ++--
...onal_run_met_genensprod_or_ensemblestat.sh | 2 +-
...gional_run_met_gridstat_or_pointstat_vx.sh | 2 +-
...un_met_gridstat_or_pointstat_vx_ensmean.sh | 2 +-
...un_met_gridstat_or_pointstat_vx_ensprob.sh | 2 +-
scripts/exregional_run_met_pb2nc_obs.sh | 2 +-
scripts/exregional_run_met_pcpcombine.sh | 2 +-
scripts/exregional_run_post.sh | 32 +-
scripts/exregional_run_prdgen.sh | 18 +-
ush/bash_utils/check_for_preexist_dir_file.sh | 4 +-
ush/bash_utils/create_symlink_to_file.sh | 6 -
ush/bash_utils/filesys_cmds_vrfy.sh | 280 ------------------
ush/get_mrms_files.sh | 2 +-
ush/job_preamble.sh | 6 +-
ush/launch_FV3LAM_wflow.sh | 2 +-
ush/source_util_funcs.sh | 10 -
32 files changed, 134 insertions(+), 430 deletions(-)
delete mode 100644 ush/bash_utils/filesys_cmds_vrfy.sh
diff --git a/.cicd/scripts/srw_metric.sh b/.cicd/scripts/srw_metric.sh
index cbb216c959..e645a2c916 100755
--- a/.cicd/scripts/srw_metric.sh
+++ b/.cicd/scripts/srw_metric.sh
@@ -58,7 +58,7 @@ fi
# Test directories
we2e_experiment_base_dir="${we2e_experiment_base_dir:=${workspace}/../expt_dirs/metric_test}"
we2e_test_dir="${we2e_test_dir:=${workspace}/tests/WE2E}"
-we2e_test_name="${test_type:=grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0}"
+we2e_test_name="grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0"
pwd
diff --git a/Externals.cfg b/Externals.cfg
index 9ed03cd285..c76f7d8845 100644
--- a/Externals.cfg
+++ b/Externals.cfg
@@ -12,7 +12,7 @@ protocol = git
repo_url = https://github.com/ufs-community/ufs-weather-model
# Specify either a branch name or a hash but not both.
#branch = develop
-hash = 1411b90
+hash = 4f32a4b
local_path = sorc/ufs-weather-model
required = True
diff --git a/jobs/JREGIONAL_CHECK_POST_OUTPUT b/jobs/JREGIONAL_CHECK_POST_OUTPUT
index 2b1fe69bbb..a6403ebe1f 100755
--- a/jobs/JREGIONAL_CHECK_POST_OUTPUT
+++ b/jobs/JREGIONAL_CHECK_POST_OUTPUT
@@ -78,7 +78,7 @@ Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed."
#
ensmem_name="mem${ENSMEM_INDX}"
cycle_dir="$EXPTDIR/$CDATE"
-mkdir_vrfy -p "${cycle_dir}"
+mkdir -p "${cycle_dir}"
touch "${cycle_dir}/post_files_exist_${ensmem_name}.txt"
#
#-----------------------------------------------------------------------
diff --git a/jobs/JREGIONAL_GET_EXTRN_MDL_FILES b/jobs/JREGIONAL_GET_EXTRN_MDL_FILES
index 8efd332dd9..80366f0ddc 100755
--- a/jobs/JREGIONAL_GET_EXTRN_MDL_FILES
+++ b/jobs/JREGIONAL_GET_EXTRN_MDL_FILES
@@ -222,8 +222,8 @@ if [ $RUN_ENVIR = "nco" ]; then
export EXTRN_MDL_STAGING_DIR="${EXTRN_MDL_STAGING_DIR:-${DATA}}"
else
export EXTRN_MDL_STAGING_DIR="${COMIN}/${EXTRN_MDL_NAME}/for_${ICS_OR_LBCS}"
- mkdir_vrfy -p "${EXTRN_MDL_STAGING_DIR}"
- cd_vrfy "${EXTRN_MDL_STAGING_DIR}"
+ mkdir -p "${EXTRN_MDL_STAGING_DIR}"
+ cd "${EXTRN_MDL_STAGING_DIR}"
fi
#
#-----------------------------------------------------------------------
diff --git a/jobs/JREGIONAL_MAKE_GRID b/jobs/JREGIONAL_MAKE_GRID
index 844d782bc7..8d65540d1c 100755
--- a/jobs/JREGIONAL_MAKE_GRID
+++ b/jobs/JREGIONAL_MAKE_GRID
@@ -153,7 +153,7 @@ This is the J-job script for the task that generates grid files.
#-----------------------------------------------------------------------
#
check_for_preexist_dir_file "${GRID_DIR}" "${PREEXISTING_DIR_METHOD}"
-mkdir_vrfy -p "${GRID_DIR}"
+mkdir -p "${GRID_DIR}"
#
#-----------------------------------------------------------------------
#
@@ -162,7 +162,7 @@ mkdir_vrfy -p "${GRID_DIR}"
#-----------------------------------------------------------------------
#
DATA="${DATA:-${GRID_DIR}/tmp}"
-mkdir_vrfy -p "$DATA"
+mkdir -p "$DATA"
#
#-----------------------------------------------------------------------
#
diff --git a/jobs/JREGIONAL_MAKE_ICS b/jobs/JREGIONAL_MAKE_ICS
index 70306c0a87..c4fb429f1b 100755
--- a/jobs/JREGIONAL_MAKE_ICS
+++ b/jobs/JREGIONAL_MAKE_ICS
@@ -60,7 +60,7 @@ if [ $RUN_ENVIR = "nco" ]; then
else
export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT"
fi
-mkdir_vrfy -p "${INPUT_DATA}"
+mkdir -p "${INPUT_DATA}"
#
#
#-----------------------------------------------------------------------
@@ -72,8 +72,8 @@ mkdir_vrfy -p "${INPUT_DATA}"
if [ $RUN_ENVIR = "community" ]; then
DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_MAKE_ICS}"
check_for_preexist_dir_file "$DATA" "delete"
- mkdir_vrfy -p $DATA
- cd_vrfy $DATA
+ mkdir -p $DATA
+ cd $DATA
fi
#
#-----------------------------------------------------------------------
diff --git a/jobs/JREGIONAL_MAKE_LBCS b/jobs/JREGIONAL_MAKE_LBCS
index 16ac382fee..81e2578fd4 100755
--- a/jobs/JREGIONAL_MAKE_LBCS
+++ b/jobs/JREGIONAL_MAKE_LBCS
@@ -60,7 +60,7 @@ if [ $RUN_ENVIR = "nco" ]; then
else
export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT"
fi
-mkdir_vrfy -p "${INPUT_DATA}"
+mkdir -p "${INPUT_DATA}"
#
#-----------------------------------------------------------------------
#
@@ -71,8 +71,8 @@ mkdir_vrfy -p "${INPUT_DATA}"
if [ "${RUN_ENVIR}" = "community" ]; then
DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_MAKE_LBCS}"
check_for_preexist_dir_file "$DATA" "delete"
- mkdir_vrfy -p $DATA
- cd_vrfy $DATA
+ mkdir -p $DATA
+ cd $DATA
fi
#
#-----------------------------------------------------------------------
diff --git a/jobs/JREGIONAL_MAKE_SFC_CLIMO b/jobs/JREGIONAL_MAKE_SFC_CLIMO
index eee25b193a..7cbd0cc23e 100755
--- a/jobs/JREGIONAL_MAKE_SFC_CLIMO
+++ b/jobs/JREGIONAL_MAKE_SFC_CLIMO
@@ -55,7 +55,7 @@ climatology.
#-----------------------------------------------------------------------
#
check_for_preexist_dir_file "${SFC_CLIMO_DIR}" "${PREEXISTING_DIR_METHOD}"
-mkdir_vrfy -p "${SFC_CLIMO_DIR}"
+mkdir -p "${SFC_CLIMO_DIR}"
#
#-----------------------------------------------------------------------
#
@@ -66,7 +66,7 @@ mkdir_vrfy -p "${SFC_CLIMO_DIR}"
DATA="${DATA:-${SFC_CLIMO_DIR}/tmp}"
if [ $RUN_ENVIR != "nco" ]; then
check_for_preexist_dir_file "$DATA" "delete"
- mkdir_vrfy $DATA
+ mkdir $DATA
fi
#
#-----------------------------------------------------------------------
diff --git a/jobs/JREGIONAL_RUN_FCST b/jobs/JREGIONAL_RUN_FCST
index a7f7c96031..45f826c0d7 100755
--- a/jobs/JREGIONAL_RUN_FCST
+++ b/jobs/JREGIONAL_RUN_FCST
@@ -76,8 +76,8 @@ fi
#
#-----------------------------------------------------------------------
#
-mkdir_vrfy -p ${DATA}/INPUT
-mkdir_vrfy -p ${DATA}/RESTART
+mkdir -p ${DATA}/INPUT
+mkdir -p ${DATA}/RESTART
#
#-----------------------------------------------------------------------
#
diff --git a/jobs/JREGIONAL_RUN_POST b/jobs/JREGIONAL_RUN_POST
index 97b100967c..692b3ae65d 100755
--- a/jobs/JREGIONAL_RUN_POST
+++ b/jobs/JREGIONAL_RUN_POST
@@ -85,7 +85,7 @@ fi
if [ "${RUN_ENVIR}" = "community" ]; then
DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}"
export COMOUT="${DATA}/postprd"
- mkdir_vrfy -p "${COMOUT}"
+ mkdir -p "${COMOUT}"
fi
if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then
@@ -94,9 +94,9 @@ else
export DATA_FHR="${DATA:-$COMOUT}/$fhr"
fi
check_for_preexist_dir_file "${DATA_FHR}" "delete"
-mkdir_vrfy -p "${DATA_FHR}"
+mkdir -p "${DATA_FHR}"
-cd_vrfy "${DATA_FHR}"
+cd "${DATA_FHR}"
#
#-----------------------------------------------------------------------
#
diff --git a/jobs/JREGIONAL_RUN_PRDGEN b/jobs/JREGIONAL_RUN_PRDGEN
index 2d30ced9db..24479cb62d 100755
--- a/jobs/JREGIONAL_RUN_PRDGEN
+++ b/jobs/JREGIONAL_RUN_PRDGEN
@@ -84,7 +84,7 @@ DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}"
if [ "${RUN_ENVIR}" = "community" ]; then
export COMOUT="${DATA}/postprd"
fi
-mkdir_vrfy -p "${COMOUT}"
+mkdir -p "${COMOUT}"
# subhourly post
if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then
@@ -93,9 +93,9 @@ else
export DATA_FHR="${DATA:-$COMOUT}/$fhr"
fi
check_for_preexist_dir_file "${DATA_FHR}" "delete"
-mkdir_vrfy -p "${DATA_FHR}"
+mkdir -p "${DATA_FHR}"
-cd_vrfy "${DATA_FHR}"
+cd "${DATA_FHR}"
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh
index 1f95ea8f91..c1876651d8 100755
--- a/scripts/exregional_make_grid.sh
+++ b/scripts/exregional_make_grid.sh
@@ -196,7 +196,7 @@ fi
#
# Change location to the temporary (work) directory.
#
-cd_vrfy "$DATA"
+cd "$DATA"
print_info_msg "$VERBOSE" "
Starting grid file generation..."
@@ -313,7 +313,7 @@ fi
# to the original directory.
#
grid_fp="$DATA/${grid_fn}"
-cd_vrfy -
+cd -
print_info_msg "$VERBOSE" "
Grid file generation completed successfully."
@@ -392,7 +392,7 @@ set_file_param "${GLOBAL_VAR_DEFNS_FP}" "CRES" "'$CRES'"
grid_fp_orig="${grid_fp}"
grid_fn="${CRES}${DOT_OR_USCORE}grid.tile${TILE_RGNL}.halo${NHW}.nc"
grid_fp="${GRID_DIR}/${grid_fn}"
-mv_vrfy "${grid_fp_orig}" "${grid_fp}"
+mv "${grid_fp_orig}" "${grid_fp}"
#
#-----------------------------------------------------------------------
#
@@ -449,7 +449,7 @@ unshaved_fp="${grid_fp}"
# Once it is complete, we will move the resultant file from DATA to
# GRID_DIR.
#
-cd_vrfy "$DATA"
+cd "$DATA"
#
# Create an input namelist file for the shave executable to generate a
# grid file with a 3-cell-wide halo from the one with a wide halo. Then
@@ -477,7 +477,7 @@ The namelist file (nml_fn) used in this call is in directory DATA:
nml_fn = \"${nml_fn}\"
DATA = \"${DATA}\""
POST_STEP
-mv_vrfy ${shaved_fp} ${GRID_DIR}
+mv ${shaved_fp} ${GRID_DIR}
#
# Create an input namelist file for the shave executable to generate a
# grid file with a 4-cell-wide halo from the one with a wide halo. Then
@@ -505,7 +505,7 @@ The namelist file (nml_fn) used in this call is in directory DATA:
nml_fn = \"${nml_fn}\"
DATA = \"${DATA}\""
POST_STEP
-mv_vrfy ${shaved_fp} ${GRID_DIR}
+mv ${shaved_fp} ${GRID_DIR}
#
# Create an input namelist file for the shave executable to generate a
# grid file without halo from the one with a wide halo. Then
@@ -532,11 +532,11 @@ The namelist file (nml_fn) used in this call is in directory DATA:
nml_fn = \"${nml_fn}\"
DATA = \"${DATA}\""
POST_STEP
-mv_vrfy ${shaved_fp} ${GRID_DIR}
+mv ${shaved_fp} ${GRID_DIR}
#
# Change location to the original directory.
#
-cd_vrfy -
+cd -
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_make_ics.sh b/scripts/exregional_make_ics.sh
index 84d73696eb..875249b107 100755
--- a/scripts/exregional_make_ics.sh
+++ b/scripts/exregional_make_ics.sh
@@ -655,10 +655,10 @@ if [ "${CPL_AQM}" = "TRUE" ]; then
cp -p gfs_ctrl.nc "${COMOUT}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc"
cp -p gfs.bndy.nc "${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc"
else
- mv_vrfy out.atm.tile${TILE_RGNL}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc
- mv_vrfy out.sfc.tile${TILE_RGNL}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc
- mv_vrfy gfs_ctrl.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc
- mv_vrfy gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc
+ mv out.atm.tile${TILE_RGNL}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc
+ mv out.sfc.tile${TILE_RGNL}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc
+ mv gfs_ctrl.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc
+ mv gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc
fi
#
#-----------------------------------------------------------------------
@@ -684,7 +684,7 @@ Please ensure that you've built this executable."
print_err_msg_exit "${message_txt}"
fi
fi
- cp_vrfy ${fvcom_exec_fp} ${INPUT_DATA}/.
+ cp ${fvcom_exec_fp} ${INPUT_DATA}/.
fvcom_data_fp="${FVCOM_DIR}/${FVCOM_FILE}"
if [ ! -f "${fvcom_data_fp}" ]; then
message_txt="The file or path (fvcom_data_fp) does not exist:
@@ -699,8 +699,8 @@ Please check the following user defined variables:
fi
fi
- cp_vrfy ${fvcom_data_fp} ${INPUT_DATA}/fvcom.nc
- cd_vrfy ${INPUT_DATA}
+ cp ${fvcom_data_fp} ${INPUT_DATA}/fvcom.nc
+ cd ${INPUT_DATA}
PREP_STEP
eval ${RUN_CMD_UTILS} ${fvcom_exec_fn} \
${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc fvcom.nc ${FVCOM_WCSTART} ${fvcom_time} \
diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh
index ca3f6401cb..5a2d24bcea 100755
--- a/scripts/exregional_make_lbcs.sh
+++ b/scripts/exregional_make_lbcs.sh
@@ -562,7 +562,7 @@ located in the following directory:
if [ "${CPL_AQM}" = "TRUE" ]; then
cp -p gfs.bndy.nc ${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc
else
- mv_vrfy gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc
+ mv gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc
fi
fi
diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh
index 0deac84d49..9a3d5da7fc 100755
--- a/scripts/exregional_make_orog.sh
+++ b/scripts/exregional_make_orog.sh
@@ -85,16 +85,16 @@ fi
#-----------------------------------------------------------------------
#
check_for_preexist_dir_file "${OROG_DIR}" "${PREEXISTING_DIR_METHOD}"
-mkdir_vrfy -p "${OROG_DIR}"
+mkdir -p "${OROG_DIR}"
raw_dir="${OROG_DIR}/raw_topo"
-mkdir_vrfy -p "${raw_dir}"
+mkdir -p "${raw_dir}"
filter_dir="${OROG_DIR}/filtered_topo"
-mkdir_vrfy -p "${filter_dir}"
+mkdir -p "${filter_dir}"
shave_dir="${OROG_DIR}/shave_tmp"
-mkdir_vrfy -p "${shave_dir}"
+mkdir -p "${shave_dir}"
#
#
#-----------------------------------------------------------------------
@@ -119,15 +119,15 @@ fi
# file and change location to it.
#
DATA="${DATA:-${raw_dir}/tmp}"
-mkdir_vrfy -p "${DATA}"
-cd_vrfy "${DATA}"
+mkdir -p "${DATA}"
+cd "${DATA}"
#
# Copy topography and related data files from the system directory (FIXorg)
# to the temporary directory.
#
-cp_vrfy ${FIXorg}/thirty.second.antarctic.new.bin fort.15
-cp_vrfy ${FIXorg}/landcover30.fixed .
-cp_vrfy ${FIXorg}/gmted2010.30sec.int fort.235
+cp ${FIXorg}/thirty.second.antarctic.new.bin fort.15
+cp ${FIXorg}/landcover30.fixed .
+cp ${FIXorg}/gmted2010.30sec.int fort.235
#
#-----------------------------------------------------------------------
#
@@ -221,7 +221,7 @@ POST_STEP
#
# Change location to the original directory.
#
-cd_vrfy -
+cd -
#
#-----------------------------------------------------------------------
#
@@ -236,7 +236,7 @@ raw_orog_fn_prefix="${CRES}${DOT_OR_USCORE}raw_orog"
fn_suffix_with_halo="tile${TILE_RGNL}.halo${NHW}.nc"
raw_orog_fn="${raw_orog_fn_prefix}.${fn_suffix_with_halo}"
raw_orog_fp="${raw_dir}/${raw_orog_fn}"
-mv_vrfy "${raw_orog_fp_orig}" "${raw_orog_fp}"
+mv "${raw_orog_fp_orig}" "${raw_orog_fp}"
#
#-----------------------------------------------------------------------
#
@@ -249,8 +249,8 @@ mv_vrfy "${raw_orog_fp_orig}" "${raw_orog_fp}"
suites=( "FV3_RAP" "FV3_HRRR" "FV3_GFS_v15_thompson_mynn_lam3km" "FV3_GFS_v17_p8" )
if [[ ${suites[@]} =~ "${CCPP_PHYS_SUITE}" ]] ; then
DATA="${DATA:-${OROG_DIR}/temp_orog_data}"
- mkdir_vrfy -p ${DATA}
- cd_vrfy ${DATA}
+ mkdir -p ${DATA}
+ cd ${DATA}
mosaic_fn_gwd="${CRES}${DOT_OR_USCORE}mosaic.halo${NH4}.nc"
mosaic_fp_gwd="${FIXlam}/${mosaic_fn_gwd}"
grid_fn_gwd=$( get_charvar_from_netcdf "${mosaic_fp_gwd}" "gridfiles" ) || \
@@ -290,9 +290,9 @@ returned with nonzero exit code:
exec_fp = \"${exec_fp}\""
POST_STEP
- mv_vrfy "${CRES}${DOT_OR_USCORE}oro_data_ss.tile${TILE_RGNL}.halo${NH0}.nc" \
- "${CRES}${DOT_OR_USCORE}oro_data_ls.tile${TILE_RGNL}.halo${NH0}.nc" \
- "${OROG_DIR}"
+ mv "${CRES}${DOT_OR_USCORE}oro_data_ss.tile${TILE_RGNL}.halo${NH0}.nc" \
+ "${CRES}${DOT_OR_USCORE}oro_data_ls.tile${TILE_RGNL}.halo${NH0}.nc" \
+ "${OROG_DIR}"
fi
#
@@ -390,7 +390,7 @@ fn_suffix_without_halo="tile${TILE_RGNL}.nc"
filtered_orog_fn_prefix="${CRES}${DOT_OR_USCORE}filtered_orog"
filtered_orog_fp_prefix="${filter_dir}/${filtered_orog_fn_prefix}"
filtered_orog_fp="${filtered_orog_fp_prefix}.${fn_suffix_without_halo}"
-cp_vrfy "${raw_orog_fp}" "${filtered_orog_fp}"
+cp "${raw_orog_fp}" "${filtered_orog_fp}"
#
# The orography filtering executable looks for the grid file specified
# in the grid mosaic file (more specifically, specified by the gridfiles
@@ -424,7 +424,7 @@ EOF
# in which it is located). Thus, since above we created the input.nml
# file in filter_dir, we must also run the executable out of this directory.
#
-cd_vrfy "${filter_dir}"
+cd "${filter_dir}"
#
# Run the orography filtering executable.
#
@@ -444,11 +444,11 @@ POST_STEP
filtered_orog_fn_orig=$( basename "${filtered_orog_fp}" )
filtered_orog_fn="${filtered_orog_fn_prefix}.${fn_suffix_with_halo}"
filtered_orog_fp=$( dirname "${filtered_orog_fp}" )"/${filtered_orog_fn}"
-mv_vrfy "${filtered_orog_fn_orig}" "${filtered_orog_fn}"
+mv "${filtered_orog_fn_orig}" "${filtered_orog_fn}"
#
# Change location to the original directory.
#
-cd_vrfy -
+cd -
print_info_msg "$VERBOSE" "
Filtering of orography complete."
@@ -485,7 +485,7 @@ unshaved_fp="${filtered_orog_fp}"
# We perform the work in shave_dir, so change location to that directory.
# Once it is complete, we move the resultant file from shave_dir to OROG_DIR.
#
-cd_vrfy "${shave_dir}"
+cd "${shave_dir}"
#
# Create an input namelist file for the shave executable to generate an
# orography file without a halo from the one with a wide halo. Then call
@@ -513,7 +513,7 @@ The namelist file (nml_fn) used in this call is in directory shave_dir:
nml_fn = \"${nml_fn}\"
shave_dir = \"${shave_dir}\""
POST_STEP
-mv_vrfy ${shaved_fp} ${OROG_DIR}
+mv ${shaved_fp} ${OROG_DIR}
#
# Create an input namelist file for the shave executable to generate an
# orography file with a 4-cell-wide halo from the one with a wide halo.
@@ -541,11 +541,11 @@ The namelist file (nml_fn) used in this call is in directory shave_dir:
nml_fn = \"${nml_fn}\"
shave_dir = \"${shave_dir}\""
POST_STEP
-mv_vrfy "${shaved_fp}" "${OROG_DIR}"
+mv "${shaved_fp}" "${OROG_DIR}"
#
# Change location to the original directory.
#
-cd_vrfy -
+cd -
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_make_sfc_climo.sh b/scripts/exregional_make_sfc_climo.sh
index 868029a488..c4ee8f25b1 100755
--- a/scripts/exregional_make_sfc_climo.sh
+++ b/scripts/exregional_make_sfc_climo.sh
@@ -70,7 +70,7 @@ ulimit -s unlimited
#
#-----------------------------------------------------------------------
#
-cd_vrfy $DATA
+cd $DATA
#
#-----------------------------------------------------------------------
#
@@ -162,7 +162,7 @@ case "$GTYPE" in
#
for fn in *.nc; do
if [[ -f $fn ]]; then
- mv_vrfy $fn ${SFC_CLIMO_DIR}/${CRES}_${fn}
+ mv $fn ${SFC_CLIMO_DIR}/${CRES}_${fn}
fi
done
;;
@@ -181,7 +181,7 @@ case "$GTYPE" in
for fn in *.halo.nc; do
if [ -f $fn ]; then
bn="${fn%.halo.nc}"
- mv_vrfy $fn ${SFC_CLIMO_DIR}/${CRES}.${bn}.halo${NH4}.nc
+ mv $fn ${SFC_CLIMO_DIR}/${CRES}.${bn}.halo${NH4}.nc
fi
done
#
@@ -194,7 +194,7 @@ case "$GTYPE" in
for fn in *.nc; do
if [ -f $fn ]; then
bn="${fn%.nc}"
- mv_vrfy $fn ${SFC_CLIMO_DIR}/${CRES}.${bn}.halo${NH0}.nc
+ mv $fn ${SFC_CLIMO_DIR}/${CRES}.${bn}.halo${NH0}.nc
fi
done
;;
diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh
index b9f6e3ac32..f769d4e225 100755
--- a/scripts/exregional_run_fcst.sh
+++ b/scripts/exregional_run_fcst.sh
@@ -98,7 +98,7 @@ Creating links in the INPUT subdirectory of the current run directory to
the grid and (filtered) orography files ..."
# Create links to fix files in the FIXlam directory.
-cd_vrfy ${DATA}/INPUT
+cd ${DATA}/INPUT
#
# For experiments in which the TN_MAKE_GRID task is run, we make the
@@ -219,7 +219,7 @@ of the current run directory (DATA), where
DATA = \"${DATA}\"
..."
-cd_vrfy ${DATA}/INPUT
+cd ${DATA}/INPUT
#
# The symlinks to be created point to files in the same directory (INPUT),
@@ -288,7 +288,7 @@ fi
#
#-----------------------------------------------------------------------
#
-cd_vrfy ${DATA}
+cd ${DATA}
print_info_msg "$VERBOSE" "
Creating links in the current run directory (DATA) to fixed (i.e.
@@ -360,8 +360,8 @@ fi
#
#-----------------------------------------------------------------------
#
-cd_vrfy ${DATA}
-rm_vrfy -f time_stamp.out
+cd ${DATA}
+rm -f time_stamp.out
#
#-----------------------------------------------------------------------
#
@@ -398,7 +398,7 @@ create_symlink_to_file ${FIELD_TABLE_FP} ${DATA}/${FIELD_TABLE_FN} ${relative_li
create_symlink_to_file ${FIELD_DICT_FP} ${DATA}/${FIELD_DICT_FN} ${relative_link_flag}
if [ ${WRITE_DOPOST} = "TRUE" ]; then
- cp_vrfy ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat
+ cp ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat
if [ ${USE_CUSTOM_POST_CONFIG_FILE} = "TRUE" ]; then
post_config_fp="${CUSTOM_POST_CONFIG_FP}"
print_info_msg "
@@ -416,9 +416,9 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then
post_config_fp = \"${post_config_fp}\"
===================================================================="
fi
- cp_vrfy ${post_config_fp} ./postxconfig-NT_FH00.txt
- cp_vrfy ${post_config_fp} ./postxconfig-NT.txt
- cp_vrfy ${PARMdir}/upp/params_grib2_tbl_new .
+ cp ${post_config_fp} ./postxconfig-NT_FH00.txt
+ cp ${post_config_fp} ./postxconfig-NT.txt
+ cp ${PARMdir}/upp/params_grib2_tbl_new .
# Set itag for inline-post:
if [ "${CPL_AQM}" = "TRUE" ]; then
post_itag_add="aqf_on=.true.,"
@@ -445,7 +445,7 @@ fi
#----------------------------------------------------------------------
#
-cp_vrfy ${CCPP_PHYS_DIR}/noahmptable.tbl .
+cp ${CCPP_PHYS_DIR}/noahmptable.tbl .
#
#-----------------------------------------------------------------------
@@ -460,9 +460,9 @@ if ([ "${DO_SPP}" = "TRUE" ] || [ "${DO_SPPT}" = "TRUE" ] || [ "${DO_SHUM}" = "T
STOCH="TRUE"
fi
if [ "${STOCH}" == "TRUE" ]; then
- cp_vrfy ${FV3_NML_STOCH_FP} ${DATA}/${FV3_NML_FN}
+ cp ${FV3_NML_STOCH_FP} ${DATA}/${FV3_NML_FN}
else
- ln_vrfy -sf ${FV3_NML_FP} ${DATA}/${FV3_NML_FN}
+ ln -sf ${FV3_NML_FP} ${DATA}/${FV3_NML_FN}
fi
#
@@ -508,10 +508,10 @@ fi
#
flag_fcst_restart="FALSE"
if [ "${DO_FCST_RESTART}" = "TRUE" ] && [ "$(ls -A ${DATA}/RESTART )" ]; then
- cp_vrfy input.nml input.nml_orig
- cp_vrfy model_configure model_configure_orig
+ cp input.nml input.nml_orig
+ cp model_configure model_configure_orig
if [ "${CPL_AQM}" = "TRUE" ]; then
- cp_vrfy aqm.rc aqm.rc_orig
+ cp aqm.rc aqm.rc_orig
fi
relative_link_flag="FALSE"
flag_fcst_restart="TRUE"
@@ -558,14 +558,14 @@ for the current cycle's (cdate) run directory (DATA) failed:
done
# Create soft-link of restart files in INPUT directory
- cd_vrfy ${DATA}/INPUT
+ cd ${DATA}/INPUT
for file_id in "${file_ids[@]}"; do
- rm_vrfy "${file_id}"
+ rm "${file_id}"
target="${DATA}/RESTART/${rst_yyyymmdd}.${rst_hh}0000.${file_id}"
symlink="${file_id}"
create_symlink_to_file $target $symlink ${relative_link_flag}
done
- cd_vrfy ${DATA}
+ cd ${DATA}
fi
#
#-----------------------------------------------------------------------
@@ -668,7 +668,7 @@ fi
#
if [ "${RUN_ENVIR}" = "nco" ] && [ "${CPL_AQM}" = "TRUE" ]; then
# create an intermediate symlink to RESTART
- ln_vrfy -sf "${DATA}/RESTART" "${COMIN}/RESTART"
+ ln -sf "${DATA}/RESTART" "${COMIN}/RESTART"
fi
#
#-----------------------------------------------------------------------
@@ -728,14 +728,14 @@ POST_STEP
if [ "${CPL_AQM}" = "TRUE" ]; then
if [ "${RUN_ENVIR}" = "nco" ]; then
if [ -d "${COMIN}/RESTART" ] && [ "$(ls -A ${DATA}/RESTART)" ]; then
- rm_vrfy -rf "${COMIN}/RESTART"
+ rm -rf "${COMIN}/RESTART"
fi
if [ "$(ls -A ${DATA}/RESTART)" ]; then
- cp_vrfy -Rp ${DATA}/RESTART ${COMIN}
+ cp -Rp ${DATA}/RESTART ${COMIN}
fi
fi
- cp_vrfy -p ${DATA}/${AQM_RC_PRODUCT_FN} ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${AQM_RC_PRODUCT_FN}
+ cp -p ${DATA}/${AQM_RC_PRODUCT_FN} ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${AQM_RC_PRODUCT_FN}
fhr_ct=0
fhr=0
@@ -745,8 +745,8 @@ if [ "${CPL_AQM}" = "TRUE" ]; then
source_phy="${DATA}/phyf${fhr_ct}.nc"
target_dyn="${COMIN}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr_ct}.nc"
target_phy="${COMIN}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr_ct}.nc"
- [ -f ${source_dyn} ] && cp_vrfy -p ${source_dyn} ${target_dyn}
- [ -f ${source_phy} ] && cp_vrfy -p ${source_phy} ${target_phy}
+ [ -f ${source_dyn} ] && cp -p ${source_dyn} ${target_dyn}
+ [ -f ${source_phy} ] && cp -p ${source_phy} ${target_phy}
(( fhr=fhr+1 ))
done
fi
@@ -767,9 +767,9 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then
if [ "${RUN_ENVIR}" != "nco" ]; then
export COMOUT="${DATA}/postprd"
fi
- mkdir_vrfy -p "${COMOUT}"
+ mkdir -p "${COMOUT}"
- cd_vrfy ${COMOUT}
+ cd ${COMOUT}
for fhr in $(seq -f "%03g" 0 ${FCST_LEN_HRS}); do
@@ -796,7 +796,7 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then
post_orig_fn="${FID}.${post_fn_suffix}"
post_renamed_fn="${NET}.${cycle}${dot_ensmem}.${fid}.${post_renamed_fn_suffix}"
- mv_vrfy ${DATA}/${post_orig_fn} ${post_renamed_fn}
+ mv ${DATA}/${post_orig_fn} ${post_renamed_fn}
if [ $RUN_ENVIR != "nco" ]; then
basetime=$( $DATE_UTIL --date "$yyyymmdd $hh" +%y%j%H%M )
symlink_suffix="_${basetime}f${fhr}${post_mn}"
@@ -809,8 +809,8 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then
done
if [ "${CPL_AQM}" = "TRUE" ]; then
- mv_vrfy ${DATA}/dynf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr}.nc
- mv_vrfy ${DATA}/phyf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr}.nc
+ mv ${DATA}/dynf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr}.nc
+ mv ${DATA}/phyf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr}.nc
fi
done
diff --git a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh
index fe0e119b19..aa24abbb10 100755
--- a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh
+++ b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh
@@ -271,7 +271,7 @@ set_vx_fhr_list \
#
#-----------------------------------------------------------------------
#
-mkdir_vrfy -p "${OUTPUT_DIR}"
+mkdir -p "${OUTPUT_DIR}"
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh
index 7eb1ce4605..93444069cb 100755
--- a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh
+++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh
@@ -272,7 +272,7 @@ set_vx_fhr_list \
#
#-----------------------------------------------------------------------
#
-mkdir_vrfy -p "${OUTPUT_DIR}"
+mkdir -p "${OUTPUT_DIR}"
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh
index 458dcec33f..4b9716493e 100755
--- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh
+++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh
@@ -221,7 +221,7 @@ set_vx_fhr_list \
#
#-----------------------------------------------------------------------
#
-mkdir_vrfy -p "${OUTPUT_DIR}"
+mkdir -p "${OUTPUT_DIR}"
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh
index fc735845c9..918fb900d3 100755
--- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh
+++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh
@@ -173,7 +173,7 @@ set_vx_fhr_list \
#
#-----------------------------------------------------------------------
#
-mkdir_vrfy -p "${OUTPUT_DIR}"
+mkdir -p "${OUTPUT_DIR}"
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_run_met_pb2nc_obs.sh b/scripts/exregional_run_met_pb2nc_obs.sh
index 92d39102fc..2528c32ced 100755
--- a/scripts/exregional_run_met_pb2nc_obs.sh
+++ b/scripts/exregional_run_met_pb2nc_obs.sh
@@ -140,7 +140,7 @@ set_vx_fhr_list \
#
#-----------------------------------------------------------------------
#
-mkdir_vrfy -p "${OUTPUT_DIR}"
+mkdir -p "${OUTPUT_DIR}"
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_run_met_pcpcombine.sh b/scripts/exregional_run_met_pcpcombine.sh
index 7eabe02901..fb495a6145 100755
--- a/scripts/exregional_run_met_pcpcombine.sh
+++ b/scripts/exregional_run_met_pcpcombine.sh
@@ -226,7 +226,7 @@ set_vx_fhr_list \
#
#-----------------------------------------------------------------------
#
-mkdir_vrfy -p "${OUTPUT_DIR}"
+mkdir -p "${OUTPUT_DIR}"
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_run_post.sh b/scripts/exregional_run_post.sh
index ab3377e6b4..1bf45bd965 100755
--- a/scripts/exregional_run_post.sh
+++ b/scripts/exregional_run_post.sh
@@ -80,8 +80,8 @@ fi
#
#-----------------------------------------------------------------------
#
-rm_vrfy -f fort.*
-cp_vrfy ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat
+rm -f fort.*
+cp ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat
if [ ${USE_CUSTOM_POST_CONFIG_FILE} = "TRUE" ]; then
post_config_fp="${CUSTOM_POST_CONFIG_FP}"
print_info_msg "
@@ -105,18 +105,18 @@ temporary work directory (DATA_FHR):
DATA_FHR = \"${DATA_FHR}\"
===================================================================="
fi
-cp_vrfy ${post_config_fp} ./postxconfig-NT.txt
-cp_vrfy ${PARMdir}/upp/params_grib2_tbl_new .
+cp ${post_config_fp} ./postxconfig-NT.txt
+cp ${PARMdir}/upp/params_grib2_tbl_new .
if [ ${USE_CRTM} = "TRUE" ]; then
- cp_vrfy ${CRTM_DIR}/Nalli.IRwater.EmisCoeff.bin ./
- cp_vrfy ${CRTM_DIR}/FAST*.bin ./
- cp_vrfy ${CRTM_DIR}/NPOESS.IRland.EmisCoeff.bin ./
- cp_vrfy ${CRTM_DIR}/NPOESS.IRsnow.EmisCoeff.bin ./
- cp_vrfy ${CRTM_DIR}/NPOESS.IRice.EmisCoeff.bin ./
- cp_vrfy ${CRTM_DIR}/AerosolCoeff.bin ./
- cp_vrfy ${CRTM_DIR}/CloudCoeff.bin ./
- cp_vrfy ${CRTM_DIR}/*.SpcCoeff.bin ./
- cp_vrfy ${CRTM_DIR}/*.TauCoeff.bin ./
+ cp ${CRTM_DIR}/Nalli.IRwater.EmisCoeff.bin ./
+ cp ${CRTM_DIR}/FAST*.bin ./
+ cp ${CRTM_DIR}/NPOESS.IRland.EmisCoeff.bin ./
+ cp ${CRTM_DIR}/NPOESS.IRsnow.EmisCoeff.bin ./
+ cp ${CRTM_DIR}/NPOESS.IRice.EmisCoeff.bin ./
+ cp ${CRTM_DIR}/AerosolCoeff.bin ./
+ cp ${CRTM_DIR}/CloudCoeff.bin ./
+ cp ${CRTM_DIR}/*.SpcCoeff.bin ./
+ cp ${CRTM_DIR}/*.TauCoeff.bin ./
print_info_msg "
====================================================================
Copying the external CRTM fix files from CRTM_DIR to the temporary
@@ -270,7 +270,7 @@ post_renamed_fn_suffix="f${fhr}${post_mn_or_null}.${POST_OUTPUT_DOMAIN_NAME}.gri
# generates (i.e. "...prslev..." and "...natlev..." files) and move,
# rename, and create symlinks to them.
#
-cd_vrfy "${COMOUT}"
+cd "${COMOUT}"
basetime=$( $DATE_UTIL --date "$yyyymmdd $hh" +%y%j%H%M )
symlink_suffix="${dot_ensmem}.${basetime}f${fhr}${post_mn}"
if [ "${CPL_AQM}" = "TRUE" ]; then
@@ -282,7 +282,7 @@ for fid in "${fids[@]}"; do
FID=$(echo_uppercase $fid)
post_orig_fn="${FID}.${post_fn_suffix}"
post_renamed_fn="${NET}.${cycle}${dot_ensmem}.${fid}.${post_renamed_fn_suffix}"
- mv_vrfy ${DATA_FHR}/${post_orig_fn} ${post_renamed_fn}
+ mv ${DATA_FHR}/${post_orig_fn} ${post_renamed_fn}
if [ $RUN_ENVIR != "nco" ]; then
create_symlink_to_file ${post_renamed_fn} ${FID}${symlink_suffix} TRUE
fi
@@ -292,7 +292,7 @@ for fid in "${fids[@]}"; do
fi
done
-rm_vrfy -rf ${DATA_FHR}
+rm -rf ${DATA_FHR}
#
#-----------------------------------------------------------------------
diff --git a/scripts/exregional_run_prdgen.sh b/scripts/exregional_run_prdgen.sh
index 8fc72dff1c..5d1bfbf447 100755
--- a/scripts/exregional_run_prdgen.sh
+++ b/scripts/exregional_run_prdgen.sh
@@ -166,7 +166,7 @@ net4=$(echo ${NET:0:4} | tr '[:upper:]' '[:lower:]')
for leveltype in prslev natlev ififip testbed
do
if [ -f ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 ]; then
- ln_vrfy -sf --relative ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2
+ ln -sf --relative ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2
wgrib2 ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2 -s > ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2.idx
fi
done
@@ -184,7 +184,7 @@ if [ ${PREDEF_GRID_NAME} = "RRFS_NA_3km" ]; then
DATA=$COMOUT
DATAprdgen=$DATA/prdgen_${fhr}
-mkdir_vrfy $DATAprdgen
+mkdir $DATAprdgen
wgrib2 ${COMOUT}/${NET}.${cycle}.prslev.f${fhr}.grib2 >& $DATAprdgen/prslevf${fhr}.txt
@@ -223,7 +223,7 @@ for domain in ${domains[@]}
do
for task in $(seq ${tasks[count]})
do
- mkdir_vrfy -p $DATAprdgen/prdgen_${domain}_${task}
+ mkdir -p $DATAprdgen/prdgen_${domain}_${task}
echo "$SCRIPTSdir/exregional_run_prdgen_subpiece.sh $fhr $cyc $task $domain ${DATAprdgen} ${COMOUT} &" >> $DATAprdgen/poescript_${fhr}
done
count=$count+1
@@ -269,7 +269,7 @@ else
#
if [ ${#ADDNL_OUTPUT_GRIDS[@]} -gt 0 ]; then
- cd_vrfy ${COMOUT}
+ cd ${COMOUT}
grid_specs_130="lambert:265:25.000000 233.862000:451:13545.000000 16.281000:337:13545.000000"
grid_specs_200="lambert:253:50.000000 285.720000:108:16232.000000 16.201000:94:16232.000000"
@@ -289,7 +289,7 @@ if [ ${#ADDNL_OUTPUT_GRIDS[@]} -gt 0 ]; then
eval grid_specs=\$grid_specs_${grid}
subdir=${COMOUT}/${grid}_grid
- mkdir_vrfy -p ${subdir}/${fhr}
+ mkdir -p ${subdir}/${fhr}
bg_remap=${subdir}/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2
# Interpolate fields to new grid
@@ -317,11 +317,11 @@ if [ ${#ADDNL_OUTPUT_GRIDS[@]} -gt 0 ]; then
rm -f ${subdir}/${fhr}/tmp_${grid}.grib2
# Save to com directory
- mkdir_vrfy -p ${COMOUT}/${grid}_grid
- cp_vrfy ${bg_remap} ${COMOUT}/${grid}_grid/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2
+ mkdir -p ${COMOUT}/${grid}_grid
+ cp ${bg_remap} ${COMOUT}/${grid}_grid/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2
if [[ -f ${COMOUT}/${grid}_grid/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 ]]; then
- ln_vrfy -fs --relative ${COMOUT}/${grid}_grid/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2
+ ln -fs --relative ${COMOUT}/${grid}_grid/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2
wgrib2 ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2 -s > ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2.idx
fi
@@ -331,7 +331,7 @@ fi
fi # block for parallel or series wgrib2 runs.
-rm_vrfy -rf ${DATA_FHR}
+rm -rf ${DATA_FHR}
#
#-----------------------------------------------------------------------
#
diff --git a/ush/bash_utils/check_for_preexist_dir_file.sh b/ush/bash_utils/check_for_preexist_dir_file.sh
index 4ca55766d2..2843222230 100644
--- a/ush/bash_utils/check_for_preexist_dir_file.sh
+++ b/ush/bash_utils/check_for_preexist_dir_file.sh
@@ -107,7 +107,7 @@ where the arguments are defined as follows:
#
"delete")
- rm_vrfy -rf "${dir_or_file}"
+ rm -rf "${dir_or_file}"
;;
#
#-----------------------------------------------------------------------
@@ -134,7 +134,7 @@ Specified directory or file (dir_or_file) already exists:
Moving (renaming) preexisting directory or file to:
old_dir_or_file = \"${old_dir_or_file}\""
- mv_vrfy "${dir_or_file}" "${old_dir_or_file}"
+ mv "${dir_or_file}" "${old_dir_or_file}"
;;
#
#-----------------------------------------------------------------------
diff --git a/ush/bash_utils/create_symlink_to_file.sh b/ush/bash_utils/create_symlink_to_file.sh
index dd25cfa2fd..c6a5213326 100644
--- a/ush/bash_utils/create_symlink_to_file.sh
+++ b/ush/bash_utils/create_symlink_to_file.sh
@@ -80,12 +80,6 @@ not exist or is not a file:
#
# Create the symlink.
#
-# Important note:
-# In the ln_vrfy command below, do not quote ${relative_flag} because if
-# is quoted (either single or double quotes) but happens to be a null
-# string, it will be treated as the (empty) name of (or path to) the
-# target and will cause an error.
-#
#-----------------------------------------------------------------------
#
ln -sf ${relative_flag} "$target" "$symlink"
diff --git a/ush/bash_utils/filesys_cmds_vrfy.sh b/ush/bash_utils/filesys_cmds_vrfy.sh
deleted file mode 100644
index b355d293ad..0000000000
--- a/ush/bash_utils/filesys_cmds_vrfy.sh
+++ /dev/null
@@ -1,280 +0,0 @@
-#
-#-----------------------------------------------------------------------
-#
-# This is a generic function that executes the specified command (e.g.
-# "cp", "mv", etc) with the specified options/arguments and then verifies
-# that the command executed without errors. The first argument to this
-# function is the command to execute while the remaining ones are the
-# options/arguments to be passed to that command.
-#
-#-----------------------------------------------------------------------
-#
-function filesys_cmd_vrfy() {
-#
-#-----------------------------------------------------------------------
-#
-# Save current shell options (in a global array). Then set new options
-# for this script/function.
-#
-#-----------------------------------------------------------------------
-#
- { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1
-#
-#-----------------------------------------------------------------------
-#
-# Get the full path to the file in which this script/function is located
-# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
-# which the file is located (scrfunc_dir).
-#
-#-----------------------------------------------------------------------
-#
- local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
- local scrfunc_fn=$( basename "${scrfunc_fp}" )
- local scrfunc_dir=$( dirname "${scrfunc_fp}" )
-#
-#-----------------------------------------------------------------------
-#
-# Get the name of this function.
-#
-#-----------------------------------------------------------------------
-#
- local func_name="${FUNCNAME[0]}"
-#
-#-----------------------------------------------------------------------
-#
-# Get information about the script or function that calls this function.
-# Note that caller_name will be set as follows:
-#
-# 1) If the caller is a function, caller_name will be set to the name of
-# that function.
-# 2) If the caller is a sourced script, caller_name will be set to
-# "script". Note that a sourced script cannot be the top level
-# script since by defintion, it is sourced by another script or
-# function.
-# 3) If the caller is the top-level script, caller_name will be set to
-# "main".
-#
-# Thus, if caller_name is set to "script" or "main", the caller is a
-# script, and if it is set to anything else, the caller is a function.
-#
-# Below, the index into FUNCNAME and BASH_SOURCE is 2 (not 1 as is usually
-# the case) because this function is called by functions such as cp_vrfy,
-# mv_vrfy, rm_vrfy, ln_vrfy, mkdir_vrfy, and cd_vrfy, but these are just
-# wrappers, and in the error and informational messages, we are really
-# interested in the scripts/functions that in turn call these wrappers.
-#
-#-----------------------------------------------------------------------
-#
- local caller_name="main"
- local caller_fp=""
- if [ -z "${BASH_SOURCE[2]-x}" ]; then
- caller_fp=$( $READLINK -f "${BASH_SOURCE[2]}" )
- local caller_fn=$( basename "${caller_fp}" )
- local caller_dir=$( dirname "${caller_fp}" )
- caller_name="${FUNCNAME[2]}"
- fi
-#
-#-----------------------------------------------------------------------
-#
-# Declare local variables that are used later below.
-#
-#-----------------------------------------------------------------------
-#
- local cmd \
- output \
- exit_code \
- double_space \
- script_or_function
-#
-#-----------------------------------------------------------------------
-#
-# Check that at least one argument is supplied.
-#
-#-----------------------------------------------------------------------
-#
- if [ "$#" -lt 1 ]; then
-
- print_err_msg_exit "
-Incorrect number of arguments specified:
-
- Function name: \"${func_name}\"
- Number of arguments specified: $#
-
-Usage:
-
- ${func_name} cmd [args_to_cmd]
-
-where \"cmd\" is the name of the command to execute and \"args_to_cmd\"
-are zero or more options and arguments to pass to that command.
-"
-
- fi
-#
-#-----------------------------------------------------------------------
-#
-# The first argument to this function is the command to execute while
-# the remaining ones are the arguments to that command. Extract the
-# command and save it in the variable "cmd". Then shift the argument
-# list so that $@ contains the arguments to the command but not the
-# name of the command itself.
-#
-#-----------------------------------------------------------------------
-#
- cmd="$1"
- shift
-#
-#-----------------------------------------------------------------------
-#
-# Pass the arguments to the command and execute it, saving the outputs
-# to stdout and stderr in the variable "output". Also, save the exit
-# code from the execution.
-#
-#-----------------------------------------------------------------------
-#
- local output=$( "$cmd" "$@" 2>&1 )
- local exit_code=$?
-#
-#-----------------------------------------------------------------------
-#
-# If output is not empty, it will be printed to stdout below either as
-# an error message or an informational message. In either case, format
-# it by adding a double space to the beginning of each line.
-#
-#-----------------------------------------------------------------------
-#
- if [ -n "$output" ]; then
- local double_space=" "
- output="${double_space}${output}"
- output=${output/$'\n'/$'\n'${double_space}}
- fi
-#
-#-----------------------------------------------------------------------
-#
-# If the exit code from the execution of cmd above is nonzero, print out
-# an error message and exit.
-#
-#-----------------------------------------------------------------------
-#
- if [ "${caller_name}" = "main" ] || \
- [ "${caller_name}" = "script" ]; then
- local script_or_function="the script"
- else
- local script_or_function="function \"${caller_name}\""
- fi
-
- if [ ${exit_code} -ne 0 ]; then
-
- print_err_msg_exit "\
-Call to function \"${cmd}_vrfy\" failed. This function was called from
-${script_or_function} in file:
-
- \"${caller_fp}\"
-
-Error message from \"${cmd}_vrfy\" function's \"$cmd\" operation:
-$output"
-
- fi
-#
-#-----------------------------------------------------------------------
-#
-# If the exit code from the execution of cmd above is zero, continue.
-#
-# First, check if cmd is set to "cd". If so, the execution of cmd above
-# in a separate subshell [which is what happens when using the $("$cmd")
-# construct above] will change directory in that subshell but not in the
-# current shell. Thus, rerun the "cd" command in the current shell.
-#
-#-----------------------------------------------------------------------
-#
- if [ "$cmd" = "cd" ]; then
- "$cmd" "$@" 2>&1 > /dev/null
- fi
-#
-#-----------------------------------------------------------------------
-#
-# If output is not empty, print out whatever message it contains (e.g.
-# it might contain a warning or other informational message).
-#
-#-----------------------------------------------------------------------
-#
- if [ -n "$output" ]; then
-
- print_info_msg "
-\"${cmd}_vrfy\" operation returned with a message. This command was
-issued from ${script_or_function} in file:
-
- \"${caller_fp}\"
-
-Message from \"${cmd}_vrfy\" function's \"$cmd\" operation:
-$output"
-
- fi
-#
-#-----------------------------------------------------------------------
-#
-# Restore the shell options saved at the beginning of this script/func-
-# tion.
-#
-#-----------------------------------------------------------------------
-#
- { restore_shell_opts; } > /dev/null 2>&1
-
-}
-
-
-#
-#-----------------------------------------------------------------------
-#
-# The following are functions are counterparts of common filesystem
-# commands "with verification", i.e. they execute a filesystem command
-# (such as "cp" and "mv") and then verify that the execution was successful.
-#
-# These functions are called using the "filesys_cmd_vrfy" function defined
-# above. In each of these functions, we:
-#
-# 1) Save current shell options (in a global array) and then set new
-# options for this script/function.
-# 2) Call the generic function "filesys_cmd_vrfy" with the command of
-# interest (e.g. "cp") as the first argument and the arguments passed
-# in as the rest.
-# 3) Restore the shell options saved at the beginning of the function.
-#
-#-----------------------------------------------------------------------
-#
-
-function cp_vrfy() {
- { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1
- filesys_cmd_vrfy "cp" "$@"
- { restore_shell_opts; } > /dev/null 2>&1
-}
-
-function mv_vrfy() {
- { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1
- filesys_cmd_vrfy "mv" "$@"
- { restore_shell_opts; } > /dev/null 2>&1
-}
-
-function rm_vrfy() {
- { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1
- filesys_cmd_vrfy "rm" "$@"
- { restore_shell_opts; } > /dev/null 2>&1
-}
-
-function ln_vrfy() {
- { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1
- filesys_cmd_vrfy "$LN_UTIL" "$@"
- { restore_shell_opts; } > /dev/null 2>&1
-}
-
-function mkdir_vrfy() {
- { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1
- filesys_cmd_vrfy "mkdir" "$@"
- { restore_shell_opts; } > /dev/null 2>&1
-}
-
-function cd_vrfy() {
- { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1
- filesys_cmd_vrfy "cd" "$@"
- { restore_shell_opts; } > /dev/null 2>&1
-}
-
diff --git a/ush/get_mrms_files.sh b/ush/get_mrms_files.sh
index b669094488..65a99cc1bd 100644
--- a/ush/get_mrms_files.sh
+++ b/ush/get_mrms_files.sh
@@ -54,7 +54,7 @@ function get_mrms_files () {
# 10 represents a significant number of vertical levels of data
if [ ${numgrib2} -ge 10 ] && [ ! -e filelist_mrms ]; then
- cp_vrfy ${nsslfile1} ${output_path}
+ cp ${nsslfile1} ${output_path}
ls ${output_path}/${file_matches} > ${output_path}/filelist_mrms
echo "Copying mrms files for ${YYYY}${MM}${DD}-${cyc}${min}"
fi
diff --git a/ush/job_preamble.sh b/ush/job_preamble.sh
index e9c3683c40..16b99393a2 100644
--- a/ush/job_preamble.sh
+++ b/ush/job_preamble.sh
@@ -88,7 +88,7 @@ fi
export DATA=
if [ "${RUN_ENVIR}" = "nco" ]; then
export DATA=${DATAROOT}/${jobid}
- mkdir_vrfy -p $DATA
+ mkdir -p $DATA
cd $DATA
fi
#
@@ -174,10 +174,10 @@ export -f POST_STEP
#
if [ "${RUN_ENVIR}" = "nco" ] && [ "${WORKFLOW_MANAGER}" != "ecflow" ]; then
__EXPTLOG=${EXPTDIR}/log
- mkdir_vrfy -p ${__EXPTLOG}
+ mkdir -p ${__EXPTLOG}
for i in ${LOGDIR}/*.${WORKFLOW_ID}.log; do
__LOGB=$(basename $i .${WORKFLOW_ID}.log)
- ln_vrfy -sf $i ${__EXPTLOG}/${__LOGB}.log
+ ln -sf $i ${__EXPTLOG}/${__LOGB}.log
done
fi
#
diff --git a/ush/launch_FV3LAM_wflow.sh b/ush/launch_FV3LAM_wflow.sh
index cfbedac9cf..92dd24aee6 100755
--- a/ush/launch_FV3LAM_wflow.sh
+++ b/ush/launch_FV3LAM_wflow.sh
@@ -166,7 +166,7 @@ wflow_status="IN PROGRESS"
#
#-----------------------------------------------------------------------
#
-cd_vrfy "$exptdir"
+cd "$exptdir"
#
#-----------------------------------------------------------------------
#
diff --git a/ush/source_util_funcs.sh b/ush/source_util_funcs.sh
index fa097de34d..7fe3025d6a 100644
--- a/ush/source_util_funcs.sh
+++ b/ush/source_util_funcs.sh
@@ -115,16 +115,6 @@ function source_util_funcs() {
#
#-----------------------------------------------------------------------
#
-# Source the file containing functions that execute filesystem commands
-# (e.g. "cp", "mv") with verification (i.e. verifying that the commands
-# completed successfully).
-#
-#-----------------------------------------------------------------------
-#
- . ${bashutils_dir}/filesys_cmds_vrfy.sh
-#
-#-----------------------------------------------------------------------
-#
# Source the file containing the function that searches an array for a
# specified string.
#
From 2d94ed42357d720765fd4d45b6bc88682957968e Mon Sep 17 00:00:00 2001
From: gsketefian <31046882+gsketefian@users.noreply.github.com>
Date: Wed, 1 May 2024 14:07:11 -0600
Subject: [PATCH 09/39] [develop] Streamline SRW App's interface to MET/METplus
(#1005)
This PR streamlines the SRW App's interface to the MET/METplus verification tool and implements some bug fixes. Details:
* Replace the field-specific METplus configuration jinja2 templates associated with each METplus tool (these templates are hard-coded for each field) with a single template that contains jinja2 code to any valid field to be verified.
* Add yaml configuration files for verification that specify the fields to verify (including field levels and thresholds). This is in order to consolidate the field/level/threshold information in one place instead of having it spread out and repeated in several hard-coded configuration files.
* Add a python script (decouple_fcst_obs_vx_config.py) to parse these two vx configuration files and create a dictionary of the field/level/threshold information that can then be passed to the unified workflow templating tool.
* Modify the ex-scripts for the verification tasks (exregional_run_met_....sh) to allow the use of the new jinja2 METplus config templates. This includes adding code to call the new script decouple_fcst_obs_vx_config.py and then passing its output to the unified workflow templating tool to generate METplus configuration files from the (new) jinja2 templates.
* Add new environment variables to the rocoto workflow configuration files (verify_[pre|det|ens].yaml) that are needed for using the new jinja2 METplus config templates.
* Bug fixes
---
jobs/JREGIONAL_CHECK_POST_OUTPUT | 15 +-
jobs/JREGIONAL_PARSE_VX_CONFIG | 97 ++
jobs/JREGIONAL_RUN_MET_PB2NC_OBS | 4 +-
parm/metplus/EnsembleStat.conf | 759 ++++++++++++++
parm/metplus/EnsembleStat_ADPSFC.conf | 307 ------
parm/metplus/EnsembleStat_ADPUPA.conf | 351 -------
parm/metplus/EnsembleStat_APCP.conf | 258 -----
parm/metplus/EnsembleStat_ASNOW.conf | 259 -----
parm/metplus/EnsembleStat_REFC.conf | 265 -----
parm/metplus/EnsembleStat_RETOP.conf | 267 -----
parm/metplus/GenEnsProd.conf | 390 ++++++++
parm/metplus/GenEnsProd_ADPSFC.conf | 219 ----
parm/metplus/GenEnsProd_ADPUPA.conf | 236 -----
parm/metplus/GenEnsProd_APCP.conf | 191 ----
parm/metplus/GenEnsProd_ASNOW.conf | 192 ----
parm/metplus/GenEnsProd_REFC.conf | 191 ----
parm/metplus/GenEnsProd_RETOP.conf | 192 ----
parm/metplus/GridStat_APCP.conf | 309 ------
parm/metplus/GridStat_ASNOW.conf | 283 ------
parm/metplus/GridStat_REFC.conf | 315 ------
parm/metplus/GridStat_RETOP.conf | 317 ------
parm/metplus/GridStat_ensmean.conf | 662 ++++++++++++
parm/metplus/GridStat_ensmean_APCP.conf | 282 ------
parm/metplus/GridStat_ensmean_ASNOW.conf | 287 ------
parm/metplus/GridStat_ensmean_REFC.conf | 313 ------
parm/metplus/GridStat_ensmean_RETOP.conf | 315 ------
parm/metplus/GridStat_ensprob.conf | 675 +++++++++++++
parm/metplus/GridStat_ensprob_APCP.conf | 362 -------
parm/metplus/GridStat_ensprob_ASNOW.conf | 384 -------
parm/metplus/GridStat_ensprob_REFC.conf | 382 -------
parm/metplus/GridStat_ensprob_RETOP.conf | 390 --------
parm/metplus/GridStat_or_PointStat.conf | 940 ++++++++++++++++++
parm/metplus/PcpCombine.conf | 216 ++++
parm/metplus/PcpCombine_fcst_APCP.conf | 130 ---
parm/metplus/PcpCombine_fcst_ASNOW.conf | 141 ---
parm/metplus/PcpCombine_obs_APCP.conf | 139 ---
parm/metplus/PointStat_ADPSFC.conf | 378 -------
parm/metplus/PointStat_ADPUPA.conf | 343 -------
parm/metplus/PointStat_ensmean.conf | 566 +++++++++++
parm/metplus/PointStat_ensmean_ADPSFC.conf | 252 -----
parm/metplus/PointStat_ensmean_ADPUPA.conf | 319 ------
parm/metplus/PointStat_ensprob.conf | 524 ++++++++++
parm/metplus/PointStat_ensprob_ADPSFC.conf | 415 --------
parm/metplus/PointStat_ensprob_ADPUPA.conf | 523 ----------
parm/metplus/metplus_macros.jinja | 150 +++
parm/metplus/vx_config_det.yaml | 204 ++++
parm/metplus/vx_config_ens.yaml | 54 +
parm/wflow/verify_det.yaml | 38 +-
parm/wflow/verify_ens.yaml | 64 +-
parm/wflow/verify_pre.yaml | 8 +-
scripts/exregional_check_post_output.sh | 9 +-
scripts/exregional_parse_vx_config.sh | 94 ++
...onal_run_met_genensprod_or_ensemblestat.sh | 101 +-
...gional_run_met_gridstat_or_pointstat_vx.sh | 90 +-
...un_met_gridstat_or_pointstat_vx_ensmean.sh | 90 +-
...un_met_gridstat_or_pointstat_vx_ensprob.sh | 44 +-
scripts/exregional_run_met_pb2nc_obs.sh | 10 +-
scripts/exregional_run_met_pcpcombine.sh | 70 +-
ush/metplus/decouple_fcst_obs_vx_config.py | 436 ++++++++
ush/set_vx_fhr_list.sh | 2 +-
60 files changed, 6040 insertions(+), 9779 deletions(-)
create mode 100755 jobs/JREGIONAL_PARSE_VX_CONFIG
create mode 100644 parm/metplus/EnsembleStat.conf
delete mode 100644 parm/metplus/EnsembleStat_ADPSFC.conf
delete mode 100644 parm/metplus/EnsembleStat_ADPUPA.conf
delete mode 100644 parm/metplus/EnsembleStat_APCP.conf
delete mode 100644 parm/metplus/EnsembleStat_ASNOW.conf
delete mode 100644 parm/metplus/EnsembleStat_REFC.conf
delete mode 100644 parm/metplus/EnsembleStat_RETOP.conf
create mode 100644 parm/metplus/GenEnsProd.conf
delete mode 100644 parm/metplus/GenEnsProd_ADPSFC.conf
delete mode 100644 parm/metplus/GenEnsProd_ADPUPA.conf
delete mode 100644 parm/metplus/GenEnsProd_APCP.conf
delete mode 100644 parm/metplus/GenEnsProd_ASNOW.conf
delete mode 100644 parm/metplus/GenEnsProd_REFC.conf
delete mode 100644 parm/metplus/GenEnsProd_RETOP.conf
delete mode 100644 parm/metplus/GridStat_APCP.conf
delete mode 100644 parm/metplus/GridStat_ASNOW.conf
delete mode 100644 parm/metplus/GridStat_REFC.conf
delete mode 100644 parm/metplus/GridStat_RETOP.conf
create mode 100644 parm/metplus/GridStat_ensmean.conf
delete mode 100644 parm/metplus/GridStat_ensmean_APCP.conf
delete mode 100644 parm/metplus/GridStat_ensmean_ASNOW.conf
delete mode 100644 parm/metplus/GridStat_ensmean_REFC.conf
delete mode 100644 parm/metplus/GridStat_ensmean_RETOP.conf
create mode 100644 parm/metplus/GridStat_ensprob.conf
delete mode 100644 parm/metplus/GridStat_ensprob_APCP.conf
delete mode 100644 parm/metplus/GridStat_ensprob_ASNOW.conf
delete mode 100644 parm/metplus/GridStat_ensprob_REFC.conf
delete mode 100644 parm/metplus/GridStat_ensprob_RETOP.conf
create mode 100644 parm/metplus/GridStat_or_PointStat.conf
create mode 100644 parm/metplus/PcpCombine.conf
delete mode 100644 parm/metplus/PcpCombine_fcst_APCP.conf
delete mode 100644 parm/metplus/PcpCombine_fcst_ASNOW.conf
delete mode 100644 parm/metplus/PcpCombine_obs_APCP.conf
delete mode 100644 parm/metplus/PointStat_ADPSFC.conf
delete mode 100644 parm/metplus/PointStat_ADPUPA.conf
create mode 100644 parm/metplus/PointStat_ensmean.conf
delete mode 100644 parm/metplus/PointStat_ensmean_ADPSFC.conf
delete mode 100644 parm/metplus/PointStat_ensmean_ADPUPA.conf
create mode 100644 parm/metplus/PointStat_ensprob.conf
delete mode 100644 parm/metplus/PointStat_ensprob_ADPSFC.conf
delete mode 100644 parm/metplus/PointStat_ensprob_ADPUPA.conf
create mode 100644 parm/metplus/metplus_macros.jinja
create mode 100644 parm/metplus/vx_config_det.yaml
create mode 100644 parm/metplus/vx_config_ens.yaml
create mode 100755 scripts/exregional_parse_vx_config.sh
create mode 100755 ush/metplus/decouple_fcst_obs_vx_config.py
diff --git a/jobs/JREGIONAL_CHECK_POST_OUTPUT b/jobs/JREGIONAL_CHECK_POST_OUTPUT
index a6403ebe1f..f55f730cf4 100755
--- a/jobs/JREGIONAL_CHECK_POST_OUTPUT
+++ b/jobs/JREGIONAL_CHECK_POST_OUTPUT
@@ -51,10 +51,11 @@ print_info_msg "
Entering script: \"${scrfunc_fn}\"
In directory: \"${scrfunc_dir}\"
-This is the J-job script for the task that checks that all the post-
-processed output files in fact exist and are at least a certain age.
-These files may have been generated by UPP as part of the current SRW
-App workflow, or they may be user-staged.
+This is the J-job for the task that checks that no more than
+NUM_MISSING_FCST_FILES_MAX of each forecast's (ensemble member's) post-
+processed output files are missing. Note that such files may have been
+generated by UPP as part of the current SRW App workflow, or they may be
+user-staged.
========================================================================"
#
#-----------------------------------------------------------------------
@@ -70,9 +71,9 @@ Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed."
#
#-----------------------------------------------------------------------
#
-# Create a flag file to make rocoto aware that the make_grid task has
-# successfully completed (so that other tasks that depend on it can be
-# launched).
+# Create a flag file to make rocoto aware that the check_post_output task
+# has successfully completed (so that other tasks that depend on it can
+# be launched).
#
#-----------------------------------------------------------------------
#
diff --git a/jobs/JREGIONAL_PARSE_VX_CONFIG b/jobs/JREGIONAL_PARSE_VX_CONFIG
new file mode 100755
index 0000000000..c1cbba8e34
--- /dev/null
+++ b/jobs/JREGIONAL_PARSE_VX_CONFIG
@@ -0,0 +1,97 @@
+#!/usr/bin/env bash
+
+#
+#-----------------------------------------------------------------------
+#
+#
+#
+#-----------------------------------------------------------------------
+#
+
+#
+#-----------------------------------------------------------------------
+#
+# Source the variable definitions file and the bash utility functions.
+#
+#-----------------------------------------------------------------------
+#
+. $USHdir/source_util_funcs.sh
+source_config_for_task "task_parse_vx_config" ${GLOBAL_VAR_DEFNS_FP}
+. $USHdir/job_preamble.sh
+#
+#-----------------------------------------------------------------------
+#
+# Save current shell options (in a global array). Then set new options
+# for this script/function.
+#
+#-----------------------------------------------------------------------
+#
+{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
+#
+#-----------------------------------------------------------------------
+#
+# Get the full path to the file in which this script/function is located
+# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
+# which the file is located (scrfunc_dir).
+#
+#-----------------------------------------------------------------------
+#
+scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
+scrfunc_fn=$( basename "${scrfunc_fp}" )
+scrfunc_dir=$( dirname "${scrfunc_fp}" )
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating entry into script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Entering script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+
+This is the J-job script for the task that reads in the \"coupled\" yaml
+verification (vx) configuration file (python dictionary) and generates
+from it two \"decoupled\" vx configuration dictionaries, one for forecasts
+and another for observations. The task then writes these two decoupled
+dictionaries to a new configuration file in the experiment directory
+that can be read by downstream vx tasks.
+
+Note:
+The \"coupled\" vx configuration file contains items (dictionary keys and
+values representing field names, levels, and thresholds) that consist of
+both the forecast and the observation value for that item separated by a
+delimiter string. Thus, they first need to be separated (decoupled) into
+a value for forecasts and one for the observations before they can be
+further processed.
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# Call the ex-script for this J-job and pass to it the necessary varia-
+# bles.
+#
+#-----------------------------------------------------------------------
+#
+$SCRIPTSdir/exregional_parse_vx_config.sh || \
+print_err_msg_exit "\
+Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed."
+#
+#-----------------------------------------------------------------------
+#
+# Run job postamble.
+#
+#-----------------------------------------------------------------------
+#
+job_postamble
+#
+#-----------------------------------------------------------------------
+#
+# Restore the shell options saved at the beginning of this script/func-
+# tion.
+#
+#-----------------------------------------------------------------------
+#
+{ restore_shell_opts; } > /dev/null 2>&1
+
diff --git a/jobs/JREGIONAL_RUN_MET_PB2NC_OBS b/jobs/JREGIONAL_RUN_MET_PB2NC_OBS
index e36e72418f..2767ae1146 100755
--- a/jobs/JREGIONAL_RUN_MET_PB2NC_OBS
+++ b/jobs/JREGIONAL_RUN_MET_PB2NC_OBS
@@ -51,8 +51,8 @@ print_info_msg "
Entering script: \"${scrfunc_fn}\"
In directory: \"${scrfunc_dir}\"
-This is the J-job script for the task that runs METplus for point-stat
-by initialization time for all forecast hours.
+This is the J-job script for the task that runs METplus for pb2nc on
+NDAS observations.
========================================================================"
#
#-----------------------------------------------------------------------
diff --git a/parm/metplus/EnsembleStat.conf b/parm/metplus/EnsembleStat.conf
new file mode 100644
index 0000000000..1ca46b961e
--- /dev/null
+++ b/parm/metplus/EnsembleStat.conf
@@ -0,0 +1,759 @@
+# {{MetplusToolName}} METplus Configuration
+
+[config]
+
+# List of applications (tools) to run.
+PROCESS_LIST = {{MetplusToolName}}
+
+# time looping - options are INIT, VALID, RETRO, and REALTIME
+# If set to INIT or RETRO:
+# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
+# If set to VALID or REALTIME:
+# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
+LOOP_BY = INIT
+
+# Format of INIT_BEG and INIT_END using % items
+# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
+# see www.strftime.org for more information
+# %Y%m%d%H expands to YYYYMMDDHH
+INIT_TIME_FMT = %Y%m%d%H
+
+# Start time for METplus run - must match INIT_TIME_FMT
+INIT_BEG = {{cdate}}
+
+# End time for METplus run - must match INIT_TIME_FMT
+INIT_END = {{cdate}}
+
+# Increment between METplus runs (in seconds if no units are specified).
+# Must be >= 60 seconds.
+INIT_INCREMENT = 3600
+
+# List of forecast leads to process for each run time (init or valid)
+# In hours if units are not specified
+# If unset, defaults to 0 (don't loop through forecast leads)
+LEAD_SEQ = {{fhr_list}}
+#
+# Order of loops to process data - Options are times, processes
+# Not relevant if only one item is in the PROCESS_LIST
+# times = run all wrappers in the PROCESS_LIST for a single run time, then
+# increment the run time and run all wrappers again until all times have
+# been evaluated.
+# processes = run the first wrapper in the PROCESS_LIST for all times
+# specified, then repeat for the next item in the PROCESS_LIST until all
+# wrappers have been run
+#
+LOOP_ORDER = times
+#
+# Specify the name of the METplus log file.
+#
+LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
+#
+# Specify the location and name of the final METplus conf file.
+#
+METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}}
+#
+# Location of MET configuration file to pass to {{MetplusToolName}}.
+#
+# References PARM_BASE, which is the location of the parm directory
+# corresponding to the ush directory of the run_metplus.py script that
+# is called or the value of the environment variable METPLUS_PARM_BASE
+# if set.
+#
+{{METPLUS_TOOL_NAME}}_CONFIG_FILE = {PARM_BASE}/met_config/{{MetplusToolName}}Config_wrapped
+#
+# Name to identify model (forecast) data in output.
+#
+MODEL = {{vx_fcst_model_name}}
+
+{{METPLUS_TOOL_NAME}}_DESC = NA
+#
+# Name to identify observation data in output.
+#
+OBTYPE = {{obtype}}
+#
+# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
+#
+LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
+#
+# Observation data time window(s).
+#
+{%- if input_field_group in ['APCP', 'ASNOW'] %}
+OBS_FILE_WINDOW_BEGIN = 0
+OBS_FILE_WINDOW_END = 0
+OBS_{{METPLUS_TOOL_NAME}}_WINDOW_BEGIN = 0
+OBS_{{METPLUS_TOOL_NAME}}_WINDOW_END = 0
+{%- elif input_field_group in ['REFC', 'RETOP'] %}
+OBS_FILE_WINDOW_BEGIN = -300
+OBS_FILE_WINDOW_END = 300
+OBS_{{METPLUS_TOOL_NAME}}_WINDOW_BEGIN = 0
+OBS_{{METPLUS_TOOL_NAME}}_WINDOW_END = 0
+{%- elif input_field_group in ['ADPSFC', 'ADPUPA'] %}
+OBS_WINDOW_BEGIN = -1799
+OBS_WINDOW_END = 1800
+OBS_{{METPLUS_TOOL_NAME}}_WINDOW_BEGIN = {OBS_WINDOW_BEGIN}
+OBS_{{METPLUS_TOOL_NAME}}_WINDOW_END = {OBS_WINDOW_END}
+{%- endif %}
+
+# number of expected members for ensemble. Should correspond with the
+# number of items in the list for FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE
+{{METPLUS_TOOL_NAME}}_N_MEMBERS = {{num_ens_members}}
+
+# ens.ens_thresh value in the MET config file
+# threshold for ratio of valid files to expected files to allow app to run
+{{METPLUS_TOOL_NAME}}_ENS_THRESH = 0.05
+
+# ens.vld_thresh value in the MET config file
+{{METPLUS_TOOL_NAME}}_ENS_VLD_THRESH = 1.0
+
+{%- if input_field_group in ['ADPSFC', 'ADPUPA'] %}
+
+{{METPLUS_TOOL_NAME}}_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA
+#{{METPLUS_TOOL_NAME}}_OBS_QUALITY_EXC =
+{%- endif %}
+
+# {{METPLUS_TOOL_NAME}}_MET_OBS_ERR_TABLE is not required.
+# If the variable is not defined, or the value is not set, then the MET
+# default is used.
+{%- if input_field_group in ['APCP', 'ASNOW'] %}
+{{METPLUS_TOOL_NAME}}_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt
+{%- elif input_field_group in ['REFC', 'RETOP'] %}
+{{METPLUS_TOOL_NAME}}_MET_OBS_ERR_TABLE =
+{%- elif input_field_group in ['ADPSFC', 'ADPUPA'] %}
+{{METPLUS_TOOL_NAME}}_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt
+{%- endif %}
+
+
+# Used in the MET config file for: regrid to_grid field
+{%- set comment_or_null = '' %}
+{%- set regrid_to_grid = '' %}
+{%- set regrid_method = '' %}
+{%- if input_field_group in ['APCP', 'ASNOW'] %}
+ {%- set comment_or_null = '' %}
+ {%- set regrid_to_grid = 'FCST' %}
+ {%- set regrid_method = 'BUDGET' %}
+{%- elif input_field_group in ['REFC', 'RETOP'] %}
+ {%- set comment_or_null = '' %}
+ {%- set regrid_to_grid = 'FCST' %}
+ {%- set regrid_method = 'BUDGET' %}
+{%- elif input_field_group in ['ADPSFC', 'ADPUPA'] %}
+ {%- set comment_or_null = '#' %}
+ {%- set regrid_to_grid = 'NONE' %}
+ {%- set regrid_method = 'BILIN' %}
+{%- endif %}
+{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = {{regrid_to_grid}}
+{{comment_or_null}}{{METPLUS_TOOL_NAME}}_REGRID_METHOD = {{regrid_method}}
+{{comment_or_null}}{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2
+{{comment_or_null}}{{METPLUS_TOOL_NAME}}_REGRID_VLD_THRESH = 0.5
+{{comment_or_null}}{{METPLUS_TOOL_NAME}}_REGRID_SHAPE = SQUARE
+
+{{METPLUS_TOOL_NAME}}_CENSOR_THRESH =
+{{METPLUS_TOOL_NAME}}_CENSOR_VAL =
+{% if input_field_group in ['APCP', 'ASNOW'] %}
+{{METPLUS_TOOL_NAME}}_MESSAGE_TYPE =
+{{METPLUS_TOOL_NAME}}_DUPLICATE_FLAG = UNIQUE
+{{METPLUS_TOOL_NAME}}_SKIP_CONST = TRUE
+{{METPLUS_TOOL_NAME}}_OBS_ERROR_FLAG = TRUE
+{%- elif input_field_group in ['REFC', 'RETOP'] %}
+# Should this parameter be set to something other than ADPSFC (maybe
+# just leave empty) since we are not verifying surface fields?
+{{METPLUS_TOOL_NAME}}_MESSAGE_TYPE = ADPSFC
+{{METPLUS_TOOL_NAME}}_DUPLICATE_FLAG = NONE
+{{METPLUS_TOOL_NAME}}_SKIP_CONST = TRUE
+{{METPLUS_TOOL_NAME}}_OBS_ERROR_FLAG = FALSE
+{%- elif input_field_group in ['ADPSFC', 'ADPUPA'] %}
+{{METPLUS_TOOL_NAME}}_MESSAGE_TYPE = {{input_field_group}}
+{{METPLUS_TOOL_NAME}}_DUPLICATE_FLAG = NONE
+{{METPLUS_TOOL_NAME}}_SKIP_CONST = FALSE
+{{METPLUS_TOOL_NAME}}_OBS_ERROR_FLAG = FALSE
+{%- endif %}
+
+{{METPLUS_TOOL_NAME}}_ENS_SSVAR_BIN_SIZE = 1.0
+{{METPLUS_TOOL_NAME}}_ENS_PHIST_BIN_SIZE = 0.05
+
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FILE_NAME =
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FIELD =
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_METHOD =
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_WIDTH =
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_VLD_THRESH =
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_SHAPE =
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD =
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_MATCH_MONTH =
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_DAY_INTERVAL = 31
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_HOUR_INTERVAL = 6
+
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FILE_NAME =
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FIELD =
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_METHOD =
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_WIDTH =
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_VLD_THRESH =
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_SHAPE =
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_TIME_INTERP_METHOD =
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_MATCH_MONTH =
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_DAY_INTERVAL = 31
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_HOUR_INTERVAL = 6
+
+{% set comment_or_null = '' %}
+{%- if input_field_group in ['APCP', 'ASNOW'] %}
+ {%- set comment_or_null = '' %}
+{%- elif input_field_group in ['REFC', 'RETOP'] %}
+ {%- set comment_or_null = '' %}
+{%- elif input_field_group in ['ADPSFC', 'ADPUPA'] %}
+ {%- set comment_or_null = '#' %}
+{%- endif %}
+{{comment_or_null}}{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1
+{{comment_or_null}}{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False
+{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = False
+{% if input_field_group in ['APCP', 'ASNOW'] %}
+{{METPLUS_TOOL_NAME}}_MASK_GRID =
+{%- elif input_field_group in ['REFC', 'RETOP'] %}
+{{METPLUS_TOOL_NAME}}_MASK_GRID = FULL
+{%- elif input_field_group in ['ADPSFC', 'ADPUPA'] %}
+{{METPLUS_TOOL_NAME}}_MASK_GRID =
+{%- endif %}
+
+{{METPLUS_TOOL_NAME}}_CI_ALPHA = 0.05
+
+{{METPLUS_TOOL_NAME}}_INTERP_FIELD = BOTH
+{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = 1.0
+{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = SQUARE
+{{METPLUS_TOOL_NAME}}_INTERP_METHOD = NEAREST
+{{METPLUS_TOOL_NAME}}_INTERP_WIDTH = 1
+
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECNT = STAT
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_RPS = NONE
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_RHIST = STAT
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PHIST = STAT
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ORANK = STAT
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SSVAR = STAT
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_RELP = STAT
+
+{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_RANK = FALSE
+{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_WEIGHT = FALSE
+#
+# Forecast and observation variables and levels as specified in the fcst
+# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME,
+# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME,
+# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION.
+#
+{#-
+Import the file containing jinja macros.
+#}
+{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %}
+
+{#-
+Jinja requires certain variables to be defined globally within the template
+before they can be used in if-statements and other scopes (see Jinja
+scoping rules). Define such variables.
+#}
+{%- set level_fcst = '' %}
+{%- set level_obs = '' %}
+{%- set indx_level_fcst = '' %}
+
+{%- set valid_threshes_fcst = [] %}
+{%- set valid_threshes_obs = [] %}
+{%- set threshes_fcst = [] %}
+{%- set threshes_obs = [] %}
+{%- set indx_input_thresh_fcst = '' %}
+
+{%- set opts_indent = '' %}
+{%- set opts_indent_len = '' %}
+{%- set tmp = '' %}
+{%- set error_msg = '' %}
+{#-
+Make sure that the set of field groups for forecasts and observations
+are identical.
+#}
+{%- set fgs_fcst = vx_config_dict['fcst'].keys()|list %}
+{%- set fgs_obs = vx_config_dict['obs'].keys()|list %}
+{%- if (fgs_fcst != fgs_obs) %}
+ {%- set error_msg = '\n' ~
+'The list of valid field groups for forecasts (fgs_fcst) must be identical\n' ~
+'to that for observations (fgs_obs) but isn\'t:\n' ~
+' fgs_fcst = ' ~ fgs_fcst ~ '\n' ~
+' fgs_obs = ' ~ fgs_obs %}
+ {{metplus_macros.print_err_and_quit(error_msg)}}
+{%- endif %}
+
+{#-
+Extract the lists of forecast and observation dictionaries containing
+the valid fields, levels, and thresholds corresponding to the specified
+field group (input_field_group). Note that it would be simpler to have
+these be just dictionaries in which the keys are the field names (instead
+of them being LISTS of dictionaries in which each dictionary contains a
+single key that is the field name), but that approach cannot be used here
+because it is possible for field names to be repeated (for both forecasts
+and observations). For example, in the observations, the field name
+'PRWE' appears more than once, each time with a different threshold, and
+the combination of name and threshold is what constitutes a unique field,
+not just the name by itself.
+#}
+{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %}
+{%- set fields_levels_threshes_obs = vx_config_dict['obs'][input_field_group] %}
+
+{#-
+Reset the specified forecast level so that if it happens to be an
+accumulation (e.g. 'A03'), the leading zeros in front of the hour are
+stipped out (e.g. reset to 'A3').
+#}
+{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %}
+
+{#-
+Ensure that the specified input forecast level(s) (input_level_fcst) and
+threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the
+set(s) of valid forecast levels and thresholds, respectively, specified
+in fields_levels_threshes_fcst.
+#}
+{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }}
+{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }}
+
+{#-
+For convenience, create lists of valid forecast and observation field
+names.
+#}
+{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %}
+{%- set valid_fields_fcst = [] %}
+{%- for i in range(0,num_valid_fields_fcst) %}
+ {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %}
+ {%- set tmp = valid_fields_fcst.append(field) %}
+{%- endfor %}
+
+{%- set valid_fields_obs = [] %}
+{%- set num_valid_fields_obs = fields_levels_threshes_obs|length %}
+{%- for i in range(0,num_valid_fields_obs) %}
+ {%- set field = fields_levels_threshes_obs[i].keys()|list|join('') %}
+ {%- set tmp = valid_fields_obs.append(field) %}
+{%- endfor %}
+
+{#-
+Ensure that the number of valid fields for forecasts is equal to that
+for the observations.
+#}
+{%- set num_valid_fields = 0 %}
+{%- if (num_valid_fields_fcst != num_valid_fields_obs) %}
+ {%- set error_msg = '\n' ~
+'The number of valid forecast fields (num_valid_fields_fcst) must be\n' ~
+'equal to the number of valid observation fields (num_valid_fields_obs)\n' ~
+'but isn\'t:\n' ~
+' num_valid_fields_fcst = ' ~ num_valid_fields_fcst ~ '\n' ~
+' num_valid_fields_obs = ' ~ num_valid_fields_obs ~ '\n' ~
+'The lists of valid forecast and observation fields are:\n' ~
+' valid_fields_fcst = ' ~ valid_fields_fcst ~ '\n' ~
+' valid_fields_obs = ' ~ valid_fields_obs ~ '\n' %}
+ {{metplus_macros.print_err_and_quit(error_msg)}}
+{%- else %}
+ {%- set num_valid_fields = num_valid_fields_fcst %}
+{%- endif %}
+
+{#-
+Loop over the valid fields and set field names, levels, thresholds, and/
+or options for each field, both for forecasts and for obseratiions, in
+the METplus configuration file.
+#}
+{%- set ns = namespace(var_count = 0) %}
+{%- for i in range(0,num_valid_fields) %}
+
+ {%- set field_fcst = valid_fields_fcst[i] %}
+ {%- set field_obs = valid_fields_obs[i] %}
+
+{#-
+For convenience, create lists of valid forecast and observation levels
+for the current field. Then check that the number of valid levels for
+forecasts is the same as that for observations.
+#}
+ {%- set valid_levels_fcst = fields_levels_threshes_fcst[i][field_fcst].keys()|list %}
+ {%- set valid_levels_obs = fields_levels_threshes_obs[i][field_obs].keys()|list %}
+
+{#-
+Extract dictionary of valid forecast levels (the dictionary keys) and
+corresponding lists of valid thresholds (the values) for each level.
+Then loop over these levels and corresponding lists of thresholds to set
+both the forecast and observation field names, levels, thresholds, and/or
+options.
+#}
+ {%- set valid_levels_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst] %}
+ {%- for level_fcst, valid_threshes_fcst in valid_levels_threshes_fcst.items() %}
+
+ {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %}
+{#-
+Increment the METplus variable counter.
+#}
+ {%- set ns.var_count = ns.var_count+1 %}
+
+{#-
+Set forecast field name. Note that this has to exactly match the name
+of the field in the input forecast file.
+
+For accumulated fields, the input forecast file is generated by MET's
+PcpCombine tool. In that file, the field name consists of the forecast
+field name here (field_fcst) with the accumulation period appended to
+it (separated by an underscore), so we must do the same here to get an
+exact match.
+#}
+ {%- if (input_field_group in ['APCP', 'ASNOW']) %}
+FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{accum_hh}}
+ {%- else %}
+FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}
+ {%- endif %}
+
+{#-
+Set forecast field level.
+#}
+FCST_VAR{{ns.var_count}}_LEVELS = {{level_fcst}}
+
+{#-
+Set forecast field threshold(s). Note that no forecast thresholds are
+included in the METplus configuration file if input_thresh_fcst is set
+to 'none'.
+#}
+ {%- if (input_thresh_fcst != 'none') %}
+{#-
+If input_thresh_fcst is set to 'all', set the list of forecast thresholds
+to the full set of valid values.
+#}
+ {%- if (input_thresh_fcst == 'all') %}
+
+ {%- set threshes_fcst = valid_threshes_fcst %}
+{#-
+If input_thresh_fcst is set to a specific value:
+ 1) Ensure that input_thresh_fcst exists in the list of valid forecast
+ thresholds.
+ 2) Get the index of input_thresh_fcst in the list of valid forecast
+ thresholds. This will be needed later below when setting the
+ observation threshold(s).
+ 3) Use this index to set the forecast threshold to a one-element list
+ containing the specified forecast threshold.
+#}
+ {%- else %}
+
+ {%- if input_thresh_fcst not in valid_threshes_fcst %}
+ {%- set error_msg = '\n' ~
+'For the current forecast field (field_fcst) and forecast level (level_fcst),\n' ~
+'the input forecast threshold (input_thresh_fcst) does not exist in the list\n' ~
+'of valid forecast thresholds (valid_threshes_fcst):\n' ~
+' field_fcst = ' ~ field_fcst ~ '\n' ~
+' level_fcst = ' ~ level_fcst ~ '\n' ~
+' valid_threshes_fcst = ' ~ valid_threshes_fcst ~ '\n' ~
+' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %}
+ {{metplus_macros.print_err_and_quit(error_msg)}}
+ {%- endif %}
+ {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %}
+ {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %}
+
+ {%- endif %}
+{#-
+If threshes_fcst has been reset to something other than its default
+value of an empty list, then set the forecast thresholds in the METplus
+configuration file because that implies threshes_fcst was set above to
+a non-empty value. Then reset threshes_fcst to its default value for
+proper processing of thresholds for the next field.
+#}
+ {%- if (threshes_fcst != []) %}
+FCST_VAR{{ns.var_count}}_THRESH = {{threshes_fcst|join(', ')}}
+ {%- endif %}
+ {%- set threshes_fcst = [] %}
+
+ {%- endif %}
+
+{#-
+Set forecast field options.
+#}
+ {%- set opts_indent_len = 20 %}
+ {%- if (ns.var_count > 9) and (ns.var_count <= 99) %}
+ {%- set opts_indent_len = opts_indent_len + 1 %}
+ {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %}
+ {%- set opts_indent_len = opts_indent_len + 2 %}
+ {%- elif (ns.var_count > 999) %}
+ {%- set opts_indent_len = opts_indent_len + 3 %}
+ {%- endif %}
+ {%- set opts_indent = ' '*opts_indent_len %}
+
+ {%- if input_field_group == 'APCP' %}
+
+ {%- if field_fcst == 'APCP' %}
+FCST_VAR{{ns.var_count}}_OPTIONS = ens_ssvar_bin_size = 50.0;
+{{opts_indent}}ens_phist_bin_size = 0.05;
+ {%- endif %}
+
+ {%- elif input_field_group == 'ASNOW' %}
+
+ {%- if field_fcst == 'ASNOW' %}
+FCST_VAR{{ns.var_count}}_OPTIONS = ens_ssvar_bin_size = 50.0;
+{{opts_indent}}ens_phist_bin_size = 0.05;
+ {%- endif %}
+
+ {%- elif input_field_group == 'REFC' %}
+
+ {%- if field_fcst == 'REFC' %}
+FCST_VAR{{ns.var_count}}_OPTIONS = ens_ssvar_bin_size = 50.0;
+{{opts_indent}}ens_phist_bin_size = 0.05;
+ {%- endif %}
+
+ {%- elif input_field_group == 'RETOP' %}
+
+ {%- if field_fcst == 'RETOP' %}
+FCST_VAR{{ns.var_count}}_OPTIONS = convert(x) = x * 3.28084 * 0.001; ;; Convert from meters to kilofeet.
+{{opts_indent}}ens_ssvar_bin_size = 50.0;
+{{opts_indent}}ens_phist_bin_size = 0.05;
+ {%- endif %}
+
+ {%- elif input_field_group == 'ADPSFC' %}
+
+ {%- if field_fcst == 'HGT' %}
+FCST_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 215;
+{{opts_indent}}desc = "CEILING";
+ {%- elif field_fcst == 'TCDC' %}
+FCST_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 200;
+{{opts_indent}}GRIB2_ipdtmpl_index=[27];
+{{opts_indent}}GRIB2_ipdtmpl_val=[255];
+{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; }
+ {%- elif field_fcst == 'VIS' %}
+FCST_VAR{{ns.var_count}}_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; }
+ {%- elif field_fcst == 'WIND' %}
+FCST_VAR{{ns.var_count}}_OPTIONS = GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind.
+ {%- endif %}
+
+ {%- elif input_field_group == 'ADPUPA' %}
+
+ {%- if field_fcst == 'CAPE' %}
+FCST_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ];
+ {%- endif %}
+
+ {%- endif %}
+
+{#-
+Set observation field name. Note that this has to exactly match the name
+of the field in the input observation file.
+
+For accumulated fields, the input observation file is generated by MET's
+PcpCombine tool. In that file, the field name consists of the observation
+field name here (field_obs) with the accumulation period appended to it
+(separated by an underscore), so we must do the same here to get an exact
+match.
+
+Note:
+Turns out for ASNOW, PcpCombine is not run for obs, so we exclude that
+from the "if" clause here (so it goes into the "else"). For workflow
+behavior uniformity between APCP and ASNOW, consider running PcpCombine
+for ASNOW observations as well (just as it's run for APCP observations).
+ {%- if (input_field_group in ['APCP', 'ASNOW']) %}
+#}
+ {%- if (input_field_group in ['APCP']) %}
+OBS_VAR{{ns.var_count}}_NAME = {{field_obs}}_{{accum_hh}}
+ {%- else %}
+OBS_VAR{{ns.var_count}}_NAME = {{field_obs}}
+ {%- endif %}
+
+{#-
+Set observation field level.
+#}
+ {%- set indx_level_fcst = valid_levels_fcst.index(level_fcst) %}
+ {%- set level_obs = valid_levels_obs[indx_level_fcst] %}
+OBS_VAR{{ns.var_count}}_LEVELS = {{level_obs}}
+
+{#-
+Set observation field threshold(s). Note that no observation thresholds
+are included in the METplus configuration file if input_thresh_fcst is
+set to 'none'.
+#}
+ {%- if (input_thresh_fcst != 'none') %}
+{#-
+Set the list of valid observation thresholds to the one corresponding to
+the current observation level (level_obs).
+#}
+ {%- set valid_threshes_obs = fields_levels_threshes_obs[i][field_obs][level_obs] %}
+{#-
+If input_thresh_fcst is set to 'all', set the list of observation thresholds
+to the full set of valid values.
+#}
+ {%- if (input_thresh_fcst == 'all') %}
+
+ {%- set threshes_obs = valid_threshes_obs %}
+{#-
+If input_thresh_fcst is set to a specific forecast threshold, then the
+observation threshold is given by the element in the list of valid
+observation thresholds that has the same index as that of input_thresh_fcst
+in the list of valid forecast thresholds.
+#}
+ {%- else %}
+ {%- set threshes_obs = [valid_threshes_obs[indx_input_thresh_fcst]] %}
+ {%- endif %}
+{#-
+If threshes_obs has been reset to something other than its default value
+of an empty list, then set the observation thresholds in the METplus
+configuration file because that implies threshes_obs was set above to
+a non-empty value. Then reset threshes_obs to its default value for
+proper processing of thresholds for the next field.
+#}
+ {%- if (threshes_obs != []) %}
+OBS_VAR{{ns.var_count}}_THRESH = {{threshes_obs|join(', ')}}
+ {%- endif %}
+ {%- set threshes_obs = [] %}
+
+ {%- endif %}
+
+{#-
+Set observation field options.
+#}
+ {%- set opts_indent_len = opts_indent_len - 1 %}
+ {%- set opts_indent = ' '*opts_indent_len %}
+
+ {%- if input_field_group == 'APCP' %}
+
+ {%- if field_obs == 'APCP' %}
+OBS_VAR{{ns.var_count}}_OPTIONS = {FCST_VAR{{ns.var_count}}_OPTIONS}
+ {%- endif %}
+
+ {%- elif input_field_group == 'ASNOW' %}
+
+ {%- if field_obs == 'ASNOW' %}
+OBS_VAR{{ns.var_count}}_OPTIONS = {FCST_VAR{{ns.var_count}}_OPTIONS};
+{{opts_indent}}convert(x) = 100.0*x;
+ {%- endif %}
+
+ {%- elif input_field_group == 'REFC' %}
+
+ {%- if field_obs == 'MergedReflectivityQCComposite' %}
+OBS_VAR{{ns.var_count}}_OPTIONS = censor_thresh = lt-20;
+{{opts_indent}}censor_val = -20.0;
+{{opts_indent}}cnt_thresh = [ >15 ];
+{{opts_indent}}cnt_logic = UNION;
+{{opts_indent}}ens_ssvar_bin_size = 50.0;
+{{opts_indent}}ens_phist_bin_size = 0.05;
+ {%- endif %}
+
+ {%- elif input_field_group == 'RETOP' %}
+
+ {%- if field_obs == 'EchoTop18' %}
+OBS_VAR{{ns.var_count}}_OPTIONS = censor_thresh = lt-20;
+{{opts_indent}}censor_val = -20.0;
+{{opts_indent}}cnt_thresh = [ >15 ];
+{{opts_indent}}cnt_logic = UNION;
+{{opts_indent}}convert(x) = x * 3280.84 * 0.001; ;; Convert from kilometers to kilofeet.
+{{opts_indent}}ens_ssvar_bin_size = 50.0;
+{{opts_indent}}ens_phist_bin_size = 0.05;
+ {%- endif %}
+
+ {%- elif input_field_group == 'ADPSFC' %}
+
+ {%- if field_obs in ['DPT', 'TMP', 'WIND'] %}
+OBS_VAR{{ns.var_count}}_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; }
+ {%- elif field_obs == 'CEILING' %}
+OBS_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 215
+ {%- endif %}
+
+ {%- elif input_field_group == 'ADPUPA' %}
+
+ {%- if field_obs in ['DPT', 'HGT', 'TMP', 'WIND'] %}
+OBS_VAR{{ns.var_count}}_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; }
+ {%- elif field_obs == 'CAPE' %}
+OBS_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ];
+{{opts_indent}}cnt_logic = UNION;
+ {%- elif field_obs == 'PBL' %}
+OBS_VAR{{ns.var_count}}_OPTIONS = desc = "TKE";
+ {%- endif %}
+
+ {%- endif %}
+{#-
+Print out a newline to separate the settings for the current field (both
+forecast and observation settings) from those for the next field.
+#}
+ {{- '\n' }}
+
+ {%- endif %}
+
+ {%- endfor %}
+{%- endfor %}
+[dir]
+#
+# OUTPUT_BASE apparently has to be set to something; it cannot be left
+# to its default value. But it is not explicitly used elsewhere in this
+# configuration file.
+#
+OUTPUT_BASE = {{output_base}}
+#
+# Point observation input directory for {{MetplusToolName}}.
+#
+{%- if input_field_group in ['ADPSFC', 'ADPUPA'] %}
+OBS_{{METPLUS_TOOL_NAME}}_POINT_INPUT_DIR = {{obs_input_dir}}
+{%- else %}
+OBS_{{METPLUS_TOOL_NAME}}_POINT_INPUT_DIR =
+{%- endif %}
+#
+# Grid observation input directory for {{MetplusToolName}}.
+#
+{%- if input_field_group in ['ADPSFC', 'ADPUPA'] %}
+OBS_{{METPLUS_TOOL_NAME}}_GRID_INPUT_DIR =
+{%- else %}
+OBS_{{METPLUS_TOOL_NAME}}_GRID_INPUT_DIR = {{obs_input_dir}}
+{%- endif %}
+#
+# Forecast model input directory for {{MetplusToolName}}.
+#
+FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}}
+#
+# Directory containing climatology mean input to {{MetplusToolName}}. Not used
+# in this example
+#
+{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR =
+#
+# Directory containing climatology mean input to {{MetplusToolName}}. Not used
+# in this example.
+#
+{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR =
+#
+# Output directory for {{MetplusToolName}}.
+#
+{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}}
+#
+# Directory for staging data.
+#
+STAGING_DIR = {{staging_dir}}
+
+# End of [dir] section and start of [filename_templates] section.
+[filename_templates]
+#
+# Template for point observation input to {{MetplusToolName}} relative to
+# OBS_{{METPLUS_TOOL_NAME}}_POINT_INPUT_DIR.
+#
+{%- if input_field_group in ['ADPSFC', 'ADPUPA'] %}
+OBS_{{METPLUS_TOOL_NAME}}_POINT_INPUT_TEMPLATE = {{obs_input_fn_template}}
+{%- else %}
+OBS_{{METPLUS_TOOL_NAME}}_POINT_INPUT_TEMPLATE =
+{%- endif %}
+#
+# Template for gridded observation input to {{MetplusToolName}} relative to
+# OBS_{{METPLUS_TOOL_NAME}}_GRID_INPUT_DIR.
+#
+{%- if input_field_group in ['ADPSFC', 'ADPUPA'] %}
+OBS_{{METPLUS_TOOL_NAME}}_GRID_INPUT_TEMPLATE =
+{%- else %}
+OBS_{{METPLUS_TOOL_NAME}}_GRID_INPUT_TEMPLATE = {{obs_input_fn_template}}
+{%- endif %}
+#
+# Template for forecast input to {{MetplusToolName}} relative to
+# FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR.
+#
+# Note that this can be a comma separated list of ensemble members
+# or a single line, - filename wildcard characters may be used, ? or *.
+#
+FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}}
+#
+# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR.
+#
+{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE =
+{{METPLUS_TOOL_NAME}}_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L
+#
+# Template for climatology input to {{MetplusToolName}} relative to
+# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR. Not used in this example.
+#
+{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_TEMPLATE =
+#
+# Template for climatology input to {{MetplusToolName}} relative to
+# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR. Not used in this example.
+#
+{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_TEMPLATE =
+#
+# Variable used to specify one or more verification mask files for
+# {{MetplusToolName}}. Not used for this example.
+#
+{{METPLUS_TOOL_NAME}}_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
diff --git a/parm/metplus/EnsembleStat_ADPSFC.conf b/parm/metplus/EnsembleStat_ADPSFC.conf
deleted file mode 100644
index 07238030c1..0000000000
--- a/parm/metplus/EnsembleStat_ADPSFC.conf
+++ /dev/null
@@ -1,307 +0,0 @@
-# EnsembleStat METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = EnsembleStat
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {ENSEMBLE_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Location of MET configuration file to pass to EnsembleStat.
-#
-# References PARM_BASE, which is the location of the parm directory
-# corresponding to the ush directory of the run_metplus.py script that
-# is called or the value of the environment variable METPLUS_PARM_BASE
-# if set.
-#
-ENSEMBLE_STAT_CONFIG_FILE = {PARM_BASE}/met_config/EnsembleStatConfig_wrapped
-#
-# Name to identify model (forecast) data in output.
-#
-MODEL = {{vx_fcst_model_name}}
-
-ENSEMBLE_STAT_DESC = NA
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Observation data time window(s).
-#
-OBS_WINDOW_BEGIN = -1799
-OBS_WINDOW_END = 1800
-OBS_ENSEMBLE_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN}
-OBS_ENSEMBLE_STAT_WINDOW_END = {OBS_WINDOW_END}
-
-# number of expected members for ensemble. Should correspond with the
-# number of items in the list for FCST_ENSEMBLE_STAT_INPUT_TEMPLATE
-ENSEMBLE_STAT_N_MEMBERS = {{num_ens_members}}
-
-# ens.ens_thresh value in the MET config file
-# threshold for ratio of valid files to expected files to allow app to run
-ENSEMBLE_STAT_ENS_THRESH = 0.05
-
-# ens.vld_thresh value in the MET config file
-ENSEMBLE_STAT_ENS_VLD_THRESH = 1.0
-
-ENSEMBLE_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA
-#ENSEMBLE_STAT_OBS_QUALITY_EXC =
-
-# ENSEMBLE_STAT_MET_OBS_ERR_TABLE is not required.
-# If the variable is not defined, or the value is not set, then the MET
-# default is used.
-ENSEMBLE_STAT_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt
-
-
-# Used in the MET config file for: regrid to_grid field
-ENSEMBLE_STAT_REGRID_TO_GRID = NONE
-#ENSEMBLE_STAT_REGRID_METHOD = BILIN
-#ENSEMBLE_STAT_REGRID_WIDTH = 2
-#ENSEMBLE_STAT_REGRID_VLD_THRESH = 0.5
-#ENSEMBLE_STAT_REGRID_SHAPE = SQUARE
-
-ENSEMBLE_STAT_CENSOR_THRESH =
-ENSEMBLE_STAT_CENSOR_VAL =
-
-ENSEMBLE_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}}
-ENSEMBLE_STAT_DUPLICATE_FLAG = NONE
-ENSEMBLE_STAT_SKIP_CONST = FALSE
-ENSEMBLE_STAT_OBS_ERROR_FLAG = FALSE
-
-ENSEMBLE_STAT_ENS_SSVAR_BIN_SIZE = 1.0
-ENSEMBLE_STAT_ENS_PHIST_BIN_SIZE = 0.05
-
-#ENSEMBLE_STAT_CLIMO_MEAN_FILE_NAME =
-#ENSEMBLE_STAT_CLIMO_MEAN_FIELD =
-#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_METHOD =
-#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_WIDTH =
-#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_VLD_THRESH =
-#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_SHAPE =
-#ENSEMBLE_STAT_CLIMO_MEAN_TIME_INTERP_METHOD =
-#ENSEMBLE_STAT_CLIMO_MEAN_MATCH_MONTH =
-#ENSEMBLE_STAT_CLIMO_MEAN_DAY_INTERVAL = 31
-#ENSEMBLE_STAT_CLIMO_MEAN_HOUR_INTERVAL = 6
-
-#ENSEMBLE_STAT_CLIMO_STDEV_FILE_NAME =
-#ENSEMBLE_STAT_CLIMO_STDEV_FIELD =
-#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_METHOD =
-#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_WIDTH =
-#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_VLD_THRESH =
-#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_SHAPE =
-#ENSEMBLE_STAT_CLIMO_STDEV_TIME_INTERP_METHOD =
-#ENSEMBLE_STAT_CLIMO_STDEV_MATCH_MONTH =
-#ENSEMBLE_STAT_CLIMO_STDEV_DAY_INTERVAL = 31
-#ENSEMBLE_STAT_CLIMO_STDEV_HOUR_INTERVAL = 6
-
-
-#ENSEMBLE_STAT_CLIMO_CDF_BINS = 1
-#ENSEMBLE_STAT_CLIMO_CDF_CENTER_BINS = False
-ENSEMBLE_STAT_CLIMO_CDF_WRITE_BINS = False
-
-ENSEMBLE_STAT_MASK_GRID =
-
-ENSEMBLE_STAT_CI_ALPHA = 0.05
-
-ENSEMBLE_STAT_INTERP_FIELD = BOTH
-ENSEMBLE_STAT_INTERP_VLD_THRESH = 1.0
-ENSEMBLE_STAT_INTERP_SHAPE = SQUARE
-ENSEMBLE_STAT_INTERP_METHOD = NEAREST
-ENSEMBLE_STAT_INTERP_WIDTH = 1
-
-ENSEMBLE_STAT_OUTPUT_FLAG_ECNT = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_RPS = NONE
-ENSEMBLE_STAT_OUTPUT_FLAG_RHIST = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_PHIST = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_ORANK = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_SSVAR = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_RELP = STAT
-
-ENSEMBLE_STAT_ENSEMBLE_FLAG_RANK = FALSE
-ENSEMBLE_STAT_ENSEMBLE_FLAG_WEIGHT = FALSE
-#
-# Forecast and observation variables and levels as specified in the fcst
-# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME,
-# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME,
-# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION.
-#
-FCST_VAR1_NAME = TMP
-FCST_VAR1_LEVELS = Z2
-FCST_VAR1_THRESH = ge268, ge273, ge278, ge293, ge298, ge303
-OBS_VAR1_NAME = TMP
-OBS_VAR1_LEVELS = Z2
-OBS_VAR1_THRESH = ge268, ge273, ge278, ge293, ge298, ge303
-OBS_VAR1_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; }
-
-FCST_VAR2_NAME = DPT
-FCST_VAR2_LEVELS = Z2
-FCST_VAR2_THRESH = ge263, ge268, ge273, ge288, ge293, ge298
-OBS_VAR2_NAME = DPT
-OBS_VAR2_LEVELS = Z2
-OBS_VAR2_THRESH = ge263, ge268, ge273, ge288, ge293, ge298
-OBS_VAR2_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; }
-
-FCST_VAR3_NAME = WIND
-FCST_VAR3_LEVELS = Z10
-FCST_VAR3_THRESH = ge5, ge10, ge15
-FCST_VAR3_OPTIONS = GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind.
-OBS_VAR3_NAME = WIND
-OBS_VAR3_LEVELS = Z10
-OBS_VAR3_THRESH = ge5, ge10, ge15
-OBS_VAR3_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; }
-
-FCST_VAR4_NAME = TCDC
-FCST_VAR4_LEVELS = L0
-FCST_VAR4_THRESH = lt25, gt75
-FCST_VAR4_OPTIONS = GRIB_lvl_typ = 200;
- GRIB2_ipdtmpl_index=[27];
- GRIB2_ipdtmpl_val=[255];
- interp = { type = [ { method = NEAREST; width = 1; } ]; }
-OBS_VAR4_NAME = TCDC
-OBS_VAR4_LEVELS = L0
-OBS_VAR4_THRESH = lt25, gt75
-
-FCST_VAR5_NAME = VIS
-FCST_VAR5_LEVELS = L0
-FCST_VAR5_THRESH = lt1609, lt8045, ge8045
-FCST_VAR5_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; }
-OBS_VAR5_NAME = VIS
-OBS_VAR5_LEVELS = L0
-OBS_VAR5_THRESH = lt1609, lt8045, ge8045
-
-FCST_VAR6_NAME = HGT
-FCST_VAR6_LEVELS = L0
-FCST_VAR6_THRESH = lt152, lt1520, ge914
-FCST_VAR6_OPTIONS = GRIB_lvl_typ = 215;
- desc = "CEILING";
-OBS_VAR6_NAME = CEILING
-OBS_VAR6_LEVELS = L0
-OBS_VAR6_THRESH = lt152, lt305, ge914
-OBS_VAR6_OPTIONS = GRIB_lvl_typ = 215
-
-[dir]
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-#
-# Point observation input directory for EnsembleStat.
-#
-OBS_ENSEMBLE_STAT_POINT_INPUT_DIR = {{obs_input_dir}}
-#
-# Grid observation input directory for EnsembleStat.
-#
-OBS_ENSEMBLE_STAT_GRID_INPUT_DIR =
-#
-# Forecast model input directory for EnsembleStat.
-#
-FCST_ENSEMBLE_STAT_INPUT_DIR = {{fcst_input_dir}}
-#
-# Directory containing climatology mean input to EnsembleStat. Not used
-# in this example
-#
-ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR =
-#
-# Directory containing climatology mean input to EnsembleStat. Not used
-# in this example.
-#
-ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR =
-#
-# Output directory for EnsembleStat.
-#
-ENSEMBLE_STAT_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for point observation input to EnsembleStat relative to
-# OBS_ENSEMBLE_STAT_POINT_INPUT_DIR.
-#
-OBS_ENSEMBLE_STAT_POINT_INPUT_TEMPLATE = {{obs_input_fn_template}}
-#
-# Template for gridded observation input to EnsembleStat relative to
-# OBS_ENSEMBLE_STAT_GRID_INPUT_DIR.
-#
-OBS_ENSEMBLE_STAT_GRID_INPUT_TEMPLATE =
-#
-# Template for forecast input to EnsembleStat relative to
-# FCST_ENSEMBLE_STAT_INPUT_DIR.
-#
-# Note that this can be a comma separated list of ensemble members
-# or a single line, - filename wildcard characters may be used, ? or *.
-#
-FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-#
-# Template for output from EnsembleStat relative to ENSEMBLE_STAT_OUTPUT_DIR.
-#
-ENSEMBLE_STAT_OUTPUT_TEMPLATE =
-ENSEMBLE_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L
-#
-# Template for climatology input to EnsembleStat relative to
-# ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example.
-#
-ENSEMBLE_STAT_CLIMO_MEAN_INPUT_TEMPLATE =
-#
-# Template for climatology input to EnsembleStat relative to
-# ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example.
-#
-ENSEMBLE_STAT_CLIMO_STDEV_INPUT_TEMPLATE =
-#
-# Variable used to specify one or more verification mask files for
-# EnsembleStat. Not used for this example.
-#
-ENSEMBLE_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
diff --git a/parm/metplus/EnsembleStat_ADPUPA.conf b/parm/metplus/EnsembleStat_ADPUPA.conf
deleted file mode 100644
index edfda41b89..0000000000
--- a/parm/metplus/EnsembleStat_ADPUPA.conf
+++ /dev/null
@@ -1,351 +0,0 @@
-# EnsembleStat METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = EnsembleStat
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {ENSEMBLE_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Location of MET configuration file to pass to EnsembleStat.
-#
-# References PARM_BASE, which is the location of the parm directory
-# corresponding to the ush directory of the run_metplus.py script that
-# is called or the value of the environment variable METPLUS_PARM_BASE
-# if set.
-#
-ENSEMBLE_STAT_CONFIG_FILE = {PARM_BASE}/met_config/EnsembleStatConfig_wrapped
-#
-# Name to identify model (forecast) data in output.
-#
-MODEL = {{vx_fcst_model_name}}
-
-ENSEMBLE_STAT_DESC = NA
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Observation data time window(s).
-#
-OBS_WINDOW_BEGIN = -1799
-OBS_WINDOW_END = 1800
-OBS_ENSEMBLE_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN}
-OBS_ENSEMBLE_STAT_WINDOW_END = {OBS_WINDOW_END}
-
-# number of expected members for ensemble. Should correspond with the
-# number of items in the list for FCST_ENSEMBLE_STAT_INPUT_TEMPLATE
-ENSEMBLE_STAT_N_MEMBERS = {{num_ens_members}}
-
-# ens.ens_thresh value in the MET config file
-# threshold for ratio of valid files to expected files to allow app to run
-ENSEMBLE_STAT_ENS_THRESH = 0.05
-
-# ens.vld_thresh value in the MET config file
-ENSEMBLE_STAT_ENS_VLD_THRESH = 1.0
-
-ENSEMBLE_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA
-#ENSEMBLE_STAT_OBS_QUALITY_EXC =
-
-# ENSEMBLE_STAT_MET_OBS_ERR_TABLE is not required.
-# If the variable is not defined, or the value is not set, then the MET
-# default is used.
-ENSEMBLE_STAT_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt
-
-
-# Used in the MET config file for: regrid to_grid field
-ENSEMBLE_STAT_REGRID_TO_GRID = NONE
-#ENSEMBLE_STAT_REGRID_METHOD = BILIN
-#ENSEMBLE_STAT_REGRID_WIDTH = 2
-#ENSEMBLE_STAT_REGRID_VLD_THRESH = 0.5
-#ENSEMBLE_STAT_REGRID_SHAPE = SQUARE
-
-ENSEMBLE_STAT_CENSOR_THRESH =
-ENSEMBLE_STAT_CENSOR_VAL =
-
-ENSEMBLE_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}}
-ENSEMBLE_STAT_DUPLICATE_FLAG = NONE
-ENSEMBLE_STAT_SKIP_CONST = FALSE
-ENSEMBLE_STAT_OBS_ERROR_FLAG = FALSE
-
-ENSEMBLE_STAT_ENS_SSVAR_BIN_SIZE = 1.0
-ENSEMBLE_STAT_ENS_PHIST_BIN_SIZE = 0.05
-
-#ENSEMBLE_STAT_CLIMO_MEAN_FILE_NAME =
-#ENSEMBLE_STAT_CLIMO_MEAN_FIELD =
-#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_METHOD =
-#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_WIDTH =
-#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_VLD_THRESH =
-#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_SHAPE =
-#ENSEMBLE_STAT_CLIMO_MEAN_TIME_INTERP_METHOD =
-#ENSEMBLE_STAT_CLIMO_MEAN_MATCH_MONTH =
-#ENSEMBLE_STAT_CLIMO_MEAN_DAY_INTERVAL = 31
-#ENSEMBLE_STAT_CLIMO_MEAN_HOUR_INTERVAL = 6
-
-#ENSEMBLE_STAT_CLIMO_STDEV_FILE_NAME =
-#ENSEMBLE_STAT_CLIMO_STDEV_FIELD =
-#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_METHOD =
-#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_WIDTH =
-#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_VLD_THRESH =
-#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_SHAPE =
-#ENSEMBLE_STAT_CLIMO_STDEV_TIME_INTERP_METHOD =
-#ENSEMBLE_STAT_CLIMO_STDEV_MATCH_MONTH =
-#ENSEMBLE_STAT_CLIMO_STDEV_DAY_INTERVAL = 31
-#ENSEMBLE_STAT_CLIMO_STDEV_HOUR_INTERVAL = 6
-
-
-#ENSEMBLE_STAT_CLIMO_CDF_BINS = 1
-#ENSEMBLE_STAT_CLIMO_CDF_CENTER_BINS = False
-ENSEMBLE_STAT_CLIMO_CDF_WRITE_BINS = False
-
-ENSEMBLE_STAT_MASK_GRID =
-
-ENSEMBLE_STAT_CI_ALPHA = 0.05
-
-ENSEMBLE_STAT_INTERP_FIELD = BOTH
-ENSEMBLE_STAT_INTERP_VLD_THRESH = 1.0
-ENSEMBLE_STAT_INTERP_SHAPE = SQUARE
-ENSEMBLE_STAT_INTERP_METHOD = NEAREST
-ENSEMBLE_STAT_INTERP_WIDTH = 1
-
-ENSEMBLE_STAT_OUTPUT_FLAG_ECNT = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_RPS = NONE
-ENSEMBLE_STAT_OUTPUT_FLAG_RHIST = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_PHIST = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_ORANK = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_SSVAR = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_RELP = STAT
-
-ENSEMBLE_STAT_ENSEMBLE_FLAG_RANK = FALSE
-ENSEMBLE_STAT_ENSEMBLE_FLAG_WEIGHT = FALSE
-#
-# Forecast and observation variables and levels as specified in the fcst
-# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME,
-# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME,
-# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION.
-#
-FCST_VAR1_NAME = TMP
-FCST_VAR1_LEVELS = P850
-FCST_VAR1_THRESH = ge288, ge293, ge298
-OBS_VAR1_NAME = TMP
-OBS_VAR1_LEVELS = P850
-OBS_VAR1_THRESH = ge288, ge293, ge298
-OBS_VAR1_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; }
-
-FCST_VAR2_NAME = TMP
-FCST_VAR2_LEVELS = P700
-FCST_VAR2_THRESH = ge273, ge278, ge283
-OBS_VAR2_NAME = TMP
-OBS_VAR2_LEVELS = P700
-OBS_VAR2_THRESH = ge273, ge278, ge283
-OBS_VAR2_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; }
-
-FCST_VAR3_NAME = TMP
-FCST_VAR3_LEVELS = P500
-FCST_VAR3_THRESH = ge258, ge263, ge268
-OBS_VAR3_NAME = TMP
-OBS_VAR3_LEVELS = P500
-OBS_VAR3_THRESH = ge258, ge263, ge268
-OBS_VAR3_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; }
-
-FCST_VAR4_NAME = DPT
-FCST_VAR4_LEVELS = P850
-FCST_VAR4_THRESH = ge273, ge278, ge283
-OBS_VAR4_NAME = DPT
-OBS_VAR4_LEVELS = P850
-OBS_VAR4_THRESH = ge273, ge278, ge283
-OBS_VAR4_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; }
-
-FCST_VAR5_NAME = DPT
-FCST_VAR5_LEVELS = P700
-FCST_VAR5_THRESH = ge263, ge286, ge273
-OBS_VAR5_NAME = DPT
-OBS_VAR5_LEVELS = P700
-OBS_VAR5_THRESH = ge263, ge286, ge273
-OBS_VAR5_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; }
-
-FCST_VAR6_NAME = WIND
-FCST_VAR6_LEVELS = P850
-FCST_VAR6_THRESH = ge5, ge10, ge15
-OBS_VAR6_NAME = WIND
-OBS_VAR6_LEVELS = P850
-OBS_VAR6_THRESH = ge5, ge10, ge15
-OBS_VAR6_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; }
-
-FCST_VAR7_NAME = WIND
-FCST_VAR7_LEVELS = P700
-FCST_VAR7_THRESH = ge10, ge15, ge20
-OBS_VAR7_NAME = WIND
-OBS_VAR7_LEVELS = P700
-OBS_VAR7_THRESH = ge10, ge15, ge20
-OBS_VAR7_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; }
-
-FCST_VAR8_NAME = WIND
-FCST_VAR8_LEVELS = P500
-FCST_VAR8_THRESH = ge15, ge21, ge26
-OBS_VAR8_NAME = WIND
-OBS_VAR8_LEVELS = P500
-OBS_VAR8_THRESH = ge15, ge21, ge26
-OBS_VAR8_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; }
-
-FCST_VAR9_NAME = WIND
-FCST_VAR9_LEVELS = P250
-FCST_VAR9_THRESH = ge26, ge31, ge36, ge46, ge62
-OBS_VAR9_NAME = WIND
-OBS_VAR9_LEVELS = P250
-OBS_VAR9_THRESH = ge26, ge31, ge36, ge46, ge62
-OBS_VAR9_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; }
-
-FCST_VAR10_NAME = HGT
-FCST_VAR10_LEVELS = P500
-FCST_VAR10_THRESH = ge5400, ge5600, ge5880
-OBS_VAR10_NAME = HGT
-OBS_VAR10_LEVELS = P500
-OBS_VAR10_THRESH = ge5400, ge5600, ge5880
-OBS_VAR10_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; }
-
-FCST_VAR11_NAME = CAPE
-FCST_VAR11_LEVELS = L0
-FCST_VAR11_THRESH = le1000, gt1000&<2500, ge2500&<4000, ge2500
-FCST_VAR11_OPTIONS = cnt_thresh = [ >0 ];
-OBS_VAR11_NAME = CAPE
-OBS_VAR11_LEVELS = L0-100000
-OBS_VAR11_THRESH = le1000, gt1000&<2500, ge2500&<4000, ge2500
-OBS_VAR11_OPTIONS = cnt_thresh = [ >0 ];
- cnt_logic = UNION;
-
-FCST_VAR12_NAME = HPBL
-FCST_VAR12_LEVELS = Z0
-FCST_VAR12_THRESH = lt500, lt1500, gt1500
-OBS_VAR12_NAME = PBL
-OBS_VAR12_LEVELS = L0
-OBS_VAR12_THRESH = lt500, lt1500, gt1500
-OBS_VAR12_OPTIONS = desc = "TKE";
-
-[dir]
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-#
-# Point observation input directory for EnsembleStat.
-#
-OBS_ENSEMBLE_STAT_POINT_INPUT_DIR = {{obs_input_dir}}
-#
-# Grid observation input directory for EnsembleStat.
-#
-OBS_ENSEMBLE_STAT_GRID_INPUT_DIR =
-#
-# Forecast model input directory for EnsembleStat.
-#
-FCST_ENSEMBLE_STAT_INPUT_DIR = {{fcst_input_dir}}
-#
-# Directory containing climatology mean input to EnsembleStat. Not used
-# in this example
-#
-ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR =
-#
-# Directory containing climatology mean input to EnsembleStat. Not used
-# in this example.
-#
-ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR =
-#
-# Output directory for EnsembleStat.
-#
-ENSEMBLE_STAT_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for point observation input to EnsembleStat relative to
-# OBS_ENSEMBLE_STAT_POINT_INPUT_DIR.
-#
-OBS_ENSEMBLE_STAT_POINT_INPUT_TEMPLATE = {{obs_input_fn_template}}
-#
-# Template for gridded observation input to EnsembleStat relative to
-# OBS_ENSEMBLE_STAT_GRID_INPUT_DIR.
-#
-OBS_ENSEMBLE_STAT_GRID_INPUT_TEMPLATE =
-#
-# Template for forecast input to EnsembleStat relative to
-# FCST_ENSEMBLE_STAT_INPUT_DIR.
-#
-# Note that this can be a comma separated list of ensemble members
-# or a single line, - filename wildcard characters may be used, ? or *.
-#
-FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-#
-# Template for output from EnsembleStat relative to ENSEMBLE_STAT_OUTPUT_DIR.
-#
-ENSEMBLE_STAT_OUTPUT_TEMPLATE =
-ENSEMBLE_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L
-#
-# Template for climatology input to EnsembleStat relative to
-# ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example.
-#
-ENSEMBLE_STAT_CLIMO_MEAN_INPUT_TEMPLATE =
-#
-# Template for climatology input to EnsembleStat relative to
-# ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example.
-#
-ENSEMBLE_STAT_CLIMO_STDEV_INPUT_TEMPLATE =
-#
-# Variable used to specify one or more verification mask files for
-# EnsembleStat. Not used for this example.
-#
-ENSEMBLE_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
diff --git a/parm/metplus/EnsembleStat_APCP.conf b/parm/metplus/EnsembleStat_APCP.conf
deleted file mode 100644
index 7604a90bd7..0000000000
--- a/parm/metplus/EnsembleStat_APCP.conf
+++ /dev/null
@@ -1,258 +0,0 @@
-# EnsembleStat METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = EnsembleStat
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {ENSEMBLE_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Location of MET configuration file to pass to EnsembleStat.
-#
-# References PARM_BASE, which is the location of the parm directory
-# corresponding to the ush directory of the run_metplus.py script that
-# is called or the value of the environment variable METPLUS_PARM_BASE
-# if set.
-#
-ENSEMBLE_STAT_CONFIG_FILE = {PARM_BASE}/met_config/EnsembleStatConfig_wrapped
-#
-# Name to identify model (forecast) data in output.
-#
-MODEL = {{vx_fcst_model_name}}
-
-ENSEMBLE_STAT_DESC = NA
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Observation data time window(s).
-#
-OBS_FILE_WINDOW_BEGIN = 0
-OBS_FILE_WINDOW_END = 0
-OBS_ENSEMBLE_STAT_WINDOW_BEGIN = 0
-OBS_ENSEMBLE_STAT_WINDOW_END = 0
-
-# number of expected members for ensemble. Should correspond with the
-# number of items in the list for FCST_ENSEMBLE_STAT_INPUT_TEMPLATE
-ENSEMBLE_STAT_N_MEMBERS = {{num_ens_members}}
-
-# ens.ens_thresh value in the MET config file
-# threshold for ratio of valid files to expected files to allow app to run
-ENSEMBLE_STAT_ENS_THRESH = 0.05
-
-# ens.vld_thresh value in the MET config file
-ENSEMBLE_STAT_ENS_VLD_THRESH = 1.0
-
-# ENSEMBLE_STAT_MET_OBS_ERR_TABLE is not required.
-# If the variable is not defined, or the value is not set, then the MET
-# default is used.
-ENSEMBLE_STAT_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt
-
-
-# Used in the MET config file for: regrid to_grid field
-ENSEMBLE_STAT_REGRID_TO_GRID = FCST
-ENSEMBLE_STAT_REGRID_METHOD = BUDGET
-ENSEMBLE_STAT_REGRID_WIDTH = 2
-ENSEMBLE_STAT_REGRID_VLD_THRESH = 0.5
-ENSEMBLE_STAT_REGRID_SHAPE = SQUARE
-
-ENSEMBLE_STAT_CENSOR_THRESH =
-ENSEMBLE_STAT_CENSOR_VAL =
-
-ENSEMBLE_STAT_MESSAGE_TYPE =
-ENSEMBLE_STAT_DUPLICATE_FLAG = UNIQUE
-ENSEMBLE_STAT_SKIP_CONST = TRUE
-ENSEMBLE_STAT_OBS_ERROR_FLAG = TRUE
-
-ENSEMBLE_STAT_ENS_SSVAR_BIN_SIZE = 1.0
-ENSEMBLE_STAT_ENS_PHIST_BIN_SIZE = 0.05
-
-#ENSEMBLE_STAT_CLIMO_MEAN_FILE_NAME =
-#ENSEMBLE_STAT_CLIMO_MEAN_FIELD =
-#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_METHOD =
-#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_WIDTH =
-#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_VLD_THRESH =
-#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_SHAPE =
-#ENSEMBLE_STAT_CLIMO_MEAN_TIME_INTERP_METHOD =
-#ENSEMBLE_STAT_CLIMO_MEAN_MATCH_MONTH =
-#ENSEMBLE_STAT_CLIMO_MEAN_DAY_INTERVAL = 31
-#ENSEMBLE_STAT_CLIMO_MEAN_HOUR_INTERVAL = 6
-
-#ENSEMBLE_STAT_CLIMO_STDEV_FILE_NAME =
-#ENSEMBLE_STAT_CLIMO_STDEV_FIELD =
-#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_METHOD =
-#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_WIDTH =
-#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_VLD_THRESH =
-#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_SHAPE =
-#ENSEMBLE_STAT_CLIMO_STDEV_TIME_INTERP_METHOD =
-#ENSEMBLE_STAT_CLIMO_STDEV_MATCH_MONTH =
-#ENSEMBLE_STAT_CLIMO_STDEV_DAY_INTERVAL = 31
-#ENSEMBLE_STAT_CLIMO_STDEV_HOUR_INTERVAL = 6
-
-
-ENSEMBLE_STAT_CLIMO_CDF_BINS = 1
-ENSEMBLE_STAT_CLIMO_CDF_CENTER_BINS = False
-ENSEMBLE_STAT_CLIMO_CDF_WRITE_BINS = False
-
-ENSEMBLE_STAT_MASK_GRID =
-
-ENSEMBLE_STAT_CI_ALPHA = 0.05
-
-ENSEMBLE_STAT_INTERP_FIELD = BOTH
-ENSEMBLE_STAT_INTERP_VLD_THRESH = 1.0
-ENSEMBLE_STAT_INTERP_SHAPE = SQUARE
-ENSEMBLE_STAT_INTERP_METHOD = NEAREST
-ENSEMBLE_STAT_INTERP_WIDTH = 1
-
-ENSEMBLE_STAT_OUTPUT_FLAG_ECNT = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_RPS = NONE
-ENSEMBLE_STAT_OUTPUT_FLAG_RHIST = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_PHIST = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_ORANK = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_SSVAR = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_RELP = STAT
-
-ENSEMBLE_STAT_ENSEMBLE_FLAG_RANK = FALSE
-ENSEMBLE_STAT_ENSEMBLE_FLAG_WEIGHT = FALSE
-#
-# Forecast and observation variables and levels as specified in the fcst
-# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME,
-# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME,
-# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION.
-#
-FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}}
-FCST_VAR1_LEVELS = A{{accum_hh}}
-FCST_VAR1_OPTIONS = ens_ssvar_bin_size = 50.0;
- ens_phist_bin_size = 0.05;
-OBS_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}}
-OBS_VAR1_LEVELS = A{{accum_hh}}
-OBS_VAR1_OPTIONS = {FCST_VAR1_OPTIONS}
-
-[dir]
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-#
-# Point observation input directory for EnsembleStat.
-#
-OBS_ENSEMBLE_STAT_POINT_INPUT_DIR =
-#
-# Grid observation input directory for EnsembleStat.
-#
-OBS_ENSEMBLE_STAT_GRID_INPUT_DIR = {{obs_input_dir}}
-#
-# Forecast model input directory for EnsembleStat.
-#
-FCST_ENSEMBLE_STAT_INPUT_DIR = {{fcst_input_dir}}
-#
-# Directory containing climatology mean input to EnsembleStat. Not used
-# in this example
-#
-ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR =
-#
-# Directory containing climatology mean input to EnsembleStat. Not used
-# in this example.
-#
-ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR =
-#
-# Output directory for EnsembleStat.
-#
-ENSEMBLE_STAT_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for point observation input to EnsembleStat relative to
-# OBS_ENSEMBLE_STAT_POINT_INPUT_DIR.
-#
-OBS_ENSEMBLE_STAT_POINT_INPUT_TEMPLATE =
-#
-# Template for gridded observation input to EnsembleStat relative to
-# OBS_ENSEMBLE_STAT_GRID_INPUT_DIR.
-#
-OBS_ENSEMBLE_STAT_GRID_INPUT_TEMPLATE = {{obs_input_fn_template}}
-#
-# Template for forecast input to EnsembleStat relative to
-# FCST_ENSEMBLE_STAT_INPUT_DIR.
-#
-# Note that this can be a comma separated list of ensemble members
-# or a single line, - filename wildcard characters may be used, ? or *.
-#
-FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-#
-# Template for output from EnsembleStat relative to ENSEMBLE_STAT_OUTPUT_DIR.
-#
-ENSEMBLE_STAT_OUTPUT_TEMPLATE =
-ENSEMBLE_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L
-#
-# Template for climatology input to EnsembleStat relative to
-# ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example.
-#
-ENSEMBLE_STAT_CLIMO_MEAN_INPUT_TEMPLATE =
-#
-# Template for climatology input to EnsembleStat relative to
-# ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example.
-#
-ENSEMBLE_STAT_CLIMO_STDEV_INPUT_TEMPLATE =
-#
-# Variable used to specify one or more verification mask files for
-# EnsembleStat. Not used for this example.
-#
-ENSEMBLE_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
diff --git a/parm/metplus/EnsembleStat_ASNOW.conf b/parm/metplus/EnsembleStat_ASNOW.conf
deleted file mode 100644
index 8897b03295..0000000000
--- a/parm/metplus/EnsembleStat_ASNOW.conf
+++ /dev/null
@@ -1,259 +0,0 @@
-# EnsembleStat METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = EnsembleStat
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {ENSEMBLE_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Location of MET configuration file to pass to EnsembleStat.
-#
-# References PARM_BASE, which is the location of the parm directory
-# corresponding to the ush directory of the run_metplus.py script that
-# is called or the value of the environment variable METPLUS_PARM_BASE
-# if set.
-#
-ENSEMBLE_STAT_CONFIG_FILE = {PARM_BASE}/met_config/EnsembleStatConfig_wrapped
-#
-# Name to identify model (forecast) data in output.
-#
-MODEL = {{vx_fcst_model_name}}
-
-ENSEMBLE_STAT_DESC = NA
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Observation data time window(s).
-#
-OBS_FILE_WINDOW_BEGIN = 0
-OBS_FILE_WINDOW_END = 0
-OBS_ENSEMBLE_STAT_WINDOW_BEGIN = 0
-OBS_ENSEMBLE_STAT_WINDOW_END = 0
-
-# number of expected members for ensemble. Should correspond with the
-# number of items in the list for FCST_ENSEMBLE_STAT_INPUT_TEMPLATE
-ENSEMBLE_STAT_N_MEMBERS = {{num_ens_members}}
-
-# ens.ens_thresh value in the MET config file
-# threshold for ratio of valid files to expected files to allow app to run
-ENSEMBLE_STAT_ENS_THRESH = 0.05
-
-# ens.vld_thresh value in the MET config file
-ENSEMBLE_STAT_ENS_VLD_THRESH = 1.0
-
-# ENSEMBLE_STAT_MET_OBS_ERR_TABLE is not required.
-# If the variable is not defined, or the value is not set, then the MET
-# default is used.
-ENSEMBLE_STAT_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt
-
-
-# Used in the MET config file for: regrid to_grid field
-ENSEMBLE_STAT_REGRID_TO_GRID = FCST
-ENSEMBLE_STAT_REGRID_METHOD = BUDGET
-ENSEMBLE_STAT_REGRID_WIDTH = 2
-ENSEMBLE_STAT_REGRID_VLD_THRESH = 0.5
-ENSEMBLE_STAT_REGRID_SHAPE = SQUARE
-
-ENSEMBLE_STAT_CENSOR_THRESH =
-ENSEMBLE_STAT_CENSOR_VAL =
-
-ENSEMBLE_STAT_MESSAGE_TYPE =
-ENSEMBLE_STAT_DUPLICATE_FLAG = UNIQUE
-ENSEMBLE_STAT_SKIP_CONST = TRUE
-ENSEMBLE_STAT_OBS_ERROR_FLAG = TRUE
-
-ENSEMBLE_STAT_ENS_SSVAR_BIN_SIZE = 1.0
-ENSEMBLE_STAT_ENS_PHIST_BIN_SIZE = 0.05
-
-#ENSEMBLE_STAT_CLIMO_MEAN_FILE_NAME =
-#ENSEMBLE_STAT_CLIMO_MEAN_FIELD =
-#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_METHOD =
-#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_WIDTH =
-#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_VLD_THRESH =
-#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_SHAPE =
-#ENSEMBLE_STAT_CLIMO_MEAN_TIME_INTERP_METHOD =
-#ENSEMBLE_STAT_CLIMO_MEAN_MATCH_MONTH =
-#ENSEMBLE_STAT_CLIMO_MEAN_DAY_INTERVAL = 31
-#ENSEMBLE_STAT_CLIMO_MEAN_HOUR_INTERVAL = 6
-
-#ENSEMBLE_STAT_CLIMO_STDEV_FILE_NAME =
-#ENSEMBLE_STAT_CLIMO_STDEV_FIELD =
-#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_METHOD =
-#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_WIDTH =
-#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_VLD_THRESH =
-#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_SHAPE =
-#ENSEMBLE_STAT_CLIMO_STDEV_TIME_INTERP_METHOD =
-#ENSEMBLE_STAT_CLIMO_STDEV_MATCH_MONTH =
-#ENSEMBLE_STAT_CLIMO_STDEV_DAY_INTERVAL = 31
-#ENSEMBLE_STAT_CLIMO_STDEV_HOUR_INTERVAL = 6
-
-
-ENSEMBLE_STAT_CLIMO_CDF_BINS = 1
-ENSEMBLE_STAT_CLIMO_CDF_CENTER_BINS = False
-ENSEMBLE_STAT_CLIMO_CDF_WRITE_BINS = False
-
-ENSEMBLE_STAT_MASK_GRID =
-
-ENSEMBLE_STAT_CI_ALPHA = 0.05
-
-ENSEMBLE_STAT_INTERP_FIELD = BOTH
-ENSEMBLE_STAT_INTERP_VLD_THRESH = 1.0
-ENSEMBLE_STAT_INTERP_SHAPE = SQUARE
-ENSEMBLE_STAT_INTERP_METHOD = NEAREST
-ENSEMBLE_STAT_INTERP_WIDTH = 1
-
-ENSEMBLE_STAT_OUTPUT_FLAG_ECNT = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_RPS = NONE
-ENSEMBLE_STAT_OUTPUT_FLAG_RHIST = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_PHIST = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_ORANK = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_SSVAR = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_RELP = STAT
-
-ENSEMBLE_STAT_ENSEMBLE_FLAG_RANK = FALSE
-ENSEMBLE_STAT_ENSEMBLE_FLAG_WEIGHT = FALSE
-#
-# Forecast and observation variables and levels as specified in the fcst
-# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME,
-# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME,
-# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION.
-#
-FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}}
-FCST_VAR1_LEVELS = A{{accum_hh}}
-FCST_VAR1_OPTIONS = ens_ssvar_bin_size = 50.0;
- ens_phist_bin_size = 0.05;
-OBS_VAR1_NAME = {{fieldname_in_obs_input}}
-OBS_VAR1_LEVELS = A{{accum_hh}}
-OBS_VAR1_OPTIONS = {FCST_VAR1_OPTIONS};
- convert(x) = 100.0*x;
-
-[dir]
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-#
-# Point observation input directory for EnsembleStat.
-#
-OBS_ENSEMBLE_STAT_POINT_INPUT_DIR =
-#
-# Grid observation input directory for EnsembleStat.
-#
-OBS_ENSEMBLE_STAT_GRID_INPUT_DIR = {{obs_input_dir}}
-#
-# Forecast model input directory for EnsembleStat.
-#
-FCST_ENSEMBLE_STAT_INPUT_DIR = {{fcst_input_dir}}
-#
-# Directory containing climatology mean input to EnsembleStat. Not used
-# in this example
-#
-ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR =
-#
-# Directory containing climatology mean input to EnsembleStat. Not used
-# in this example.
-#
-ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR =
-#
-# Output directory for EnsembleStat.
-#
-ENSEMBLE_STAT_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for point observation input to EnsembleStat relative to
-# OBS_ENSEMBLE_STAT_POINT_INPUT_DIR.
-#
-OBS_ENSEMBLE_STAT_POINT_INPUT_TEMPLATE =
-#
-# Template for gridded observation input to EnsembleStat relative to
-# OBS_ENSEMBLE_STAT_GRID_INPUT_DIR.
-#
-OBS_ENSEMBLE_STAT_GRID_INPUT_TEMPLATE = {{obs_input_fn_template}}
-#
-# Template for forecast input to EnsembleStat relative to
-# FCST_ENSEMBLE_STAT_INPUT_DIR.
-#
-# Note that this can be a comma separated list of ensemble members
-# or a single line, - filename wildcard characters may be used, ? or *.
-#
-FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-#
-# Template for output from EnsembleStat relative to ENSEMBLE_STAT_OUTPUT_DIR.
-#
-ENSEMBLE_STAT_OUTPUT_TEMPLATE =
-ENSEMBLE_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L
-#
-# Template for climatology input to EnsembleStat relative to
-# ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example.
-#
-ENSEMBLE_STAT_CLIMO_MEAN_INPUT_TEMPLATE =
-#
-# Template for climatology input to EnsembleStat relative to
-# ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example.
-#
-ENSEMBLE_STAT_CLIMO_STDEV_INPUT_TEMPLATE =
-#
-# Variable used to specify one or more verification mask files for
-# EnsembleStat. Not used for this example.
-#
-ENSEMBLE_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
diff --git a/parm/metplus/EnsembleStat_REFC.conf b/parm/metplus/EnsembleStat_REFC.conf
deleted file mode 100644
index 6de6eddeb8..0000000000
--- a/parm/metplus/EnsembleStat_REFC.conf
+++ /dev/null
@@ -1,265 +0,0 @@
-# EnsembleStat METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = EnsembleStat
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {ENSEMBLE_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Location of MET configuration file to pass to EnsembleStat.
-#
-# References PARM_BASE, which is the location of the parm directory
-# corresponding to the ush directory of the run_metplus.py script that
-# is called or the value of the environment variable METPLUS_PARM_BASE
-# if set.
-#
-ENSEMBLE_STAT_CONFIG_FILE = {PARM_BASE}/met_config/EnsembleStatConfig_wrapped
-#
-# Name to identify model (forecast) data in output.
-#
-MODEL = {{vx_fcst_model_name}}
-
-ENSEMBLE_STAT_DESC = NA
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Observation data time window(s).
-#
-OBS_FILE_WINDOW_BEGIN = -300
-OBS_FILE_WINDOW_END = 300
-OBS_ENSEMBLE_STAT_WINDOW_BEGIN = 0
-OBS_ENSEMBLE_STAT_WINDOW_END = 0
-
-# number of expected members for ensemble. Should correspond with the
-# number of items in the list for FCST_ENSEMBLE_STAT_INPUT_TEMPLATE
-ENSEMBLE_STAT_N_MEMBERS = {{num_ens_members}}
-
-# ens.ens_thresh value in the MET config file
-# threshold for ratio of valid files to expected files to allow app to run
-ENSEMBLE_STAT_ENS_THRESH = 0.05
-
-# ens.vld_thresh value in the MET config file
-ENSEMBLE_STAT_ENS_VLD_THRESH = 1.0
-
-# ENSEMBLE_STAT_MET_OBS_ERR_TABLE is not required.
-# If the variable is not defined, or the value is not set, then the MET
-# default is used.
-ENSEMBLE_STAT_MET_OBS_ERR_TABLE =
-
-
-# Used in the MET config file for: regrid to_grid field
-ENSEMBLE_STAT_REGRID_TO_GRID = FCST
-ENSEMBLE_STAT_REGRID_METHOD = BUDGET
-ENSEMBLE_STAT_REGRID_WIDTH = 2
-ENSEMBLE_STAT_REGRID_VLD_THRESH = 0.5
-ENSEMBLE_STAT_REGRID_SHAPE = SQUARE
-
-ENSEMBLE_STAT_CENSOR_THRESH =
-ENSEMBLE_STAT_CENSOR_VAL =
-
-# Should this parameter be set to something other than ADPSFC (maybe
-# just leave empty) since we are not verifying surface fields?
-ENSEMBLE_STAT_MESSAGE_TYPE = ADPSFC
-ENSEMBLE_STAT_DUPLICATE_FLAG = NONE
-ENSEMBLE_STAT_SKIP_CONST = TRUE
-ENSEMBLE_STAT_OBS_ERROR_FLAG = FALSE
-
-ENSEMBLE_STAT_ENS_SSVAR_BIN_SIZE = 1.0
-ENSEMBLE_STAT_ENS_PHIST_BIN_SIZE = 0.05
-
-#ENSEMBLE_STAT_CLIMO_MEAN_FILE_NAME =
-#ENSEMBLE_STAT_CLIMO_MEAN_FIELD =
-#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_METHOD =
-#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_WIDTH =
-#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_VLD_THRESH =
-#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_SHAPE =
-#ENSEMBLE_STAT_CLIMO_MEAN_TIME_INTERP_METHOD =
-#ENSEMBLE_STAT_CLIMO_MEAN_MATCH_MONTH =
-#ENSEMBLE_STAT_CLIMO_MEAN_DAY_INTERVAL = 31
-#ENSEMBLE_STAT_CLIMO_MEAN_HOUR_INTERVAL = 6
-
-#ENSEMBLE_STAT_CLIMO_STDEV_FILE_NAME =
-#ENSEMBLE_STAT_CLIMO_STDEV_FIELD =
-#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_METHOD =
-#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_WIDTH =
-#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_VLD_THRESH =
-#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_SHAPE =
-#ENSEMBLE_STAT_CLIMO_STDEV_TIME_INTERP_METHOD =
-#ENSEMBLE_STAT_CLIMO_STDEV_MATCH_MONTH =
-#ENSEMBLE_STAT_CLIMO_STDEV_DAY_INTERVAL = 31
-#ENSEMBLE_STAT_CLIMO_STDEV_HOUR_INTERVAL = 6
-
-
-ENSEMBLE_STAT_CLIMO_CDF_BINS = 1
-ENSEMBLE_STAT_CLIMO_CDF_CENTER_BINS = False
-ENSEMBLE_STAT_CLIMO_CDF_WRITE_BINS = False
-
-ENSEMBLE_STAT_MASK_GRID = FULL
-
-ENSEMBLE_STAT_CI_ALPHA = 0.05
-
-ENSEMBLE_STAT_INTERP_FIELD = BOTH
-ENSEMBLE_STAT_INTERP_VLD_THRESH = 1.0
-ENSEMBLE_STAT_INTERP_SHAPE = SQUARE
-ENSEMBLE_STAT_INTERP_METHOD = NEAREST
-ENSEMBLE_STAT_INTERP_WIDTH = 1
-
-ENSEMBLE_STAT_OUTPUT_FLAG_ECNT = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_RPS = NONE
-ENSEMBLE_STAT_OUTPUT_FLAG_RHIST = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_PHIST = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_ORANK = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_SSVAR = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_RELP = STAT
-
-ENSEMBLE_STAT_ENSEMBLE_FLAG_RANK = FALSE
-ENSEMBLE_STAT_ENSEMBLE_FLAG_WEIGHT = FALSE
-#
-# Forecast and observation variables and levels as specified in the fcst
-# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME,
-# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME,
-# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION.
-#
-FCST_VAR1_NAME = {{fieldname_in_fcst_input}}
-FCST_VAR1_LEVELS = L0
-FCST_VAR1_OPTIONS = ens_ssvar_bin_size = 50.0;
- ens_phist_bin_size = 0.05;
-OBS_VAR1_NAME = {{fieldname_in_obs_input}}
-OBS_VAR1_LEVELS = Z500
-OBS_VAR1_OPTIONS = censor_thresh = lt-20;
- censor_val = -20.0;
- cnt_thresh = [ >15 ];
- cnt_logic = UNION;
- ens_ssvar_bin_size = 50.0;
- ens_phist_bin_size = 0.05;
-
-[dir]
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-#
-# Point observation input directory for EnsembleStat.
-#
-OBS_ENSEMBLE_STAT_POINT_INPUT_DIR =
-#
-# Grid observation input directory for EnsembleStat.
-#
-OBS_ENSEMBLE_STAT_GRID_INPUT_DIR = {{obs_input_dir}}
-#
-# Forecast model input directory for EnsembleStat.
-#
-FCST_ENSEMBLE_STAT_INPUT_DIR = {{fcst_input_dir}}
-#
-# Directory containing climatology mean input to EnsembleStat. Not used
-# in this example
-#
-ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR =
-#
-# Directory containing climatology mean input to EnsembleStat. Not used
-# in this example.
-#
-ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR =
-#
-# Output directory for EnsembleStat.
-#
-ENSEMBLE_STAT_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for point observation input to EnsembleStat relative to
-# OBS_ENSEMBLE_STAT_POINT_INPUT_DIR.
-#
-OBS_ENSEMBLE_STAT_POINT_INPUT_TEMPLATE =
-#
-# Template for gridded observation input to EnsembleStat relative to
-# OBS_ENSEMBLE_STAT_GRID_INPUT_DIR.
-#
-OBS_ENSEMBLE_STAT_GRID_INPUT_TEMPLATE = {{obs_input_fn_template}}
-#
-# Template for forecast input to EnsembleStat relative to
-# FCST_ENSEMBLE_STAT_INPUT_DIR.
-#
-# Note that this can be a comma separated list of ensemble members
-# or a single line, - filename wildcard characters may be used, ? or *.
-#
-FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-#
-# Template for output from EnsembleStat relative to ENSEMBLE_STAT_OUTPUT_DIR.
-#
-ENSEMBLE_STAT_OUTPUT_TEMPLATE =
-ENSEMBLE_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L
-#
-# Template for climatology input to EnsembleStat relative to
-# ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example.
-#
-ENSEMBLE_STAT_CLIMO_MEAN_INPUT_TEMPLATE =
-#
-# Template for climatology input to EnsembleStat relative to
-# ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example.
-#
-ENSEMBLE_STAT_CLIMO_STDEV_INPUT_TEMPLATE =
-#
-# Variable used to specify one or more verification mask files for
-# EnsembleStat. Not used for this example.
-#
-ENSEMBLE_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
diff --git a/parm/metplus/EnsembleStat_RETOP.conf b/parm/metplus/EnsembleStat_RETOP.conf
deleted file mode 100644
index abd2dd2a45..0000000000
--- a/parm/metplus/EnsembleStat_RETOP.conf
+++ /dev/null
@@ -1,267 +0,0 @@
-# EnsembleStat METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = EnsembleStat
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {ENSEMBLE_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Location of MET configuration file to pass to EnsembleStat.
-#
-# References PARM_BASE, which is the location of the parm directory
-# corresponding to the ush directory of the run_metplus.py script that
-# is called or the value of the environment variable METPLUS_PARM_BASE
-# if set.
-#
-ENSEMBLE_STAT_CONFIG_FILE = {PARM_BASE}/met_config/EnsembleStatConfig_wrapped
-#
-# Name to identify model (forecast) data in output.
-#
-MODEL = {{vx_fcst_model_name}}
-
-ENSEMBLE_STAT_DESC = NA
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Observation data time window(s).
-#
-OBS_FILE_WINDOW_BEGIN = -300
-OBS_FILE_WINDOW_END = 300
-OBS_ENSEMBLE_STAT_WINDOW_BEGIN = 0
-OBS_ENSEMBLE_STAT_WINDOW_END = 0
-
-# number of expected members for ensemble. Should correspond with the
-# number of items in the list for FCST_ENSEMBLE_STAT_INPUT_TEMPLATE
-ENSEMBLE_STAT_N_MEMBERS = {{num_ens_members}}
-
-# ens.ens_thresh value in the MET config file
-# threshold for ratio of valid files to expected files to allow app to run
-ENSEMBLE_STAT_ENS_THRESH = 0.05
-
-# ens.vld_thresh value in the MET config file
-ENSEMBLE_STAT_ENS_VLD_THRESH = 1.0
-
-# ENSEMBLE_STAT_MET_OBS_ERR_TABLE is not required.
-# If the variable is not defined, or the value is not set, then the MET
-# default is used.
-ENSEMBLE_STAT_MET_OBS_ERR_TABLE =
-
-
-# Used in the MET config file for: regrid to_grid field
-ENSEMBLE_STAT_REGRID_TO_GRID = FCST
-ENSEMBLE_STAT_REGRID_METHOD = BUDGET
-ENSEMBLE_STAT_REGRID_WIDTH = 2
-ENSEMBLE_STAT_REGRID_VLD_THRESH = 0.5
-ENSEMBLE_STAT_REGRID_SHAPE = SQUARE
-
-ENSEMBLE_STAT_CENSOR_THRESH =
-ENSEMBLE_STAT_CENSOR_VAL =
-
-# Should this parameter be set to something other than ADPSFC (maybe
-# just leave empty) since we are not verifying surface fields?
-ENSEMBLE_STAT_MESSAGE_TYPE = ADPSFC
-ENSEMBLE_STAT_DUPLICATE_FLAG = NONE
-ENSEMBLE_STAT_SKIP_CONST = TRUE
-ENSEMBLE_STAT_OBS_ERROR_FLAG = FALSE
-
-ENSEMBLE_STAT_ENS_SSVAR_BIN_SIZE = 1.0
-ENSEMBLE_STAT_ENS_PHIST_BIN_SIZE = 0.05
-
-#ENSEMBLE_STAT_CLIMO_MEAN_FILE_NAME =
-#ENSEMBLE_STAT_CLIMO_MEAN_FIELD =
-#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_METHOD =
-#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_WIDTH =
-#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_VLD_THRESH =
-#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_SHAPE =
-#ENSEMBLE_STAT_CLIMO_MEAN_TIME_INTERP_METHOD =
-#ENSEMBLE_STAT_CLIMO_MEAN_MATCH_MONTH =
-#ENSEMBLE_STAT_CLIMO_MEAN_DAY_INTERVAL = 31
-#ENSEMBLE_STAT_CLIMO_MEAN_HOUR_INTERVAL = 6
-
-#ENSEMBLE_STAT_CLIMO_STDEV_FILE_NAME =
-#ENSEMBLE_STAT_CLIMO_STDEV_FIELD =
-#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_METHOD =
-#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_WIDTH =
-#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_VLD_THRESH =
-#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_SHAPE =
-#ENSEMBLE_STAT_CLIMO_STDEV_TIME_INTERP_METHOD =
-#ENSEMBLE_STAT_CLIMO_STDEV_MATCH_MONTH =
-#ENSEMBLE_STAT_CLIMO_STDEV_DAY_INTERVAL = 31
-#ENSEMBLE_STAT_CLIMO_STDEV_HOUR_INTERVAL = 6
-
-
-ENSEMBLE_STAT_CLIMO_CDF_BINS = 1
-ENSEMBLE_STAT_CLIMO_CDF_CENTER_BINS = False
-ENSEMBLE_STAT_CLIMO_CDF_WRITE_BINS = False
-
-ENSEMBLE_STAT_MASK_GRID = FULL
-
-ENSEMBLE_STAT_CI_ALPHA = 0.05
-
-ENSEMBLE_STAT_INTERP_FIELD = BOTH
-ENSEMBLE_STAT_INTERP_VLD_THRESH = 1.0
-ENSEMBLE_STAT_INTERP_SHAPE = SQUARE
-ENSEMBLE_STAT_INTERP_METHOD = NEAREST
-ENSEMBLE_STAT_INTERP_WIDTH = 1
-
-ENSEMBLE_STAT_OUTPUT_FLAG_ECNT = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_RPS = NONE
-ENSEMBLE_STAT_OUTPUT_FLAG_RHIST = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_PHIST = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_ORANK = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_SSVAR = STAT
-ENSEMBLE_STAT_OUTPUT_FLAG_RELP = STAT
-
-ENSEMBLE_STAT_ENSEMBLE_FLAG_RANK = FALSE
-ENSEMBLE_STAT_ENSEMBLE_FLAG_WEIGHT = FALSE
-#
-# Forecast and observation variables and levels as specified in the fcst
-# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME,
-# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME,
-# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION.
-#
-FCST_VAR1_NAME = {{fieldname_in_fcst_input}}
-FCST_VAR1_LEVELS = L0
-FCST_VAR1_OPTIONS = convert(x) = x * 3.28084 * 0.001; ;; Convert from meters to kilofeet.
- ens_ssvar_bin_size = 50.0;
- ens_phist_bin_size = 0.05;
-OBS_VAR1_NAME = {{fieldname_in_obs_input}}
-OBS_VAR1_LEVELS = Z500
-OBS_VAR1_OPTIONS = censor_thresh = lt-20;
- censor_val = -20.0;
- cnt_thresh = [ >15 ];
- cnt_logic = UNION;
- convert(x) = x * 3280.84 * 0.001; ;; Convert from kilometers to kilofeet.
- ens_ssvar_bin_size = 50.0;
- ens_phist_bin_size = 0.05;
-
-[dir]
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-#
-# Point observation input directory for EnsembleStat.
-#
-OBS_ENSEMBLE_STAT_POINT_INPUT_DIR =
-#
-# Grid observation input directory for EnsembleStat.
-#
-OBS_ENSEMBLE_STAT_GRID_INPUT_DIR = {{obs_input_dir}}
-#
-# Forecast model input directory for EnsembleStat.
-#
-FCST_ENSEMBLE_STAT_INPUT_DIR = {{fcst_input_dir}}
-#
-# Directory containing climatology mean input to EnsembleStat. Not used
-# in this example
-#
-ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR =
-#
-# Directory containing climatology mean input to EnsembleStat. Not used
-# in this example.
-#
-ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR =
-#
-# Output directory for EnsembleStat.
-#
-ENSEMBLE_STAT_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for point observation input to EnsembleStat relative to
-# OBS_ENSEMBLE_STAT_POINT_INPUT_DIR.
-#
-OBS_ENSEMBLE_STAT_POINT_INPUT_TEMPLATE =
-#
-# Template for gridded observation input to EnsembleStat relative to
-# OBS_ENSEMBLE_STAT_GRID_INPUT_DIR.
-#
-OBS_ENSEMBLE_STAT_GRID_INPUT_TEMPLATE = {{obs_input_fn_template}}
-#
-# Template for forecast input to EnsembleStat relative to
-# FCST_ENSEMBLE_STAT_INPUT_DIR.
-#
-# Note that this can be a comma separated list of ensemble members
-# or a single line, - filename wildcard characters may be used, ? or *.
-#
-FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-#
-# Template for output from EnsembleStat relative to ENSEMBLE_STAT_OUTPUT_DIR.
-#
-ENSEMBLE_STAT_OUTPUT_TEMPLATE =
-ENSEMBLE_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L
-#
-# Template for climatology input to EnsembleStat relative to
-# ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example.
-#
-ENSEMBLE_STAT_CLIMO_MEAN_INPUT_TEMPLATE =
-#
-# Template for climatology input to EnsembleStat relative to
-# ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example.
-#
-ENSEMBLE_STAT_CLIMO_STDEV_INPUT_TEMPLATE =
-#
-# Variable used to specify one or more verification mask files for
-# EnsembleStat. Not used for this example.
-#
-ENSEMBLE_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
diff --git a/parm/metplus/GenEnsProd.conf b/parm/metplus/GenEnsProd.conf
new file mode 100644
index 0000000000..7291ce02fa
--- /dev/null
+++ b/parm/metplus/GenEnsProd.conf
@@ -0,0 +1,390 @@
+# {{MetplusToolName}} METplus Configuration
+
+[config]
+
+# List of applications (tools) to run.
+PROCESS_LIST = {{MetplusToolName}}
+
+# time looping - options are INIT, VALID, RETRO, and REALTIME
+# If set to INIT or RETRO:
+# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
+# If set to VALID or REALTIME:
+# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
+LOOP_BY = INIT
+
+# Format of INIT_BEG and INIT_END using % items
+# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
+# see www.strftime.org for more information
+# %Y%m%d%H expands to YYYYMMDDHH
+INIT_TIME_FMT = %Y%m%d%H
+
+# Start time for METplus run - must match INIT_TIME_FMT
+INIT_BEG = {{cdate}}
+
+# End time for METplus run - must match INIT_TIME_FMT
+INIT_END = {{cdate}}
+
+# Increment between METplus runs (in seconds if no units are specified).
+# Must be >= 60 seconds.
+INIT_INCREMENT = 3600
+
+# List of forecast leads to process for each run time (init or valid)
+# In hours if units are not specified
+# If unset, defaults to 0 (don't loop through forecast leads)
+LEAD_SEQ = {{fhr_list}}
+#
+# Order of loops to process data - Options are times, processes
+# Not relevant if only one item is in the PROCESS_LIST
+# times = run all wrappers in the PROCESS_LIST for a single run time, then
+# increment the run time and run all wrappers again until all times have
+# been evaluated.
+# processes = run the first wrapper in the PROCESS_LIST for all times
+# specified, then repeat for the next item in the PROCESS_LIST until all
+# wrappers have been run
+#
+LOOP_ORDER = times
+#
+# Specify the name of the METplus log file.
+#
+LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
+#
+# Specify the location and name of the final METplus conf file.
+#
+METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}}
+#
+# Name to identify model (forecast) data in output.
+#
+MODEL = {{vx_fcst_model_name}}
+
+{{METPLUS_TOOL_NAME}}_DESC = NA
+#
+# Name to identify observation data in output.
+#
+OBTYPE = {{obtype}}
+#
+# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
+#
+LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
+
+###
+# File I/O
+###
+
+#
+# Forecast model input directory for {{MetplusToolName}}.
+#
+{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}}
+#
+# Template for forecast input to {{MetplusToolName}} relative to
+# {{METPLUS_TOOL_NAME}}_INPUT_DIR.
+#
+# Note that this can be a comma separated list of ensemble members
+# or a single line, - filename wildcard characters may be used, ? or *.
+#
+{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}}
+
+# {{METPLUS_TOOL_NAME}}_CTRL_INPUT_DIR = {INPUT_BASE}
+# {{METPLUS_TOOL_NAME}}_CTRL_INPUT_TEMPLATE =
+# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2
+
+#
+# Output directory for {{MetplusToolName}}.
+#
+# OUTPUT_BASE apparently has to be set to something; it cannot be left
+# to its default value. But it is not explicitly used elsewhere in this
+# configuration file.
+#
+OUTPUT_BASE = {{output_base}}
+{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}}
+#
+# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR.
+#
+{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE = {{metplus_tool_name}}_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc
+#
+# Directory for staging data.
+#
+STAGING_DIR = {{staging_dir}}
+#
+# There are n ensembles but 1 is used as control, so specify n-1 members.
+#
+{{METPLUS_TOOL_NAME}}_N_MEMBERS = {{num_ens_members}}
+
+###
+# Field Info
+###
+#
+# Ensemble variables and levels as specified in the ens field dictionary
+# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS,
+# (optional) ENS_VARn_OPTION
+#
+{#-
+Import the file containing jinja macros.
+#}
+{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %}
+
+{#-
+Jinja requires certain variables to be defined globally within the template
+before they can be used in if-statements and other scopes (see Jinja
+scoping rules). Define such variables.
+#}
+{%- set threshes_fcst = [] %}
+{%- set indx_input_thresh_fcst = '' %}
+
+{%- set opts_indent = '' %}
+{%- set opts_indent_len = '' %}
+{%- set tmp = '' %}
+{%- set error_msg = '' %}
+
+{#-
+Extract the list of forecast dictionaries containing the valid fields,
+levels, and thresholds corresponding to the specified field group
+(input_field_group).
+#}
+{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %}
+
+{#-
+Reset the specified forecast level so that if it happens to be an
+accumulation (e.g. 'A03'), the leading zeros in front of the hour are
+stipped out (e.g. reset to 'A3').
+#}
+{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %}
+
+{#-
+Ensure that the specified input forecast level(s) (input_level_fcst) and
+threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the
+set(s) of valid forecast levels and thresholds, respectively, specified
+in fields_levels_threshes_fcst.
+#}
+{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }}
+{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }}
+
+{#-
+For convenience, create lists of valid forecast field names.
+#}
+{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %}
+{%- set valid_fields_fcst = [] %}
+{%- for i in range(0,num_valid_fields_fcst) %}
+ {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %}
+ {%- set tmp = valid_fields_fcst.append(field) %}
+{%- endfor %}
+
+{#-
+Loop over the valid fields and set field names, levels, thresholds, and/
+or options for each forecast field. Note that GenEnsProd only deals with
+forecasts; it does not need observations.
+#}
+{%- set ns = namespace(var_count = 0) %}
+{%- for i in range(0,num_valid_fields_fcst) %}
+
+ {%- set field_fcst = valid_fields_fcst[i] %}
+
+{#-
+Extract dictionary of valid forecast levels (the dictionary keys) and
+corresponding lists of valid thresholds (the values) for each level.
+Then loop over these levels and corresponding lists of thresholds to set
+the forecast field names, levels, thresholds, and/or options.
+#}
+ {%- set valid_levels_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst] %}
+ {%- for level_fcst, valid_threshes_fcst in valid_levels_threshes_fcst.items() %}
+
+ {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %}
+{#-
+Increment the METplus variable counter.
+#}
+ {%- set ns.var_count = ns.var_count+1 %}
+
+{#-
+Set forecast field name. Note that this has to exactly match the name
+of the field in the input forecast file.
+
+For accumulated fields, the input forecast file is generated by MET's
+PcpCombine tool. In that file, the field name consists of the forecast
+field name here (field_fcst) with the accumulation period appended to
+it (separated by an underscore), so we must do the same here to get an
+exact match.
+#}
+ {%- if (input_field_group in ['APCP', 'ASNOW']) %}
+ENS_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{accum_hh}}
+ {%- else %}
+ENS_VAR{{ns.var_count}}_NAME = {{field_fcst}}
+ {%- endif %}
+
+{#-
+Set forecast field level.
+#}
+ENS_VAR{{ns.var_count}}_LEVELS = {{level_fcst}}
+
+{#-
+Set forecast field threshold(s). Note that no forecast thresholds are
+included in the METplus configuration file if input_thresh_fcst is set
+to 'none'.
+#}
+ {%- if (input_thresh_fcst != 'none') %}
+{#-
+If input_thresh_fcst is set to 'all', set the list of forecast thresholds
+to the full set of valid values.
+#}
+ {%- if (input_thresh_fcst == 'all') %}
+
+ {%- set threshes_fcst = valid_threshes_fcst %}
+{#-
+If input_thresh_fcst is set to a specific value:
+ 1) Ensure that input_thresh_fcst exists in the list of valid forecast
+ thresholds.
+ 2) Get the index of input_thresh_fcst in the list of valid forecast
+ thresholds. This will be needed later below when setting the
+ observation threshold(s).
+ 3) Use this index to set the forecast threshold to a one-element list
+ containing the specified forecast threshold.
+#}
+ {%- else %}
+
+ {%- if input_thresh_fcst not in valid_threshes_fcst %}
+ {%- set error_msg = '\n' ~
+'For the current forecast field (field_fcst) and forecast level (level_fcst),\n' ~
+'the input forecast threshold (input_thresh_fcst) does not exist in the list\n' ~
+'of valid forecast thresholds (valid_threshes_fcst):\n' ~
+' field_fcst = ' ~ field_fcst ~ '\n' ~
+' level_fcst = ' ~ level_fcst ~ '\n' ~
+' valid_threshes_fcst = ' ~ valid_threshes_fcst ~ '\n' ~
+' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %}
+ {{metplus_macros.print_err_and_quit(error_msg)}}
+ {%- endif %}
+ {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %}
+ {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %}
+
+ {%- endif %}
+{#-
+If threshes_fcst has been reset to something other than its default
+value of an empty list, then set the forecast thresholds in the METplus
+configuration file because that implies threshes_fcst was set above to
+a non-empty value. Then reset threshes_fcst to its default value for
+proper processing of thresholds for the next field.
+#}
+ {%- if (threshes_fcst != []) %}
+ENS_VAR{{ns.var_count}}_THRESH = {{threshes_fcst|join(', ')}}
+ {%- endif %}
+ {%- set threshes_fcst = [] %}
+
+ {%- endif %}
+
+{#-
+Set forecast field options.
+#}
+ {%- set opts_indent_len = 19 %}
+ {%- if (ns.var_count > 9) and (ns.var_count <= 99) %}
+ {%- set opts_indent_len = opts_indent_len + 1 %}
+ {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %}
+ {%- set opts_indent_len = opts_indent_len + 2 %}
+ {%- elif (ns.var_count > 999) %}
+ {%- set opts_indent_len = opts_indent_len + 3 %}
+ {%- endif %}
+ {%- set opts_indent = ' '*opts_indent_len %}
+
+ {%- if input_field_group == 'RETOP' %}
+
+ {%- if field_fcst == 'RETOP' %}
+ENS_VAR{{ns.var_count}}_OPTIONS = convert(x) = x * 3.28084 * 0.001; ;; Convert from meters to kilofeet.
+ {%- endif %}
+
+ {%- elif input_field_group == 'ADPSFC' %}
+
+ {%- if field_fcst == 'HGT' %}
+ENS_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 215;
+{{opts_indent}}desc = "CEILING";
+ {%- elif field_fcst == 'TCDC' %}
+ENS_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 200;
+{{opts_indent}}GRIB2_ipdtmpl_index=[27];
+{{opts_indent}}GRIB2_ipdtmpl_val=[255];
+{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; }
+ {%- elif field_fcst == 'VIS' %}
+ENS_VAR{{ns.var_count}}_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; }
+ {%- elif field_fcst == 'WIND' %}
+ENS_VAR{{ns.var_count}}_OPTIONS = GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind.
+ {%- endif %}
+
+ {%- elif input_field_group == 'ADPUPA' %}
+
+ {%- if field_fcst == 'CAPE' %}
+ENS_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ];
+ {%- endif %}
+
+ {%- endif %}
+{#-
+Print out a newline to separate the settings for the current field (both
+forecast and observation settings) from those for the next field.
+#}
+ {{- '\n' }}
+
+ {%- endif %}
+
+ {%- endfor %}
+{%- endfor %}
+###
+# {{MetplusToolName}}
+###
+
+# {{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = NONE
+# {{METPLUS_TOOL_NAME}}_REGRID_METHOD = NEAREST
+# {{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 1
+# {{METPLUS_TOOL_NAME}}_REGRID_VLD_THRESH = 0.5
+# {{METPLUS_TOOL_NAME}}_REGRID_SHAPE = SQUARE
+
+# {{METPLUS_TOOL_NAME}}_CENSOR_THRESH =
+# {{METPLUS_TOOL_NAME}}_CENSOR_VAL =
+# {{METPLUS_TOOL_NAME}}_CAT_THRESH =
+# {{METPLUS_TOOL_NAME}}_NC_VAR_STR =
+
+# Threshold for ratio of valid files to expected files to allow app to run
+{{METPLUS_TOOL_NAME}}_ENS_THRESH = 0.05
+
+{{METPLUS_TOOL_NAME}}_NBRHD_PROB_WIDTH = 27
+{{METPLUS_TOOL_NAME}}_NBRHD_PROB_SHAPE = CIRCLE
+{{METPLUS_TOOL_NAME}}_NBRHD_PROB_VLD_THRESH = 0.0
+
+# {{METPLUS_TOOL_NAME}}_NMEP_SMOOTH_VLD_THRESH = 0.0
+# {{METPLUS_TOOL_NAME}}_NMEP_SMOOTH_SHAPE = CIRCLE
+# {{METPLUS_TOOL_NAME}}_NMEP_SMOOTH_GAUSSIAN_DX = 81.27
+# {{METPLUS_TOOL_NAME}}_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120
+# {{METPLUS_TOOL_NAME}}_NMEP_SMOOTH_METHOD = GAUSSIAN
+# {{METPLUS_TOOL_NAME}}_NMEP_SMOOTH_WIDTH = 1
+
+# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FILE_NAME =
+# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FIELD =
+# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_METHOD =
+# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_WIDTH =
+# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_VLD_THRESH =
+# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_SHAPE =
+# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD =
+# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_MATCH_MONTH =
+# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_DAY_INTERVAL = 31
+# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_HOUR_INTERVAL = 6
+
+# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FILE_NAME =
+# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FIELD =
+# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_METHOD =
+# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_WIDTH =
+# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_VLD_THRESH =
+# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_SHAPE =
+# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_TIME_INTERP_METHOD =
+# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_MATCH_MONTH =
+# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_DAY_INTERVAL = 31
+# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_HOUR_INTERVAL = 6
+
+{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_LATLON = TRUE
+{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_MEAN = TRUE
+{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_STDEV = TRUE
+{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_MINUS = FALSE
+{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_PLUS = FALSE
+{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_MIN = FALSE
+{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_MAX = FALSE
+{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_RANGE = TRUE
+{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_VLD_COUNT = TRUE
+{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_FREQUENCY = TRUE
+{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_NEP = TRUE
+{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_NMEP = TRUE
+{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_CLIMO = FALSE
+{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_CLIMO_CDF = FALSE
+
+# {{METPLUS_TOOL_NAME}}_ENS_MEMBER_IDS =
+# {{METPLUS_TOOL_NAME}}_CONTROL_ID =
diff --git a/parm/metplus/GenEnsProd_ADPSFC.conf b/parm/metplus/GenEnsProd_ADPSFC.conf
deleted file mode 100644
index cb253f575b..0000000000
--- a/parm/metplus/GenEnsProd_ADPSFC.conf
+++ /dev/null
@@ -1,219 +0,0 @@
-# GenEnsProd METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = GenEnsProd
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {GEN_ENS_PROD_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Name to identify model (forecast) data in output.
-#
-MODEL = {{vx_fcst_model_name}}
-
-GEN_ENS_PROD_DESC = NA
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-
-###
-# File I/O
-###
-
-#
-# Forecast model input directory for GenEnsProd.
-#
-GEN_ENS_PROD_INPUT_DIR = {{fcst_input_dir}}
-#
-# Template for forecast input to GenEnsProd relative to
-# GEN_ENS_PROD_INPUT_DIR.
-#
-# Note that this can be a comma separated list of ensemble members
-# or a single line, - filename wildcard characters may be used, ? or *.
-#
-GEN_ENS_PROD_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-
-# GEN_ENS_PROD_CTRL_INPUT_DIR = {INPUT_BASE}
-# GEN_ENS_PROD_CTRL_INPUT_TEMPLATE =
-# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2
-
-#
-# Output directory for GenEnsProd.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-GEN_ENS_PROD_OUTPUT_DIR = {{output_dir}}
-#
-# Template for output from GenEnsProd relative to GEN_ENS_PROD_OUTPUT_DIR.
-#
-GEN_ENS_PROD_OUTPUT_TEMPLATE = gen_ens_prod_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-#
-# There are n ensembles but 1 is used as control, so specify n-1 members.
-#
-GEN_ENS_PROD_N_MEMBERS = {{num_ens_members}}
-
-###
-# Field Info
-###
-#
-# Ensemble variables and levels as specified in the ens field dictionary
-# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS,
-# (optional) ENS_VARn_OPTION
-#
-ENS_VAR1_NAME = TMP
-ENS_VAR1_LEVELS = Z02
-ENS_VAR1_THRESH = ge268, ge273, ge278, ge293, ge298, ge303
-
-ENS_VAR2_NAME = DPT
-ENS_VAR2_LEVELS = Z2
-ENS_VAR2_THRESH = ge263, ge268, ge273, ge288, ge293, ge298
-
-ENS_VAR3_NAME = WIND
-ENS_VAR3_LEVELS = Z10
-ENS_VAR3_THRESH = ge5, ge10, ge15
-ENS_VAR3_OPTIONS = GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind.
-
-ENS_VAR4_NAME = TCDC
-ENS_VAR4_LEVELS = L0
-ENS_VAR4_THRESH = lt25, gt75
-ENS_VAR4_OPTIONS = GRIB_lvl_typ = 200;
- GRIB2_ipdtmpl_index=[27];
- GRIB2_ipdtmpl_val=[255];
- interp = { type = [ { method = NEAREST; width = 1; } ]; }
-
-ENS_VAR5_NAME = VIS
-ENS_VAR5_LEVELS = L0
-ENS_VAR5_THRESH = lt1609, lt8045, ge8045
-ENS_VAR5_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; }
-
-ENS_VAR6_NAME = HGT
-ENS_VAR6_LEVELS = L0
-ENS_VAR6_THRESH = lt152, lt1520, ge914
-ENS_VAR6_OPTIONS = GRIB_lvl_typ = 215;
- desc = "CEILING";
-
-###
-# GenEnsProd
-###
-
-# GEN_ENS_PROD_REGRID_TO_GRID = NONE
-# GEN_ENS_PROD_REGRID_METHOD = NEAREST
-# GEN_ENS_PROD_REGRID_WIDTH = 1
-# GEN_ENS_PROD_REGRID_VLD_THRESH = 0.5
-# GEN_ENS_PROD_REGRID_SHAPE = SQUARE
-
-# GEN_ENS_PROD_CENSOR_THRESH =
-# GEN_ENS_PROD_CENSOR_VAL =
-# GEN_ENS_PROD_CAT_THRESH =
-# GEN_ENS_PROD_NC_VAR_STR =
-
-# Threshold for ratio of valid files to expected files to allow app to run
-GEN_ENS_PROD_ENS_THRESH = 0.05
-
-GEN_ENS_PROD_NBRHD_PROB_WIDTH = 27
-GEN_ENS_PROD_NBRHD_PROB_SHAPE = CIRCLE
-GEN_ENS_PROD_NBRHD_PROB_VLD_THRESH = 0.0
-
-# GEN_ENS_PROD_NMEP_SMOOTH_VLD_THRESH = 0.0
-# GEN_ENS_PROD_NMEP_SMOOTH_SHAPE = CIRCLE
-# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_DX = 81.27
-# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120
-# GEN_ENS_PROD_NMEP_SMOOTH_METHOD = GAUSSIAN
-# GEN_ENS_PROD_NMEP_SMOOTH_WIDTH = 1
-
-# GEN_ENS_PROD_CLIMO_MEAN_FILE_NAME =
-# GEN_ENS_PROD_CLIMO_MEAN_FIELD =
-# GEN_ENS_PROD_CLIMO_MEAN_REGRID_METHOD =
-# GEN_ENS_PROD_CLIMO_MEAN_REGRID_WIDTH =
-# GEN_ENS_PROD_CLIMO_MEAN_REGRID_VLD_THRESH =
-# GEN_ENS_PROD_CLIMO_MEAN_REGRID_SHAPE =
-# GEN_ENS_PROD_CLIMO_MEAN_TIME_INTERP_METHOD =
-# GEN_ENS_PROD_CLIMO_MEAN_MATCH_MONTH =
-# GEN_ENS_PROD_CLIMO_MEAN_DAY_INTERVAL = 31
-# GEN_ENS_PROD_CLIMO_MEAN_HOUR_INTERVAL = 6
-
-# GEN_ENS_PROD_CLIMO_STDEV_FILE_NAME =
-# GEN_ENS_PROD_CLIMO_STDEV_FIELD =
-# GEN_ENS_PROD_CLIMO_STDEV_REGRID_METHOD =
-# GEN_ENS_PROD_CLIMO_STDEV_REGRID_WIDTH =
-# GEN_ENS_PROD_CLIMO_STDEV_REGRID_VLD_THRESH =
-# GEN_ENS_PROD_CLIMO_STDEV_REGRID_SHAPE =
-# GEN_ENS_PROD_CLIMO_STDEV_TIME_INTERP_METHOD =
-# GEN_ENS_PROD_CLIMO_STDEV_MATCH_MONTH =
-# GEN_ENS_PROD_CLIMO_STDEV_DAY_INTERVAL = 31
-# GEN_ENS_PROD_CLIMO_STDEV_HOUR_INTERVAL = 6
-
-GEN_ENS_PROD_ENSEMBLE_FLAG_LATLON = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_MEAN = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_STDEV = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_MINUS = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_PLUS = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_MIN = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_MAX = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_RANGE = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_VLD_COUNT = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_FREQUENCY = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_NEP = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_NMEP = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO_CDF = FALSE
-
-# GEN_ENS_PROD_ENS_MEMBER_IDS =
-# GEN_ENS_PROD_CONTROL_ID =
diff --git a/parm/metplus/GenEnsProd_ADPUPA.conf b/parm/metplus/GenEnsProd_ADPUPA.conf
deleted file mode 100644
index 863427752f..0000000000
--- a/parm/metplus/GenEnsProd_ADPUPA.conf
+++ /dev/null
@@ -1,236 +0,0 @@
-# GenEnsProd METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = GenEnsProd
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {GEN_ENS_PROD_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Name to identify model (forecast) data in output.
-#
-MODEL = {{vx_fcst_model_name}}
-
-GEN_ENS_PROD_DESC = NA
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-
-###
-# File I/O
-###
-
-#
-# Forecast model input directory for GenEnsProd.
-#
-GEN_ENS_PROD_INPUT_DIR = {{fcst_input_dir}}
-#
-# Template for forecast input to GenEnsProd relative to
-# GEN_ENS_PROD_INPUT_DIR.
-#
-# Note that this can be a comma separated list of ensemble members
-# or a single line, - filename wildcard characters may be used, ? or *.
-#
-GEN_ENS_PROD_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-
-# GEN_ENS_PROD_CTRL_INPUT_DIR = {INPUT_BASE}
-# GEN_ENS_PROD_CTRL_INPUT_TEMPLATE =
-# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2
-
-#
-# Output directory for GenEnsProd.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-GEN_ENS_PROD_OUTPUT_DIR = {{output_dir}}
-#
-# Template for output from GenEnsProd relative to GEN_ENS_PROD_OUTPUT_DIR.
-#
-GEN_ENS_PROD_OUTPUT_TEMPLATE = gen_ens_prod_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-#
-# There are n ensembles but 1 is used as control, so specify n-1 members.
-#
-GEN_ENS_PROD_N_MEMBERS = {{num_ens_members}}
-
-###
-# Field Info
-###
-#
-# Ensemble variables and levels as specified in the ens field dictionary
-# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS,
-# (optional) ENS_VARn_OPTION
-#
-ENS_VAR1_NAME = TMP
-ENS_VAR1_LEVELS = P850
-ENS_VAR1_THRESH = ge288, ge293, ge298
-
-ENS_VAR2_NAME = TMP
-ENS_VAR2_LEVELS = P700
-ENS_VAR2_THRESH = ge273, ge278, ge283
-
-ENS_VAR3_NAME = TMP
-ENS_VAR3_LEVELS = P500
-ENS_VAR3_THRESH = ge258, ge263, ge268
-
-ENS_VAR4_NAME = DPT
-ENS_VAR4_LEVELS = P850
-ENS_VAR4_THRESH = ge273, ge278, ge283
-
-ENS_VAR5_NAME = DPT
-ENS_VAR5_LEVELS = P700
-ENS_VAR5_THRESH = ge263, ge268, ge273
-
-ENS_VAR6_NAME = WIND
-ENS_VAR6_LEVELS = P850
-ENS_VAR6_THRESH = ge5, ge10, ge15
-
-ENS_VAR7_NAME = WIND
-ENS_VAR7_LEVELS = P700
-ENS_VAR7_THRESH = ge10, ge15, ge20
-
-ENS_VAR8_NAME = WIND
-ENS_VAR8_LEVELS = P500
-ENS_VAR8_THRESH = ge15, ge21, ge26
-
-ENS_VAR9_NAME = WIND
-ENS_VAR9_LEVELS = P250
-ENS_VAR9_THRESH = ge26, ge31, ge36, ge46, ge62
-
-ENS_VAR10_NAME = HGT
-ENS_VAR10_LEVELS = P500
-ENS_VAR10_THRESH = ge5400, ge5600, ge5880
-
-ENS_VAR11_NAME = CAPE
-ENS_VAR11_LEVELS = L0
-ENS_VAR11_THRESH = le1000, gt1000&<2500, gt2500&<4000, gt2500
-ENS_VAR11_OPTIONS = cnt_thresh = [ >0 ];
-
-ENS_VAR12_NAME = HPBL
-ENS_VAR12_LEVELS = Z0
-ENS_VAR12_THRESH = lt500, lt1500, gt1500
-
-###
-# GenEnsProd
-###
-
-# GEN_ENS_PROD_REGRID_TO_GRID = NONE
-# GEN_ENS_PROD_REGRID_METHOD = NEAREST
-# GEN_ENS_PROD_REGRID_WIDTH = 1
-# GEN_ENS_PROD_REGRID_VLD_THRESH = 0.5
-# GEN_ENS_PROD_REGRID_SHAPE = SQUARE
-
-# GEN_ENS_PROD_CENSOR_THRESH =
-# GEN_ENS_PROD_CENSOR_VAL =
-# GEN_ENS_PROD_CAT_THRESH =
-# GEN_ENS_PROD_NC_VAR_STR =
-
-# Threshold for ratio of valid files to expected files to allow app to run
-GEN_ENS_PROD_ENS_THRESH = 0.05
-
-GEN_ENS_PROD_NBRHD_PROB_WIDTH = 27
-GEN_ENS_PROD_NBRHD_PROB_SHAPE = CIRCLE
-GEN_ENS_PROD_NBRHD_PROB_VLD_THRESH = 0.0
-
-# GEN_ENS_PROD_NMEP_SMOOTH_VLD_THRESH = 0.0
-# GEN_ENS_PROD_NMEP_SMOOTH_SHAPE = CIRCLE
-# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_DX = 81.27
-# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120
-# GEN_ENS_PROD_NMEP_SMOOTH_METHOD = GAUSSIAN
-# GEN_ENS_PROD_NMEP_SMOOTH_WIDTH = 1
-
-# GEN_ENS_PROD_CLIMO_MEAN_FILE_NAME =
-# GEN_ENS_PROD_CLIMO_MEAN_FIELD =
-# GEN_ENS_PROD_CLIMO_MEAN_REGRID_METHOD =
-# GEN_ENS_PROD_CLIMO_MEAN_REGRID_WIDTH =
-# GEN_ENS_PROD_CLIMO_MEAN_REGRID_VLD_THRESH =
-# GEN_ENS_PROD_CLIMO_MEAN_REGRID_SHAPE =
-# GEN_ENS_PROD_CLIMO_MEAN_TIME_INTERP_METHOD =
-# GEN_ENS_PROD_CLIMO_MEAN_MATCH_MONTH =
-# GEN_ENS_PROD_CLIMO_MEAN_DAY_INTERVAL = 31
-# GEN_ENS_PROD_CLIMO_MEAN_HOUR_INTERVAL = 6
-
-# GEN_ENS_PROD_CLIMO_STDEV_FILE_NAME =
-# GEN_ENS_PROD_CLIMO_STDEV_FIELD =
-# GEN_ENS_PROD_CLIMO_STDEV_REGRID_METHOD =
-# GEN_ENS_PROD_CLIMO_STDEV_REGRID_WIDTH =
-# GEN_ENS_PROD_CLIMO_STDEV_REGRID_VLD_THRESH =
-# GEN_ENS_PROD_CLIMO_STDEV_REGRID_SHAPE =
-# GEN_ENS_PROD_CLIMO_STDEV_TIME_INTERP_METHOD =
-# GEN_ENS_PROD_CLIMO_STDEV_MATCH_MONTH =
-# GEN_ENS_PROD_CLIMO_STDEV_DAY_INTERVAL = 31
-# GEN_ENS_PROD_CLIMO_STDEV_HOUR_INTERVAL = 6
-
-GEN_ENS_PROD_ENSEMBLE_FLAG_LATLON = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_MEAN = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_STDEV = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_MINUS = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_PLUS = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_MIN = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_MAX = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_RANGE = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_VLD_COUNT = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_FREQUENCY = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_NEP = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_NMEP = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO_CDF = FALSE
-
-# GEN_ENS_PROD_ENS_MEMBER_IDS =
-# GEN_ENS_PROD_CONTROL_ID =
diff --git a/parm/metplus/GenEnsProd_APCP.conf b/parm/metplus/GenEnsProd_APCP.conf
deleted file mode 100644
index 0d05843a87..0000000000
--- a/parm/metplus/GenEnsProd_APCP.conf
+++ /dev/null
@@ -1,191 +0,0 @@
-# GenEnsProd METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = GenEnsProd
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {GEN_ENS_PROD_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Name to identify model (forecast) data in output.
-#
-MODEL = {{vx_fcst_model_name}}
-
-GEN_ENS_PROD_DESC = NA
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-
-###
-# File I/O
-###
-
-#
-# Forecast model input directory for GenEnsProd.
-#
-GEN_ENS_PROD_INPUT_DIR = {{fcst_input_dir}}
-#
-# Template for forecast input to GenEnsProd relative to
-# GEN_ENS_PROD_INPUT_DIR.
-#
-# Note that this can be a comma separated list of ensemble members
-# or a single line, - filename wildcard characters may be used, ? or *.
-#
-GEN_ENS_PROD_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-
-# GEN_ENS_PROD_CTRL_INPUT_DIR = {INPUT_BASE}
-# GEN_ENS_PROD_CTRL_INPUT_TEMPLATE =
-# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2
-
-#
-# Output directory for GenEnsProd.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-GEN_ENS_PROD_OUTPUT_DIR = {{output_dir}}
-#
-# Template for output from GenEnsProd relative to GEN_ENS_PROD_OUTPUT_DIR.
-#
-GEN_ENS_PROD_OUTPUT_TEMPLATE = gen_ens_prod_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-#
-# There are n ensembles but 1 is used as control, so specify n-1 members.
-#
-GEN_ENS_PROD_N_MEMBERS = {{num_ens_members}}
-
-###
-# Field Info
-###
-#
-# Ensemble variables and levels as specified in the ens field dictionary
-# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS,
-# (optional) ENS_VARn_OPTION
-#
-ENS_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}}
-ENS_VAR1_LEVELS = A{{accum_hh}}
-ENS_VAR1_THRESH = {{field_thresholds}}
-
-###
-# GenEnsProd
-###
-
-# GEN_ENS_PROD_REGRID_TO_GRID = NONE
-# GEN_ENS_PROD_REGRID_METHOD = NEAREST
-# GEN_ENS_PROD_REGRID_WIDTH = 1
-# GEN_ENS_PROD_REGRID_VLD_THRESH = 0.5
-# GEN_ENS_PROD_REGRID_SHAPE = SQUARE
-
-# GEN_ENS_PROD_CENSOR_THRESH =
-# GEN_ENS_PROD_CENSOR_VAL =
-# GEN_ENS_PROD_CAT_THRESH =
-# GEN_ENS_PROD_NC_VAR_STR =
-
-# Threshold for ratio of valid files to expected files to allow app to run
-GEN_ENS_PROD_ENS_THRESH = 0.05
-
-GEN_ENS_PROD_NBRHD_PROB_WIDTH = 27
-GEN_ENS_PROD_NBRHD_PROB_SHAPE = CIRCLE
-GEN_ENS_PROD_NBRHD_PROB_VLD_THRESH = 0.0
-
-# GEN_ENS_PROD_NMEP_SMOOTH_VLD_THRESH = 0.0
-# GEN_ENS_PROD_NMEP_SMOOTH_SHAPE = CIRCLE
-# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_DX = 81.27
-# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120
-# GEN_ENS_PROD_NMEP_SMOOTH_METHOD = GAUSSIAN
-# GEN_ENS_PROD_NMEP_SMOOTH_WIDTH = 1
-
-# GEN_ENS_PROD_CLIMO_MEAN_FILE_NAME =
-# GEN_ENS_PROD_CLIMO_MEAN_FIELD =
-# GEN_ENS_PROD_CLIMO_MEAN_REGRID_METHOD =
-# GEN_ENS_PROD_CLIMO_MEAN_REGRID_WIDTH =
-# GEN_ENS_PROD_CLIMO_MEAN_REGRID_VLD_THRESH =
-# GEN_ENS_PROD_CLIMO_MEAN_REGRID_SHAPE =
-# GEN_ENS_PROD_CLIMO_MEAN_TIME_INTERP_METHOD =
-# GEN_ENS_PROD_CLIMO_MEAN_MATCH_MONTH =
-# GEN_ENS_PROD_CLIMO_MEAN_DAY_INTERVAL = 31
-# GEN_ENS_PROD_CLIMO_MEAN_HOUR_INTERVAL = 6
-
-# GEN_ENS_PROD_CLIMO_STDEV_FILE_NAME =
-# GEN_ENS_PROD_CLIMO_STDEV_FIELD =
-# GEN_ENS_PROD_CLIMO_STDEV_REGRID_METHOD =
-# GEN_ENS_PROD_CLIMO_STDEV_REGRID_WIDTH =
-# GEN_ENS_PROD_CLIMO_STDEV_REGRID_VLD_THRESH =
-# GEN_ENS_PROD_CLIMO_STDEV_REGRID_SHAPE =
-# GEN_ENS_PROD_CLIMO_STDEV_TIME_INTERP_METHOD =
-# GEN_ENS_PROD_CLIMO_STDEV_MATCH_MONTH =
-# GEN_ENS_PROD_CLIMO_STDEV_DAY_INTERVAL = 31
-# GEN_ENS_PROD_CLIMO_STDEV_HOUR_INTERVAL = 6
-
-GEN_ENS_PROD_ENSEMBLE_FLAG_LATLON = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_MEAN = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_STDEV = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_MINUS = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_PLUS = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_MIN = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_MAX = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_RANGE = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_VLD_COUNT = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_FREQUENCY = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_NEP = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_NMEP = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO_CDF = FALSE
-
-# GEN_ENS_PROD_ENS_MEMBER_IDS =
-# GEN_ENS_PROD_CONTROL_ID =
diff --git a/parm/metplus/GenEnsProd_ASNOW.conf b/parm/metplus/GenEnsProd_ASNOW.conf
deleted file mode 100644
index ea9dac02d9..0000000000
--- a/parm/metplus/GenEnsProd_ASNOW.conf
+++ /dev/null
@@ -1,192 +0,0 @@
-# GenEnsProd METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = GenEnsProd
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {GEN_ENS_PROD_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Name to identify model (forecast) data in output.
-#
-MODEL = {{vx_fcst_model_name}}
-
-GEN_ENS_PROD_DESC = NA
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-
-###
-# File I/O
-###
-
-#
-# Forecast model input directory for GenEnsProd.
-#
-GEN_ENS_PROD_INPUT_DIR = {{fcst_input_dir}}
-#
-# Template for forecast input to GenEnsProd relative to
-# GEN_ENS_PROD_INPUT_DIR.
-#
-# Note that this can be a comma separated list of ensemble members
-# or a single line, - filename wildcard characters may be used, ? or *.
-#
-GEN_ENS_PROD_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-
-# GEN_ENS_PROD_CTRL_INPUT_DIR = {INPUT_BASE}
-# GEN_ENS_PROD_CTRL_INPUT_TEMPLATE =
-# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2
-
-#
-# Output directory for GenEnsProd.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-GEN_ENS_PROD_OUTPUT_DIR = {{output_dir}}
-#
-# Template for output from GenEnsProd relative to GEN_ENS_PROD_OUTPUT_DIR.
-#
-GEN_ENS_PROD_OUTPUT_TEMPLATE = gen_ens_prod_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-#
-# There are n ensembles but 1 is used as control, so specify n-1 members.
-#
-GEN_ENS_PROD_N_MEMBERS = {{num_ens_members}}
-
-###
-# Field Info
-###
-
-#
-# Ensemble variables and levels as specified in the ens field dictionary
-# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS,
-# (optional) ENS_VARn_OPTION
-#
-ENS_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}}
-ENS_VAR1_LEVELS = A{{accum_hh}}
-ENS_VAR1_THRESH = {{field_thresholds}}
-
-###
-# GenEnsProd
-###
-
-# GEN_ENS_PROD_REGRID_TO_GRID = NONE
-# GEN_ENS_PROD_REGRID_METHOD = NEAREST
-# GEN_ENS_PROD_REGRID_WIDTH = 1
-# GEN_ENS_PROD_REGRID_VLD_THRESH = 0.5
-# GEN_ENS_PROD_REGRID_SHAPE = SQUARE
-
-# GEN_ENS_PROD_CENSOR_THRESH =
-# GEN_ENS_PROD_CENSOR_VAL =
-# GEN_ENS_PROD_CAT_THRESH =
-# GEN_ENS_PROD_NC_VAR_STR =
-
-# Threshold for ratio of valid files to expected files to allow app to run
-GEN_ENS_PROD_ENS_THRESH = 0.05
-
-GEN_ENS_PROD_NBRHD_PROB_WIDTH = 27
-GEN_ENS_PROD_NBRHD_PROB_SHAPE = CIRCLE
-GEN_ENS_PROD_NBRHD_PROB_VLD_THRESH = 0.0
-
-# GEN_ENS_PROD_NMEP_SMOOTH_VLD_THRESH = 0.0
-# GEN_ENS_PROD_NMEP_SMOOTH_SHAPE = CIRCLE
-# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_DX = 81.27
-# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120
-# GEN_ENS_PROD_NMEP_SMOOTH_METHOD = GAUSSIAN
-# GEN_ENS_PROD_NMEP_SMOOTH_WIDTH = 1
-
-# GEN_ENS_PROD_CLIMO_MEAN_FILE_NAME =
-# GEN_ENS_PROD_CLIMO_MEAN_FIELD =
-# GEN_ENS_PROD_CLIMO_MEAN_REGRID_METHOD =
-# GEN_ENS_PROD_CLIMO_MEAN_REGRID_WIDTH =
-# GEN_ENS_PROD_CLIMO_MEAN_REGRID_VLD_THRESH =
-# GEN_ENS_PROD_CLIMO_MEAN_REGRID_SHAPE =
-# GEN_ENS_PROD_CLIMO_MEAN_TIME_INTERP_METHOD =
-# GEN_ENS_PROD_CLIMO_MEAN_MATCH_MONTH =
-# GEN_ENS_PROD_CLIMO_MEAN_DAY_INTERVAL = 31
-# GEN_ENS_PROD_CLIMO_MEAN_HOUR_INTERVAL = 6
-
-# GEN_ENS_PROD_CLIMO_STDEV_FILE_NAME =
-# GEN_ENS_PROD_CLIMO_STDEV_FIELD =
-# GEN_ENS_PROD_CLIMO_STDEV_REGRID_METHOD =
-# GEN_ENS_PROD_CLIMO_STDEV_REGRID_WIDTH =
-# GEN_ENS_PROD_CLIMO_STDEV_REGRID_VLD_THRESH =
-# GEN_ENS_PROD_CLIMO_STDEV_REGRID_SHAPE =
-# GEN_ENS_PROD_CLIMO_STDEV_TIME_INTERP_METHOD =
-# GEN_ENS_PROD_CLIMO_STDEV_MATCH_MONTH =
-# GEN_ENS_PROD_CLIMO_STDEV_DAY_INTERVAL = 31
-# GEN_ENS_PROD_CLIMO_STDEV_HOUR_INTERVAL = 6
-
-GEN_ENS_PROD_ENSEMBLE_FLAG_LATLON = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_MEAN = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_STDEV = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_MINUS = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_PLUS = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_MIN = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_MAX = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_RANGE = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_VLD_COUNT = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_FREQUENCY = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_NEP = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_NMEP = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO_CDF = FALSE
-
-# GEN_ENS_PROD_ENS_MEMBER_IDS =
-# GEN_ENS_PROD_CONTROL_ID =
diff --git a/parm/metplus/GenEnsProd_REFC.conf b/parm/metplus/GenEnsProd_REFC.conf
deleted file mode 100644
index 553c23f69e..0000000000
--- a/parm/metplus/GenEnsProd_REFC.conf
+++ /dev/null
@@ -1,191 +0,0 @@
-# GenEnsProd METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = GenEnsProd
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {GEN_ENS_PROD_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Name to identify model (forecast) data in output.
-#
-MODEL = {{vx_fcst_model_name}}
-
-GEN_ENS_PROD_DESC = NA
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-
-###
-# File I/O
-###
-
-#
-# Forecast model input directory for GenEnsProd.
-#
-GEN_ENS_PROD_INPUT_DIR = {{fcst_input_dir}}
-#
-# Template for forecast input to GenEnsProd relative to
-# GEN_ENS_PROD_INPUT_DIR.
-#
-# Note that this can be a comma separated list of ensemble members
-# or a single line, - filename wildcard characters may be used, ? or *.
-#
-GEN_ENS_PROD_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-
-# GEN_ENS_PROD_CTRL_INPUT_DIR = {INPUT_BASE}
-# GEN_ENS_PROD_CTRL_INPUT_TEMPLATE =
-# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2
-
-#
-# Output directory for GenEnsProd.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-GEN_ENS_PROD_OUTPUT_DIR = {{output_dir}}
-#
-# Template for output from GenEnsProd relative to GEN_ENS_PROD_OUTPUT_DIR.
-#
-GEN_ENS_PROD_OUTPUT_TEMPLATE = gen_ens_prod_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-#
-# There are n ensembles but 1 is used as control, so specify n-1 members.
-#
-GEN_ENS_PROD_N_MEMBERS = {{num_ens_members}}
-
-###
-# Field Info
-###
-#
-# Ensemble variables and levels as specified in the ens field dictionary
-# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS,
-# (optional) ENS_VARn_OPTION
-#
-ENS_VAR1_NAME = {{fieldname_in_fcst_input}}
-ENS_VAR1_LEVELS = L0
-ENS_VAR1_THRESH = {{field_thresholds}}
-
-###
-# GenEnsProd
-###
-
-# GEN_ENS_PROD_REGRID_TO_GRID = NONE
-# GEN_ENS_PROD_REGRID_METHOD = NEAREST
-# GEN_ENS_PROD_REGRID_WIDTH = 1
-# GEN_ENS_PROD_REGRID_VLD_THRESH = 0.5
-# GEN_ENS_PROD_REGRID_SHAPE = SQUARE
-
-# GEN_ENS_PROD_CENSOR_THRESH =
-# GEN_ENS_PROD_CENSOR_VAL =
-# GEN_ENS_PROD_CAT_THRESH =
-# GEN_ENS_PROD_NC_VAR_STR =
-
-# Threshold for ratio of valid files to expected files to allow app to run
-GEN_ENS_PROD_ENS_THRESH = 0.05
-
-GEN_ENS_PROD_NBRHD_PROB_WIDTH = 27
-GEN_ENS_PROD_NBRHD_PROB_SHAPE = CIRCLE
-GEN_ENS_PROD_NBRHD_PROB_VLD_THRESH = 0.0
-
-# GEN_ENS_PROD_NMEP_SMOOTH_VLD_THRESH = 0.0
-# GEN_ENS_PROD_NMEP_SMOOTH_SHAPE = CIRCLE
-# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_DX = 81.27
-# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120
-# GEN_ENS_PROD_NMEP_SMOOTH_METHOD = GAUSSIAN
-# GEN_ENS_PROD_NMEP_SMOOTH_WIDTH = 1
-
-# GEN_ENS_PROD_CLIMO_MEAN_FILE_NAME =
-# GEN_ENS_PROD_CLIMO_MEAN_FIELD =
-# GEN_ENS_PROD_CLIMO_MEAN_REGRID_METHOD =
-# GEN_ENS_PROD_CLIMO_MEAN_REGRID_WIDTH =
-# GEN_ENS_PROD_CLIMO_MEAN_REGRID_VLD_THRESH =
-# GEN_ENS_PROD_CLIMO_MEAN_REGRID_SHAPE =
-# GEN_ENS_PROD_CLIMO_MEAN_TIME_INTERP_METHOD =
-# GEN_ENS_PROD_CLIMO_MEAN_MATCH_MONTH =
-# GEN_ENS_PROD_CLIMO_MEAN_DAY_INTERVAL = 31
-# GEN_ENS_PROD_CLIMO_MEAN_HOUR_INTERVAL = 6
-
-# GEN_ENS_PROD_CLIMO_STDEV_FILE_NAME =
-# GEN_ENS_PROD_CLIMO_STDEV_FIELD =
-# GEN_ENS_PROD_CLIMO_STDEV_REGRID_METHOD =
-# GEN_ENS_PROD_CLIMO_STDEV_REGRID_WIDTH =
-# GEN_ENS_PROD_CLIMO_STDEV_REGRID_VLD_THRESH =
-# GEN_ENS_PROD_CLIMO_STDEV_REGRID_SHAPE =
-# GEN_ENS_PROD_CLIMO_STDEV_TIME_INTERP_METHOD =
-# GEN_ENS_PROD_CLIMO_STDEV_MATCH_MONTH =
-# GEN_ENS_PROD_CLIMO_STDEV_DAY_INTERVAL = 31
-# GEN_ENS_PROD_CLIMO_STDEV_HOUR_INTERVAL = 6
-
-GEN_ENS_PROD_ENSEMBLE_FLAG_LATLON = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_MEAN = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_STDEV = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_MINUS = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_PLUS = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_MIN = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_MAX = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_RANGE = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_VLD_COUNT = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_FREQUENCY = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_NEP = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_NMEP = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO_CDF = FALSE
-
-# GEN_ENS_PROD_ENS_MEMBER_IDS =
-# GEN_ENS_PROD_CONTROL_ID =
diff --git a/parm/metplus/GenEnsProd_RETOP.conf b/parm/metplus/GenEnsProd_RETOP.conf
deleted file mode 100644
index 49e5e5c3b6..0000000000
--- a/parm/metplus/GenEnsProd_RETOP.conf
+++ /dev/null
@@ -1,192 +0,0 @@
-# GenEnsProd METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = GenEnsProd
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {GEN_ENS_PROD_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Name to identify model (forecast) data in output.
-#
-MODEL = {{vx_fcst_model_name}}
-
-GEN_ENS_PROD_DESC = NA
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-
-###
-# File I/O
-###
-
-#
-# Forecast model input directory for GenEnsProd.
-#
-GEN_ENS_PROD_INPUT_DIR = {{fcst_input_dir}}
-#
-# Template for forecast input to GenEnsProd relative to
-# GEN_ENS_PROD_INPUT_DIR.
-#
-# Note that this can be a comma separated list of ensemble members
-# or a single line, - filename wildcard characters may be used, ? or *.
-#
-GEN_ENS_PROD_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-
-# GEN_ENS_PROD_CTRL_INPUT_DIR = {INPUT_BASE}
-# GEN_ENS_PROD_CTRL_INPUT_TEMPLATE =
-# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2
-
-#
-# Output directory for GenEnsProd.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-GEN_ENS_PROD_OUTPUT_DIR = {{output_dir}}
-#
-# Template for output from GenEnsProd relative to GEN_ENS_PROD_OUTPUT_DIR.
-#
-GEN_ENS_PROD_OUTPUT_TEMPLATE = gen_ens_prod_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-#
-# There are n ensembles but 1 is used as control, so specify n-1 members.
-#
-GEN_ENS_PROD_N_MEMBERS = {{num_ens_members}}
-
-###
-# Field Info
-###
-#
-# Ensemble variables and levels as specified in the ens field dictionary
-# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS,
-# (optional) ENS_VARn_OPTION
-#
-ENS_VAR1_NAME = {{fieldname_in_fcst_input}}
-ENS_VAR1_LEVELS = L0
-ENS_VAR1_THRESH = {{field_thresholds}}
-ENS_VAR1_OPTIONS = convert(x) = x * 3.28084 * 0.001; ;; Convert from meters to kilofeet.
-
-###
-# GenEnsProd
-###
-
-# GEN_ENS_PROD_REGRID_TO_GRID = NONE
-# GEN_ENS_PROD_REGRID_METHOD = NEAREST
-# GEN_ENS_PROD_REGRID_WIDTH = 1
-# GEN_ENS_PROD_REGRID_VLD_THRESH = 0.5
-# GEN_ENS_PROD_REGRID_SHAPE = SQUARE
-
-# GEN_ENS_PROD_CENSOR_THRESH =
-# GEN_ENS_PROD_CENSOR_VAL =
-# GEN_ENS_PROD_CAT_THRESH =
-# GEN_ENS_PROD_NC_VAR_STR =
-
-# Threshold for ratio of valid files to expected files to allow app to run
-GEN_ENS_PROD_ENS_THRESH = 0.05
-
-GEN_ENS_PROD_NBRHD_PROB_WIDTH = 27
-GEN_ENS_PROD_NBRHD_PROB_SHAPE = CIRCLE
-GEN_ENS_PROD_NBRHD_PROB_VLD_THRESH = 0.0
-
-# GEN_ENS_PROD_NMEP_SMOOTH_VLD_THRESH = 0.0
-# GEN_ENS_PROD_NMEP_SMOOTH_SHAPE = CIRCLE
-# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_DX = 81.27
-# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120
-# GEN_ENS_PROD_NMEP_SMOOTH_METHOD = GAUSSIAN
-# GEN_ENS_PROD_NMEP_SMOOTH_WIDTH = 1
-
-# GEN_ENS_PROD_CLIMO_MEAN_FILE_NAME =
-# GEN_ENS_PROD_CLIMO_MEAN_FIELD =
-# GEN_ENS_PROD_CLIMO_MEAN_REGRID_METHOD =
-# GEN_ENS_PROD_CLIMO_MEAN_REGRID_WIDTH =
-# GEN_ENS_PROD_CLIMO_MEAN_REGRID_VLD_THRESH =
-# GEN_ENS_PROD_CLIMO_MEAN_REGRID_SHAPE =
-# GEN_ENS_PROD_CLIMO_MEAN_TIME_INTERP_METHOD =
-# GEN_ENS_PROD_CLIMO_MEAN_MATCH_MONTH =
-# GEN_ENS_PROD_CLIMO_MEAN_DAY_INTERVAL = 31
-# GEN_ENS_PROD_CLIMO_MEAN_HOUR_INTERVAL = 6
-
-# GEN_ENS_PROD_CLIMO_STDEV_FILE_NAME =
-# GEN_ENS_PROD_CLIMO_STDEV_FIELD =
-# GEN_ENS_PROD_CLIMO_STDEV_REGRID_METHOD =
-# GEN_ENS_PROD_CLIMO_STDEV_REGRID_WIDTH =
-# GEN_ENS_PROD_CLIMO_STDEV_REGRID_VLD_THRESH =
-# GEN_ENS_PROD_CLIMO_STDEV_REGRID_SHAPE =
-# GEN_ENS_PROD_CLIMO_STDEV_TIME_INTERP_METHOD =
-# GEN_ENS_PROD_CLIMO_STDEV_MATCH_MONTH =
-# GEN_ENS_PROD_CLIMO_STDEV_DAY_INTERVAL = 31
-# GEN_ENS_PROD_CLIMO_STDEV_HOUR_INTERVAL = 6
-
-GEN_ENS_PROD_ENSEMBLE_FLAG_LATLON = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_MEAN = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_STDEV = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_MINUS = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_PLUS = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_MIN = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_MAX = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_RANGE = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_VLD_COUNT = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_FREQUENCY = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_NEP = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_NMEP = TRUE
-GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO = FALSE
-GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO_CDF = FALSE
-
-# GEN_ENS_PROD_ENS_MEMBER_IDS =
-# GEN_ENS_PROD_CONTROL_ID =
diff --git a/parm/metplus/GridStat_APCP.conf b/parm/metplus/GridStat_APCP.conf
deleted file mode 100644
index 51e5125951..0000000000
--- a/parm/metplus/GridStat_APCP.conf
+++ /dev/null
@@ -1,309 +0,0 @@
-# GridStat METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = GridStat
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Location of MET configuration file to pass to GridStat.
-#
-# References PARM_BASE, which is the location of the parm directory
-# corresponding to the ush directory of the run_metplus.py script that
-# is called or the value of the environment variable METPLUS_PARM_BASE
-# if set.
-#
-GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped
-
-# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary
-# See MET User's Guide for more information
-GRID_STAT_REGRID_TO_GRID = FCST
-GRID_STAT_REGRID_VLD_THRESH = 0.5
-GRID_STAT_REGRID_METHOD = BUDGET
-GRID_STAT_REGRID_WIDTH = 2
-GRID_STAT_REGRID_SHAPE = SQUARE
-
-#GRID_STAT_INTERP_FIELD = BOTH
-#GRID_STAT_INTERP_VLD_THRESH = 1.0
-#GRID_STAT_INTERP_SHAPE = SQUARE
-#GRID_STAT_INTERP_TYPE_METHOD = NEAREST
-#GRID_STAT_INTERP_TYPE_WIDTH = 1
-
-#GRID_STAT_GRID_WEIGHT_FLAG =
-#
-# Name to identify model (forecast) data in output.
-#
-# The variable MODEL is recorded in the stat files, and the data in
-# these files is then plotted (e.g. using METViewer). Here, we add a
-# suffix to MODEL that identifies the forecast ensemble member. This
-# makes it easier to identify each curve.
-#
-MODEL = {{vx_fcst_model_name}}_{{ensmem_name}}
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Value to enter under the DESC column in the output stat file. Here,
-# we store the value of the original lead in this column, i.e. the lead
-# with zero corresponding to the actual start time of the forecast (which
-# is (cdate - time_lag)), not to cdate. This is just the lead in
-# LEAD_SEQ with the time lag (time_lag) of the current forecast member
-# added on.
-#
-# Uncomment this line only after upgrading to METplus 5.x.
-#GRID_STAT_DESC = {lead?fmt=%H%M%S?shift={{time_lag}}}
-GRID_STAT_DESC = NA
-
-# List of variables to compare in GridStat - FCST_VAR1 variables correspond
-# to OBS_VAR1 variables
-# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations
-# are needed for different tools
-
-GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE;
-#
-# List of forecast and corresponding observation fields to process.
-#
-# Note on use of set_attr_lead and ensemble member time-lagging:
-# -------------------------------------------------------------
-# The set_attr_lead parameter appearing below in [FCST|OBS]_VAR_OPTIONS
-# specifies the lead to use both in naming of the output .stat and .nc
-# files and for setting the lead values contained in those files. This
-# option causes MET/METplus to use the lead values in the variable LEAD_SEQ
-# set above, which are the same for all ensemble forecast members (i.e.
-# regardless of whether members are time lagged with respect to the
-# nominal cycle date specified by cdate). If set_attr_lead were not
-# specified as below, then MET/METplus would get the lead from the input
-# forecast file, and that would in general differ from one ensemble member
-# to the next depending on whether the member is time-lagged. That would
-# cause confusion, so here, we always use lead values with zero lead
-# corresponding to the nominal cdate.
-#
-FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}}
-FCST_VAR1_LEVELS = A{{accum_hh}}
-FCST_VAR1_THRESH = {{field_thresholds}}
-FCST_VAR1_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
-OBS_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}}
-OBS_VAR1_LEVELS = A{{accum_hh}}
-OBS_VAR1_THRESH = {{field_thresholds}}
-
-#
-# Forecast data time window(s).
-#
-FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0
-FCST_GRID_STAT_FILE_WINDOW_END = 0
-#
-# Observation data time window(s).
-#
-OBS_GRID_STAT_FILE_WINDOW_BEGIN = 0
-OBS_GRID_STAT_FILE_WINDOW_END = 0
-
-# MET GridStat neighborhood values
-# See the MET User's Guide GridStat section for more information
-GRID_STAT_NEIGHBORHOOD_FIELD = BOTH
-
-# width value passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_WIDTH = 3,5,7
-
-# shape value passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE
-
-# cov thresh list passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5
-
-# Set to true to run GridStat separately for each field specified
-# Set to false to create one run of GridStat per run time that
-# includes all fields specified.
-GRID_STAT_ONCE_PER_FIELD = False
-#
-# Set to true if forecast data is probabilistic.
-#
-FCST_IS_PROB = False
-#
-# Only used if FCST_IS_PROB is true - sets probabilistic threshold
-#
-FCST_GRID_STAT_PROB_THRESH = ==0.1
-
-GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}
-
-# Climatology data
-#GRID_STAT_CLIMO_MEAN_FILE_NAME =
-#GRID_STAT_CLIMO_MEAN_FIELD =
-#GRID_STAT_CLIMO_MEAN_REGRID_METHOD =
-#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH =
-#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH =
-#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE =
-#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD =
-#GRID_STAT_CLIMO_MEAN_MATCH_MONTH =
-#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL =
-#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL =
-
-#GRID_STAT_CLIMO_STDEV_FILE_NAME =
-#GRID_STAT_CLIMO_STDEV_FIELD =
-#GRID_STAT_CLIMO_STDEV_REGRID_METHOD =
-#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH =
-#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH =
-#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE =
-#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD =
-#GRID_STAT_CLIMO_STDEV_MATCH_MONTH =
-#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL =
-#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL =
-
-#GRID_STAT_CLIMO_CDF_BINS = 1
-#GRID_STAT_CLIMO_CDF_CENTER_BINS = False
-#GRID_STAT_CLIMO_CDF_WRITE_BINS = True
-
-GRID_STAT_MASK_GRID =
-
-# Statistical output types
-GRID_STAT_OUTPUT_FLAG_FHO = STAT
-GRID_STAT_OUTPUT_FLAG_CTC = STAT
-GRID_STAT_OUTPUT_FLAG_CTS = STAT
-#GRID_STAT_OUTPUT_FLAG_MCTC = NONE
-#GRID_STAT_OUTPUT_FLAG_MCTS = NONE
-GRID_STAT_OUTPUT_FLAG_CNT = STAT
-#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VCNT = NONE
-#GRID_STAT_OUTPUT_FLAG_PCT = NONE
-#GRID_STAT_OUTPUT_FLAG_PSTD = NONE
-#GRID_STAT_OUTPUT_FLAG_PJC = NONE
-#GRID_STAT_OUTPUT_FLAG_PRC = NONE
-#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH
-GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT
-GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT
-GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT
-#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH
-#GRID_STAT_OUTPUT_FLAG_DMAP = NONE
-
-# NetCDF matched pairs output file
-#GRID_STAT_NC_PAIRS_VAR_NAME =
-GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE
-GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE
-GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE
-GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE
-GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE
-GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE
-GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE
-
-# End of [config] section and start of [dir] section.
-[dir]
-#
-# Directory containing observation input to GridStat.
-#
-OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}}
-#
-# Directory containing forecast input to GridStat.
-#
-FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}}
-#
-# Directory containing climatology mean input to GridStat. Not used in
-# this example.
-#
-GRID_STAT_CLIMO_MEAN_INPUT_DIR =
-#
-# Directory containing climatology mean input to GridStat. Not used in
-# this example.
-#
-GRID_STAT_CLIMO_STDEV_INPUT_DIR =
-#
-# Directory in which to write output from GridStat.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-GRID_STAT_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for observation input to GridStat relative to
-# OBS_GRID_STAT_INPUT_DIR.
-#
-OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}}
-#
-# Template for forecast input to GridStat relative to
-# FCST_GRID_STAT_INPUT_DIR.
-#
-FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-#
-# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR.
-#
-GRID_STAT_OUTPUT_TEMPLATE =
-#
-# Template for climatology input to GridStat relative to
-# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example.
-#
-GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE =
-#
-# Template for climatology input to GridStat relative to
-# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example.
-#
-GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE =
-#
-# Variable used to specify one or more verification mask files for
-# GridStat. Not used for this example.
-#
-GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
diff --git a/parm/metplus/GridStat_ASNOW.conf b/parm/metplus/GridStat_ASNOW.conf
deleted file mode 100644
index 3960a10c30..0000000000
--- a/parm/metplus/GridStat_ASNOW.conf
+++ /dev/null
@@ -1,283 +0,0 @@
-# GridStat METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = GridStat
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Location of MET config file to pass to GridStat.
-#
-GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped
-
-# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary
-# See MET User's Guide for more information
-GRID_STAT_REGRID_TO_GRID = FCST
-GRID_STAT_REGRID_VLD_THRESH = 0.5
-GRID_STAT_REGRID_METHOD = BUDGET
-GRID_STAT_REGRID_WIDTH = 2
-GRID_STAT_REGRID_SHAPE = SQUARE
-
-#GRID_STAT_INTERP_FIELD = BOTH
-#GRID_STAT_INTERP_VLD_THRESH = 1.0
-#GRID_STAT_INTERP_SHAPE = SQUARE
-#GRID_STAT_INTERP_TYPE_METHOD = NEAREST
-#GRID_STAT_INTERP_TYPE_WIDTH = 1
-
-#GRID_STAT_GRID_WEIGHT_FLAG =
-
-#
-# Name to identify model (forecast) data in output.
-#
-# The variable MODEL is recorded in the stat files, and the data in
-# these files is then plotted (e.g. using METViewer). Here, we add a
-# suffix to MODEL that identifies the forecast ensemble member. This
-# makes it easier to identify each curve.
-#
-MODEL = {{vx_fcst_model_name}}_{{ensmem_name}}
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Value to enter under the DESC column in the output stat file.
-#
-GRID_STAT_DESC = NA
-
-# List of variables to compare in GridStat - FCST_VAR1 variables correspond
-# to OBS_VAR1 variables
-# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations
-# are needed for different tools
-
-GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE;
-#
-# List of forecast and corresponding observation fields to process.
-#
-FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}}
-FCST_VAR1_LEVELS = A{{accum_hh}}
-FCST_VAR1_THRESH = {{field_thresholds}}
-FCST_VAR1_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
-OBS_VAR1_NAME = {{fieldname_in_obs_input}}
-OBS_VAR1_LEVELS = A{{accum_hh}}
-OBS_VAR1_THRESH = {{field_thresholds}}
-OBS_VAR1_OPTIONS = convert(x) = 100.0*x;
-#
-# Forecast data time window(s).
-#
-FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0
-FCST_GRID_STAT_FILE_WINDOW_END = 0
-#
-# Observation data time window(s).
-#
-OBS_GRID_STAT_FILE_WINDOW_BEGIN = 0
-OBS_GRID_STAT_FILE_WINDOW_END = 0
-
-# MET GridStat neighborhood values
-# See the MET User's Guide GridStat section for more information
-GRID_STAT_NEIGHBORHOOD_FIELD = BOTH
-
-# width value passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_WIDTH = 5
-
-# shape value passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE
-
-# cov thresh list passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5
-
-# Set to true to run GridStat separately for each field specified
-# Set to false to create one run of GridStat per run time that
-# includes all fields specified.
-GRID_STAT_ONCE_PER_FIELD = False
-#
-# Set to true if forecast data is probabilistic.
-#
-FCST_IS_PROB = False
-#
-# Only used if FCST_IS_PROB is true - sets probabilistic threshold
-#
-FCST_GRID_STAT_PROB_THRESH = ==0.1
-
-GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}
-
-# Climatology data
-#GRID_STAT_CLIMO_MEAN_FILE_NAME =
-#GRID_STAT_CLIMO_MEAN_FIELD =
-#GRID_STAT_CLIMO_MEAN_REGRID_METHOD =
-#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH =
-#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH =
-#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE =
-#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD =
-#GRID_STAT_CLIMO_MEAN_MATCH_MONTH =
-#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL =
-#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL =
-
-#GRID_STAT_CLIMO_STDEV_FILE_NAME =
-#GRID_STAT_CLIMO_STDEV_FIELD =
-#GRID_STAT_CLIMO_STDEV_REGRID_METHOD =
-#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH =
-#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH =
-#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE =
-#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD =
-#GRID_STAT_CLIMO_STDEV_MATCH_MONTH =
-#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL =
-#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL =
-
-#GRID_STAT_CLIMO_CDF_BINS = 1
-#GRID_STAT_CLIMO_CDF_CENTER_BINS = False
-#GRID_STAT_CLIMO_CDF_WRITE_BINS = True
-
-GRID_STAT_MASK_GRID =
-
-# Statistical output types
-GRID_STAT_OUTPUT_FLAG_FHO = STAT
-GRID_STAT_OUTPUT_FLAG_CTC = STAT
-GRID_STAT_OUTPUT_FLAG_CTS = STAT
-#GRID_STAT_OUTPUT_FLAG_MCTC = NONE
-#GRID_STAT_OUTPUT_FLAG_MCTS = NONE
-GRID_STAT_OUTPUT_FLAG_CNT = STAT
-#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VCNT = NONE
-#GRID_STAT_OUTPUT_FLAG_PCT = NONE
-#GRID_STAT_OUTPUT_FLAG_PSTD = NONE
-#GRID_STAT_OUTPUT_FLAG_PJC = NONE
-#GRID_STAT_OUTPUT_FLAG_PRC = NONE
-#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH
-GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT
-GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT
-GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT
-#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH
-#GRID_STAT_OUTPUT_FLAG_DMAP = NONE
-
-# NetCDF matched pairs output file
-#GRID_STAT_NC_PAIRS_VAR_NAME =
-GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE
-GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE
-GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE
-GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE
-GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE
-GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE
-GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE
-
-# End of [config] section and start of [dir] section.
-[dir]
-#
-# Directory containing observation input to GridStat.
-#
-OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}}
-#
-# Directory containing forecast input to GridStat.
-#
-FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}}
-#
-# Directory containing climatology mean input to GridStat. Not used in
-# this example.
-#
-GRID_STAT_CLIMO_MEAN_INPUT_DIR =
-#
-# Directory containing climatology mean input to GridStat. Not used in
-# this example.
-#
-GRID_STAT_CLIMO_STDEV_INPUT_DIR =
-#
-# Directory in which to write output from GridStat.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-GRID_STAT_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for observation input to GridStat relative to
-# OBS_GRID_STAT_INPUT_DIR.
-#
-OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}}
-#
-# Template for forecast input to GridStat relative to
-# FCST_GRID_STAT_INPUT_DIR.
-#
-FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-#
-# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR.
-#
-GRID_STAT_OUTPUT_TEMPLATE =
-#
-# Template for climatology input to GridStat relative to
-# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example.
-#
-GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE =
-#
-# Template for climatology input to GridStat relative to
-# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example.
-#
-GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE =
-#
-# Variable used to specify one or more verification mask files for
-# GridStat. Not used for this example.
-#
-GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
diff --git a/parm/metplus/GridStat_REFC.conf b/parm/metplus/GridStat_REFC.conf
deleted file mode 100644
index c7f34d27f9..0000000000
--- a/parm/metplus/GridStat_REFC.conf
+++ /dev/null
@@ -1,315 +0,0 @@
-# GridStat METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = GridStat
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Location of MET configuration file to pass to GridStat.
-#
-# References PARM_BASE, which is the location of the parm directory
-# corresponding to the ush directory of the run_metplus.py script that
-# is called or the value of the environment variable METPLUS_PARM_BASE
-# if set.
-#
-GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped
-
-# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary
-# See MET User's Guide for more information
-GRID_STAT_REGRID_TO_GRID = FCST
-GRID_STAT_REGRID_VLD_THRESH = 0.5
-GRID_STAT_REGRID_METHOD = BUDGET
-GRID_STAT_REGRID_WIDTH = 2
-GRID_STAT_REGRID_SHAPE = SQUARE
-
-GRID_STAT_INTERP_FIELD = NONE
-GRID_STAT_INTERP_VLD_THRESH = 1.0
-GRID_STAT_INTERP_SHAPE = SQUARE
-GRID_STAT_INTERP_TYPE_METHOD = NEAREST
-GRID_STAT_INTERP_TYPE_WIDTH = 1
-
-GRID_STAT_GRID_WEIGHT_FLAG = NONE
-#
-# Name to identify model (forecast) data in output.
-#
-# The variable MODEL is recorded in the stat files, and the data in
-# these files is then plotted (e.g. using METViewer). Here, we add a
-# suffix to MODEL that identifies the forecast ensemble member. This
-# makes it easier to identify each curve.
-#
-MODEL = {{vx_fcst_model_name}}_{{ensmem_name}}
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Value to enter under the DESC column in the output stat file. Here,
-# we store the value of the original lead in this column, i.e. the lead
-# with zero corresponding to the actual start time of the forecast (which
-# is (cdate - time_lag)), not to cdate. This is just the lead in
-# LEAD_SEQ with the time lag (time_lag) of the current forecast member
-# added on.
-#
-# Uncomment this line only after upgrading to METplus 5.x.
-#GRID_STAT_DESC = {lead?fmt=%H%M%S?shift={{time_lag}}}
-GRID_STAT_DESC = NA
-
-# List of variables to compare in GridStat - FCST_VAR1 variables correspond
-# to OBS_VAR1 variables
-# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations
-# are needed for different tools
-
-GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE;
-#
-# List of forecast and corresponding observation fields to process.
-#
-# Note on use of set_attr_lead and ensemble member time-lagging:
-# -------------------------------------------------------------
-# The set_attr_lead parameter appearing below in [FCST|OBS]_VAR_OPTIONS
-# specifies the lead to use both in naming of the output .stat and .nc
-# files and for setting the lead values contained in those files. This
-# option causes MET/METplus to use the lead values in the variable LEAD_SEQ
-# set above, which are the same for all ensemble forecast members (i.e.
-# regardless of whether members are time lagged with respect to the
-# nominal cycle date specified by cdate). If set_attr_lead were not
-# specified as below, then MET/METplus would get the lead from the input
-# forecast file, and that would in general differ from one ensemble member
-# to the next depending on whether the member is time-lagged. That would
-# cause confusion, so here, we always use lead values with zero lead
-# corresponding to the nominal cdate.
-#
-FCST_VAR1_NAME = {{fieldname_in_fcst_input}}
-FCST_VAR1_LEVELS = L0
-FCST_VAR1_THRESH = {{field_thresholds}}
-FCST_VAR1_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
- cnt_thresh = [ >15 ];
- cnt_logic = UNION;
-OBS_VAR1_NAME = {{fieldname_in_obs_input}}
-OBS_VAR1_LEVELS = Z500
-OBS_VAR1_THRESH = {{field_thresholds}}
-OBS_VAR1_OPTIONS = censor_thresh = [eq-999, <-20];
- censor_val = [-9999, -20];
- cnt_thresh = [ >15 ];
- cnt_logic = UNION;
-
-#
-# Forecast data time window(s).
-#
-#FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0
-#FCST_GRID_STAT_FILE_WINDOW_END = 0
-#
-# Observation data time window(s).
-#
-OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300
-OBS_GRID_STAT_FILE_WINDOW_END = 300
-
-# MET GridStat neighborhood values
-# See the MET User's Guide GridStat section for more information
-GRID_STAT_NEIGHBORHOOD_FIELD = BOTH
-
-# width value passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_WIDTH = 1,3,5,7
-
-# shape value passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE
-
-# cov thresh list passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5
-
-# Set to true to run GridStat separately for each field specified
-# Set to false to create one run of GridStat per run time that
-# includes all fields specified.
-GRID_STAT_ONCE_PER_FIELD = False
-#
-# Set to true if forecast data is probabilistic.
-#
-FCST_IS_PROB = False
-#
-# Only used if FCST_IS_PROB is true - sets probabilistic threshold
-#
-FCST_GRID_STAT_PROB_THRESH = ==0.1
-
-GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}
-
-# Climatology data
-#GRID_STAT_CLIMO_MEAN_FILE_NAME =
-#GRID_STAT_CLIMO_MEAN_FIELD =
-#GRID_STAT_CLIMO_MEAN_REGRID_METHOD =
-#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH =
-#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH =
-#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE =
-#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD =
-#GRID_STAT_CLIMO_MEAN_MATCH_MONTH =
-#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL =
-#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL =
-
-#GRID_STAT_CLIMO_STDEV_FILE_NAME =
-#GRID_STAT_CLIMO_STDEV_FIELD =
-#GRID_STAT_CLIMO_STDEV_REGRID_METHOD =
-#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH =
-#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH =
-#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE =
-#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD =
-#GRID_STAT_CLIMO_STDEV_MATCH_MONTH =
-#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL =
-#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL =
-
-GRID_STAT_CLIMO_CDF_BINS = 1
-#GRID_STAT_CLIMO_CDF_CENTER_BINS = False
-#GRID_STAT_CLIMO_CDF_WRITE_BINS = True
-
-GRID_STAT_MASK_GRID =
-
-# Statistical output types
-GRID_STAT_OUTPUT_FLAG_FHO = STAT
-GRID_STAT_OUTPUT_FLAG_CTC = STAT
-GRID_STAT_OUTPUT_FLAG_CTS = STAT
-#GRID_STAT_OUTPUT_FLAG_MCTC = NONE
-#GRID_STAT_OUTPUT_FLAG_MCTS = NONE
-GRID_STAT_OUTPUT_FLAG_CNT = STAT
-#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VCNT = NONE
-#GRID_STAT_OUTPUT_FLAG_PCT = NONE
-#GRID_STAT_OUTPUT_FLAG_PSTD = NONE
-#GRID_STAT_OUTPUT_FLAG_PJC = NONE
-#GRID_STAT_OUTPUT_FLAG_PRC = NONE
-#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH
-GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT
-GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT
-GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT
-#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH
-#GRID_STAT_OUTPUT_FLAG_DMAP = NONE
-
-# NetCDF matched pairs output file
-#GRID_STAT_NC_PAIRS_VAR_NAME =
-GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE
-GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE
-GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE
-GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE
-GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE
-GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE
-GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE
-
-# End of [config] section and start of [dir] section.
-[dir]
-#
-# Directory containing observation input to GridStat.
-#
-OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}}
-#
-# Directory containing forecast input to GridStat.
-#
-FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}}
-#
-# Directory containing climatology mean input to GridStat. Not used in
-# this example.
-#
-GRID_STAT_CLIMO_MEAN_INPUT_DIR =
-#
-# Directory containing climatology mean input to GridStat. Not used in
-# this example.
-#
-GRID_STAT_CLIMO_STDEV_INPUT_DIR =
-#
-# Directory in which to write output from GridStat.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-GRID_STAT_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for observation input to GridStat relative to
-# OBS_GRID_STAT_INPUT_DIR.
-#
-OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}}
-#
-# Template for forecast input to GridStat relative to
-# FCST_GRID_STAT_INPUT_DIR.
-#
-FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-#
-# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR.
-#
-GRID_STAT_OUTPUT_TEMPLATE =
-#
-# Template for climatology input to GridStat relative to
-# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example.
-#
-GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE =
-#
-# Template for climatology input to GridStat relative to
-# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example.
-#
-GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE =
-#
-# Variable used to specify one or more verification mask files for
-# GridStat. Not used for this example.
-#
-GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
diff --git a/parm/metplus/GridStat_RETOP.conf b/parm/metplus/GridStat_RETOP.conf
deleted file mode 100644
index be91a0ba03..0000000000
--- a/parm/metplus/GridStat_RETOP.conf
+++ /dev/null
@@ -1,317 +0,0 @@
-# GridStat METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = GridStat
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Location of MET configuration file to pass to GridStat.
-#
-# References PARM_BASE, which is the location of the parm directory
-# corresponding to the ush directory of the run_metplus.py script that
-# is called or the value of the environment variable METPLUS_PARM_BASE
-# if set.
-#
-GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped
-
-# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary
-# See MET User's Guide for more information
-GRID_STAT_REGRID_TO_GRID = FCST
-GRID_STAT_REGRID_VLD_THRESH = 0.5
-GRID_STAT_REGRID_METHOD = BUDGET
-GRID_STAT_REGRID_WIDTH = 2
-GRID_STAT_REGRID_SHAPE = SQUARE
-
-GRID_STAT_INTERP_FIELD = NONE
-GRID_STAT_INTERP_VLD_THRESH = 1.0
-GRID_STAT_INTERP_SHAPE = SQUARE
-GRID_STAT_INTERP_TYPE_METHOD = NEAREST
-GRID_STAT_INTERP_TYPE_WIDTH = 1
-
-GRID_STAT_GRID_WEIGHT_FLAG = NONE
-#
-# Name to identify model (forecast) data in output.
-#
-# The variable MODEL is recorded in the stat files, and the data in
-# these files is then plotted (e.g. using METViewer). Here, we add a
-# suffix to MODEL that identifies the forecast ensemble member. This
-# makes it easier to identify each curve.
-#
-MODEL = {{vx_fcst_model_name}}_{{ensmem_name}}
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Value to enter under the DESC column in the output stat file. Here,
-# we store the value of the original lead in this column, i.e. the lead
-# with zero corresponding to the actual start time of the forecast (which
-# is (cdate - time_lag)), not to cdate. This is just the lead in
-# LEAD_SEQ with the time lag (time_lag) of the current forecast member
-# added on.
-#
-# Uncomment this line only after upgrading to METplus 5.x.
-#GRID_STAT_DESC = {lead?fmt=%H%M%S?shift={{time_lag}}}
-GRID_STAT_DESC = NA
-
-# List of variables to compare in GridStat - FCST_VAR1 variables correspond
-# to OBS_VAR1 variables
-# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations
-# are needed for different tools
-
-GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE;
-#
-# List of forecast and corresponding observation fields to process.
-#
-# Note on use of set_attr_lead and ensemble member time-lagging:
-# -------------------------------------------------------------
-# The set_attr_lead parameter appearing below in [FCST|OBS]_VAR_OPTIONS
-# specifies the lead to use both in naming of the output .stat and .nc
-# files and for setting the lead values contained in those files. This
-# option causes MET/METplus to use the lead values in the variable LEAD_SEQ
-# set above, which are the same for all ensemble forecast members (i.e.
-# regardless of whether members are time lagged with respect to the
-# nominal cycle date specified by cdate). If set_attr_lead were not
-# specified as below, then MET/METplus would get the lead from the input
-# forecast file, and that would in general differ from one ensemble member
-# to the next depending on whether the member is time-lagged. That would
-# cause confusion, so here, we always use lead values with zero lead
-# corresponding to the nominal cdate.
-#
-FCST_VAR1_NAME = {{fieldname_in_fcst_input}}
-FCST_VAR1_LEVELS = L0
-FCST_VAR1_THRESH = {{field_thresholds}}
-FCST_VAR1_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
- convert(x) = x * 3.28084 * 0.001;
- cnt_thresh = [ >0 ];
- cnt_logic = UNION;
-OBS_VAR1_NAME = {{fieldname_in_obs_input}}
-OBS_VAR1_LEVELS = Z500
-OBS_VAR1_THRESH = {{field_thresholds}}
-OBS_VAR1_OPTIONS = convert(x) = x * 3280.84 * 0.001;
- censor_thresh = [<=-9.84252,eq-3.28084];
- censor_val = [-9999,-16.4042];
- cnt_thresh = [ >0 ];
- cnt_logic = UNION;
-
-#
-# Forecast data time window(s).
-#
-#FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0
-#FCST_GRID_STAT_FILE_WINDOW_END = 0
-#
-# Observation data time window(s).
-#
-OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300
-OBS_GRID_STAT_FILE_WINDOW_END = 300
-
-# MET GridStat neighborhood values
-# See the MET User's Guide GridStat section for more information
-GRID_STAT_NEIGHBORHOOD_FIELD = BOTH
-
-# width value passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_WIDTH = 1,3,5,7
-
-# shape value passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE
-
-# cov thresh list passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5
-
-# Set to true to run GridStat separately for each field specified
-# Set to false to create one run of GridStat per run time that
-# includes all fields specified.
-GRID_STAT_ONCE_PER_FIELD = False
-#
-# Set to true if forecast data is probabilistic.
-#
-FCST_IS_PROB = False
-#
-# Only used if FCST_IS_PROB is true - sets probabilistic threshold
-#
-FCST_GRID_STAT_PROB_THRESH = ==0.1
-
-GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}
-
-# Climatology data
-#GRID_STAT_CLIMO_MEAN_FILE_NAME =
-#GRID_STAT_CLIMO_MEAN_FIELD =
-#GRID_STAT_CLIMO_MEAN_REGRID_METHOD =
-#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH =
-#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH =
-#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE =
-#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD =
-#GRID_STAT_CLIMO_MEAN_MATCH_MONTH =
-#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL =
-#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL =
-
-#GRID_STAT_CLIMO_STDEV_FILE_NAME =
-#GRID_STAT_CLIMO_STDEV_FIELD =
-#GRID_STAT_CLIMO_STDEV_REGRID_METHOD =
-#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH =
-#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH =
-#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE =
-#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD =
-#GRID_STAT_CLIMO_STDEV_MATCH_MONTH =
-#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL =
-#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL =
-
-GRID_STAT_CLIMO_CDF_BINS = 1
-#GRID_STAT_CLIMO_CDF_CENTER_BINS = False
-#GRID_STAT_CLIMO_CDF_WRITE_BINS = True
-
-GRID_STAT_MASK_GRID =
-
-# Statistical output types
-GRID_STAT_OUTPUT_FLAG_FHO = STAT
-GRID_STAT_OUTPUT_FLAG_CTC = STAT
-GRID_STAT_OUTPUT_FLAG_CTS = STAT
-#GRID_STAT_OUTPUT_FLAG_MCTC = NONE
-#GRID_STAT_OUTPUT_FLAG_MCTS = NONE
-GRID_STAT_OUTPUT_FLAG_CNT = STAT
-#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VCNT = NONE
-#GRID_STAT_OUTPUT_FLAG_PCT = NONE
-#GRID_STAT_OUTPUT_FLAG_PSTD = NONE
-#GRID_STAT_OUTPUT_FLAG_PJC = NONE
-#GRID_STAT_OUTPUT_FLAG_PRC = NONE
-#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH
-GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT
-GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT
-GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT
-#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH
-#GRID_STAT_OUTPUT_FLAG_DMAP = NONE
-
-# NetCDF matched pairs output file
-#GRID_STAT_NC_PAIRS_VAR_NAME =
-GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE
-GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE
-GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE
-GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE
-GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE
-GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE
-GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE
-
-# End of [config] section and start of [dir] section.
-[dir]
-#
-# Directory containing observation input to GridStat.
-#
-OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}}
-#
-# Directory containing forecast input to GridStat.
-#
-FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}}
-#
-# Directory containing climatology mean input to GridStat. Not used in
-# this example.
-#
-GRID_STAT_CLIMO_MEAN_INPUT_DIR =
-#
-# Directory containing climatology mean input to GridStat. Not used in
-# this example.
-#
-GRID_STAT_CLIMO_STDEV_INPUT_DIR =
-#
-# Directory in which to write output from GridStat.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-GRID_STAT_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for observation input to GridStat relative to
-# OBS_GRID_STAT_INPUT_DIR.
-#
-OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}}
-#
-# Template for forecast input to GridStat relative to
-# FCST_GRID_STAT_INPUT_DIR.
-#
-FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-#
-# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR.
-#
-GRID_STAT_OUTPUT_TEMPLATE =
-#
-# Template for climatology input to GridStat relative to
-# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example.
-#
-GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE =
-#
-# Template for climatology input to GridStat relative to
-# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example.
-#
-GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE =
-#
-# Variable used to specify one or more verification mask files for
-# GridStat. Not used for this example.
-#
-GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
diff --git a/parm/metplus/GridStat_ensmean.conf b/parm/metplus/GridStat_ensmean.conf
new file mode 100644
index 0000000000..4b8c71ddab
--- /dev/null
+++ b/parm/metplus/GridStat_ensmean.conf
@@ -0,0 +1,662 @@
+# Ensemble mean {{MetplusToolName}} METplus Configuration
+
+[config]
+
+# List of applications (tools) to run.
+PROCESS_LIST = {{MetplusToolName}}
+
+# time looping - options are INIT, VALID, RETRO, and REALTIME
+# If set to INIT or RETRO:
+# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
+# If set to VALID or REALTIME:
+# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
+LOOP_BY = INIT
+
+# Format of INIT_BEG and INIT_END using % items
+# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
+# see www.strftime.org for more information
+# %Y%m%d%H expands to YYYYMMDDHH
+INIT_TIME_FMT = %Y%m%d%H
+
+# Start time for METplus run - must match INIT_TIME_FMT
+INIT_BEG = {{cdate}}
+
+# End time for METplus run - must match INIT_TIME_FMT
+INIT_END = {{cdate}}
+
+# Increment between METplus runs (in seconds if no units are specified).
+# Must be >= 60 seconds.
+INIT_INCREMENT = 3600
+
+# List of forecast leads to process for each run time (init or valid)
+# In hours if units are not specified
+# If unset, defaults to 0 (don't loop through forecast leads)
+LEAD_SEQ = {{fhr_list}}
+#
+# Order of loops to process data - Options are times, processes
+# Not relevant if only one item is in the PROCESS_LIST
+# times = run all wrappers in the PROCESS_LIST for a single run time, then
+# increment the run time and run all wrappers again until all times have
+# been evaluated.
+# processes = run the first wrapper in the PROCESS_LIST for all times
+# specified, then repeat for the next item in the PROCESS_LIST until all
+# wrappers have been run
+#
+LOOP_ORDER = times
+#
+# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
+#
+LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
+#
+# Specify the name of the METplus log file.
+#
+LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
+#
+# Specify the location and name of the final METplus conf file.
+#
+METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}}
+#
+# Location of MET configuration file to pass to {{MetplusToolName}}.
+#
+# References PARM_BASE, which is the location of the parm directory
+# corresponding to the ush directory of the run_metplus.py script that
+# is called or the value of the environment variable METPLUS_PARM_BASE
+# if set.
+#
+{{METPLUS_TOOL_NAME}}_CONFIG_FILE = {PARM_BASE}/met_config/{{MetplusToolName}}Config_wrapped
+
+# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary
+# See MET User's Guide for more information
+{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = FCST
+{{METPLUS_TOOL_NAME}}_REGRID_VLD_THRESH = 0.5
+{{METPLUS_TOOL_NAME}}_REGRID_METHOD = BUDGET
+{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2
+{{METPLUS_TOOL_NAME}}_REGRID_SHAPE = SQUARE
+
+#{{METPLUS_TOOL_NAME}}_INTERP_FIELD = BOTH
+#{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = 1.0
+#{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = SQUARE
+#{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = NEAREST
+#{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 1
+
+#{{METPLUS_TOOL_NAME}}_GRID_WEIGHT_FLAG =
+
+#
+# Name to identify model (forecast) data in output.
+#
+# The variable MODEL is recorded in the stat files, and the data in
+# these files is then plotted (e.g. using METViewer). Here, we add a
+# suffix to MODEL that identifies the data as that for the ensemble
+# mean. This makes it easier to identify each curve.
+#
+MODEL = {{vx_fcst_model_name}}_ensmean
+#
+# Name to identify observation data in output.
+#
+OBTYPE = {{obtype}}
+#
+# Value to enter under the DESC column in the output stat file.
+#
+{{METPLUS_TOOL_NAME}}_DESC = NA
+#
+# Overrides of MET configuration defaults.
+#
+{{METPLUS_TOOL_NAME}}_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE;
+#
+# List of forecast and corresponding observation fields to process.
+#
+{#-
+Import the file containing jinja macros.
+#}
+{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %}
+
+{#-
+Set the probabilistic threshold to be used for the forecast field. If
+necessary, this can be changed to be an input parameter in the calling
+script instead of a hard-coded value as below.
+#}
+{%- set thresh_fcst_prob = '==0.1' %}
+
+{#-
+Jinja requires certain variables to be defined globally within the template
+before they can be used in if-statements and other scopes (see Jinja
+scoping rules). Define such variables.
+#}
+{%- set level_fcst = '' %}
+{%- set level_obs = '' %}
+{%- set indx_level_fcst = '' %}
+
+{%- set valid_threshes_fcst = [] %}
+{%- set valid_threshes_obs = [] %}
+{%- set threshes_fcst = '' %}
+{%- set threshes_obs = '' %}
+{%- set indx_input_thresh_fcst = '' %}
+
+{%- set opts_indent = '' %}
+{%- set opts_indent_len = '' %}
+{%- set tmp = '' %}
+{%- set error_msg = '' %}
+
+{#-
+Make sure that the set of field groups for forecasts and observations
+are identical.
+#}
+{%- set fgs_fcst = vx_config_dict['fcst'].keys()|list %}
+{%- set fgs_obs = vx_config_dict['obs'].keys()|list %}
+{%- if (fgs_fcst != fgs_obs) %}
+ {%- set error_msg = '\n' ~
+'The list of valid field groups for forecasts (fgs_fcst) must be identical\n' ~
+'to that for observations (fgs_obs) but isn\'t:\n' ~
+' fgs_fcst = ' ~ fgs_fcst ~ '\n' ~
+' fgs_obs = ' ~ fgs_obs %}
+ {{metplus_macros.print_err_and_quit(error_msg)}}
+{%- endif %}
+
+{#-
+Extract the lists of forecast and observation dictionaries containing
+the valid fields, levels, and thresholds corresponding to the specified
+field group (input_field_group). Note that it would be simpler to have
+these be just dictionaries in which the keys are the field names (instead
+of them being LISTS of dictionaries in which each dictionary contains a
+single key that is the field name), but that approach cannot be used here
+because it is possible for field names to be repeated (for both forecasts
+and observations). For example, in the observations, the field name
+'PRWE' appears more than once, each time with a different threshold, and
+the combination of name and threshold is what constitutes a unique field,
+not just the name by itself.
+#}
+{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %}
+{%- set fields_levels_threshes_obs = vx_config_dict['obs'][input_field_group] %}
+
+{#-
+Reset the specified forecast level so that if it happens to be an
+accumulation (e.g. 'A03'), the leading zeros in front of the hour are
+stipped out (e.g. reset to 'A3').
+#}
+{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %}
+
+{#-
+Ensure that the specified input forecast level(s) (input_level_fcst) and
+threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the
+set(s) of valid forecast levels and thresholds, respectively, specified
+in fields_levels_threshes_fcst.
+#}
+{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }}
+{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }}
+
+{#-
+Some fields in the specified field group (input_field_group) may need to
+be excluded from the METplus config file because calculating means for
+them doesn't make sense. List these (for each input_field_group) in the
+following dictionary.
+#}
+{%- set fields_fcst_to_exclude_by_field_group =
+ {'APCP': [],
+ 'ASNOW': [],
+ 'REFC': [],
+ 'RETOP': [],
+ 'ADPSFC': ['TCDC', 'VIS', 'HGT'],
+ 'ADPUPA': []} %}
+{%- set fields_fcst_to_exclude = fields_fcst_to_exclude_by_field_group[input_field_group] %}
+
+{#-
+For convenience, create lists of valid forecast and observation field
+names.
+#}
+{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %}
+{%- set valid_fields_fcst = [] %}
+{%- for i in range(0,num_valid_fields_fcst) %}
+ {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %}
+ {%- set tmp = valid_fields_fcst.append(field) %}
+{%- endfor %}
+
+{%- set valid_fields_obs = [] %}
+{%- set num_valid_fields_obs = fields_levels_threshes_obs|length %}
+{%- for i in range(0,num_valid_fields_obs) %}
+ {%- set field = fields_levels_threshes_obs[i].keys()|list|join('') %}
+ {%- set tmp = valid_fields_obs.append(field) %}
+{%- endfor %}
+
+{#-
+Ensure that the number of valid fields for forecasts is equal to that
+for the observations.
+#}
+{%- set num_valid_fields = 0 %}
+{%- if (num_valid_fields_fcst != num_valid_fields_obs) %}
+ {%- set error_msg = '\n' ~
+'The number of valid forecast fields (num_valid_fields_fcst) must be\n' ~
+'equal to the number of valid observation fields (num_valid_fields_obs)\n' ~
+'but isn\'t:\n' ~
+' num_valid_fields_fcst = ' ~ num_valid_fields_fcst ~ '\n' ~
+' num_valid_fields_obs = ' ~ num_valid_fields_obs ~ '\n' ~
+'The lists of valid forecast and observation fields are:\n' ~
+' valid_fields_fcst = ' ~ valid_fields_fcst ~ '\n' ~
+' valid_fields_obs = ' ~ valid_fields_obs ~ '\n' %}
+ {{metplus_macros.print_err_and_quit(error_msg)}}
+{%- else %}
+ {%- set num_valid_fields = num_valid_fields_fcst %}
+{%- endif %}
+
+{#-
+Loop over the valid fields and set field names, levels, thresholds, and/
+or options for each field, both for forecasts and for obseratiions, in
+the METplus configuration file.
+#}
+{%- set ns = namespace(var_count = 0) %}
+
+{%- for i in range(0,num_valid_fields) if valid_fields_fcst[i] not in fields_fcst_to_exclude %}
+
+ {%- set field_fcst = valid_fields_fcst[i] %}
+ {%- set field_obs = valid_fields_obs[i] %}
+
+{#-
+For convenience, create lists of valid forecast and observation levels
+for the current field. Then check that the number of valid levels for
+forecasts is the same as that for observations.
+#}
+ {%- set valid_levels_fcst = fields_levels_threshes_fcst[i][field_fcst].keys()|list %}
+ {%- set valid_levels_obs = fields_levels_threshes_obs[i][field_obs].keys()|list %}
+
+{#-
+Extract dictionary of valid forecast levels (the dictionary keys) and
+corresponding lists of valid thresholds (the values) for each level.
+Then loop over these levels and corresponding lists of thresholds to set
+both the forecast and observation field names, levels, thresholds, and/or
+options.
+#}
+ {%- set valid_levels_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst] %}
+ {%- for level_fcst, valid_threshes_fcst in valid_levels_threshes_fcst.items() %}
+
+ {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %}
+{#-
+Increment the METplus variable counter.
+#}
+ {%- set ns.var_count = ns.var_count+1 %}
+
+{#-
+Set forecast field name. Note that this has to exactly match the name
+of the field in the input forecast file(s).
+
+The input forecast files are generated by the MET/METplus GenEnsProd
+tool. That tool adds the field's level to the variable names in its
+output file to ensure that all variables in the file have distinct names.
+For example, if the same field, say APCP, is output at two different
+levels, say at A3 and A6 (for APCP, "levels" are really accumulation
+periods), there need to be two variables in the output file, and they
+obviously can't both be named "APCP", so GenEnsProd names one "APCP_A3"
+and the other "APCP_A6". Here, the level is stored in the variable
+level_fcst and, below, is included in the name of the forecast field.
+
+For accumulated fields, the field name in the input forecast file contains
+TWO references to the accumulation period. The first is the level of the
+forecast field added by GenEnsProd as described above. The second is
+another reference to this same level (accumulation period) but added by
+the MET/METplus's PcpCombine tool (whose output file is the input into
+GenEnsProd). PcpCombine adds this reference to the level (really the
+accumulation period) to the field's name for the same reason that
+GenEnsProd does, i.e. to ensure that the names of variables in the output
+file are distinct. Here, this accumulation period is stored in the
+variable accum_hh. Thus, for accumulated fields, below we add both
+accum_hh and level_fcst to the field name to get an exact field name
+match.
+#}
+ {%- if (input_field_group in ['APCP', 'ASNOW']) %}
+FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{accum_hh}}_{{level_fcst}}_ENS_MEAN
+ {%- else %}
+FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{level_fcst}}_ENS_MEAN
+ {%- endif %}
+
+{#-
+Set forecast field level.
+#}
+FCST_VAR{{ns.var_count}}_LEVELS = {{level_fcst}}
+
+{#-
+Set forecast field threshold(s). Note that no forecast thresholds are
+included in the METplus configuration file if input_thresh_fcst is set
+to 'none'.
+#}
+ {%- if (input_thresh_fcst != 'none') %}
+{#-
+If input_thresh_fcst is set to 'all', set the list of forecast thresholds
+to the full set of valid values.
+#}
+ {%- if (input_thresh_fcst == 'all') %}
+
+ {%- set threshes_fcst = valid_threshes_fcst %}
+{#-
+If input_thresh_fcst is set to a specific value:
+ 1) Ensure that input_thresh_fcst exists in the list of valid forecast
+ thresholds.
+ 2) Get the index of input_thresh_fcst in the list of valid forecast
+ thresholds. This will be needed later below when setting the
+ observation threshold(s).
+ 3) Use this index to set the forecast threshold to a one-element list
+ containing the specified forecast threshold.
+#}
+ {%- else %}
+
+ {%- if input_thresh_fcst not in valid_threshes_fcst %}
+ {%- set error_msg = '\n' ~
+'For the current forecast field (field_fcst) and forecast level (level_fcst),\n' ~
+'the input forecast threshold (input_thresh_fcst) does not exist in the list\n' ~
+'of valid forecast thresholds (valid_threshes_fcst):\n' ~
+' field_fcst = ' ~ field_fcst ~ '\n' ~
+' level_fcst = ' ~ level_fcst ~ '\n' ~
+' valid_threshes_fcst = ' ~ valid_threshes_fcst ~ '\n' ~
+' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %}
+ {{metplus_macros.print_err_and_quit(error_msg)}}
+ {%- endif %}
+ {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %}
+ {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %}
+
+ {%- endif %}
+{#-
+If threshes_fcst has been reset to something other than its default
+value of an empty list, then set the forecast thresholds in the METplus
+configuration file because that implies threshes_fcst was set above to
+a non-empty value. Then reset threshes_fcst to its default value for
+proper processing of thresholds for the next field.
+#}
+ {%- if (threshes_fcst != []) %}
+FCST_VAR{{ns.var_count}}_THRESH = {{threshes_fcst|join(', ')}}
+ {%- endif %}
+ {%- set threshes_fcst = [] %}
+
+ {%- endif %}
+
+{#-
+Set forecast field options.
+#}
+ {%- set opts_indent_len = 20 %}
+ {%- if (ns.var_count > 9) and (ns.var_count <= 99) %}
+ {%- set opts_indent_len = opts_indent_len + 1 %}
+ {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %}
+ {%- set opts_indent_len = opts_indent_len + 2 %}
+ {%- elif (ns.var_count > 999) %}
+ {%- set opts_indent_len = opts_indent_len + 3 %}
+ {%- endif %}
+ {%- set opts_indent = ' '*opts_indent_len %}
+
+ {%- if input_field_group == 'ADPUPA' %}
+
+ {%- if field_fcst == 'CAPE' %}
+FCST_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ];
+ {%- endif %}
+
+ {%- endif %}
+
+{#-
+Set observation field name. Note that this has to exactly match the name
+of the field in the input observation file.
+
+For accumulated fields, the input observation file is generated by MET's
+PcpCombine tool. In that file, the field name consists of the observation
+field name here (field_obs) with the accumulation period appended to it
+(separated by an underscore), so we must do the same here to get an exact
+match.
+
+Note:
+Turns out for ASNOW, PcpCombine is not run for obs, so we exclude that
+from the "if" clause here (so it goes into the "else"). For workflow
+behavior uniformity between APCP and ASNOW, consider running PcpCombine
+for ASNOW observations as well (just as it's run for APCP observations).
+ {%- if (input_field_group in ['APCP', 'ASNOW']) %}
+#}
+ {%- if (input_field_group in ['APCP']) %}
+OBS_VAR{{ns.var_count}}_NAME = {{field_obs}}_{{accum_hh}}
+ {%- else %}
+OBS_VAR{{ns.var_count}}_NAME = {{field_obs}}
+ {%- endif %}
+
+{#-
+Set observation field level.
+#}
+ {%- set indx_level_fcst = valid_levels_fcst.index(level_fcst) %}
+ {%- set level_obs = valid_levels_obs[indx_level_fcst] %}
+OBS_VAR{{ns.var_count}}_LEVELS = {{level_obs}}
+
+{#-
+Set observation field threshold(s). Note that no observation thresholds
+are included in the METplus configuration file if input_thresh_fcst is
+set to 'none'.
+#}
+ {%- if (input_thresh_fcst != 'none') %}
+{#-
+Set the list of valid observation thresholds to the one corresponding to
+the current observation level (level_obs).
+#}
+ {%- set valid_threshes_obs = fields_levels_threshes_obs[i][field_obs][level_obs] %}
+{#-
+If input_thresh_fcst is set to 'all', set the list of observation thresholds
+to the full set of valid values.
+#}
+ {%- if (input_thresh_fcst == 'all') %}
+
+ {%- set threshes_obs = valid_threshes_obs %}
+{#-
+If input_thresh_fcst is set to a specific forecast threshold, then the
+observation threshold is given by the element in the list of valid
+observation thresholds that has the same index as that of input_thresh_fcst
+in the list of valid forecast thresholds.
+#}
+ {%- else %}
+ {%- set threshes_obs = [valid_threshes_obs[indx_input_thresh_fcst]] %}
+ {%- endif %}
+{#-
+If threshes_obs has been reset to something other than its default value
+of an empty list, then set the observation thresholds in the METplus
+configuration file because that implies threshes_obs was set above to
+a non-empty value. Then reset threshes_obs to its default value for
+proper processing of thresholds for the next field.
+#}
+ {%- if (threshes_obs != []) %}
+OBS_VAR{{ns.var_count}}_THRESH = {{threshes_obs|join(', ')}}
+ {%- endif %}
+ {%- set threshes_obs = [] %}
+
+ {%- endif %}
+
+{#-
+Set observation field options.
+#}
+ {%- set opts_indent_len = opts_indent_len - 1 %}
+ {%- set opts_indent = ' '*opts_indent_len %}
+
+ {%- if input_field_group == 'ASNOW' %}
+
+ {%- if field_obs == 'ASNOW' %}
+OBS_VAR{{ns.var_count}}_OPTIONS = convert(x) = 100.0*x;
+ {%- endif %}
+
+ {%- elif input_field_group == 'ADPUPA' %}
+
+ {%- if field_obs == 'CAPE' %}
+OBS_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ];
+{{opts_indent}}cnt_logic = UNION;
+ {%- elif field_obs == 'PBL' %}
+OBS_VAR{{ns.var_count}}_OPTIONS = desc = "TKE";
+ {%- endif %}
+
+ {%- endif %}
+
+{#-
+Print out a newline to separate the settings for the current field (both
+forecast and observation settings) from those for the next field.
+#}
+ {{- '\n' }}
+
+ {%- endif %}
+
+ {%- endfor %}
+{%- endfor %}
+#
+# Forecast data time window(s).
+#
+FCST_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_BEGIN = 0
+FCST_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_END = 0
+#
+# Observation data time window(s).
+#
+OBS_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_BEGIN = 0
+OBS_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_END = 0
+
+# MET {{MetplusToolName}} neighborhood values
+# See the MET User's Guide {{MetplusToolName}} section for more information
+{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_FIELD = BOTH
+
+# width value passed to nbrhd dictionary in the MET config file
+{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_WIDTH = 3,5,7
+
+# shape value passed to nbrhd dictionary in the MET config file
+{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_SHAPE = SQUARE
+
+# cov thresh list passed to nbrhd dictionary in the MET config file
+{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_COV_THRESH = >=0.5
+
+# Set to true to run {{MetplusToolName}} separately for each field specified
+# Set to false to create one run of {{MetplusToolName}} per run time that
+# includes all fields specified.
+{{METPLUS_TOOL_NAME}}_ONCE_PER_FIELD = False
+#
+# Set to true if forecast data is probabilistic.
+#
+FCST_IS_PROB = False
+#
+# Only used if FCST_IS_PROB is true - sets probabilistic threshold
+#
+FCST_{{METPLUS_TOOL_NAME}}_PROB_THRESH = ==0.1
+
+{{METPLUS_TOOL_NAME}}_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}
+
+# Climatology data
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FILE_NAME =
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FIELD =
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_METHOD =
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_WIDTH =
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_VLD_THRESH =
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_SHAPE =
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD =
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_MATCH_MONTH =
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_DAY_INTERVAL =
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_HOUR_INTERVAL =
+
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FILE_NAME =
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FIELD =
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_METHOD =
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_WIDTH =
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_VLD_THRESH =
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_SHAPE =
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_TIME_INTERP_METHOD =
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_MATCH_MONTH =
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_DAY_INTERVAL =
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_HOUR_INTERVAL =
+
+#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1
+#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False
+#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = True
+
+{{METPLUS_TOOL_NAME}}_MASK_GRID =
+
+# Statistical output types
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_FHO = STAT
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTC = STAT
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTS = STAT
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTC = NONE
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTS = NONE
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CNT = STAT
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SL1L2 = NONE
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SAL1L2 = NONE
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VL1L2 = NONE
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VAL1L2 = NONE
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VCNT = NONE
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PCT = NONE
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PSTD = NONE
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PJC = NONE
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PRC = NONE
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV = BOTH
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCTC = STAT
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCTS = STAT
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCNT = STAT
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_GRAD = BOTH
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_DMAP = NONE
+
+# NetCDF matched pairs output file
+#{{METPLUS_TOOL_NAME}}_NC_PAIRS_VAR_NAME =
+{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_LATLON = FALSE
+{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_RAW = FALSE
+{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_DIFF = FALSE
+{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_CLIMO = FALSE
+#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_CLIMO_CDP = FALSE
+{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_WEIGHT = FALSE
+{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_NBRHD = FALSE
+#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_FOURIER = FALSE
+#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_GRADIENT = FALSE
+#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE
+{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_APPLY_MASK = FALSE
+
+# End of [config] section and start of [dir] section.
+[dir]
+#
+# Directory containing observation input to {{MetplusToolName}}.
+#
+OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{obs_input_dir}}
+#
+# Directory containing forecast input to {{MetplusToolName}}.
+#
+FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}}
+#
+# Directory containing climatology mean input to {{MetplusToolName}}. Not used in
+# this example.
+#
+{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR =
+#
+# Directory containing climatology mean input to {{MetplusToolName}}. Not used in
+# this example.
+#
+{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR =
+#
+# Directory in which to write output from {{MetplusToolName}}.
+#
+# OUTPUT_BASE apparently has to be set to something; it cannot be left
+# to its default value. But it is not explicitly used elsewhere in this
+# configuration file.
+#
+OUTPUT_BASE = {{output_base}}
+{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}}
+#
+# Directory for staging data.
+#
+STAGING_DIR = {{staging_dir}}
+
+# End of [dir] section and start of [filename_templates] section.
+[filename_templates]
+#
+# Template for observation input to {{MetplusToolName}} relative to
+# OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR.
+#
+OBS_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{obs_input_fn_template}}
+#
+# Template for forecast input to {{MetplusToolName}} relative to
+# FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR.
+#
+FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}}
+#
+# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR.
+#
+{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE =
+#
+# Template for climatology input to {{MetplusToolName}} relative to
+# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR. Not used in this example.
+#
+{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_TEMPLATE =
+#
+# Template for climatology input to {{MetplusToolName}} relative to
+# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR. Not used in this example.
+#
+{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_TEMPLATE =
+#
+# Variable used to specify one or more verification mask files for
+# {{MetplusToolName}}. Not used for this example.
+#
+{{METPLUS_TOOL_NAME}}_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
diff --git a/parm/metplus/GridStat_ensmean_APCP.conf b/parm/metplus/GridStat_ensmean_APCP.conf
deleted file mode 100644
index 6d3956c8e6..0000000000
--- a/parm/metplus/GridStat_ensmean_APCP.conf
+++ /dev/null
@@ -1,282 +0,0 @@
-# Ensemble mean GridStat METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = GridStat
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Location of MET configuration file to pass to GridStat.
-#
-# References PARM_BASE, which is the location of the parm directory
-# corresponding to the ush directory of the run_metplus.py script that
-# is called or the value of the environment variable METPLUS_PARM_BASE
-# if set.
-#
-GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped
-
-# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary
-# See MET User's Guide for more information
-GRID_STAT_REGRID_TO_GRID = FCST
-GRID_STAT_REGRID_VLD_THRESH = 0.5
-GRID_STAT_REGRID_METHOD = BUDGET
-GRID_STAT_REGRID_WIDTH = 2
-GRID_STAT_REGRID_SHAPE = SQUARE
-
-#GRID_STAT_INTERP_FIELD = BOTH
-#GRID_STAT_INTERP_VLD_THRESH = 1.0
-#GRID_STAT_INTERP_SHAPE = SQUARE
-#GRID_STAT_INTERP_TYPE_METHOD = NEAREST
-#GRID_STAT_INTERP_TYPE_WIDTH = 1
-
-#GRID_STAT_GRID_WEIGHT_FLAG =
-
-#
-# Name to identify model (forecast) data in output.
-#
-# The variable MODEL is recorded in the stat files, and the data in
-# these files is then plotted (e.g. using METViewer). Here, we add a
-# suffix to MODEL that identifies the data as that for the ensemble
-# mean. This makes it easier to identify each curve.
-#
-MODEL = {{vx_fcst_model_name}}_ensmean
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Value to enter under the DESC column in the output stat file.
-#
-GRID_STAT_DESC = NA
-
-GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE;
-#
-# List of forecast and corresponding observation fields to process.
-#
-FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_MEAN
-FCST_VAR1_LEVELS = A{{accum_hh}}
-FCST_VAR1_THRESH = {{field_thresholds}}
-OBS_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}}
-OBS_VAR1_LEVELS = A{{accum_hh}}
-OBS_VAR1_THRESH = {{field_thresholds}}
-
-#
-# Forecast data time window(s).
-#
-FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0
-FCST_GRID_STAT_FILE_WINDOW_END = 0
-#
-# Observation data time window(s).
-#
-OBS_GRID_STAT_FILE_WINDOW_BEGIN = 0
-OBS_GRID_STAT_FILE_WINDOW_END = 0
-
-# MET GridStat neighborhood values
-# See the MET User's Guide GridStat section for more information
-GRID_STAT_NEIGHBORHOOD_FIELD = BOTH
-
-# width value passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_WIDTH = 3,5,7
-
-# shape value passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE
-
-# cov thresh list passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5
-
-# Set to true to run GridStat separately for each field specified
-# Set to false to create one run of GridStat per run time that
-# includes all fields specified.
-GRID_STAT_ONCE_PER_FIELD = False
-#
-# Set to true if forecast data is probabilistic.
-#
-FCST_IS_PROB = False
-#
-# Only used if FCST_IS_PROB is true - sets probabilistic threshold
-#
-FCST_GRID_STAT_PROB_THRESH = ==0.1
-
-GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}
-
-# Climatology data
-#GRID_STAT_CLIMO_MEAN_FILE_NAME =
-#GRID_STAT_CLIMO_MEAN_FIELD =
-#GRID_STAT_CLIMO_MEAN_REGRID_METHOD =
-#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH =
-#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH =
-#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE =
-#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD =
-#GRID_STAT_CLIMO_MEAN_MATCH_MONTH =
-#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL =
-#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL =
-
-#GRID_STAT_CLIMO_STDEV_FILE_NAME =
-#GRID_STAT_CLIMO_STDEV_FIELD =
-#GRID_STAT_CLIMO_STDEV_REGRID_METHOD =
-#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH =
-#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH =
-#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE =
-#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD =
-#GRID_STAT_CLIMO_STDEV_MATCH_MONTH =
-#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL =
-#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL =
-
-#GRID_STAT_CLIMO_CDF_BINS = 1
-#GRID_STAT_CLIMO_CDF_CENTER_BINS = False
-#GRID_STAT_CLIMO_CDF_WRITE_BINS = True
-
-GRID_STAT_MASK_GRID =
-
-# Statistical output types
-GRID_STAT_OUTPUT_FLAG_FHO = STAT
-GRID_STAT_OUTPUT_FLAG_CTC = STAT
-GRID_STAT_OUTPUT_FLAG_CTS = STAT
-#GRID_STAT_OUTPUT_FLAG_MCTC = NONE
-#GRID_STAT_OUTPUT_FLAG_MCTS = NONE
-GRID_STAT_OUTPUT_FLAG_CNT = STAT
-#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VCNT = NONE
-#GRID_STAT_OUTPUT_FLAG_PCT = NONE
-#GRID_STAT_OUTPUT_FLAG_PSTD = NONE
-#GRID_STAT_OUTPUT_FLAG_PJC = NONE
-#GRID_STAT_OUTPUT_FLAG_PRC = NONE
-#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH
-GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT
-GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT
-GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT
-#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH
-#GRID_STAT_OUTPUT_FLAG_DMAP = NONE
-
-# NetCDF matched pairs output file
-#GRID_STAT_NC_PAIRS_VAR_NAME =
-GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE
-GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE
-GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE
-GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE
-GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE
-GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE
-GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE
-
-# End of [config] section and start of [dir] section.
-[dir]
-#
-# Directory containing observation input to GridStat.
-#
-OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}}
-#
-# Directory containing forecast input to GridStat.
-#
-FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}}
-#
-# Directory containing climatology mean input to GridStat. Not used in
-# this example.
-#
-GRID_STAT_CLIMO_MEAN_INPUT_DIR =
-#
-# Directory containing climatology mean input to GridStat. Not used in
-# this example.
-#
-GRID_STAT_CLIMO_STDEV_INPUT_DIR =
-#
-# Directory in which to write output from GridStat.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-GRID_STAT_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for observation input to GridStat relative to
-# OBS_GRID_STAT_INPUT_DIR.
-#
-OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}}
-#
-# Template for forecast input to GridStat relative to
-# FCST_GRID_STAT_INPUT_DIR.
-#
-FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-#
-# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR.
-#
-GRID_STAT_OUTPUT_TEMPLATE =
-#
-# Template for climatology input to GridStat relative to
-# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example.
-#
-GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE =
-#
-# Template for climatology input to GridStat relative to
-# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example.
-#
-GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE =
-#
-# Variable used to specify one or more verification mask files for
-# GridStat. Not used for this example.
-#
-GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
diff --git a/parm/metplus/GridStat_ensmean_ASNOW.conf b/parm/metplus/GridStat_ensmean_ASNOW.conf
deleted file mode 100644
index 6fb8951a3f..0000000000
--- a/parm/metplus/GridStat_ensmean_ASNOW.conf
+++ /dev/null
@@ -1,287 +0,0 @@
-# Ensemble mean GridStat METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = GridStat
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Location of MET configuration file to pass to GridStat.
-#
-# References PARM_BASE, which is the location of the parm directory
-# corresponding to the ush directory of the run_metplus.py script that
-# is called or the value of the environment variable METPLUS_PARM_BASE
-# if set.
-#
-GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped
-
-# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary
-# See MET User's Guide for more information
-GRID_STAT_REGRID_TO_GRID = FCST
-GRID_STAT_REGRID_VLD_THRESH = 0.5
-GRID_STAT_REGRID_METHOD = BUDGET
-GRID_STAT_REGRID_WIDTH = 2
-GRID_STAT_REGRID_SHAPE = SQUARE
-
-#GRID_STAT_INTERP_FIELD = BOTH
-#GRID_STAT_INTERP_VLD_THRESH = 1.0
-#GRID_STAT_INTERP_SHAPE = SQUARE
-#GRID_STAT_INTERP_TYPE_METHOD = NEAREST
-#GRID_STAT_INTERP_TYPE_WIDTH = 1
-
-#GRID_STAT_GRID_WEIGHT_FLAG =
-
-#
-# Name to identify model (forecast) data in output.
-#
-# The variable MODEL is recorded in the stat files, and the data in
-# these files is then plotted (e.g. using METViewer). Here, we add a
-# suffix to MODEL that identifies the data as that for the ensemble
-# mean. This makes it easier to identify each curve.
-#
-MODEL = {{vx_fcst_model_name}}_ensmean
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Value to enter under the DESC column in the output stat file.
-#
-GRID_STAT_DESC = NA
-
-# List of variables to compare in GridStat - FCST_VAR1 variables correspond
-# to OBS_VAR1 variables
-# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations
-# are needed for different tools
-
-GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE;
-#
-# List of forecast and corresponding observation fields to process.
-#
-FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_MEAN
-FCST_VAR1_LEVELS = A{{accum_hh}}
-FCST_VAR1_THRESH = {{field_thresholds}}
-OBS_VAR1_NAME = {{fieldname_in_obs_input}}
-OBS_VAR1_LEVELS = A{{accum_hh}}
-OBS_VAR1_THRESH = {{field_thresholds}}
-OBS_VAR1_OPTIONS = convert(x) = 100.0*x;
-#
-# Forecast data time window(s).
-#
-FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0
-FCST_GRID_STAT_FILE_WINDOW_END = 0
-#
-# Observation data time window(s).
-#
-OBS_GRID_STAT_FILE_WINDOW_BEGIN = 0
-OBS_GRID_STAT_FILE_WINDOW_END = 0
-
-# MET GridStat neighborhood values
-# See the MET User's Guide GridStat section for more information
-GRID_STAT_NEIGHBORHOOD_FIELD = BOTH
-
-# width value passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_WIDTH = 3,5,7
-
-# shape value passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE
-
-# cov thresh list passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5
-
-# Set to true to run GridStat separately for each field specified
-# Set to false to create one run of GridStat per run time that
-# includes all fields specified.
-GRID_STAT_ONCE_PER_FIELD = False
-#
-# Set to true if forecast data is probabilistic.
-#
-FCST_IS_PROB = False
-#
-# Only used if FCST_IS_PROB is true - sets probabilistic threshold
-#
-FCST_GRID_STAT_PROB_THRESH = ==0.1
-
-GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}
-
-# Climatology data
-#GRID_STAT_CLIMO_MEAN_FILE_NAME =
-#GRID_STAT_CLIMO_MEAN_FIELD =
-#GRID_STAT_CLIMO_MEAN_REGRID_METHOD =
-#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH =
-#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH =
-#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE =
-#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD =
-#GRID_STAT_CLIMO_MEAN_MATCH_MONTH =
-#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL =
-#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL =
-
-#GRID_STAT_CLIMO_STDEV_FILE_NAME =
-#GRID_STAT_CLIMO_STDEV_FIELD =
-#GRID_STAT_CLIMO_STDEV_REGRID_METHOD =
-#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH =
-#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH =
-#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE =
-#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD =
-#GRID_STAT_CLIMO_STDEV_MATCH_MONTH =
-#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL =
-#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL =
-
-#GRID_STAT_CLIMO_CDF_BINS = 1
-#GRID_STAT_CLIMO_CDF_CENTER_BINS = False
-#GRID_STAT_CLIMO_CDF_WRITE_BINS = True
-
-GRID_STAT_MASK_GRID =
-
-# Statistical output types
-GRID_STAT_OUTPUT_FLAG_FHO = STAT
-GRID_STAT_OUTPUT_FLAG_CTC = STAT
-GRID_STAT_OUTPUT_FLAG_CTS = STAT
-#GRID_STAT_OUTPUT_FLAG_MCTC = NONE
-#GRID_STAT_OUTPUT_FLAG_MCTS = NONE
-GRID_STAT_OUTPUT_FLAG_CNT = STAT
-#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VCNT = NONE
-#GRID_STAT_OUTPUT_FLAG_PCT = NONE
-#GRID_STAT_OUTPUT_FLAG_PSTD = NONE
-#GRID_STAT_OUTPUT_FLAG_PJC = NONE
-#GRID_STAT_OUTPUT_FLAG_PRC = NONE
-#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH
-GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT
-GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT
-GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT
-#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH
-#GRID_STAT_OUTPUT_FLAG_DMAP = NONE
-
-# NetCDF matched pairs output file
-#GRID_STAT_NC_PAIRS_VAR_NAME =
-GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE
-GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE
-GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE
-GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE
-GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE
-GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE
-GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE
-
-# End of [config] section and start of [dir] section.
-[dir]
-#
-# Directory containing observation input to GridStat.
-#
-OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}}
-#
-# Directory containing forecast input to GridStat.
-#
-FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}}
-#
-# Directory containing climatology mean input to GridStat. Not used in
-# this example.
-#
-GRID_STAT_CLIMO_MEAN_INPUT_DIR =
-#
-# Directory containing climatology mean input to GridStat. Not used in
-# this example.
-#
-GRID_STAT_CLIMO_STDEV_INPUT_DIR =
-#
-# Directory in which to write output from GridStat.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-GRID_STAT_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for observation input to GridStat relative to
-# OBS_GRID_STAT_INPUT_DIR.
-#
-OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}}
-#
-# Template for forecast input to GridStat relative to
-# FCST_GRID_STAT_INPUT_DIR.
-#
-FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-#
-# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR.
-#
-GRID_STAT_OUTPUT_TEMPLATE =
-#
-# Template for climatology input to GridStat relative to
-# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example.
-#
-GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE =
-#
-# Template for climatology input to GridStat relative to
-# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example.
-#
-GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE =
-#
-# Variable used to specify one or more verification mask files for
-# GridStat. Not used for this example.
-#
-GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
diff --git a/parm/metplus/GridStat_ensmean_REFC.conf b/parm/metplus/GridStat_ensmean_REFC.conf
deleted file mode 100644
index 451c82dfd5..0000000000
--- a/parm/metplus/GridStat_ensmean_REFC.conf
+++ /dev/null
@@ -1,313 +0,0 @@
-# Ensemble mean GridStat METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = GridStat
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Location of MET configuration file to pass to GridStat.
-#
-# References PARM_BASE, which is the location of the parm directory
-# corresponding to the ush directory of the run_metplus.py script that
-# is called or the value of the environment variable METPLUS_PARM_BASE
-# if set.
-#
-GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped
-
-# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary
-# See MET User's Guide for more information
-GRID_STAT_REGRID_TO_GRID = FCST
-GRID_STAT_REGRID_VLD_THRESH = 0.5
-GRID_STAT_REGRID_METHOD = BUDGET
-GRID_STAT_REGRID_WIDTH = 2
-GRID_STAT_REGRID_SHAPE = SQUARE
-
-GRID_STAT_INTERP_FIELD = NONE
-GRID_STAT_INTERP_VLD_THRESH = 1.0
-GRID_STAT_INTERP_SHAPE = SQUARE
-GRID_STAT_INTERP_TYPE_METHOD = NEAREST
-GRID_STAT_INTERP_TYPE_WIDTH = 1
-
-GRID_STAT_GRID_WEIGHT_FLAG = NONE
-#
-# Name to identify model (forecast) data in output.
-#
-# The variable MODEL is recorded in the stat files, and the data in
-# these files is then plotted (e.g. using METViewer). Here, we add a
-# suffix to MODEL that identifies the data as that for the ensemble
-# mean. This makes it easier to identify each curve.
-#
-MODEL = {{vx_fcst_model_name}}_ensmean
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Value to enter under the DESC column in the output stat file.
-#
-GRID_STAT_DESC = NA
-
-# List of variables to compare in GridStat - FCST_VAR1 variables correspond
-# to OBS_VAR1 variables
-# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations
-# are needed for different tools
-
-GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE;
-#
-# String to search for in the foreast input files for forecast variable
-# 1.
-#
-# Note:
-# This is the name of the field in the NetCDF file(s) created by MET's
-# gen_ens_prod tool. This tool reads in the grib2 file(s) (in this case
-# of forecasts) and outputs NetCDF file(s) in which the array names
-# consist of the value of fieldname_in_met_output plus a suffix that
-# specifies additional properties of the data in the array such as the
-# level, the type of statistic, etc. In this case, this suffix is
-# "_L0_ENS_MEAN". Thus, below, FCST_VAR1_NAME must be set to the value
-# of fieldname_in_met_output with "_L0_ENS_MEAN" appended to it.
-#
-FCST_VAR1_NAME = {{fieldname_in_met_output}}_L0_ENS_MEAN
-FCST_VAR1_LEVELS = L0
-FCST_VAR1_THRESH = {{field_thresholds}}
-#
-# String to search for in the observation input files for observation
-# variable 1.
-#
-# Note:
-# This is the name of the field in the grib2 observation file. Thus,
-# it should not be set to {{fieldname_in_met_output}} because the
-# value of fieldname_in_met_output is in general not the same as the
-# name of the field in the grib2 observation file (although it can be
-# for certain fields). If you do and it doesn't match, you may get an
-# error like this from METplus:
-# ERROR : VarInfoGrib2::set_dict() -> unrecognized GRIB2 field abbreviation ...
-#
-OBS_VAR1_NAME = {{fieldname_in_obs_input}}
-OBS_VAR1_LEVELS = Z500
-OBS_VAR1_THRESH = {{field_thresholds}}
-OBS_VAR1_OPTIONS = censor_thresh = lt-20;
- censor_val = -20.0;
- cnt_thresh = [ >15 ];
- cnt_logic = UNION;
-#
-# Forecast data time window(s).
-#
-#FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0
-#FCST_GRID_STAT_FILE_WINDOW_END = 0
-#
-# Observation data time window(s).
-#
-OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300
-OBS_GRID_STAT_FILE_WINDOW_END = 300
-
-# MET GridStat neighborhood values
-# See the MET User's Guide GridStat section for more information
-GRID_STAT_NEIGHBORHOOD_FIELD = BOTH
-
-# width value passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_WIDTH = 3,5,7
-
-# shape value passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE
-
-# cov thresh list passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5
-
-# Set to true to run GridStat separately for each field specified
-# Set to false to create one run of GridStat per run time that
-# includes all fields specified.
-GRID_STAT_ONCE_PER_FIELD = False
-#
-# Set to true if forecast data is probabilistic.
-#
-FCST_IS_PROB = False
-#
-# Only used if FCST_IS_PROB is true - sets probabilistic threshold
-#
-FCST_GRID_STAT_PROB_THRESH = ==0.1
-
-GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}
-
-# Climatology data
-#GRID_STAT_CLIMO_MEAN_FILE_NAME =
-#GRID_STAT_CLIMO_MEAN_FIELD =
-#GRID_STAT_CLIMO_MEAN_REGRID_METHOD =
-#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH =
-#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH =
-#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE =
-#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD =
-#GRID_STAT_CLIMO_MEAN_MATCH_MONTH =
-#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL =
-#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL =
-
-#GRID_STAT_CLIMO_STDEV_FILE_NAME =
-#GRID_STAT_CLIMO_STDEV_FIELD =
-#GRID_STAT_CLIMO_STDEV_REGRID_METHOD =
-#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH =
-#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH =
-#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE =
-#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD =
-#GRID_STAT_CLIMO_STDEV_MATCH_MONTH =
-#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL =
-#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL =
-
-GRID_STAT_CLIMO_CDF_BINS = 1
-#GRID_STAT_CLIMO_CDF_CENTER_BINS = False
-#GRID_STAT_CLIMO_CDF_WRITE_BINS = True
-
-GRID_STAT_MASK_GRID =
-
-# Statistical output types
-GRID_STAT_OUTPUT_FLAG_FHO = STAT
-GRID_STAT_OUTPUT_FLAG_CTC = STAT
-GRID_STAT_OUTPUT_FLAG_CTS = STAT
-#GRID_STAT_OUTPUT_FLAG_MCTC = NONE
-#GRID_STAT_OUTPUT_FLAG_MCTS = NONE
-GRID_STAT_OUTPUT_FLAG_CNT = STAT
-#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VCNT = NONE
-#GRID_STAT_OUTPUT_FLAG_PCT = NONE
-#GRID_STAT_OUTPUT_FLAG_PSTD = NONE
-#GRID_STAT_OUTPUT_FLAG_PJC = NONE
-#GRID_STAT_OUTPUT_FLAG_PRC = NONE
-#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH
-GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT
-GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT
-GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT
-#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH
-#GRID_STAT_OUTPUT_FLAG_DMAP = NONE
-
-# NetCDF matched pairs output file
-#GRID_STAT_NC_PAIRS_VAR_NAME =
-GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE
-GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE
-GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE
-GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE
-GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE
-GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE
-GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE
-
-# End of [config] section and start of [dir] section.
-[dir]
-#
-# Directory containing observation input to GridStat.
-#
-OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}}
-#
-# Directory containing forecast input to GridStat.
-#
-FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}}
-#
-# Directory containing climatology mean input to GridStat. Not used in
-# this example.
-#
-GRID_STAT_CLIMO_MEAN_INPUT_DIR =
-#
-# Directory containing climatology mean input to GridStat. Not used in
-# this example.
-#
-GRID_STAT_CLIMO_STDEV_INPUT_DIR =
-#
-# Directory in which to write output from GridStat.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-GRID_STAT_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for observation input to GridStat relative to
-# OBS_GRID_STAT_INPUT_DIR.
-#
-OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}}
-#
-# Template for forecast input to GridStat relative to
-# FCST_GRID_STAT_INPUT_DIR.
-#
-FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-#
-# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR.
-#
-GRID_STAT_OUTPUT_TEMPLATE =
-#
-# Template for climatology input to GridStat relative to
-# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example.
-#
-GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE =
-#
-# Template for climatology input to GridStat relative to
-# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example.
-#
-GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE =
-#
-# Variable used to specify one or more verification mask files for
-# GridStat. Not used for this example.
-#
-GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
diff --git a/parm/metplus/GridStat_ensmean_RETOP.conf b/parm/metplus/GridStat_ensmean_RETOP.conf
deleted file mode 100644
index a881ed3ab5..0000000000
--- a/parm/metplus/GridStat_ensmean_RETOP.conf
+++ /dev/null
@@ -1,315 +0,0 @@
-# Ensemble mean GridStat METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = GridStat
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Location of MET configuration file to pass to GridStat.
-#
-# References PARM_BASE, which is the location of the parm directory
-# corresponding to the ush directory of the run_metplus.py script that
-# is called or the value of the environment variable METPLUS_PARM_BASE
-# if set.
-#
-GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped
-
-# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary
-# See MET User's Guide for more information
-GRID_STAT_REGRID_TO_GRID = FCST
-GRID_STAT_REGRID_VLD_THRESH = 0.5
-GRID_STAT_REGRID_METHOD = BUDGET
-GRID_STAT_REGRID_WIDTH = 2
-GRID_STAT_REGRID_SHAPE = SQUARE
-
-GRID_STAT_INTERP_FIELD = NONE
-GRID_STAT_INTERP_VLD_THRESH = 1.0
-GRID_STAT_INTERP_SHAPE = SQUARE
-GRID_STAT_INTERP_TYPE_METHOD = NEAREST
-GRID_STAT_INTERP_TYPE_WIDTH = 1
-
-GRID_STAT_GRID_WEIGHT_FLAG = NONE
-#
-# Name to identify model (forecast) data in output.
-#
-# The variable MODEL is recorded in the stat files, and the data in
-# these files is then plotted (e.g. using METViewer). Here, we add a
-# suffix to MODEL that identifies the data as that for the ensemble
-# mean. This makes it easier to identify each curve.
-#
-MODEL = {{vx_fcst_model_name}}_ensmean
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Value to enter under the DESC column in the output stat file.
-#
-GRID_STAT_DESC = NA
-
-# List of variables to compare in GridStat - FCST_VAR1 variables correspond
-# to OBS_VAR1 variables
-# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations
-# are needed for different tools
-
-GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE;
-#
-# String to search for in the foreast input files for forecast variable
-# 1.
-#
-# Note:
-# This is the name of the field in the NetCDF file(s) created by MET's
-# gen_ens_prod tool. This tool reads in the grib2 file(s) (in this case
-# of forecasts) and outputs NetCDF file(s) in which the array names
-# consist of the value of fieldname_in_met_output plus a suffix that
-# specifies additional properties of the data in the array such as the
-# level, the type of statistic, etc. In this case, this suffix is
-# "_L0_ENS_MEAN". Thus, below, FCST_VAR1_NAME must be set to the value
-# of fieldname_in_met_output with "_L0_ENS_MEAN" appended to it.
-#
-FCST_VAR1_NAME = {{fieldname_in_met_output}}_L0_ENS_MEAN
-FCST_VAR1_LEVELS = L0
-FCST_VAR1_THRESH = {{field_thresholds}}
-FCST_VAR1_OPTIONS = convert(x) = x * 3.28084 * 0.001;
-#
-# String to search for in the observation input files for observation
-# variable 1.
-#
-# Note:
-# This is the name of the field in the grib2 observation file. Thus,
-# it should not be set to {{fieldname_in_met_output}} because the
-# value of fieldname_in_met_output is in general not the same as the
-# name of the field in the grib2 observation file (although it can be
-# for certain fields). If you do and it doesn't match, you may get an
-# error like this from METplus:
-# ERROR : VarInfoGrib2::set_dict() -> unrecognized GRIB2 field abbreviation ...
-#
-OBS_VAR1_NAME = {{fieldname_in_obs_input}}
-OBS_VAR1_LEVELS = Z500
-OBS_VAR1_THRESH = {{field_thresholds}}
-OBS_VAR1_OPTIONS = censor_thresh = lt-20.0;
- censor_val = -20.0;
- cnt_thresh = [ >15 ];
- cnt_logic = UNION;
- convert(x) = x * 3280.84 * 0.001;
-#
-# Forecast data time window(s).
-#
-#FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0
-#FCST_GRID_STAT_FILE_WINDOW_END = 0
-#
-# Observation data time window(s).
-#
-OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300
-OBS_GRID_STAT_FILE_WINDOW_END = 300
-
-# MET GridStat neighborhood values
-# See the MET User's Guide GridStat section for more information
-GRID_STAT_NEIGHBORHOOD_FIELD = BOTH
-
-# width value passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_WIDTH = 3,5,7
-
-# shape value passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE
-
-# cov thresh list passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5
-
-# Set to true to run GridStat separately for each field specified
-# Set to false to create one run of GridStat per run time that
-# includes all fields specified.
-GRID_STAT_ONCE_PER_FIELD = False
-#
-# Set to true if forecast data is probabilistic.
-#
-FCST_IS_PROB = False
-#
-# Only used if FCST_IS_PROB is true - sets probabilistic threshold
-#
-FCST_GRID_STAT_PROB_THRESH = ==0.1
-
-GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}
-
-# Climatology data
-#GRID_STAT_CLIMO_MEAN_FILE_NAME =
-#GRID_STAT_CLIMO_MEAN_FIELD =
-#GRID_STAT_CLIMO_MEAN_REGRID_METHOD =
-#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH =
-#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH =
-#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE =
-#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD =
-#GRID_STAT_CLIMO_MEAN_MATCH_MONTH =
-#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL =
-#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL =
-
-#GRID_STAT_CLIMO_STDEV_FILE_NAME =
-#GRID_STAT_CLIMO_STDEV_FIELD =
-#GRID_STAT_CLIMO_STDEV_REGRID_METHOD =
-#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH =
-#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH =
-#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE =
-#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD =
-#GRID_STAT_CLIMO_STDEV_MATCH_MONTH =
-#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL =
-#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL =
-
-GRID_STAT_CLIMO_CDF_BINS = 1
-#GRID_STAT_CLIMO_CDF_CENTER_BINS = False
-#GRID_STAT_CLIMO_CDF_WRITE_BINS = True
-
-GRID_STAT_MASK_GRID =
-
-# Statistical output types
-GRID_STAT_OUTPUT_FLAG_FHO = STAT
-GRID_STAT_OUTPUT_FLAG_CTC = STAT
-GRID_STAT_OUTPUT_FLAG_CTS = STAT
-#GRID_STAT_OUTPUT_FLAG_MCTC = NONE
-#GRID_STAT_OUTPUT_FLAG_MCTS = NONE
-GRID_STAT_OUTPUT_FLAG_CNT = STAT
-#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VCNT = NONE
-#GRID_STAT_OUTPUT_FLAG_PCT = NONE
-#GRID_STAT_OUTPUT_FLAG_PSTD = NONE
-#GRID_STAT_OUTPUT_FLAG_PJC = NONE
-#GRID_STAT_OUTPUT_FLAG_PRC = NONE
-#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH
-GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT
-GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT
-GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT
-#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH
-#GRID_STAT_OUTPUT_FLAG_DMAP = NONE
-
-# NetCDF matched pairs output file
-#GRID_STAT_NC_PAIRS_VAR_NAME =
-GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE
-GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE
-GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE
-GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE
-GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE
-GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE
-GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE
-
-# End of [config] section and start of [dir] section.
-[dir]
-#
-# Directory containing observation input to GridStat.
-#
-OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}}
-#
-# Directory containing forecast input to GridStat.
-#
-FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}}
-#
-# Directory containing climatology mean input to GridStat. Not used in
-# this example.
-#
-GRID_STAT_CLIMO_MEAN_INPUT_DIR =
-#
-# Directory containing climatology mean input to GridStat. Not used in
-# this example.
-#
-GRID_STAT_CLIMO_STDEV_INPUT_DIR =
-#
-# Directory in which to write output from GridStat.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-GRID_STAT_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for observation input to GridStat relative to
-# OBS_GRID_STAT_INPUT_DIR.
-#
-OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}}
-#
-# Template for forecast input to GridStat relative to
-# FCST_GRID_STAT_INPUT_DIR.
-#
-FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-#
-# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR.
-#
-GRID_STAT_OUTPUT_TEMPLATE =
-#
-# Template for climatology input to GridStat relative to
-# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example.
-#
-GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE =
-#
-# Template for climatology input to GridStat relative to
-# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example.
-#
-GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE =
-#
-# Variable used to specify one or more verification mask files for
-# GridStat. Not used for this example.
-#
-GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
diff --git a/parm/metplus/GridStat_ensprob.conf b/parm/metplus/GridStat_ensprob.conf
new file mode 100644
index 0000000000..6a4873e446
--- /dev/null
+++ b/parm/metplus/GridStat_ensprob.conf
@@ -0,0 +1,675 @@
+# Ensemble probabilistic {{MetplusToolName}} METplus Configuration
+
+[config]
+
+# List of applications (tools) to run.
+PROCESS_LIST = {{MetplusToolName}}
+
+# time looping - options are INIT, VALID, RETRO, and REALTIME
+# If set to INIT or RETRO:
+# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
+# If set to VALID or REALTIME:
+# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
+LOOP_BY = INIT
+
+# Format of INIT_BEG and INIT_END using % items
+# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
+# see www.strftime.org for more information
+# %Y%m%d%H expands to YYYYMMDDHH
+INIT_TIME_FMT = %Y%m%d%H
+
+# Start time for METplus run - must match INIT_TIME_FMT
+INIT_BEG = {{cdate}}
+
+# End time for METplus run - must match INIT_TIME_FMT
+INIT_END = {{cdate}}
+
+# Increment between METplus runs (in seconds if no units are specified).
+# Must be >= 60 seconds.
+INIT_INCREMENT = 3600
+
+# List of forecast leads to process for each run time (init or valid)
+# In hours if units are not specified
+# If unset, defaults to 0 (don't loop through forecast leads)
+LEAD_SEQ = {{fhr_list}}
+#
+# Order of loops to process data - Options are times, processes
+# Not relevant if only one item is in the PROCESS_LIST
+# times = run all wrappers in the PROCESS_LIST for a single run time, then
+# increment the run time and run all wrappers again until all times have
+# been evaluated.
+# processes = run the first wrapper in the PROCESS_LIST for all times
+# specified, then repeat for the next item in the PROCESS_LIST until all
+# wrappers have been run
+#
+LOOP_ORDER = times
+#
+# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
+#
+LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
+#
+# Specify the name of the METplus log file.
+#
+LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
+#
+# Specify the location and name of the final METplus conf file.
+#
+METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}}
+#
+# Location of MET configuration file to pass to {{MetplusToolName}}.
+#
+# References PARM_BASE, which is the location of the parm directory
+# corresponding to the ush directory of the run_metplus.py script that
+# is called or the value of the environment variable METPLUS_PARM_BASE
+# if set.
+#
+{{METPLUS_TOOL_NAME}}_CONFIG_FILE = {PARM_BASE}/met_config/{{MetplusToolName}}Config_wrapped
+
+# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary
+# See MET User's Guide for more information
+{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = FCST
+{{METPLUS_TOOL_NAME}}_REGRID_VLD_THRESH = 0.5
+{{METPLUS_TOOL_NAME}}_REGRID_METHOD = BUDGET
+{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2
+{{METPLUS_TOOL_NAME}}_REGRID_SHAPE = SQUARE
+
+{%- if input_field_group in ['APCP', 'ASNOW'] %}
+
+#{{METPLUS_TOOL_NAME}}_INTERP_FIELD = BOTH
+#{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = 1.0
+#{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = SQUARE
+#{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = NEAREST
+#{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 1
+
+#{{METPLUS_TOOL_NAME}}_GRID_WEIGHT_FLAG =
+{%- elif input_field_group in ['REFC', 'RETOP'] %}
+
+{{METPLUS_TOOL_NAME}}_INTERP_FIELD = NONE
+{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = 1.0
+{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = SQUARE
+{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = NEAREST
+{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 1
+
+{{METPLUS_TOOL_NAME}}_GRID_WEIGHT_FLAG = NONE
+{%- endif %}
+#
+# Name to identify model (forecast) data in output.
+#
+# The variable MODEL is recorded in the stat files, and the data in
+# these files is then plotted (e.g. using METViewer). Here, we add a
+# suffix to MODEL that identifies the data as ensemble-probabilistic.
+# This makes it easier to identify each curve.
+#
+MODEL = {{vx_fcst_model_name}}_ensprob
+#
+# Name to identify observation data in output.
+#
+OBTYPE = {{obtype}}
+#
+# Value to enter under the DESC column in the output stat file.
+#
+{{METPLUS_TOOL_NAME}}_DESC = NA
+#
+# Overrides of MET configuration defaults.
+#
+{{METPLUS_TOOL_NAME}}_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE;
+#
+# List of forecast and corresponding observation fields to process.
+#
+{#-
+Import the file containing jinja macros.
+#}
+{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %}
+
+{#-
+Set the probabilistic threshold to be used for the forecast field. If
+necessary, this can be changed to be an input parameter in the calling
+script instead of a hard-coded value as below.
+#}
+{%- set thresh_fcst_prob = '==0.1' %}
+
+{#-
+Jinja requires certain variables to be defined globally within the template
+before they can be used in if-statements and other scopes (see Jinja
+scoping rules). Define such variables.
+#}
+{%- set level_fcst = '' %}
+{%- set level_obs = '' %}
+{%- set indx_level_fcst = '' %}
+
+{%- set valid_threshes_fcst = [] %}
+{%- set valid_threshes_obs = [] %}
+{%- set thresh_fcst = '' %}
+{%- set thresh_obs = '' %}
+{%- set indx_thresh_fcst = '' %}
+{%- set thresh_fcst_and_or = '' %}
+
+{%- set opts_indent = '' %}
+{%- set opts_indent_len = '' %}
+{%- set tmp = '' %}
+{%- set error_msg = '' %}
+
+{#-
+Make sure that the set of field groups for forecasts and observations
+are identical.
+#}
+{%- set fgs_fcst = vx_config_dict['fcst'].keys()|list %}
+{%- set fgs_obs = vx_config_dict['obs'].keys()|list %}
+{%- if (fgs_fcst != fgs_obs) %}
+ {%- set error_msg = '\n' ~
+'The list of valid field groups for forecasts (fgs_fcst) must be identical\n' ~
+'to that for observations (fgs_obs) but isn\'t:\n' ~
+' fgs_fcst = ' ~ fgs_fcst ~ '\n' ~
+' fgs_obs = ' ~ fgs_obs %}
+ {{metplus_macros.print_err_and_quit(error_msg)}}
+{%- endif %}
+
+{#-
+Extract the lists of forecast and observation dictionaries containing
+the valid fields, levels, and thresholds corresponding to the specified
+field group (input_field_group). Note that it would be simpler to have
+these be just dictionaries in which the keys are the field names (instead
+of them being LISTS of dictionaries in which each dictionary contains a
+single key that is the field name), but that approach cannot be used here
+because it is possible for field names to be repeated (for both forecasts
+and observations). For example, in the observations, the field name
+'PRWE' appears more than once, each time with a different threshold, and
+the combination of name and threshold is what constitutes a unique field,
+not just the name by itself.
+#}
+{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %}
+{%- set fields_levels_threshes_obs = vx_config_dict['obs'][input_field_group] %}
+
+{#-
+Reset the specified forecast level so that if it happens to be an
+accumulation (e.g. 'A03'), the leading zeros in front of the hour are
+stipped out (e.g. reset to 'A3').
+#}
+{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %}
+
+{#-
+Ensure that the specified input forecast level(s) (input_level_fcst) and
+threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the
+set(s) of valid forecast levels and thresholds, respectively, specified
+in fields_levels_threshes_fcst.
+#}
+{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }}
+{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }}
+
+{#-
+For convenience, create lists of valid forecast and observation field
+names.
+#}
+{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %}
+{%- set valid_fields_fcst = [] %}
+{%- for i in range(0,num_valid_fields_fcst) %}
+ {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %}
+ {%- set tmp = valid_fields_fcst.append(field) %}
+{%- endfor %}
+
+{%- set valid_fields_obs = [] %}
+{%- set num_valid_fields_obs = fields_levels_threshes_obs|length %}
+{%- for i in range(0,num_valid_fields_obs) %}
+ {%- set field = fields_levels_threshes_obs[i].keys()|list|join('') %}
+ {%- set tmp = valid_fields_obs.append(field) %}
+{%- endfor %}
+
+{#-
+Ensure that the number of valid fields for forecasts is equal to that
+for the observations.
+#}
+{%- set num_valid_fields = 0 %}
+{%- if (num_valid_fields_fcst != num_valid_fields_obs) %}
+ {%- set error_msg = '\n' ~
+'The number of valid forecast fields (num_valid_fields_fcst) must be\n' ~
+'equal to the number of valid observation fields (num_valid_fields_obs)\n' ~
+'but isn\'t:\n' ~
+' num_valid_fields_fcst = ' ~ num_valid_fields_fcst ~ '\n' ~
+' num_valid_fields_obs = ' ~ num_valid_fields_obs ~ '\n' ~
+'The lists of valid forecast and observation fields are:\n' ~
+' valid_fields_fcst = ' ~ valid_fields_fcst ~ '\n' ~
+' valid_fields_obs = ' ~ valid_fields_obs ~ '\n' %}
+ {{metplus_macros.print_err_and_quit(error_msg)}}
+{%- else %}
+ {%- set num_valid_fields = num_valid_fields_fcst %}
+{%- endif %}
+
+{#-
+Loop over the valid fields and set field names, levels, thresholds, and/
+or options for each field, both for forecasts and for obseratiions, in
+the METplus configuration file.
+#}
+{%- set ns = namespace(var_count = 0) %}
+
+{#-
+Loop over each field twice, the first treating the forecast field as
+probabilistic and the second time as a scalar.
+#}
+{%- for treat_fcst_as_prob in [True, False] %}
+
+ {%- for i in range(0,num_valid_fields) %}
+
+{#-
+Add comment depending on whether or not the field is being treated
+probabilistically.
+#}
+ {%- if treat_fcst_as_prob %}
+# FREQ
+# Process as probability
+#
+ {%- else %}
+#
+#Process as scalars for neighborhood methods
+## Note that the number of forecast and obs thresholds must match
+## but won't actually be applied to NBRCNT outputs with "nbrhd.field = OBS;"
+#
+ {%- endif %}
+
+ {%- set field_fcst = valid_fields_fcst[i] %}
+ {%- set field_obs = valid_fields_obs[i] %}
+
+{#-
+For convenience, create lists of valid forecast and observation levels
+for the current field. Then check that the number of valid levels for
+forecasts is the same as that for observations.
+#}
+ {%- set valid_levels_fcst = fields_levels_threshes_fcst[i][field_fcst].keys()|list %}
+ {%- set valid_levels_obs = fields_levels_threshes_obs[i][field_obs].keys()|list %}
+
+{#-
+Extract dictionary of valid forecast levels (the dictionary keys) and
+corresponding lists of valid thresholds (the values) for each level.
+Then loop over these levels and corresponding lists of thresholds to set
+both the forecast and observation field names, levels, thresholds, and/or
+options.
+#}
+ {%- set valid_levels_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst] %}
+ {%- for level_fcst, valid_threshes_fcst in valid_levels_threshes_fcst.items() %}
+
+ {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %}
+
+ {%- for thresh_fcst in valid_threshes_fcst %}
+
+ {%- if (input_thresh_fcst == 'all') or (input_thresh_fcst == thresh_fcst) %}
+{#-
+Increment the METplus variable counter.
+#}
+ {%- set ns.var_count = ns.var_count+1 %}
+
+{#-
+Set forecast field name. Note that this has to exactly match the name
+of the field in the input forecast file(s).
+
+The input forecast files are generated by the MET/METplus GenEnsProd
+tool. That tool adds the field's level to the variable names in its
+output file to ensure that all variables in the file have distinct names.
+For example, if the same field, say APCP, is output at two different
+levels, say at A3 and A6 (for APCP, "levels" are really accumulation
+periods), there need to be two variables in the output file, and they
+obviously can't both be named "APCP", so GenEnsProd names one "APCP_A3"
+and the other "APCP_A6". Here, the level is stored in the variable
+level_fcst and, below, is included in the name of the forecast field.
+
+For accumulated fields, the field name in the input forecast file contains
+TWO references to the accumulation period. The first is the level of the
+forecast field added by GenEnsProd as described above. The second is
+another reference to this same level (accumulation period) but added by
+the MET/METplus's PcpCombine tool (whose output file is the input into
+GenEnsProd). PcpCombine adds this reference to the level (really the
+accumulation period) to the field's name for the same reason that
+GenEnsProd does, i.e. to ensure that the names of variables in the output
+file are distinct. Here, this accumulation period is stored in the
+variable accum_hh. Thus, for accumulated fields, below we add both
+accum_hh and level_fcst to the field name to get an exact field name
+match.
+#}
+ {%- set thresh_fcst_and_or = thresh_fcst|replace("&&", ".and.") %}
+ {%- set thresh_fcst_and_or = thresh_fcst_and_or|replace("||", ".or.") %}
+ {%- if (input_field_group in ['APCP', 'ASNOW']) %}
+FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{accum_hh}}_{{level_fcst}}_ENS_FREQ_{{thresh_fcst_and_or}}
+ {%- else %}
+FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{level_fcst}}_ENS_FREQ_{{thresh_fcst_and_or}}
+ {%- endif %}
+
+{#-
+Set forecast field level.
+#}
+FCST_VAR{{ns.var_count}}_LEVELS = {{level_fcst}}
+
+{#-
+Set forecast field threshold.
+Note that since the forecast field being read in is actually a field of
+probabilities, we set the forecast threshold to a probabilistic one
+(thresh_fcst_prob) and not to the physical threshold (thresh_fcst) in
+the dictionary of forecast field names, levels, and thresholds that we
+are looping over.
+#}
+FCST_VAR{{ns.var_count}}_THRESH = {{thresh_fcst_prob}}
+
+{#-
+Set forecast field options.
+#}
+ {%- set opts_indent_len = 20 %}
+ {%- if (ns.var_count > 9) and (ns.var_count <= 99) %}
+ {%- set opts_indent_len = opts_indent_len + 1 %}
+ {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %}
+ {%- set opts_indent_len = opts_indent_len + 2 %}
+ {%- elif (ns.var_count > 999) %}
+ {%- set opts_indent_len = opts_indent_len + 3 %}
+ {%- endif %}
+ {%- set opts_indent = ' '*opts_indent_len %}
+
+ {%- if not treat_fcst_as_prob %}
+FCST_VAR{{ns.var_count}}_OPTIONS = prob = FALSE;
+ {%- endif %}
+
+{#-
+Set observation field name. Note that this has to exactly match the name
+of the field in the input observation file.
+
+For accumulated fields, the input observation file is generated by MET's
+PcpCombine tool. In that file, the field name consists of the observation
+field name here (field_obs) with the accumulation period appended to it
+(separated by an underscore), so we must do the same here to get an exact
+match.
+
+Note:
+Turns out for ASNOW, PcpCombine is not run for obs, so we exclude that
+from the "if" clause here (so it goes into the "else"). For workflow
+behavior uniformity between APCP and ASNOW, consider running PcpCombine
+for ASNOW observations as well (just as it's run for APCP observations).
+ {%- if (input_field_group in ['APCP', 'ASNOW']) %}
+#}
+ {%- if (input_field_group in ['APCP']) %}
+OBS_VAR{{ns.var_count}}_NAME = {{field_obs}}_{{accum_hh}}
+ {%- else %}
+OBS_VAR{{ns.var_count}}_NAME = {{field_obs}}
+ {%- endif %}
+
+{#-
+Set observation field level.
+#}
+ {%- set indx_level_fcst = valid_levels_fcst.index(level_fcst) %}
+ {%- set level_obs = valid_levels_obs[indx_level_fcst] %}
+OBS_VAR{{ns.var_count}}_LEVELS = {{level_obs}}
+
+{#-
+Set observation field threshold. Note that no observation thresholds
+are included in the METplus configuration file if input_thresh_fcst is
+set to 'none'.
+#}
+ {%- if (input_thresh_fcst != 'none') %}
+{#-
+Set the list of valid observation thresholds to the one corresponding to
+the current observation level (level_obs).
+#}
+ {%- set valid_threshes_obs = fields_levels_threshes_obs[i][field_obs][level_obs] %}
+{#-
+Set the observation threshold. This is given by the element in the list
+of valid observation thresholds that has the same index as that of the
+current forcast threshold (thresh_fcst) in the list of valid forecast
+thresholds.
+#}
+ {%- set indx_thresh_fcst = valid_threshes_fcst.index(thresh_fcst) %}
+ {%- set thresh_obs = valid_threshes_obs[indx_thresh_fcst] %}
+OBS_VAR{{ns.var_count}}_THRESH = {{thresh_obs}}
+ {%- endif %}
+
+{#-
+Set observation field options.
+#}
+ {%- set opts_indent_len = opts_indent_len - 1 %}
+ {%- set opts_indent = ' '*opts_indent_len %}
+
+ {%- if input_field_group == 'APCP' %}
+
+ {%- if field_obs == 'APCP' %}
+ {%- if not treat_fcst_as_prob %}
+OBS_VAR{{ns.var_count}}_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }
+ {%- endif %}
+ {%- endif %}
+
+ {%- elif input_field_group == 'ASNOW' %}
+
+ {%- if field_obs == 'ASNOW' %}
+ {%- if treat_fcst_as_prob %}
+OBS_VAR{{ns.var_count}}_OPTIONS = convert(x) = 100.0*x;
+ {%- else %}
+OBS_VAR{{ns.var_count}}_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; };
+{{opts_indent}}convert(x) = 100.0*x;
+ {%- endif %}
+ {%- endif %}
+
+ {%- elif input_field_group == 'REFC' %}
+
+ {%- if field_obs == 'MergedReflectivityQCComposite' %}
+OBS_VAR{{ns.var_count}}_OPTIONS = censor_thresh = lt-20;
+{{opts_indent}}censor_val = -20.0;
+{{opts_indent}}cnt_thresh = [ >15 ];
+{{opts_indent}}cnt_logic = UNION;
+ {%- if not treat_fcst_as_prob %}
+{{opts_indent}}nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }
+ {%- endif %}
+ {%- endif %}
+
+ {%- elif input_field_group == 'RETOP' %}
+
+ {%- if field_obs == 'EchoTop18' %}
+OBS_VAR{{ns.var_count}}_OPTIONS = censor_thresh = lt-20.0;
+{{opts_indent}}censor_val = -20.0;
+{{opts_indent}}cnt_thresh = [ >15 ];
+{{opts_indent}}cnt_logic = UNION;
+{{opts_indent}}convert(x) = x * 3280.84 * 0.001;
+ {%- if not treat_fcst_as_prob %}
+{{opts_indent}}nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }
+ {%- endif %}
+ {%- endif %}
+
+ {%- endif %}
+{#-
+Print out a newline to separate the settings for the current field (both
+forecast and observation settings) from those for the next field.
+#}
+ {{- '\n' }}
+
+ {%- endif %}
+ {%- endfor %}
+
+ {%- endif %}
+
+ {%- endfor %}
+ {%- endfor %}
+{%- endfor %}
+#
+# Forecast data time window(s).
+#
+{%- set comment_or_null = '' %}
+{%- set obs_window_abs_val = '0' %}
+{%- if input_field_group in ['REFC', 'RETOP'] %}
+ {%- set comment_or_null = '#' %}
+ {%- set obs_window_abs_val = '300' %}
+{%- endif %}
+{{comment_or_null}}FCST_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_BEGIN = 0
+{{comment_or_null}}FCST_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_END = 0
+#
+# Observation data time window(s).
+#
+{#-
+Use integers for seconds, but int can be changed to float if there is a
+need to go to sub-seconds.
+#}
+OBS_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_BEGIN = {{ 0 - obs_window_abs_val|int }}
+OBS_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_END = {{ obs_window_abs_val|int }}
+
+# MET {{MetplusToolName}} neighborhood values
+# See the MET User's Guide {{MetplusToolName}} section for more information
+{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_FIELD =
+
+# width value passed to nbrhd dictionary in the MET config file
+{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_WIDTH =
+
+# shape value passed to nbrhd dictionary in the MET config file
+{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_SHAPE =
+
+# cov thresh list passed to nbrhd dictionary in the MET config file
+{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_COV_THRESH = >=0.5
+
+# Set to true to run {{MetplusToolName}} separately for each field specified
+# Set to false to create one run of {{MetplusToolName}} per run time that
+# includes all fields specified.
+{{METPLUS_TOOL_NAME}}_ONCE_PER_FIELD = False
+#
+# Set to true if forecast data is probabilistic.
+#
+FCST_IS_PROB = True
+FCST_PROB_IN_GRIB_PDS = False
+#
+# Only used if FCST_IS_PROB is true - sets probabilistic threshold
+#
+FCST_{{METPLUS_TOOL_NAME}}_PROB_THRESH = {{thresh_fcst_prob}}
+
+{{METPLUS_TOOL_NAME}}_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}
+
+# Climatology data
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FILE_NAME =
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FIELD =
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_METHOD =
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_WIDTH =
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_VLD_THRESH =
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_SHAPE =
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD =
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_MATCH_MONTH =
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_DAY_INTERVAL =
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_HOUR_INTERVAL =
+
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FILE_NAME =
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FIELD =
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_METHOD =
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_WIDTH =
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_VLD_THRESH =
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_SHAPE =
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_TIME_INTERP_METHOD =
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_MATCH_MONTH =
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_DAY_INTERVAL =
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_HOUR_INTERVAL =
+
+{%- set comment_or_null = '' %}
+{%- if input_field_group in ['APCP', 'ASNOW'] %}
+ {%- set comment_or_null = '#' %}
+{%- endif %}
+
+{{comment_or_null}}{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1
+#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False
+#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = True
+
+{{METPLUS_TOOL_NAME}}_MASK_GRID =
+
+# Statistical output types
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_FHO = NONE
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTC = NONE
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTS = NONE
+{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTC = NONE
+{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTS = NONE
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CNT = NONE
+{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SL1L2 = NONE
+{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SAL1L2 = NONE
+{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VL1L2 = NONE
+{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VAL1L2 = NONE
+{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VCNT = NONE
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PCT = STAT
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PSTD = STAT
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PJC = STAT
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PRC = STAT
+{%- if input_field_group in ['APCP', 'ASNOW'] %}
+{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV = BOTH
+{%- elif input_field_group in ['REFC', 'RETOP'] %}
+{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV = NONE
+{%- endif %}
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCTC = NONE
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCTS = NONE
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCNT = STAT
+{%- if input_field_group in ['APCP', 'ASNOW'] %}
+{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_GRAD = BOTH
+{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_DMAP = NONE
+{%- elif input_field_group in ['REFC', 'RETOP'] %}
+{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_GRAD = NONE
+{%- endif %}
+
+# NetCDF matched pairs output file
+#{{METPLUS_TOOL_NAME}}_NC_PAIRS_VAR_NAME =
+{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_LATLON = FALSE
+{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_RAW = FALSE
+{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_DIFF = FALSE
+{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_CLIMO = FALSE
+{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_WEIGHT = FALSE
+{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_NBRHD = FALSE
+{{comment_or_null}}{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_FOURIER = FALSE
+{{comment_or_null}}{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_GRADIENT = FALSE
+{{comment_or_null}}{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE
+{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_APPLY_MASK = FALSE
+
+# End of [config] section and start of [dir] section.
+[dir]
+#
+# Directory containing observation input to {{MetplusToolName}}.
+#
+OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{obs_input_dir}}
+#
+# Directory containing forecast input to {{MetplusToolName}}.
+#
+FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}}
+#
+# Directory containing climatology mean input to {{MetplusToolName}}. Not used in
+# this example.
+#
+{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR =
+#
+# Directory containing climatology mean input to {{MetplusToolName}}. Not used in
+# this example.
+#
+{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR =
+#
+# Directory in which to write output from {{MetplusToolName}}.
+#
+# OUTPUT_BASE apparently has to be set to something; it cannot be left
+# to its default value. But it is not explicitly used elsewhere in this
+# configuration file.
+#
+OUTPUT_BASE = {{output_base}}
+{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}}
+#
+# Directory for staging data.
+#
+STAGING_DIR = {{staging_dir}}
+
+# End of [dir] section and start of [filename_templates] section.
+[filename_templates]
+#
+# Template for observation input to {{MetplusToolName}} relative to
+# OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR.
+#
+OBS_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{obs_input_fn_template}}
+#
+# Template for forecast input to {{MetplusToolName}} relative to
+# FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR.
+#
+FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}}
+#
+# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR.
+#
+{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE =
+#
+# Template for climatology input to {{MetplusToolName}} relative to
+# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR. Not used in this example.
+#
+{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_TEMPLATE =
+#
+# Template for climatology input to {{MetplusToolName}} relative to
+# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR. Not used in this example.
+#
+{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_TEMPLATE =
+#
+# Variable used to specify one or more verification mask files for
+# {{MetplusToolName}}. Not used for this example.
+#
+{{METPLUS_TOOL_NAME}}_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
diff --git a/parm/metplus/GridStat_ensprob_APCP.conf b/parm/metplus/GridStat_ensprob_APCP.conf
deleted file mode 100644
index 3e16de248d..0000000000
--- a/parm/metplus/GridStat_ensprob_APCP.conf
+++ /dev/null
@@ -1,362 +0,0 @@
-# Ensemble probabilistic GridStat METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = GridStat
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Location of MET configuration file to pass to GridStat.
-#
-# References PARM_BASE, which is the location of the parm directory
-# corresponding to the ush directory of the run_metplus.py script that
-# is called or the value of the environment variable METPLUS_PARM_BASE
-# if set.
-#
-GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped
-
-# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary
-# See MET User's Guide for more information
-GRID_STAT_REGRID_TO_GRID = FCST
-GRID_STAT_REGRID_VLD_THRESH = 0.5
-GRID_STAT_REGRID_METHOD = BUDGET
-GRID_STAT_REGRID_WIDTH = 2
-GRID_STAT_REGRID_SHAPE = SQUARE
-
-#GRID_STAT_INTERP_FIELD = BOTH
-#GRID_STAT_INTERP_VLD_THRESH = 1.0
-#GRID_STAT_INTERP_SHAPE = SQUARE
-#GRID_STAT_INTERP_TYPE_METHOD = NEAREST
-#GRID_STAT_INTERP_TYPE_WIDTH = 1
-
-#GRID_STAT_GRID_WEIGHT_FLAG =
-#
-# Name to identify model (forecast) data in output.
-#
-# The variable MODEL is recorded in the stat files, and the data in
-# these files is then plotted (e.g. using METViewer). Here, we add a
-# suffix to MODEL that identifies the data as ensemble-probabilistic.
-# This makes it easier to identify each curve.
-#
-MODEL = {{vx_fcst_model_name}}_ensprob
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Value to enter under the DESC column in the output stat file.
-#
-GRID_STAT_DESC = NA
-
-# List of variables to compare in GridStat - FCST_VAR1 variables correspond
-# to OBS_VAR1 variables
-# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations
-# are needed for different tools
-
-GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE;
-
-{%- set field_thresholds = [] %}
-{%- if accum_hh == '01' %}
- {%- set field_thresholds = ['gt0.0', 'ge0.254', 'ge0.508', 'ge2.54'] %}
-{%- elif accum_hh == '03' %}
- {%- set field_thresholds = ['gt0.0', 'ge0.508', 'ge2.54', 'ge6.350'] %}
-{%- elif accum_hh == '06' %}
- {%- set field_thresholds = ['gt0.0', 'ge2.54', 'ge6.350', 'ge12.700'] %}
-{%- elif accum_hh == '24' %}
- {%- set field_thresholds = ['gt0.0', 'ge6.350', 'ge12.700', 'ge25.400'] %}
-{%- endif %}
-#
-# List of forecast and corresponding observation fields to process.
-#
-# FREQ
-# Process as probability
-#
-FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[0]}}
-FCST_VAR1_LEVELS = A{{accum_hh}}
-FCST_VAR1_THRESH = ==0.1
-OBS_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}}
-OBS_VAR1_LEVELS = A{{accum_hh}}
-OBS_VAR1_THRESH = {{field_thresholds[0]}}
-
-FCST_VAR2_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[1]}}
-FCST_VAR2_LEVELS = A{{accum_hh}}
-FCST_VAR2_THRESH = ==0.1
-OBS_VAR2_NAME = {{fieldname_in_met_output}}_{{accum_hh}}
-OBS_VAR2_LEVELS = A{{accum_hh}}
-OBS_VAR2_THRESH = {{field_thresholds[1]}}
-
-FCST_VAR3_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[2]}}
-FCST_VAR3_LEVELS = A{{accum_hh}}
-FCST_VAR3_THRESH = ==0.1
-OBS_VAR3_NAME = {{fieldname_in_met_output}}_{{accum_hh}}
-OBS_VAR3_LEVELS = A{{accum_hh}}
-OBS_VAR3_THRESH = {{field_thresholds[2]}}
-
-FCST_VAR4_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[3]}}
-FCST_VAR4_LEVELS = A{{accum_hh}}
-FCST_VAR4_THRESH = ==0.1
-OBS_VAR4_NAME = {{fieldname_in_met_output}}_{{accum_hh}}
-OBS_VAR4_LEVELS = A{{accum_hh}}
-OBS_VAR4_THRESH = {{field_thresholds[3]}}
-
-#
-#Process as scalars for neighborhood methods
-## Note that the number of forecast and obs thresholds must match
-## but won't actually be applied to NBRCNT outputs with "nbrhd.field = OBS;"
-#
-FCST_VAR5_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[0]}}
-FCST_VAR5_LEVELS = A{{accum_hh}}
-FCST_VAR5_THRESH = ==0.1
-FCST_VAR5_OPTIONS = prob = FALSE;
-OBS_VAR5_NAME = {{fieldname_in_met_output}}_{{accum_hh}}
-OBS_VAR5_LEVELS = A{{accum_hh}}
-OBS_VAR5_THRESH = {{field_thresholds[0]}}
-OBS_VAR5_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }
-
-FCST_VAR6_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[1]}}
-FCST_VAR6_LEVELS = A{{accum_hh}}
-FCST_VAR6_THRESH = ==0.1
-FCST_VAR6_OPTIONS = prob = FALSE;
-OBS_VAR6_NAME = {{fieldname_in_met_output}}_{{accum_hh}}
-OBS_VAR6_LEVELS = A{{accum_hh}}
-OBS_VAR6_THRESH = {{field_thresholds[1]}}
-OBS_VAR6_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }
-
-FCST_VAR7_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[2]}}
-FCST_VAR7_LEVELS = A{{accum_hh}}
-FCST_VAR7_THRESH = ==0.1
-FCST_VAR7_OPTIONS = prob = FALSE;
-OBS_VAR7_NAME = {{fieldname_in_met_output}}_{{accum_hh}}
-OBS_VAR7_LEVELS = A{{accum_hh}}
-OBS_VAR7_THRESH = {{field_thresholds[2]}}
-OBS_VAR7_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }
-
-FCST_VAR8_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[3]}}
-FCST_VAR8_LEVELS = A{{accum_hh}}
-FCST_VAR8_THRESH = ==0.1
-FCST_VAR8_OPTIONS = prob = FALSE;
-OBS_VAR8_NAME = {{fieldname_in_met_output}}_{{accum_hh}}
-OBS_VAR8_LEVELS = A{{accum_hh}}
-OBS_VAR8_THRESH = {{field_thresholds[3]}}
-OBS_VAR8_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }
-
-#
-# Forecast data time window(s).
-#
-FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0
-FCST_GRID_STAT_FILE_WINDOW_END = 0
-#
-# Observation data time window(s).
-#
-OBS_GRID_STAT_FILE_WINDOW_BEGIN = 0
-OBS_GRID_STAT_FILE_WINDOW_END = 0
-
-# MET GridStat neighborhood values
-# See the MET User's Guide GridStat section for more information
-GRID_STAT_NEIGHBORHOOD_FIELD =
-
-# width value passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_WIDTH =
-
-# shape value passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_SHAPE =
-
-# cov thresh list passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5
-
-# Set to true to run GridStat separately for each field specified
-# Set to false to create one run of GridStat per run time that
-# includes all fields specified.
-GRID_STAT_ONCE_PER_FIELD = False
-#
-# Set to true if forecast data is probabilistic.
-#
-FCST_IS_PROB = True
-FCST_PROB_IN_GRIB_PDS = False
-#
-# Only used if FCST_IS_PROB is true - sets probabilistic threshold
-#
-FCST_GRID_STAT_PROB_THRESH = ==0.1
-
-GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}
-
-# Climatology data
-#GRID_STAT_CLIMO_MEAN_FILE_NAME =
-#GRID_STAT_CLIMO_MEAN_FIELD =
-#GRID_STAT_CLIMO_MEAN_REGRID_METHOD =
-#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH =
-#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH =
-#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE =
-#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD =
-#GRID_STAT_CLIMO_MEAN_MATCH_MONTH =
-#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL =
-#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL =
-
-#GRID_STAT_CLIMO_STDEV_FILE_NAME =
-#GRID_STAT_CLIMO_STDEV_FIELD =
-#GRID_STAT_CLIMO_STDEV_REGRID_METHOD =
-#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH =
-#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH =
-#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE =
-#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD =
-#GRID_STAT_CLIMO_STDEV_MATCH_MONTH =
-#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL =
-#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL =
-
-#GRID_STAT_CLIMO_CDF_BINS = 1
-#GRID_STAT_CLIMO_CDF_CENTER_BINS = False
-#GRID_STAT_CLIMO_CDF_WRITE_BINS = True
-
-GRID_STAT_MASK_GRID =
-
-# Statistical output types
-GRID_STAT_OUTPUT_FLAG_FHO = NONE
-GRID_STAT_OUTPUT_FLAG_CTC = NONE
-GRID_STAT_OUTPUT_FLAG_CTS = NONE
-#GRID_STAT_OUTPUT_FLAG_MCTC = NONE
-#GRID_STAT_OUTPUT_FLAG_MCTS = NONE
-GRID_STAT_OUTPUT_FLAG_CNT = NONE
-#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VCNT = NONE
-GRID_STAT_OUTPUT_FLAG_PCT = STAT
-GRID_STAT_OUTPUT_FLAG_PSTD = STAT
-GRID_STAT_OUTPUT_FLAG_PJC = STAT
-GRID_STAT_OUTPUT_FLAG_PRC = STAT
-#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH
-GRID_STAT_OUTPUT_FLAG_NBRCTC = NONE
-GRID_STAT_OUTPUT_FLAG_NBRCTS = NONE
-GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT
-#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH
-#GRID_STAT_OUTPUT_FLAG_DMAP = NONE
-
-# NetCDF matched pairs output file
-#GRID_STAT_NC_PAIRS_VAR_NAME =
-GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE
-GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE
-GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE
-GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE
-GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE
-GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE
-GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE
-
-# End of [config] section and start of [dir] section.
-[dir]
-#
-# Directory containing observation input to GridStat.
-#
-OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}}
-#
-# Directory containing forecast input to GridStat.
-#
-FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}}
-#
-# Directory containing climatology mean input to GridStat. Not used in
-# this example.
-#
-GRID_STAT_CLIMO_MEAN_INPUT_DIR =
-#
-# Directory containing climatology mean input to GridStat. Not used in
-# this example.
-#
-GRID_STAT_CLIMO_STDEV_INPUT_DIR =
-#
-# Directory in which to write output from GridStat.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-GRID_STAT_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for observation input to GridStat relative to
-# OBS_GRID_STAT_INPUT_DIR.
-#
-OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}}
-#
-# Template for forecast input to GridStat relative to
-# FCST_GRID_STAT_INPUT_DIR.
-#
-FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-#
-# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR.
-#
-GRID_STAT_OUTPUT_TEMPLATE =
-#
-# Template for climatology input to GridStat relative to
-# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example.
-#
-GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE =
-#
-# Template for climatology input to GridStat relative to
-# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example.
-#
-GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE =
-#
-# Variable used to specify one or more verification mask files for
-# GridStat. Not used for this example.
-#
-GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
diff --git a/parm/metplus/GridStat_ensprob_ASNOW.conf b/parm/metplus/GridStat_ensprob_ASNOW.conf
deleted file mode 100644
index ecd17f681b..0000000000
--- a/parm/metplus/GridStat_ensprob_ASNOW.conf
+++ /dev/null
@@ -1,384 +0,0 @@
-# Ensemble probabilistic GridStat METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = GridStat
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Location of MET configuration file to pass to GridStat.
-#
-# References PARM_BASE, which is the location of the parm directory
-# corresponding to the ush directory of the run_metplus.py script that
-# is called or the value of the environment variable METPLUS_PARM_BASE
-# if set.
-#
-GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped
-
-# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary
-# See MET User's Guide for more information
-GRID_STAT_REGRID_TO_GRID = FCST
-GRID_STAT_REGRID_VLD_THRESH = 0.5
-GRID_STAT_REGRID_METHOD = BUDGET
-GRID_STAT_REGRID_WIDTH = 2
-GRID_STAT_REGRID_SHAPE = SQUARE
-
-#GRID_STAT_INTERP_FIELD = BOTH
-#GRID_STAT_INTERP_VLD_THRESH = 1.0
-#GRID_STAT_INTERP_SHAPE = SQUARE
-#GRID_STAT_INTERP_TYPE_METHOD = NEAREST
-#GRID_STAT_INTERP_TYPE_WIDTH = 1
-
-#GRID_STAT_GRID_WEIGHT_FLAG =
-#
-# Name to identify model (forecast) data in output.
-#
-# The variable MODEL is recorded in the stat files, and the data in
-# these files is then plotted (e.g. using METViewer). Here, we add a
-# suffix to MODEL that identifies the data as ensemble-probabilistic.
-# This makes it easier to identify each curve.
-#
-MODEL = {{vx_fcst_model_name}}_ensprob
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Value to enter under the DESC column in the output stat file.
-#
-GRID_STAT_DESC = NA
-
-# List of variables to compare in GridStat - FCST_VAR1 variables correspond
-# to OBS_VAR1 variables
-# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations
-# are needed for different tools
-
-GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE;
-
-{%- set field_thresholds = [] %}
-{%- if accum_hh == '06' %}
- {%- set field_thresholds = ['gt0.0', 'ge2.54', 'ge5.08', 'ge10.16', 'ge20.32'] %}
-{%- elif accum_hh == '24' %}
- {%- set field_thresholds = ['gt0.0', 'ge2.54', 'ge5.08', 'ge10.16', 'ge20.32'] %}
-{%- endif %}
-#
-# List of forecast and corresponding observation fields to process.
-#
-# FREQ
-# Process as probability
-#
-FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[0]}}
-FCST_VAR1_LEVELS = A{{accum_hh}}
-FCST_VAR1_THRESH = ==0.1
-OBS_VAR1_NAME = {{fieldname_in_obs_input}}
-OBS_VAR1_LEVELS = A{{accum_hh}}
-OBS_VAR1_THRESH = {{field_thresholds[0]}}
-OBS_VAR1_OPTIONS = convert(x) = 100.0*x;
-
-FCST_VAR2_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[1]}}
-FCST_VAR2_LEVELS = A{{accum_hh}}
-FCST_VAR2_THRESH = ==0.1
-OBS_VAR2_NAME = {{fieldname_in_obs_input}}
-OBS_VAR2_LEVELS = A{{accum_hh}}
-OBS_VAR2_THRESH = {{field_thresholds[1]}}
-OBS_VAR2_OPTIONS = convert(x) = 100.0*x;
-
-FCST_VAR3_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[2]}}
-FCST_VAR3_LEVELS = A{{accum_hh}}
-FCST_VAR3_THRESH = ==0.1
-OBS_VAR3_NAME = {{fieldname_in_obs_input}}
-OBS_VAR3_LEVELS = A{{accum_hh}}
-OBS_VAR3_THRESH = {{field_thresholds[2]}}
-OBS_VAR3_OPTIONS = convert(x) = 100.0*x;
-
-FCST_VAR4_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[3]}}
-FCST_VAR4_LEVELS = A{{accum_hh}}
-FCST_VAR4_THRESH = ==0.1
-OBS_VAR4_NAME = {{fieldname_in_obs_input}}
-OBS_VAR4_LEVELS = A{{accum_hh}}
-OBS_VAR4_THRESH = {{field_thresholds[3]}}
-OBS_VAR4_OPTIONS = convert(x) = 100.0*x;
-
-FCST_VAR5_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[4]}}
-FCST_VAR5_LEVELS = A{{accum_hh}}
-FCST_VAR5_THRESH = ==0.1
-OBS_VAR5_NAME = {{fieldname_in_obs_input}}
-OBS_VAR5_LEVELS = A{{accum_hh}}
-OBS_VAR5_THRESH = {{field_thresholds[4]}}
-OBS_VAR5_OPTIONS = convert(x) = 100.0*x;
-
-#
-#Process as scalars for neighborhood methods
-## Note that the number of forecast and obs thresholds must match
-## but won't actually be applied to NBRCNT outputs with "nbrhd.field = OBS;"
-#
-FCST_VAR6_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[0]}}
-FCST_VAR6_LEVELS = A{{accum_hh}}
-FCST_VAR6_THRESH = ==0.1
-FCST_VAR6_OPTIONS = prob = FALSE;
-OBS_VAR6_NAME = {{fieldname_in_obs_input}}
-OBS_VAR6_LEVELS = A{{accum_hh}}
-OBS_VAR6_THRESH = {{field_thresholds[0]}}
-OBS_VAR6_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; };
- convert(x) = 100.0*x;
-
-FCST_VAR7_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[1]}}
-FCST_VAR7_LEVELS = A{{accum_hh}}
-FCST_VAR7_THRESH = ==0.1
-FCST_VAR7_OPTIONS = prob = FALSE;
-OBS_VAR7_NAME = {{fieldname_in_obs_input}}
-OBS_VAR7_LEVELS = A{{accum_hh}}
-OBS_VAR7_THRESH = {{field_thresholds[1]}}
-OBS_VAR7_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; };
- convert(x) = 100.0*x;
-
-FCST_VAR8_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[2]}}
-FCST_VAR8_LEVELS = A{{accum_hh}}
-FCST_VAR8_THRESH = ==0.1
-FCST_VAR8_OPTIONS = prob = FALSE;
-OBS_VAR8_NAME = {{fieldname_in_obs_input}}
-OBS_VAR8_LEVELS = A{{accum_hh}}
-OBS_VAR8_THRESH = {{field_thresholds[2]}}
-OBS_VAR8_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; };
- convert(x) = 100.0*x;
-
-FCST_VAR9_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[3]}}
-FCST_VAR9_LEVELS = A{{accum_hh}}
-FCST_VAR9_THRESH = ==0.1
-FCST_VAR9_OPTIONS = prob = FALSE;
-OBS_VAR9_NAME = {{fieldname_in_obs_input}}
-OBS_VAR9_LEVELS = A{{accum_hh}}
-OBS_VAR9_THRESH = {{field_thresholds[3]}}
-OBS_VAR9_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; };
- convert(x) = 100.0*x;
-
-FCST_VAR10_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[4]}}
-FCST_VAR10_LEVELS = A{{accum_hh}}
-FCST_VAR10_THRESH = ==0.1
-FCST_VAR10_OPTIONS = prob = FALSE;
-OBS_VAR10_NAME = {{fieldname_in_obs_input}}
-OBS_VAR10_LEVELS = A{{accum_hh}}
-OBS_VAR10_THRESH = {{field_thresholds[4]}}
-OBS_VAR10_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; };
- convert(x) = 100.0*x;
-
-#
-# Forecast data time window(s).
-#
-FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0
-FCST_GRID_STAT_FILE_WINDOW_END = 0
-#
-# Observation data time window(s).
-#
-OBS_GRID_STAT_FILE_WINDOW_BEGIN = 0
-OBS_GRID_STAT_FILE_WINDOW_END = 0
-
-# MET GridStat neighborhood values
-# See the MET User's Guide GridStat section for more information
-GRID_STAT_NEIGHBORHOOD_FIELD =
-
-# width value passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_WIDTH =
-
-# shape value passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_SHAPE =
-
-# cov thresh list passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5
-
-# Set to true to run GridStat separately for each field specified
-# Set to false to create one run of GridStat per run time that
-# includes all fields specified.
-GRID_STAT_ONCE_PER_FIELD = False
-#
-# Set to true if forecast data is probabilistic.
-#
-FCST_IS_PROB = True
-FCST_PROB_IN_GRIB_PDS = False
-#
-# Only used if FCST_IS_PROB is true - sets probabilistic threshold
-#
-FCST_GRID_STAT_PROB_THRESH = ==0.1
-
-GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}
-
-# Climatology data
-#GRID_STAT_CLIMO_MEAN_FILE_NAME =
-#GRID_STAT_CLIMO_MEAN_FIELD =
-#GRID_STAT_CLIMO_MEAN_REGRID_METHOD =
-#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH =
-#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH =
-#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE =
-#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD =
-#GRID_STAT_CLIMO_MEAN_MATCH_MONTH =
-#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL =
-#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL =
-
-#GRID_STAT_CLIMO_STDEV_FILE_NAME =
-#GRID_STAT_CLIMO_STDEV_FIELD =
-#GRID_STAT_CLIMO_STDEV_REGRID_METHOD =
-#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH =
-#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH =
-#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE =
-#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD =
-#GRID_STAT_CLIMO_STDEV_MATCH_MONTH =
-#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL =
-#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL =
-
-#GRID_STAT_CLIMO_CDF_BINS = 1
-#GRID_STAT_CLIMO_CDF_CENTER_BINS = False
-#GRID_STAT_CLIMO_CDF_WRITE_BINS = True
-
-GRID_STAT_MASK_GRID =
-
-# Statistical output types
-GRID_STAT_OUTPUT_FLAG_FHO = NONE
-GRID_STAT_OUTPUT_FLAG_CTC = NONE
-GRID_STAT_OUTPUT_FLAG_CTS = NONE
-#GRID_STAT_OUTPUT_FLAG_MCTC = NONE
-#GRID_STAT_OUTPUT_FLAG_MCTS = NONE
-GRID_STAT_OUTPUT_FLAG_CNT = NONE
-#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE
-#GRID_STAT_OUTPUT_FLAG_VCNT = NONE
-GRID_STAT_OUTPUT_FLAG_PCT = STAT
-GRID_STAT_OUTPUT_FLAG_PSTD = STAT
-GRID_STAT_OUTPUT_FLAG_PJC = STAT
-GRID_STAT_OUTPUT_FLAG_PRC = STAT
-#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH
-GRID_STAT_OUTPUT_FLAG_NBRCTC = NONE
-GRID_STAT_OUTPUT_FLAG_NBRCTS = NONE
-GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT
-#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH
-#GRID_STAT_OUTPUT_FLAG_DMAP = NONE
-
-# NetCDF matched pairs output file
-#GRID_STAT_NC_PAIRS_VAR_NAME =
-GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE
-GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE
-GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE
-GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE
-GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE
-GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE
-#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE
-GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE
-
-# End of [config] section and start of [dir] section.
-[dir]
-#
-# Directory containing observation input to GridStat.
-#
-OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}}
-#
-# Directory containing forecast input to GridStat.
-#
-FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}}
-#
-# Directory containing climatology mean input to GridStat. Not used in
-# this example.
-#
-GRID_STAT_CLIMO_MEAN_INPUT_DIR =
-#
-# Directory containing climatology mean input to GridStat. Not used in
-# this example.
-#
-GRID_STAT_CLIMO_STDEV_INPUT_DIR =
-#
-# Directory in which to write output from GridStat.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-GRID_STAT_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for observation input to GridStat relative to
-# OBS_GRID_STAT_INPUT_DIR.
-#
-OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}}
-#
-# Template for forecast input to GridStat relative to
-# FCST_GRID_STAT_INPUT_DIR.
-#
-FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-#
-# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR.
-#
-GRID_STAT_OUTPUT_TEMPLATE =
-#
-# Template for climatology input to GridStat relative to
-# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example.
-#
-GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE =
-#
-# Template for climatology input to GridStat relative to
-# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example.
-#
-GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE =
-#
-# Variable used to specify one or more verification mask files for
-# GridStat. Not used for this example.
-#
-GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
diff --git a/parm/metplus/GridStat_ensprob_REFC.conf b/parm/metplus/GridStat_ensprob_REFC.conf
deleted file mode 100644
index 95e19af1ce..0000000000
--- a/parm/metplus/GridStat_ensprob_REFC.conf
+++ /dev/null
@@ -1,382 +0,0 @@
-# Ensemble probabilistic GridStat METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = GridStat
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Location of MET configuration file to pass to GridStat.
-#
-# References PARM_BASE, which is the location of the parm directory
-# corresponding to the ush directory of the run_metplus.py script that
-# is called or the value of the environment variable METPLUS_PARM_BASE
-# if set.
-#
-GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped
-
-# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary
-# See MET User's Guide for more information
-GRID_STAT_REGRID_TO_GRID = FCST
-GRID_STAT_REGRID_VLD_THRESH = 0.5
-GRID_STAT_REGRID_METHOD = BUDGET
-GRID_STAT_REGRID_WIDTH = 2
-GRID_STAT_REGRID_SHAPE = SQUARE
-
-GRID_STAT_INTERP_FIELD = NONE
-GRID_STAT_INTERP_VLD_THRESH = 1.0
-GRID_STAT_INTERP_SHAPE = SQUARE
-GRID_STAT_INTERP_TYPE_METHOD = NEAREST
-GRID_STAT_INTERP_TYPE_WIDTH = 1
-
-GRID_STAT_GRID_WEIGHT_FLAG = NONE
-#
-# Name to identify model (forecast) data in output.
-#
-# The variable MODEL is recorded in the stat files, and the data in
-# these files is then plotted (e.g. using METViewer). Here, we add a
-# suffix to MODEL that identifies the data as ensemble-probabilistic.
-# This makes it easier to identify each curve.
-#
-MODEL = {{vx_fcst_model_name}}_ensprob
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Value to enter under the DESC column in the output stat file.
-#
-GRID_STAT_DESC = NA
-
-# List of variables to compare in GridStat - FCST_VAR1 variables correspond
-# to OBS_VAR1 variables
-# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations
-# are needed for different tools
-
-GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE;
-#
-# List of forecast and corresponding observation fields to process.
-#
-# FREQ
-# Process as probability
-#
-FCST_VAR1_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge20
-FCST_VAR1_LEVELS = L0
-FCST_VAR1_THRESH = ==0.1
-OBS_VAR1_NAME = MergedReflectivityQCComposite
-OBS_VAR1_LEVELS = Z500
-OBS_VAR1_THRESH = ge20
-OBS_VAR1_OPTIONS = censor_thresh = lt-20;
- censor_val = -20.0;
- cnt_thresh = [ >15 ];
- cnt_logic = UNION;
-
-FCST_VAR2_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge30
-FCST_VAR2_LEVELS = L0
-FCST_VAR2_THRESH = ==0.1
-OBS_VAR2_NAME = MergedReflectivityQCComposite
-OBS_VAR2_LEVELS = Z500
-OBS_VAR2_THRESH = ge30
-OBS_VAR2_OPTIONS = censor_thresh = lt-20;
- censor_val = -20.0;
- cnt_thresh = [ >15 ];
- cnt_logic = UNION;
-
-FCST_VAR3_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge40
-FCST_VAR3_LEVELS = L0
-FCST_VAR3_THRESH = ==0.1
-OBS_VAR3_NAME = MergedReflectivityQCComposite
-OBS_VAR3_LEVELS = Z500
-OBS_VAR3_THRESH = ge40
-OBS_VAR3_OPTIONS = censor_thresh = lt-20;
- censor_val = -20.0;
- cnt_thresh = [ >15 ];
- cnt_logic = UNION;
-
-FCST_VAR4_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge50
-FCST_VAR4_LEVELS = L0
-FCST_VAR4_THRESH = ==0.1
-OBS_VAR4_NAME = MergedReflectivityQCComposite
-OBS_VAR4_LEVELS = Z500
-OBS_VAR4_THRESH = ge50
-OBS_VAR4_OPTIONS = censor_thresh = lt-20;
- censor_val = -20.0;
- cnt_thresh = [ >15 ];
- cnt_logic = UNION;
-
-#
-#Process as scalars for neighborhood methods
-## Note that the number of forecast and obs thresholds must match
-## but won't actually be applied to NBRCNT outputs with "nbrhd.field = OBS;"
-#
-FCST_VAR5_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge20
-FCST_VAR5_LEVELS = L0
-FCST_VAR5_THRESH = ==0.1
-FCST_VAR5_OPTIONS = prob = FALSE;
-OBS_VAR5_NAME = MergedReflectivityQCComposite
-OBS_VAR5_LEVELS = Z500
-OBS_VAR5_THRESH = ge20
-OBS_VAR5_OPTIONS = censor_thresh = lt-20;
- censor_val = -20.0;
- cnt_thresh = [ >15 ];
- cnt_logic = UNION;
- nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }
-
-FCST_VAR6_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge30
-FCST_VAR6_LEVELS = L0
-FCST_VAR6_THRESH = ==0.1
-FCST_VAR6_OPTIONS = prob = FALSE;
-OBS_VAR6_NAME = MergedReflectivityQCComposite
-OBS_VAR6_LEVELS = Z500
-OBS_VAR6_THRESH = ge30
-OBS_VAR6_OPTIONS = censor_thresh = lt-20;
- censor_val = -20.0;
- cnt_thresh = [ >15 ];
- cnt_logic = UNION;
- nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }
-
-FCST_VAR7_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge40
-FCST_VAR7_LEVELS = L0
-FCST_VAR7_THRESH = ==0.1
-FCST_VAR7_OPTIONS = prob = FALSE;
-OBS_VAR7_NAME = MergedReflectivityQCComposite
-OBS_VAR7_LEVELS = Z500
-OBS_VAR7_THRESH = ge40
-OBS_VAR7_OPTIONS = censor_thresh = lt-20;
- censor_val = -20.0;
- cnt_thresh = [ >15 ];
- cnt_logic = UNION;
- nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }
-
-FCST_VAR8_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge50
-FCST_VAR8_LEVELS = L0
-FCST_VAR8_THRESH = ==0.1
-FCST_VAR8_OPTIONS = prob = FALSE;
-OBS_VAR8_NAME = MergedReflectivityQCComposite
-OBS_VAR8_LEVELS = Z500
-OBS_VAR8_THRESH = ge50
-OBS_VAR8_OPTIONS = censor_thresh = lt-20;
- censor_val = -20.0;
- cnt_thresh = [ >15 ];
- cnt_logic = UNION;
- nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }
-
-#
-# Forecast data time window(s).
-#
-#FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0
-#FCST_GRID_STAT_FILE_WINDOW_END = 0
-#
-# Observation data time window(s).
-#
-OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300
-OBS_GRID_STAT_FILE_WINDOW_END = 300
-
-# MET GridStat neighborhood values
-# See the MET User's Guide GridStat section for more information
-GRID_STAT_NEIGHBORHOOD_FIELD =
-
-# width value passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_WIDTH =
-
-# shape value passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_SHAPE =
-
-# cov thresh list passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5
-
-# Set to true to run GridStat separately for each field specified
-# Set to false to create one run of GridStat per run time that
-# includes all fields specified.
-GRID_STAT_ONCE_PER_FIELD = False
-#
-# Set to true if forecast data is probabilistic.
-#
-FCST_IS_PROB = True
-FCST_PROB_IN_GRIB_PDS = False
-#
-# Only used if FCST_IS_PROB is true - sets probabilistic threshold
-#
-FCST_GRID_STAT_PROB_THRESH = ==0.1
-
-GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}
-
-# Climatology data
-#GRID_STAT_CLIMO_MEAN_FILE_NAME =
-#GRID_STAT_CLIMO_MEAN_FIELD =
-#GRID_STAT_CLIMO_MEAN_REGRID_METHOD =
-#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH =
-#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH =
-#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE =
-#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD =
-#GRID_STAT_CLIMO_MEAN_MATCH_MONTH =
-#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL =
-#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL =
-
-#GRID_STAT_CLIMO_STDEV_FILE_NAME =
-#GRID_STAT_CLIMO_STDEV_FIELD =
-#GRID_STAT_CLIMO_STDEV_REGRID_METHOD =
-#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH =
-#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH =
-#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE =
-#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD =
-#GRID_STAT_CLIMO_STDEV_MATCH_MONTH =
-#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL =
-#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL =
-
-GRID_STAT_CLIMO_CDF_BINS = 1
-#GRID_STAT_CLIMO_CDF_CENTER_BINS = False
-#GRID_STAT_CLIMO_CDF_WRITE_BINS = True
-
-GRID_STAT_MASK_GRID =
-
-# Statistical output types
-GRID_STAT_OUTPUT_FLAG_FHO = NONE
-GRID_STAT_OUTPUT_FLAG_CTC = NONE
-GRID_STAT_OUTPUT_FLAG_CTS = NONE
-GRID_STAT_OUTPUT_FLAG_MCTC = NONE
-GRID_STAT_OUTPUT_FLAG_MCTS = NONE
-GRID_STAT_OUTPUT_FLAG_CNT = NONE
-GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE
-GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE
-GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE
-GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE
-GRID_STAT_OUTPUT_FLAG_VCNT = NONE
-GRID_STAT_OUTPUT_FLAG_PCT = STAT
-GRID_STAT_OUTPUT_FLAG_PSTD = STAT
-GRID_STAT_OUTPUT_FLAG_PJC = STAT
-GRID_STAT_OUTPUT_FLAG_PRC = STAT
-GRID_STAT_OUTPUT_FLAG_ECLV = NONE
-GRID_STAT_OUTPUT_FLAG_NBRCTC = NONE
-GRID_STAT_OUTPUT_FLAG_NBRCTS = NONE
-GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT
-GRID_STAT_OUTPUT_FLAG_GRAD = NONE
-
-# NetCDF matched pairs output file
-#GRID_STAT_NC_PAIRS_VAR_NAME =
-GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE
-GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE
-GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE
-GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE
-GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE
-GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE
-GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE
-GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE
-GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE
-GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE
-
-# End of [config] section and start of [dir] section.
-[dir]
-#
-# Directory containing observation input to GridStat.
-#
-OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}}
-#
-# Directory containing forecast input to GridStat.
-#
-FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}}
-#
-# Directory containing climatology mean input to GridStat. Not used in
-# this example.
-#
-GRID_STAT_CLIMO_MEAN_INPUT_DIR =
-#
-# Directory containing climatology mean input to GridStat. Not used in
-# this example.
-#
-GRID_STAT_CLIMO_STDEV_INPUT_DIR =
-#
-# Directory in which to write output from GridStat.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-GRID_STAT_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for observation input to GridStat relative to
-# OBS_GRID_STAT_INPUT_DIR.
-#
-OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}}
-#
-# Template for forecast input to GridStat relative to
-# FCST_GRID_STAT_INPUT_DIR.
-#
-FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-#
-# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR.
-#
-GRID_STAT_OUTPUT_TEMPLATE =
-#
-# Template for climatology input to GridStat relative to
-# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example.
-#
-GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE =
-#
-# Template for climatology input to GridStat relative to
-# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example.
-#
-GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE =
-#
-# Variable used to specify one or more verification mask files for
-# GridStat. Not used for this example.
-#
-GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
diff --git a/parm/metplus/GridStat_ensprob_RETOP.conf b/parm/metplus/GridStat_ensprob_RETOP.conf
deleted file mode 100644
index d1f218bea8..0000000000
--- a/parm/metplus/GridStat_ensprob_RETOP.conf
+++ /dev/null
@@ -1,390 +0,0 @@
-# Ensemble probabilistic GridStat METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = GridStat
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Location of MET configuration file to pass to GridStat.
-#
-# References PARM_BASE, which is the location of the parm directory
-# corresponding to the ush directory of the run_metplus.py script that
-# is called or the value of the environment variable METPLUS_PARM_BASE
-# if set.
-#
-GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped
-
-# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary
-# See MET User's Guide for more information
-GRID_STAT_REGRID_TO_GRID = FCST
-GRID_STAT_REGRID_VLD_THRESH = 0.5
-GRID_STAT_REGRID_METHOD = BUDGET
-GRID_STAT_REGRID_WIDTH = 2
-GRID_STAT_REGRID_SHAPE = SQUARE
-
-GRID_STAT_INTERP_FIELD = NONE
-GRID_STAT_INTERP_VLD_THRESH = 1.0
-GRID_STAT_INTERP_SHAPE = SQUARE
-GRID_STAT_INTERP_TYPE_METHOD = NEAREST
-GRID_STAT_INTERP_TYPE_WIDTH = 1
-
-GRID_STAT_GRID_WEIGHT_FLAG = NONE
-#
-# Name to identify model (forecast) data in output.
-#
-# The variable MODEL is recorded in the stat files, and the data in
-# these files is then plotted (e.g. using METViewer). Here, we add a
-# suffix to MODEL that identifies the data as ensemble-probabilistic.
-# This makes it easier to identify each curve.
-#
-MODEL = {{vx_fcst_model_name}}_ensprob
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Value to enter under the DESC column in the output stat file.
-#
-GRID_STAT_DESC = NA
-
-# List of variables to compare in GridStat - FCST_VAR1 variables correspond
-# to OBS_VAR1 variables
-# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations
-# are needed for different tools
-
-GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE;
-#
-# List of forecast and corresponding observation fields to process.
-#
-# FREQ
-# Process as probability
-#
-FCST_VAR1_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge20
-FCST_VAR1_LEVELS = L0
-FCST_VAR1_THRESH = ==0.1
-OBS_VAR1_NAME = EchoTop18
-OBS_VAR1_LEVELS = Z500
-OBS_VAR1_THRESH = ge20
-OBS_VAR1_OPTIONS = censor_thresh = lt-20.0;
- censor_val = -20.0;
- cnt_thresh = [ >15 ];
- cnt_logic = UNION;
- convert(x) = x * 3280.84 * 0.001;
-
-FCST_VAR2_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge30
-FCST_VAR2_LEVELS = L0
-FCST_VAR2_THRESH = ==0.1
-OBS_VAR2_NAME = EchoTop18
-OBS_VAR2_LEVELS = Z500
-OBS_VAR2_THRESH = ge30
-OBS_VAR2_OPTIONS = censor_thresh = lt-20.0;
- censor_val = -20.0;
- cnt_thresh = [ >15 ];
- cnt_logic = UNION;
- convert(x) = x * 3280.84 * 0.001;
-
-FCST_VAR3_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge40
-FCST_VAR3_LEVELS = L0
-FCST_VAR3_THRESH = ==0.1
-OBS_VAR3_NAME = EchoTop18
-OBS_VAR3_LEVELS = Z500
-OBS_VAR3_THRESH = ge40
-OBS_VAR3_OPTIONS = censor_thresh = lt-20.0;
- censor_val = -20.0;
- cnt_thresh = [ >15 ];
- cnt_logic = UNION;
- convert(x) = x * 3280.84 * 0.001;
-
-FCST_VAR4_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge50
-FCST_VAR4_LEVELS = L0
-FCST_VAR4_THRESH = ==0.1
-OBS_VAR4_NAME = EchoTop18
-OBS_VAR4_LEVELS = Z500
-OBS_VAR4_THRESH = ge50
-OBS_VAR4_OPTIONS = censor_thresh = lt-20.0;
- censor_val = -20.0;
- cnt_thresh = [ >15 ];
- cnt_logic = UNION;
- convert(x) = x * 3280.84 * 0.001;
-
-#
-#Process as scalars for neighborhood methods
-## Note that the number of forecast and obs thresholds must match
-## but won't actually be applied to NBRCNT outputs with "nbrhd.field = OBS;"
-#
-FCST_VAR5_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge20
-FCST_VAR5_LEVELS = L0
-FCST_VAR5_THRESH = ==0.1
-FCST_VAR5_OPTIONS = prob = FALSE;
-OBS_VAR5_NAME = EchoTop18
-OBS_VAR5_LEVELS = Z500
-OBS_VAR5_THRESH = ge20
-OBS_VAR5_OPTIONS = censor_thresh = lt-20.0;
- censor_val = -20.0;
- cnt_thresh = [ >15 ];
- cnt_logic = UNION;
- convert(x) = x * 3280.84 * 0.001;
- nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }
-
-FCST_VAR6_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge30
-FCST_VAR6_LEVELS = L0
-FCST_VAR6_THRESH = ==0.1
-FCST_VAR6_OPTIONS = prob = FALSE;
-OBS_VAR6_NAME = EchoTop18
-OBS_VAR6_LEVELS = Z500
-OBS_VAR6_THRESH = ge30
-OBS_VAR6_OPTIONS = censor_thresh = lt-20.0;
- censor_val = -20.0;
- cnt_thresh = [ >15 ];
- cnt_logic = UNION;
- convert(x) = x * 3280.84 * 0.001;
- nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }
-
-FCST_VAR7_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge40
-FCST_VAR7_LEVELS = L0
-FCST_VAR7_THRESH = ==0.1
-FCST_VAR7_OPTIONS = prob = FALSE;
-OBS_VAR7_NAME = EchoTop18
-OBS_VAR7_LEVELS = Z500
-OBS_VAR7_THRESH = ge40
-OBS_VAR7_OPTIONS = censor_thresh = lt-20.0;
- censor_val = -20.0;
- cnt_thresh = [ >15 ];
- cnt_logic = UNION;
- convert(x) = x * 3280.84 * 0.001;
- nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }
-
-FCST_VAR8_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge50
-FCST_VAR8_LEVELS = L0
-FCST_VAR8_THRESH = ==0.1
-FCST_VAR8_OPTIONS = prob = FALSE;
-OBS_VAR8_NAME = EchoTop18
-OBS_VAR8_LEVELS = Z500
-OBS_VAR8_THRESH = ge50
-OBS_VAR8_OPTIONS = censor_thresh = lt-20.0;
- censor_val = -20.0;
- cnt_thresh = [ >15 ];
- cnt_logic = UNION;
- convert(x) = x * 3280.84 * 0.001;
- nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }
-
-#
-# Forecast data time window(s).
-#
-#FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0
-#FCST_GRID_STAT_FILE_WINDOW_END = 0
-#
-# Observation data time window(s).
-#
-OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300
-OBS_GRID_STAT_FILE_WINDOW_END = 300
-
-# MET GridStat neighborhood values
-# See the MET User's Guide GridStat section for more information
-GRID_STAT_NEIGHBORHOOD_FIELD =
-
-# width value passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_WIDTH =
-
-# shape value passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_SHAPE =
-
-# cov thresh list passed to nbrhd dictionary in the MET config file
-GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5
-
-# Set to true to run GridStat separately for each field specified
-# Set to false to create one run of GridStat per run time that
-# includes all fields specified.
-GRID_STAT_ONCE_PER_FIELD = False
-#
-# Set to true if forecast data is probabilistic.
-#
-FCST_IS_PROB = True
-FCST_PROB_IN_GRIB_PDS = False
-#
-# Only used if FCST_IS_PROB is true - sets probabilistic threshold
-#
-FCST_GRID_STAT_PROB_THRESH = ==0.1
-
-GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}
-
-# Climatology data
-#GRID_STAT_CLIMO_MEAN_FILE_NAME =
-#GRID_STAT_CLIMO_MEAN_FIELD =
-#GRID_STAT_CLIMO_MEAN_REGRID_METHOD =
-#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH =
-#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH =
-#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE =
-#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD =
-#GRID_STAT_CLIMO_MEAN_MATCH_MONTH =
-#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL =
-#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL =
-
-#GRID_STAT_CLIMO_STDEV_FILE_NAME =
-#GRID_STAT_CLIMO_STDEV_FIELD =
-#GRID_STAT_CLIMO_STDEV_REGRID_METHOD =
-#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH =
-#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH =
-#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE =
-#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD =
-#GRID_STAT_CLIMO_STDEV_MATCH_MONTH =
-#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL =
-#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL =
-
-GRID_STAT_CLIMO_CDF_BINS = 1
-#GRID_STAT_CLIMO_CDF_CENTER_BINS = False
-#GRID_STAT_CLIMO_CDF_WRITE_BINS = True
-
-GRID_STAT_MASK_GRID =
-
-# Statistical output types
-GRID_STAT_OUTPUT_FLAG_FHO = NONE
-GRID_STAT_OUTPUT_FLAG_CTC = NONE
-GRID_STAT_OUTPUT_FLAG_CTS = NONE
-GRID_STAT_OUTPUT_FLAG_MCTC = NONE
-GRID_STAT_OUTPUT_FLAG_MCTS = NONE
-GRID_STAT_OUTPUT_FLAG_CNT = NONE
-GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE
-GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE
-GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE
-GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE
-GRID_STAT_OUTPUT_FLAG_VCNT = NONE
-GRID_STAT_OUTPUT_FLAG_PCT = STAT
-GRID_STAT_OUTPUT_FLAG_PSTD = STAT
-GRID_STAT_OUTPUT_FLAG_PJC = STAT
-GRID_STAT_OUTPUT_FLAG_PRC = STAT
-GRID_STAT_OUTPUT_FLAG_ECLV = NONE
-GRID_STAT_OUTPUT_FLAG_NBRCTC = NONE
-GRID_STAT_OUTPUT_FLAG_NBRCTS = NONE
-GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT
-GRID_STAT_OUTPUT_FLAG_GRAD = NONE
-
-# NetCDF matched pairs output file
-#GRID_STAT_NC_PAIRS_VAR_NAME =
-GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE
-GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE
-GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE
-GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE
-GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE
-GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE
-GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE
-GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE
-GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE
-GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE
-
-# End of [config] section and start of [dir] section.
-[dir]
-#
-# Directory containing observation input to GridStat.
-#
-OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}}
-#
-# Directory containing forecast input to GridStat.
-#
-FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}}
-#
-# Directory containing climatology mean input to GridStat. Not used in
-# this example.
-#
-GRID_STAT_CLIMO_MEAN_INPUT_DIR =
-#
-# Directory containing climatology mean input to GridStat. Not used in
-# this example.
-#
-GRID_STAT_CLIMO_STDEV_INPUT_DIR =
-#
-# Directory in which to write output from GridStat.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-GRID_STAT_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for observation input to GridStat relative to
-# OBS_GRID_STAT_INPUT_DIR.
-#
-OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}}
-#
-# Template for forecast input to GridStat relative to
-# FCST_GRID_STAT_INPUT_DIR.
-#
-FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-#
-# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR.
-#
-GRID_STAT_OUTPUT_TEMPLATE =
-#
-# Template for climatology input to GridStat relative to
-# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example.
-#
-GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE =
-#
-# Template for climatology input to GridStat relative to
-# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example.
-#
-GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE =
-#
-# Variable used to specify one or more verification mask files for
-# GridStat. Not used for this example.
-#
-GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
diff --git a/parm/metplus/GridStat_or_PointStat.conf b/parm/metplus/GridStat_or_PointStat.conf
new file mode 100644
index 0000000000..c90783862b
--- /dev/null
+++ b/parm/metplus/GridStat_or_PointStat.conf
@@ -0,0 +1,940 @@
+# {{MetplusToolName}} METplus Configuration
+
+[config]
+
+# List of applications (tools) to run.
+PROCESS_LIST = {{MetplusToolName}}
+
+# time looping - options are INIT, VALID, RETRO, and REALTIME
+# If set to INIT or RETRO:
+# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
+# If set to VALID or REALTIME:
+# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
+LOOP_BY = INIT
+
+# Format of INIT_BEG and INIT_END using % items
+# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
+# see www.strftime.org for more information
+# %Y%m%d%H expands to YYYYMMDDHH
+INIT_TIME_FMT = %Y%m%d%H
+
+# Start time for METplus run - must match INIT_TIME_FMT
+INIT_BEG = {{cdate}}
+
+# End time for METplus run - must match INIT_TIME_FMT
+INIT_END = {{cdate}}
+
+# Increment between METplus runs (in seconds if no units are specified).
+# Must be >= 60 seconds.
+INIT_INCREMENT = 3600
+
+# List of forecast leads to process for each run time (init or valid)
+# In hours if units are not specified
+# If unset, defaults to 0 (don't loop through forecast leads)
+LEAD_SEQ = {{fhr_list}}
+#
+# Order of loops to process data - Options are times, processes
+# Not relevant if only one item is in the PROCESS_LIST
+# times = run all wrappers in the PROCESS_LIST for a single run time, then
+# increment the run time and run all wrappers again until all times have
+# been evaluated.
+# processes = run the first wrapper in the PROCESS_LIST for all times
+# specified, then repeat for the next item in the PROCESS_LIST until all
+# wrappers have been run
+#
+LOOP_ORDER = times
+#
+# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
+#
+LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
+#
+# Specify the name of the METplus log file.
+#
+LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
+#
+# Specify the location and name of the final METplus conf file.
+#
+METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}}
+#
+# Location of MET configuration file to pass to {{MetplusToolName}}.
+#
+# References PARM_BASE, which is the location of the parm directory
+# corresponding to the ush directory of the run_metplus.py script that
+# is called or the value of the environment variable METPLUS_PARM_BASE
+# if set.
+#
+{{METPLUS_TOOL_NAME}}_CONFIG_FILE = {PARM_BASE}/met_config/{{MetplusToolName}}Config_wrapped
+
+{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %}
+#
+# Grid to remap data. Value is set as the 'to_grid' variable in the
+# 'regrid' dictionary. See MET User's Guide for more information.
+#
+{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = FCST
+{{METPLUS_TOOL_NAME}}_REGRID_VLD_THRESH = 0.5
+{{METPLUS_TOOL_NAME}}_REGRID_METHOD = BUDGET
+{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2
+{{METPLUS_TOOL_NAME}}_REGRID_SHAPE = SQUARE
+{%- elif (METPLUS_TOOL_NAME == 'POINT_STAT') %}
+#
+# Regrid to specified grid. Indicate NONE if no regridding, or the grid id
+# (e.g. G212)
+#
+{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = NONE
+{{METPLUS_TOOL_NAME}}_REGRID_METHOD = BILIN
+{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2
+{%- endif %}
+
+{%- if (METPLUS_TOOL_NAME == 'POINT_STAT') %}
+#
+{{METPLUS_TOOL_NAME}}_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA
+#{{METPLUS_TOOL_NAME}}_OBS_QUALITY_EXC =
+{%- endif %}
+
+{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %}
+
+ {%- if (input_field_group == 'APCP') %}
+
+#{{METPLUS_TOOL_NAME}}_INTERP_FIELD = BOTH
+#{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = 1.0
+#{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = SQUARE
+#{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = NEAREST
+#{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 1
+
+#{{METPLUS_TOOL_NAME}}_GRID_WEIGHT_FLAG =
+ {%- elif input_field_group in ['REFC', 'RETOP'] %}
+
+{{METPLUS_TOOL_NAME}}_INTERP_FIELD = NONE
+{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = 1.0
+{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = SQUARE
+{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = NEAREST
+{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 1
+
+{{METPLUS_TOOL_NAME}}_GRID_WEIGHT_FLAG = NONE
+ {%- endif %}
+
+{%- elif (METPLUS_TOOL_NAME == 'POINT_STAT') %}
+
+#{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH =
+#{{METPLUS_TOOL_NAME}}_INTERP_SHAPE =
+{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = BILIN
+{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 2
+
+{%- endif %}
+#
+# Name to identify model (forecast) data in output.
+#
+# The variable MODEL is recorded in the stat files, and the data in
+# these files is then plotted (e.g. using METViewer). Here, we add a
+# suffix to MODEL that identifies the forecast ensemble member. This
+# makes it easier to identify each curve.
+#
+MODEL = {{vx_fcst_model_name}}_{{ensmem_name}}
+#
+# Name to identify observation data in output.
+#
+OBTYPE = {{obtype}}
+#
+# Value to enter under the DESC column in the output stat file. Here,
+# we store the value of the original lead in this column, i.e. the lead
+# with zero corresponding to the actual start time of the forecast (which
+# is (cdate - time_lag)), not to cdate. This is just the lead in
+# LEAD_SEQ with the time lag (time_lag) of the current forecast member
+# added on.
+#
+# Uncomment this line only after upgrading to METplus 5.x.
+#{{METPLUS_TOOL_NAME}}_DESC = {lead?fmt=%H%M%S?shift={{time_lag}}}
+{{METPLUS_TOOL_NAME}}_DESC = NA
+#
+# Verification Masking regions
+# Indicate which grid and polygon masking region, if applicable
+#
+{{METPLUS_TOOL_NAME}}_MASK_GRID =
+
+{%- if (METPLUS_TOOL_NAME == 'POINT_STAT') %}
+#
+# List of full path to poly masking files. NOTE: Only short lists of poly
+# files work (those that fit on one line), a long list will result in an
+# environment variable that is too long, resulting in an error. For long
+# lists of poly masking files (i.e. all the mask files in the NCEP_mask
+# directory), define these in the METplus {{MetplusToolName}} configuration file.
+#
+{{METPLUS_TOOL_NAME}}_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
+{{METPLUS_TOOL_NAME}}_STATION_ID =
+
+# Message types, if all message types are to be returned, leave this empty,
+# otherwise indicate the message types of interest.
+{{METPLUS_TOOL_NAME}}_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}}
+{%- endif %}
+{%- set overrides_indent_len = 0 %}
+{%- set overrides_indent = '' %}
+{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %}
+ {%- set overrides_indent_len = 33 %}
+ {%- set overrides_indent = ' '*overrides_indent_len %}
+#
+# Overrides of MET configuration defaults.
+#
+{{METPLUS_TOOL_NAME}}_MET_CONFIG_OVERRIDES = cat_thresh = [NA];
+{{overrides_indent}}cnt_thresh = [NA];
+{{overrides_indent}}cnt_logic = UNION;
+{{overrides_indent}}wind_thresh = [NA];
+{{overrides_indent}}wind_logic = UNION;
+{{overrides_indent}}ci_alpha = [0.05];
+{{overrides_indent}}rank_corr_flag = FALSE;
+{%- endif %}
+#
+# List of forecast and corresponding observation fields to process.
+#
+{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %}
+ {%- if input_field_group in ['APCP', 'ASNOW'] %}
+# Note that for accumulated fields such as APCP and ASNOW, in the input
+# forecast and observation files (which are generated by MET's PcpCombine
+# tool) the accumulation period is appended to the field name, so the
+# same is done here.
+#
+ {%- endif %}
+{%- endif %}
+# Note on use of set_attr_lead and ensemble member time-lagging:
+# -------------------------------------------------------------
+# The set_attr_lead parameter appearing below in [FCST|OBS]_VAR_OPTIONS
+# specifies the lead to use both in naming of the output .stat and .nc
+# files and for setting the lead values contained in those files. This
+# option causes MET/METplus to use the lead values in the variable LEAD_SEQ
+# set above, which are the same for all ensemble forecast members (i.e.
+# regardless of whether members are time lagged with respect to the
+# nominal cycle date specified by cdate). If set_attr_lead were not
+# specified as below, then MET/METplus would get the lead from the input
+# forecast file, and that would in general differ from one ensemble member
+# to the next depending on whether the member is time-lagged. That would
+# cause confusion, so here, we always use lead values with zero lead
+# corresponding to the nominal cdate.
+#
+{#-
+Import the file containing jinja macros.
+#}
+{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %}
+
+{#-
+Jinja requires certain variables to be defined globally within the template
+before they can be used in if-statements and other scopes (see Jinja
+scoping rules). Define such variables.
+#}
+{%- set levels_fcst = '' %}
+{%- set levels_obs = '' %}
+{%- set indx_input_level_fcst = '' %}
+
+{%- set valid_threshes_fcst = [] %}
+{%- set valid_threshes_obs = [] %}
+{%- set threshes_fcst = [] %}
+{%- set threshes_obs = [] %}
+{%- set indx_input_thresh_fcst = '' %}
+
+{%- set opts_indent = '' %}
+{%- set opts_indent_len = '' %}
+{%- set tmp = '' %}
+{%- set error_msg = '' %}
+{#-
+Make sure that the set of field groups for forecasts and observations
+are identical.
+#}
+{%- set fgs_fcst = vx_config_dict['fcst'].keys()|list %}
+{%- set fgs_obs = vx_config_dict['obs'].keys()|list %}
+{%- if (fgs_fcst != fgs_obs) %}
+ {%- set error_msg = '\n' ~
+'The list of valid field groups for forecasts (fgs_fcst) must be identical\n' ~
+'to that for observations (fgs_obs) but isn\'t:\n' ~
+' fgs_fcst = ' ~ fgs_fcst ~ '\n' ~
+' fgs_obs = ' ~ fgs_obs %}
+ {{metplus_macros.print_err_and_quit(error_msg)}}
+{%- endif %}
+
+{#-
+Extract the lists of forecast and observation dictionaries containing
+the valid fields, levels, and thresholds corresponding to the specified
+field group (input_field_group). Note that it would be simpler to have
+these be just dictionaries in which the keys are the field names (instead
+of them being LISTS of dictionaries in which each dictionary contains a
+single key that is the field name), but that approach cannot be used here
+because it is possible for field names to be repeated (for both forecasts
+and observations). For example, in the observations, the field name
+'PRWE' appears more than once, each time with a different threshold, and
+the combination of name and threshold is what constitutes a unique field,
+not just the name by itself.
+#}
+{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %}
+{%- set fields_levels_threshes_obs = vx_config_dict['obs'][input_field_group] %}
+
+{#-
+Reset the specified forecast level so that if it happens to be an
+accumulation (e.g. 'A03'), the leading zeros in front of the hour are
+stipped out (e.g. reset to 'A3').
+#}
+{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %}
+
+{#-
+Ensure that the specified input forecast level(s) (input_level_fcst) and
+threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the
+set(s) of valid forecast levels and thresholds, respectively, specified
+in fields_levels_threshes_fcst.
+#}
+{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }}
+{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }}
+
+{#-
+For convenience, create lists of valid forecast and observation field
+names.
+#}
+{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %}
+{%- set valid_fields_fcst = [] %}
+{%- for i in range(0,num_valid_fields_fcst) %}
+ {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %}
+ {%- set tmp = valid_fields_fcst.append(field) %}
+{%- endfor %}
+
+{%- set valid_fields_obs = [] %}
+{%- set num_valid_fields_obs = fields_levels_threshes_obs|length %}
+{%- for i in range(0,num_valid_fields_obs) %}
+ {%- set field = fields_levels_threshes_obs[i].keys()|list|join('') %}
+ {%- set tmp = valid_fields_obs.append(field) %}
+{%- endfor %}
+
+{#-
+Ensure that the number of valid fields for forecasts is equal to that
+for the observations.
+#}
+{%- set num_valid_fields = 0 %}
+{%- if (num_valid_fields_fcst != num_valid_fields_obs) %}
+ {%- set error_msg = '\n' ~
+'The number of valid forecast fields (num_valid_fields_fcst) must be\n' ~
+'equal to the number of valid observation fields (num_valid_fields_obs)\n' ~
+'but isn\'t:\n' ~
+' num_valid_fields_fcst = ' ~ num_valid_fields_fcst ~ '\n' ~
+' num_valid_fields_obs = ' ~ num_valid_fields_obs ~ '\n' ~
+'The lists of valid forecast and observation fields are:\n' ~
+' valid_fields_fcst = ' ~ valid_fields_fcst ~ '\n' ~
+' valid_fields_obs = ' ~ valid_fields_obs ~ '\n' %}
+ {{metplus_macros.print_err_and_quit(error_msg)}}
+{%- else %}
+ {%- set num_valid_fields = num_valid_fields_fcst %}
+{%- endif %}
+
+{#-
+Loop over the valid fields and set field names, levels, thresholds, and/
+or options for each field, both for forecasts and for obseratiions, in
+the METplus configuration file.
+#}
+{%- set ns = namespace(var_count = 0) %}
+{%- for i in range(0,num_valid_fields) %}
+
+ {%- set field_fcst = valid_fields_fcst[i] %}
+ {%- set field_obs = valid_fields_obs[i] %}
+
+{#-
+For convenience, create lists of valid forecast and observation levels
+for the current field. Then check that the number of valid levels for
+forecasts is the same as that for observations.
+#}
+ {%- set valid_levels_fcst = fields_levels_threshes_fcst[i][field_fcst].keys()|list %}
+ {%- set valid_levels_obs = fields_levels_threshes_obs[i][field_obs].keys()|list %}
+
+ {%- set num_valid_levels = 0 %}
+ {%- set num_valid_levels_fcst = valid_levels_fcst|length %}
+ {%- set num_valid_levels_obs = valid_levels_obs|length %}
+ {%- if (num_valid_levels_fcst != num_valid_levels_obs) %}
+ {%- set error_msg = '\n' ~
+'The number of valid forecast levels (num_valid_levels_fcst) must be\n' ~
+'equal to the number of valid observation levels (num_valid_levels_obs)\n' ~
+'but isn\'t:\n' ~
+' num_valid_levels_fcst = ' ~ num_valid_levels_fcst ~ '\n' ~
+' num_valid_levels_obs = ' ~ num_valid_levels_obs ~ '\n' %}
+ {{metplus_macros.print_err_and_quit(error_msg)}}
+ {%- else %}
+ {%- set num_valid_levels = num_valid_levels_fcst %}
+ {%- endif %}
+
+{#-
+Make sure that input_level_fcst is set to a valid value.
+#}
+ {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %}
+ {%- set error_msg = '\n' ~
+'The input forecast level (input_level_fcst) must either be set to \'all\',\n' ~
+'or it must be set to one of the elements in the list of valid levels\n' ~
+'(valid_levels_fcst) for the current forecast field (field_fcst). This\n' ~
+'is not the case:\n' ~
+' field_fcst = ' ~ field_fcst ~ '\n' ~
+' input_level_fcst = ' ~ input_level_fcst ~ '\n' ~
+' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' %}
+ {{metplus_macros.print_err_and_quit(error_msg)}}
+ {%- endif %}
+
+{#-
+Increment the METplus variable counter.
+#}
+ {%- set ns.var_count = ns.var_count+1 %}
+
+{#-
+Set forecast field name. Note that this has to exactly match the name
+of the field in the input forecast file.
+
+For accumulated fields, the input forecast file is generated by MET's
+PcpCombine tool. In that file, the field name consists of the forecast
+field name here (field_fcst) with the accumulation period appended to
+it (separated by an underscore), so we must do the same here to get an
+exact match.
+#}
+ {%- if (input_field_group in ['APCP', 'ASNOW']) %}
+FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{accum_hh}}
+ {%- else %}
+FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}
+ {%- endif %}
+
+{#-
+Set forecast field level(s).
+#}
+ {%- if (input_level_fcst == 'all') %}
+ {%- set levels_fcst = valid_levels_fcst %}
+{#-
+If input_level_fcst is set to a specific value:
+ 1) Ensure that input_level_fcst exists in the list of valid forecast
+ levels.
+ 2) Get the index of input_level_fcst in the list of valid forecast
+ levels. This will be needed later below when setting the observation
+ level(s).
+ 3) Use this index to set the forecast level to a one-element list
+ containing the specified forecast level.
+#}
+ {%- else %}
+ {%- if input_level_fcst not in valid_levels_fcst %}
+ {%- set error_msg = '\n' ~
+'For the current forecast field (field_fcst), the input forecast level\n' ~
+'(input_level_fcst) does not exist in the list of valid forecast levels\n' ~
+'(valid_levels_fcst):\n' ~
+' field_fcst = ' ~ field_fcst ~ '\n' ~
+' input_level_fcst = ' ~ input_level_fcst ~ '\n' ~
+' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' %}
+ {{metplus_macros.print_err_and_quit(error_msg)}}
+ {%- endif %}
+ {%- set indx_input_level_fcst = valid_levels_fcst.index(input_level_fcst) %}
+ {%- set levels_fcst = [valid_levels_fcst[indx_input_level_fcst]] %}
+ {%- endif %}
+FCST_VAR{{ns.var_count}}_LEVELS = {{levels_fcst|join(', ')}}
+
+{#-
+Set forecast field threshold(s). Note that no forecast thresholds are
+included in the METplus configuration file if input_thresh_fcst is set
+to 'none'.
+#}
+ {%- if (input_thresh_fcst != 'none') %}
+{#-
+If input_level_fcst is set to 'all' and there is more than one (forecast
+or observation) level to be verified for the current (forecast or
+observation) field, then the list of forecast thresholds for each forecast
+level must be identical to every other. Check for this. Note that this
+restriction includes the order of the thresholds, i.e. the set of
+thresholds for each level must be in the same order as for all other
+levels.
+#}
+ {%- if (input_level_fcst == 'all') and (num_valid_levels > 1) %}
+ {{- metplus_macros.check_for_identical_threshes_by_level(
+ field_fcst, fields_levels_threshes_fcst[i]) }}
+ {%- endif %}
+{#-
+Now set the list of valid forecast thresholds to the one corresponding
+to the first (zeroth) forecast level in the list of forecast levels set
+above. We can do this because, for the case of a single forecast level,
+there is only one list of forecast thresholds to consider (the first
+one), and for the case of all levels, all levels have the same set of
+thresholds (as verified by the check above).
+#}
+ {%- set valid_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst][levels_fcst[0]] %}
+{#-
+If input_thresh_fcst is set to 'all', set the list of forecast thresholds
+to the full set of valid values.
+#}
+ {%- if (input_thresh_fcst == 'all') %}
+
+ {%- set threshes_fcst = valid_threshes_fcst %}
+{#-
+If input_thresh_fcst is set to a specific value:
+ 1) Ensure that input_thresh_fcst exists in the list of valid forecast
+ thresholds.
+ 2) Get the index of input_thresh_fcst in the list of valid forecast
+ thresholds. This will be needed later below when setting the
+ observation threshold(s).
+ 3) Use this index to set the forecast threshold to a one-element list
+ containing the specified forecast threshold.
+#}
+ {%- else %}
+
+ {%- if input_thresh_fcst not in valid_threshes_fcst %}
+ {%- set error_msg = '\n' ~
+'For the current forecast field (field_fcst) and list of forecast level(s)\n' ~
+'(levels_fcst), the input forecast threshold (input_thresh_fcst) does not\n' ~
+'exist in the list of valid forecast thresholds (valid_threshes_fcst):\n' ~
+' field_fcst = ' ~ field_fcst ~ '\n' ~
+' levels_fcst = ' ~ levels_fcst ~ '\n' ~
+' valid_threshes_fcst = ' ~ valid_threshes_fcst ~ '\n' ~
+' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %}
+ {{metplus_macros.print_err_and_quit(error_msg)}}
+ {%- endif %}
+ {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %}
+ {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %}
+
+ {%- endif %}
+{#-
+If threshes_fcst has been reset to something other than its default
+value of an empty list, then set the forecast thresholds in the METplus
+configuration file because that implies threshes_fcst was set above to
+a non-empty value. Then reset threshes_fcst to its default value for
+proper processing of thresholds for the next field.
+#}
+ {%- if (threshes_fcst != []) %}
+FCST_VAR{{ns.var_count}}_THRESH = {{threshes_fcst|join(', ')}}
+ {%- endif %}
+ {%- set threshes_fcst = [] %}
+
+ {%- endif %}
+
+{#-
+Set forecast field options.
+#}
+FCST_VAR{{ns.var_count}}_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
+
+ {%- set opts_indent_len = 20 %}
+ {%- if (ns.var_count > 9) and (ns.var_count <= 99) %}
+ {%- set opts_indent_len = opts_indent_len + 1 %}
+ {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %}
+ {%- set opts_indent_len = opts_indent_len + 2 %}
+ {%- elif (ns.var_count > 999) %}
+ {%- set opts_indent_len = opts_indent_len + 3 %}
+ {%- endif %}
+ {%- set opts_indent = ' '*opts_indent_len %}
+
+ {%- if (input_field_group == 'REFC') %}
+
+ {%- if (field_fcst == 'REFC') %}
+{{opts_indent}}cnt_thresh = [ >15 ];
+{{opts_indent}}cnt_logic = UNION;
+ {%- endif %}
+
+ {%- elif (input_field_group == 'RETOP') %}
+
+ {%- if (field_fcst == 'RETOP') %}
+{{opts_indent}}convert(x) = x * 3.28084 * 0.001;
+{{opts_indent}}cnt_thresh = [ >0 ];
+{{opts_indent}}cnt_logic = UNION;
+ {%- endif %}
+
+ {%- elif (input_field_group == 'ADPSFC') %}
+
+ {%- if (field_fcst in ['WIND']) %}
+{{opts_indent}}GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind.
+ {%- elif (field_fcst in ['TCDC']) %}
+{{opts_indent}}GRIB_lvl_typ = 200;
+{{opts_indent}}GRIB2_ipdtmpl_index=[27];
+{{opts_indent}}GRIB2_ipdtmpl_val=[255];
+{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; }
+ {%- elif (field_fcst in ['VIS']) %}
+{{opts_indent}}censor_thresh = [>16090];
+{{opts_indent}}censor_val = [16090];
+{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; }
+ {%- elif (field_fcst in ['HGT']) %}
+{{opts_indent}}GRIB_lvl_typ = 215;
+{{opts_indent}}desc = "CEILING";
+ {%- endif %}
+
+ {%- elif (input_field_group == 'ADPUPA') %}
+
+ {%- if (field_fcst in ['HGT']) %}
+ {%- if (levels_fcst[0] in ['L0']) %}
+{{opts_indent}}GRIB_lvl_typ = 220;
+ {%- endif %}
+ {%- elif (field_fcst in ['CAPE']) %}
+{{opts_indent}}cnt_thresh = [ >0 ];
+ {%- endif %}
+
+ {%- endif %}
+
+{#-
+Set observation field name. Note that this has to exactly match the name
+of the field in the input observation file.
+
+For accumulated fields, the input observation file is generated by MET's
+PcpCombine tool. In that file, the field name consists of the observation
+field name here (field_obs) with the accumulation period appended to it
+(separated by an underscore), so we must do the same here to get an exact
+match.
+
+Note:
+Turns out for ASNOW, PcpCombine is not run for obs, so we exclude that
+from the "if" clause here (so it goes into the "else"). For workflow
+behavior uniformity between APCP and ASNOW, consider running PcpCombine
+for ASNOW observations as well (just as it's run for APCP observations).
+ {%- if (input_field_group in ['APCP', 'ASNOW']) %}
+#}
+ {%- if (input_field_group in ['APCP']) %}
+OBS_VAR{{ns.var_count}}_NAME = {{field_obs}}_{{accum_hh}}
+ {%- else %}
+OBS_VAR{{ns.var_count}}_NAME = {{field_obs}}
+ {%- endif %}
+
+{#-
+Set observation field level(s).
+#}
+ {%- if (input_level_fcst == 'all') %}
+ {%- set levels_obs = valid_levels_obs %}
+{#-
+If input_level_fcst is set to a specific forecast level, then the
+observation level is given by the element in the list of valid observation
+levels that has the same index as that of input_level_fcst in the list
+of valid forecast levels.
+#}
+ {%- else %}
+ {%- set levels_obs = [valid_levels_obs[indx_input_level_fcst]] %}
+ {%- endif %}
+OBS_VAR{{ns.var_count}}_LEVELS = {{levels_obs|join(', ')}}
+
+{#-
+Set observation field threshold(s). Note that no observation thresholds
+are included in the METplus configuration file if input_thresh_fcst is
+set to 'none'.
+#}
+ {%- if (input_thresh_fcst != 'none') %}
+{#-
+If input_level_fcst is set to 'all' and there is more than one (forecast
+or observation) level to be verified for the current (forecast or
+observation) field, then the list of observation thresholds for each
+observation level must be identical to every other. Check for this.
+Note that this restriction includes the order of the thresholds, i.e.
+the set of thresholds for each level must be in the same order as for
+all other levels.
+#}
+ {%- if (input_level_fcst == 'all') and (num_valid_levels > 1) %}
+ {{- metplus_macros.check_for_identical_threshes_by_level(
+ field_obs, fields_levels_threshes_obs[i]) }}
+ {%- endif %}
+{#-
+Now set the list of valid observation thresholds to the one corresponding
+to the first (zeroth) observation level in the list of observation levels
+set above. We can do this because, for the case of a single observaton
+level, there is only one list of observation thresholds to consider (the
+first one), and for the case of all levels, all levels have the same set
+of thresholds (as verified by the check above).
+#}
+ {%- set valid_threshes_obs = fields_levels_threshes_obs[i][field_obs][levels_obs[0]] %}
+{#-
+If input_thresh_fcst is set to 'all', set the list of observation thresholds
+to the full set of valid values.
+#}
+ {%- if (input_thresh_fcst == 'all') %}
+
+ {%- set threshes_obs = valid_threshes_obs %}
+{#-
+If input_thresh_fcst is set to a specific forecast threshold, then the
+observation threshold is given by the element in the list of valid
+observation thresholds that has the same index as that of input_thresh_fcst
+in the list of valid forecast thresholds.
+#}
+ {%- else %}
+
+ {%- set threshes_obs = [valid_threshes_obs[indx_input_thresh_fcst]] %}
+
+ {%- endif %}
+{#-
+If threshes_obs has been reset to something other than its default value
+of an empty list, then set the observation thresholds in the METplus
+configuration file because that implies threshes_obs was set above to
+a non-empty value. Then reset threshes_obs to its default value for
+proper processing of thresholds for the next field.
+#}
+ {%- if (threshes_obs != []) %}
+OBS_VAR{{ns.var_count}}_THRESH = {{threshes_obs|join(', ')}}
+ {%- endif %}
+ {%- set threshes_obs = [] %}
+
+ {%- endif %}
+
+{#-
+Set observation field options.
+#}
+ {%- set opts_indent_len = opts_indent_len - 1 %}
+ {%- set opts_indent = ' '*opts_indent_len %}
+
+ {%- if (input_field_group == 'ASNOW') %}
+
+ {%- if (field_obs == 'ASNOW') %}
+OBS_VAR{{ns.var_count}}_OPTIONS = convert(x) = 100.0*x;
+ {%- endif %}
+
+ {%- elif (input_field_group == 'REFC') %}
+
+ {%- if (field_obs == 'MergedReflectivityQCComposite') %}
+OBS_VAR{{ns.var_count}}_OPTIONS = censor_thresh = [eq-999, <-20];
+{{opts_indent}}censor_val = [-9999, -20];
+{{opts_indent}}cnt_thresh = [ >15 ];
+{{opts_indent}}cnt_logic = UNION;
+ {%- endif %}
+
+ {%- elif (input_field_group == 'RETOP') %}
+
+ {%- if (field_obs in ['EchoTop18']) %}
+OBS_VAR{{ns.var_count}}_OPTIONS = convert(x) = x * 3280.84 * 0.001;
+{{opts_indent}}censor_thresh = [<=-9.84252,eq-3.28084];
+{{opts_indent}}censor_val = [-9999,-16.4042];
+{{opts_indent}}cnt_thresh = [ >0 ];
+{{opts_indent}}cnt_logic = UNION;
+ {%- endif %}
+
+ {%- elif (input_field_group == 'ADPSFC') %}
+
+ {%- if (field_obs in ['WIND']) %}
+OBS_VAR{{ns.var_count}}_OPTIONS = GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind.
+ {%- elif (field_obs in ['VIS']) %}
+OBS_VAR{{ns.var_count}}_OPTIONS = censor_thresh = [>16090];
+{{opts_indent}}censor_val = [16090];
+{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; }
+ {%- elif (field_obs in ['CEILING']) %}
+OBS_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 215;
+{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; }
+ {%- endif %}
+
+ {%- elif (input_field_group == 'ADPUPA') %}
+
+ {%- if (field_obs in ['CAPE', 'MLCAPE']) %}
+OBS_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ];
+{{opts_indent}}cnt_logic = UNION;
+ {%- elif (field_obs in ['PBL']) %}
+ {%- if (field_fcst in ['HPBL']) %}
+OBS_VAR{{ns.var_count}}_OPTIONS = desc = "TKE";
+ {%- elif (field_fcst in ['HGT']) %}
+OBS_VAR{{ns.var_count}}_OPTIONS = desc = "RI";
+ {%- endif %}
+ {%- endif %}
+
+ {%- endif %}
+
+{#-
+Print out a newline to separate the settings for the current field (both
+forecast and observation settings) from those for the next field.
+#}
+ {{- '\n' }}
+
+{%- endfor %}
+
+{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %}
+ {%- if (input_field_group in ['APCP', 'ASNOW']) %}
+#
+# Forecast data time window(s).
+#
+FCST_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_BEGIN = 0
+FCST_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_END = 0
+ {%- endif %}
+{%- endif %}
+#
+# Observation data time window(s).
+#
+{%- set obs_window_begin = 0 %}
+{%- set obs_window_end = 0 %}
+{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %}
+ {%- if (input_field_group in ['REFC', 'RETOP']) %}
+ {%- set obs_window_begin = -300 %}
+ {%- set obs_window_end = 300 %}
+ {%- endif %}
+OBS_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_BEGIN = {{obs_window_begin}}
+OBS_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_END = {{obs_window_end}}
+{%- elif (METPLUS_TOOL_NAME == 'POINT_STAT') %}
+ {%- set obs_window_begin = -1799 %}
+ {%- set obs_window_end = 1800 %}
+OBS_WINDOW_BEGIN = {{obs_window_begin}}
+OBS_WINDOW_END = {{obs_window_end}}
+OBS_{{METPLUS_TOOL_NAME}}_WINDOW_BEGIN = {OBS_WINDOW_BEGIN}
+OBS_{{METPLUS_TOOL_NAME}}_WINDOW_END = {OBS_WINDOW_END}
+#
+# Optional list of offsets to look for point observation data
+#
+{{METPLUS_TOOL_NAME}}_OFFSETS = 0
+{%- endif %}
+
+{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %}
+
+# MET {{MetplusToolName}} neighborhood values
+# See the MET User's Guide {{MetplusToolName}} section for more information
+{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_FIELD = BOTH
+
+# width value passed to nbrhd dictionary in the MET config file
+{%- if (input_field_group in ['APCP']) %}
+{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_WIDTH = 3,5,7
+{%- elif (input_field_group in ['ASNOW']) %}
+{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_WIDTH = 5
+{%- elif (input_field_group in ['REFC', 'RETOP']) %}
+{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_WIDTH = 1,3,5,7
+{%- endif %}
+
+# shape value passed to nbrhd dictionary in the MET config file
+{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_SHAPE = SQUARE
+
+# cov thresh list passed to nbrhd dictionary in the MET config file
+{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_COV_THRESH = >=0.5
+{%- endif %}
+#
+# Set to True to run {{MetplusToolName}} separately for each field specified;
+# set to False to run {{MetplusToolName}} once per run time that includes all
+# fields specified.
+#
+{{METPLUS_TOOL_NAME}}_ONCE_PER_FIELD = False
+
+{{METPLUS_TOOL_NAME}}_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}
+
+# Climatology data
+{%- set comment_or_null = '' %}
+{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %}
+#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD =
+ {%- if (input_field_group in ['APCP', 'ASNOW']) %}
+ {%- set comment_or_null = '#' %}
+ {%- endif %}
+
+{{comment_or_null}}{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1
+#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False
+#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = True
+{%- elif (METPLUS_TOOL_NAME == 'POINT_STAT') %}
+{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST
+
+{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1
+#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False
+#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = True
+{%- endif %}
+
+# Statistical output types
+{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %}
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_FHO = STAT
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTC = STAT
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTS = STAT
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTC = NONE
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTS = NONE
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CNT = STAT
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SL1L2 = NONE
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SAL1L2 = NONE
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VL1L2 = NONE
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VAL1L2 = NONE
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VCNT = NONE
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PCT = NONE
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PSTD = NONE
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PJC = NONE
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PRC = NONE
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV = BOTH
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCTC = STAT
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCTS = STAT
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCNT = STAT
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_GRAD = BOTH
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_DMAP = NONE
+{%- elif (METPLUS_TOOL_NAME == 'POINT_STAT') %}
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_FHO = STAT
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTC = STAT
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTS = STAT
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTC =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTS =
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CNT = STAT
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SL1L2 = STAT
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SAL1L2 =
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VL1L2 = STAT
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VAL1L2 =
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VCNT = STAT
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PCT =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PSTD =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PJC =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PRC =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECNT =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_RPS =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MPR =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ORANK =
+{%- endif %}
+
+{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %}
+
+# NetCDF matched pairs output file
+#{{METPLUS_TOOL_NAME}}_NC_PAIRS_VAR_NAME =
+{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_LATLON = FALSE
+{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_RAW = FALSE
+{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_DIFF = FALSE
+{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_CLIMO = FALSE
+#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_CLIMO_CDP = FALSE
+{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_WEIGHT = FALSE
+{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_NBRHD = FALSE
+#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_FOURIER = FALSE
+#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_GRADIENT = FALSE
+#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE
+{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_APPLY_MASK = FALSE
+{%- endif %}
+
+# End of [config] section and start of [dir] section.
+[dir]
+#
+# Directory containing observation input to {{MetplusToolName}}.
+#
+OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{obs_input_dir}}
+#
+# Directory containing forecast input to {{MetplusToolName}}.
+#
+FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}}
+#
+# Directory containing climatology mean input to {{MetplusToolName}}. Not used in
+# this example.
+#
+{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR =
+#
+# Directory containing climatology mean input to {{MetplusToolName}}. Not used in
+# this example.
+#
+{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR =
+#
+# Directory in which to write output from {{MetplusToolName}}.
+#
+# OUTPUT_BASE apparently has to be set to something; it cannot be left
+# to its default value. But it is not explicitly used elsewhere in this
+# configuration file.
+#
+OUTPUT_BASE = {{output_base}}
+{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}}
+#
+# Directory for staging data.
+#
+STAGING_DIR = {{staging_dir}}
+
+# End of [dir] section and start of [filename_templates] section.
+[filename_templates]
+#
+# Template for observation input to {{MetplusToolName}} relative to
+# OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR.
+#
+OBS_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{obs_input_fn_template}}
+#
+# Template for forecast input to {{MetplusToolName}} relative to
+# FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR.
+#
+FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}}
+#
+# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR.
+#
+{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE =
+#
+# Template for climatology input to {{MetplusToolName}} relative to
+# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR. Not used in this example.
+#
+{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_TEMPLATE =
+#
+# Template for climatology input to {{MetplusToolName}} relative to
+# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR. Not used in this example.
+#
+{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_TEMPLATE =
+{#-
+Not sure if the following section for ..._VERIFICATION_MASK_TEMPLATE
+is also nececcary for PointStat.
+#}
+{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %}
+#
+# Variable used to specify one or more verification mask files for
+# {{MetplusToolName}}. Not used for this example.
+#
+{{METPLUS_TOOL_NAME}}_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
+{%- endif %}
diff --git a/parm/metplus/PcpCombine.conf b/parm/metplus/PcpCombine.conf
new file mode 100644
index 0000000000..3cee69df1d
--- /dev/null
+++ b/parm/metplus/PcpCombine.conf
@@ -0,0 +1,216 @@
+{%- if FCST_OR_OBS == 'FCST' -%}
+# PcpCombine METplus Configuration for Forecasts
+{%- elif FCST_OR_OBS == 'OBS' -%}
+# PcpCombine METplus Configuration for Observations
+{%- endif %}
+
+[config]
+
+# List of applications (tools) to run.
+PROCESS_LIST = PcpCombine
+
+# time looping - options are INIT, VALID, RETRO, and REALTIME
+# If set to INIT or RETRO:
+# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
+# If set to VALID or REALTIME:
+# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
+LOOP_BY = INIT
+
+# Format of INIT_BEG and INIT_END using % items
+# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
+# see www.strftime.org for more information
+# %Y%m%d%H expands to YYYYMMDDHH
+INIT_TIME_FMT = %Y%m%d%H
+
+# Start time for METplus run - must match INIT_TIME_FMT
+INIT_BEG = {{cdate}}
+
+# End time for METplus run - must match INIT_TIME_FMT
+INIT_END = {{cdate}}
+
+# Increment between METplus runs (in seconds if no units are specified).
+# Must be >= 60 seconds.
+INIT_INCREMENT = 3600
+
+# List of forecast leads to process for each run time (init or valid)
+# In hours if units are not specified
+# If unset, defaults to 0 (don't loop through forecast leads)
+LEAD_SEQ = {{fhr_list}}
+
+# Order of loops to process data - Options are times, processes
+# Not relevant if only one item is in the PROCESS_LIST
+# times = run all wrappers in the PROCESS_LIST for a single run time, then
+# increment the run time and run all wrappers again until all times have
+# been evaluated.
+# processes = run the first wrapper in the PROCESS_LIST for all times
+# specified, then repeat for the next item in the PROCESS_LIST until all
+# wrappers have been run
+LOOP_ORDER = times
+#
+# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
+#
+LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
+#
+# Specify the name of the METplus log file.
+#
+LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
+#
+# Specify the location and name of the final METplus conf file.
+#
+METPLUS_CONF = {% raw %}{{% endraw %}{{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
+
+{%- if FCST_OR_OBS == 'FCST' %}
+#
+# Run PcpCombine on forecast data but not observations (observation input
+# files are not provided).
+#
+OBS_PCP_COMBINE_RUN = False
+FCST_PCP_COMBINE_RUN = True
+{%- elif FCST_OR_OBS == 'OBS' %}
+#
+# Run PcpCombine on observation data but not forecasts (forecast input
+# files are not provided).
+#
+OBS_PCP_COMBINE_RUN = True
+FCST_PCP_COMBINE_RUN = False
+{%- endif %}
+#
+# Mode of PcpCombine to use (SUM, ADD, SUBTRACT, DERIVE, or CUSTOM).
+#
+{{FCST_OR_OBS}}_PCP_COMBINE_METHOD = ADD
+
+{%- if (FCST_OR_OBS == 'FCST') and (input_field_group == 'ASNOW') %}
+#
+# Specify name of variable for Snowfall Accumulation.
+# NOTE: Currently TSNOWP is used which is a constant-density estimate of snowfall accumulation.
+# In future RRFS development, a GSL product with variable-density snowfall accumulation
+# is planned for UPP. When that is included and turned on in post, this variable may be changed
+# to ASNOW.
+#
+FCST_PCP_COMBINE_INPUT_NAMES = TSNOWP
+
+FCST_PCP_COMBINE_INPUT_LEVELS = A01
+{%- endif %}
+#
+# Specify how to name the array in the NetCDF file that PcpCombine
+# generates.
+#
+# For accumulation variables (which is the only type of variable that we
+# run PcpCombine on), we add the accumulation period to the variable name
+# because this is how METplus normally sets names. This is because,
+# epending on the settings in the METplus configuration file, it is
+# possible for a single NetCDF output file to contain output for multiple
+# accumulations, so even though the "level" attribute of each accumulation
+# variable in the output file will contain the level (e.g. "A1" or "A3"),
+# the variable names for say the 1-hour and 3-hour accumulations would be
+# the same (e.g. both would be "APCP"), which is not allowed and/or would
+# cause overwriting of data. To avoid this, METplus includes the level
+# as part of the variable name, so we do the same here (even though in
+# our case, it is not required because there will only be one variable in
+# the output NetCDF file).
+#
+{%- if (input_field_group in ['APCP', 'ASNOW']) %}
+{{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_NAME = {{fieldname_in_met_output}}_{{accum_hh}}
+{%- else %}
+{{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_NAME = {{fieldname_in_met_output}}
+{%- endif %}
+#
+# Accumulation interval available in the input data.
+#
+{{FCST_OR_OBS}}_PCP_COMBINE_INPUT_ACCUMS = 01
+#
+# Accumulation interval to generate in the output file.
+#
+{{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_ACCUM = {{accum_hh}}
+#
+# If the output NetCDF file already exists, specify whether or not to
+# skip the call to PcpCombine.
+#
+# In general, relaunching a task in the SRW App should recreate all the
+# output from that task regardless of whether or not that output already
+# exists. This is the case when running the PcpCombine task on forecasts.
+# Thus, for forecasts, we set the skip flag to False. However, it turns
+# out that when running PcpCombine on observations, it is necessary to
+# skip the call to PcpCombine (i.e. NOT recreate output files) because
+# in the SRW App's workflow, more than one cycle may want to create the
+# same output observation file. This can happen if the forecast periods
+# from two or more forecasts overlap, e.g. forecast 1 starts at 00Z of
+# day one and forecast 2 starts at 00Z of day 2, and the forecasts are
+# both 36 hours long, so the last 12 hours of forecast 1 overlap with the
+# first 12 hours of forecast 2. In this case, there will be two workflow
+# tasks that will try to create the observation APCP files for those 12
+# hours, and the files will be named exactly the same (because the output
+# naming convention in this conf file is based on valid times). Thus, in
+# order to avoid (1) duplicating work and (2) having two tasks accidentally
+# trying to write to the same file (which will cause at least one task to
+# fail), when running PcpCombine on observations we want to skip the call
+# if the output observation file(s) (for a given forecast hour) already
+# exist. For this reason, for observations we set the skip flag to False
+# but set it to True for forecasts.
+#
+{%- if FCST_OR_OBS == 'FCST' %}
+# Since this METplus configuration file takes forecast files as inputs,
+# we set this flag to False.
+#
+PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS = False
+{%- elif FCST_OR_OBS == 'OBS' %}
+# Since this METplus configuration file takes observation files as inputs,
+# we set this flag to True.
+#
+PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS = True
+{%- endif %}
+
+{%- if FCST_OR_OBS == 'FCST' %}
+#
+# Maximum forecast lead to allow when searching for model data to use in
+# PcpCombine. Default is a very large time (4000 years) so setting this
+# to a valid maximum value can speed up execution time of numerous runs.
+#
+FCST_PCP_COMBINE_MAX_FORECAST = 2d
+#
+# Keep initialization time constant.
+#
+FCST_PCP_COMBINE_CONSTANT_INIT = True
+{%- endif %}
+
+{%- if FCST_OR_OBS == 'OBS' %}
+#
+# Name to identify observation data in output.
+#
+OBTYPE = CCPA
+{%- endif %}
+#
+# Specify file type of input data.
+#
+{{FCST_OR_OBS}}_PCP_COMBINE_INPUT_DATATYPE = GRIB
+
+# End of [config] section and start of [dir] section.
+[dir]
+#
+# Directory containing input files.
+#
+{{FCST_OR_OBS}}_PCP_COMBINE_INPUT_DIR = {{input_dir}}
+#
+# Directory in which to write output from PcpCombine.
+#
+# OUTPUT_BASE apparently has to be set to something; it cannot be left
+# to its default value. But it is not explicitly used elsewhere in this
+# configuration file.
+#
+OUTPUT_BASE = {{output_base}}
+{{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_DIR = {{output_dir}}
+#
+# Directory for staging data.
+#
+STAGING_DIR = {{staging_dir}}
+
+# End of [dir] section and start of [filename_templates] section.
+[filename_templates]
+#
+# Input file name template relative to {{FCST_OR_OBS}}_PCP_COMBINE_INPUT_DIR.
+#
+{{FCST_OR_OBS}}_PCP_COMBINE_INPUT_TEMPLATE = {{input_fn_template}}
+#
+# Output file name template relative to {{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_DIR.
+#
+{{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_TEMPLATE = {{output_fn_template}}
diff --git a/parm/metplus/PcpCombine_fcst_APCP.conf b/parm/metplus/PcpCombine_fcst_APCP.conf
deleted file mode 100644
index 64fe0b4fcf..0000000000
--- a/parm/metplus/PcpCombine_fcst_APCP.conf
+++ /dev/null
@@ -1,130 +0,0 @@
-# PcpCombine METplus Configuration for Forecasts
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = PcpCombine
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-LOOP_ORDER = times
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {FCST_PCP_COMBINE_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Run PcpCombine on forecast data but not observation (observation input
-# files are not provided).
-#
-OBS_PCP_COMBINE_RUN = False
-FCST_PCP_COMBINE_RUN = True
-#
-# Mode of PcpCombine to use (SUM, ADD, SUBTRACT, DERIVE, or CUSTOM).
-#
-FCST_PCP_COMBINE_METHOD = ADD
-#
-# Specify how to name the array in the NetCDF file that PcpCombine
-# generates.
-#
-FCST_PCP_COMBINE_OUTPUT_NAME = {{fieldname_in_met_output}}_{{accum_hh}}
-#
-# Accumulation interval available in the forecast input data.
-#
-FCST_PCP_COMBINE_INPUT_ACCUMS = 01
-#
-# Accumulation interval to generate in the output file.
-#
-FCST_PCP_COMBINE_OUTPUT_ACCUM = {{accum_hh}}
-#
-# If the "bucket" output NetCDF file already exists, DON'T skip the call
-# to PcpCombine.
-#
-# In general, we want to recreate the files when the SRW App workflow
-# task that uses this METplus configuration file is relaunched.
-#
-PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS = False
-
-# Maximum forecast lead to allow when searching for model data to use in
-# PcpCombine. Default is a very large time (4000 years) so setting this
-# to a valid maximum value can speed up execution time of numerous runs.
-FCST_PCP_COMBINE_MAX_FORECAST = 2d
-
-# Keep initialization time constant.
-FCST_PCP_COMBINE_CONSTANT_INIT = True
-
-FCST_PCP_COMBINE_INPUT_DATATYPE = GRIB
-
-# End of [config] section and start of [dir] section.
-[dir]
-#
-# Directory containing forecast input to PcpCombine.
-#
-FCST_PCP_COMBINE_INPUT_DIR = {{fcst_input_dir}}
-#
-# Directory in which to write output from PcpCombine.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-FCST_PCP_COMBINE_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for forecast input to PcpCombine relative to
-# FCST_PCP_COMBINE_INPUT_DIR.
-#
-FCST_PCP_COMBINE_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-#
-# Template for output from PcpCombine relative to
-# FCST_PCP_COMBINE_OUTPUT_DIR.
-#
-FCST_PCP_COMBINE_OUTPUT_TEMPLATE = {{output_fn_template}}
diff --git a/parm/metplus/PcpCombine_fcst_ASNOW.conf b/parm/metplus/PcpCombine_fcst_ASNOW.conf
deleted file mode 100644
index 91a6a70abb..0000000000
--- a/parm/metplus/PcpCombine_fcst_ASNOW.conf
+++ /dev/null
@@ -1,141 +0,0 @@
-# PcpCombine METplus Configuration for Forecasts
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = PcpCombine
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-LOOP_ORDER = times
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {FCST_PCP_COMBINE_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Run PcpCombine on forecast data but not observation (observation input
-# files are not provided).
-#
-OBS_PCP_COMBINE_RUN = False
-FCST_PCP_COMBINE_RUN = True
-#
-# Mode of PcpCombine to use (SUM, ADD, SUBTRACT, DERIVE, or CUSTOM).
-#
-FCST_PCP_COMBINE_METHOD = ADD
-#
-# Specify name of variable for Snowfall Accumulation.
-# NOTE: Currently TSNOWP is used which is a constant-density estimate of snowfall accumulation.
-# In future RRFS development, a GSL product with variable-density snowfall accumulation
-# is planned for UPP. When that is included and turned on in post, this variable may be changed
-# to ASNOW.
-#
-FCST_PCP_COMBINE_INPUT_NAMES=TSNOWP
-
-FCST_PCP_COMBINE_INPUT_LEVELS = A01
-#
-# Specify how to name the array in the NetCDF file that PcpCombine
-# generates.
-#
-FCST_PCP_COMBINE_OUTPUT_NAME = {{fieldname_in_met_output}}_{{accum_hh}}
-#
-# Accumulation interval available in the forecast input data.
-#
-FCST_PCP_COMBINE_INPUT_ACCUMS = 01
-#
-# Accumulation interval to generate in the output file.
-#
-FCST_PCP_COMBINE_OUTPUT_ACCUM = {{accum_hh}}
-
-# If the "bucket" output NetCDF file already exists, DON'T skip the call
-# to PcpCombine.
-#
-# In general, we want to recreate the files when the SRW App workflow
-# task that uses this METplus configuration file is relaunched.
-#
-PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS = False
-
-# Maximum forecast lead to allow when searching for model data to use in
-# PcpCombine. Default is a very large time (4000 years) so setting this
-# to a valid maximum value can speed up execution time of numerous runs.
-FCST_PCP_COMBINE_MAX_FORECAST = 2d
-
-# Keep initialization time constant.
-FCST_PCP_COMBINE_CONSTANT_INIT = True
-
-FCST_PCP_COMBINE_INPUT_DATATYPE = GRIB
-#FCST_NATIVE_DATA_TYPE = GRIB
-
-# End of [config] section and start of [dir] section.
-[dir]
-#
-# Directory containing forecast input to PcpCombine.
-#
-FCST_PCP_COMBINE_INPUT_DIR = {{fcst_input_dir}}
-#
-# Directory in which to write output from PcpCombine.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-FCST_PCP_COMBINE_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for forecast input to PcpCombine relative to
-# FCST_PCP_COMBINE_INPUT_DIR.
-#
-FCST_PCP_COMBINE_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-#
-# Template for output from PCPCOMBINE relative to
-# FCST_PCP_COMBINE_OUTPUT_DIR.
-#
-FCST_PCP_COMBINE_OUTPUT_TEMPLATE = {{output_fn_template}}
diff --git a/parm/metplus/PcpCombine_obs_APCP.conf b/parm/metplus/PcpCombine_obs_APCP.conf
deleted file mode 100644
index cea6809597..0000000000
--- a/parm/metplus/PcpCombine_obs_APCP.conf
+++ /dev/null
@@ -1,139 +0,0 @@
-# PcpCombine METplus Configuration for Observations
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = PcpCombine
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-LOOP_ORDER = times
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {OBS_PCP_COMBINE_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Run PcpCombine on observation data but not forecast (forecast input
-# files are not provided).
-#
-OBS_PCP_COMBINE_RUN = True
-FCST_PCP_COMBINE_RUN = False
-#
-# Mode of PcpCombine to use (SUM, ADD, SUBTRACT, DERIVE, or CUSTOM).
-#
-OBS_PCP_COMBINE_METHOD = ADD
-#
-# Specify how to name the array in the NetCDF file that PcpCombine
-# generates.
-#
-OBS_PCP_COMBINE_OUTPUT_NAME = {{fieldname_in_met_output}}_{{accum_hh}}
-#
-# Accumulation interval available in the observation input data.
-#
-OBS_PCP_COMBINE_INPUT_ACCUMS = 01
-#
-# Accumulation interval to generate in the output file.
-#
-OBS_PCP_COMBINE_OUTPUT_ACCUM = {{accum_hh}}
-#
-# If the "bucket" output NetCDF file already exists, skip the call to
-# PcpCombine.
-#
-# In general, we want to recreate the files when the SRW App workflow
-# task that uses this METplus configuration file is relaunched. In this
-# case, however, it is necessary to skip the call to PcpCombine because
-# in the SRW App's workflow, more than one cycle may want to create the
-# same file. This can happen if the forecast periods from two or more
-# forecasts overlap, e.g. forecast 1 starts at 00Z of day one and forecast
-# 2 starts at 00Z of day 2, and the forecasts are both 36 hours long, so
-# the last 12 hours of forecast 1 overlap with the first 12 hours of
-# forecast 2. In this case, there will be two workflow tasks that will
-# try to create the observation APCP files for those 12 hours, and the
-# files will be named exactly the same (because the output naming convention
-# in this conf file uses valid times). In order to (1) avoid duplicating
-# work and (2) having two tasks accidentally trying to write to the same
-# file (which will cause at least one task to fail), we do not call
-# PcpCombine if the output file (for a given forecast hour) already
-# exists.
-#
-PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS = True
-#
-# Name to identify observation data in output.
-#
-OBTYPE = CCPA
-OBS_PCP_COMBINE_INPUT_DATA_TYPE = GRIB
-
-# End of [config] section and start of [dir] section.
-[dir]
-#
-# Directory containing observation input to PcpCombine.
-#
-OBS_PCP_COMBINE_INPUT_DIR = {{obs_input_dir}}
-#
-# Directory in which to write output from PcpCombine.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-OBS_PCP_COMBINE_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for observation input to PcpCombine relative to
-# OBS_PCP_COMBINE_INPUT_DIR.
-#
-OBS_PCP_COMBINE_INPUT_TEMPLATE = {{obs_input_fn_template}}
-#
-# Template for output from PcpCombine relative to
-# OBS_PCP_COMBINE_OUTPUT_DIR.
-#
-OBS_PCP_COMBINE_OUTPUT_TEMPLATE = {{output_fn_template}}
diff --git a/parm/metplus/PointStat_ADPSFC.conf b/parm/metplus/PointStat_ADPSFC.conf
deleted file mode 100644
index 6d94e0bed9..0000000000
--- a/parm/metplus/PointStat_ADPSFC.conf
+++ /dev/null
@@ -1,378 +0,0 @@
-# PointStat METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = PointStat
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {POINT_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Location of MET configuration file to pass to PointStat.
-#
-# References PARM_BASE, which is the location of the parm directory
-# corresponding to the ush directory of the run_metplus.py script that
-# is called or the value of the environment variable METPLUS_PARM_BASE
-# if set.
-#
-POINT_STAT_CONFIG_FILE = {PARM_BASE}/met_config/PointStatConfig_wrapped
-
-POINT_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA
-#POINT_STAT_OBS_QUALITY_EXC =
-
-POINT_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST
-#POINT_STAT_CLIMO_STDEV_TIME_INTERP_METHOD =
-
-#POINT_STAT_INTERP_VLD_THRESH =
-#POINT_STAT_INTERP_SHAPE =
-POINT_STAT_INTERP_TYPE_METHOD = BILIN
-POINT_STAT_INTERP_TYPE_WIDTH = 2
-
-POINT_STAT_OUTPUT_FLAG_FHO = STAT
-POINT_STAT_OUTPUT_FLAG_CTC = STAT
-POINT_STAT_OUTPUT_FLAG_CTS = STAT
-#POINT_STAT_OUTPUT_FLAG_MCTC =
-#POINT_STAT_OUTPUT_FLAG_MCTS =
-POINT_STAT_OUTPUT_FLAG_CNT = STAT
-POINT_STAT_OUTPUT_FLAG_SL1L2 = STAT
-#POINT_STAT_OUTPUT_FLAG_SAL1L2 =
-POINT_STAT_OUTPUT_FLAG_VL1L2 = STAT
-#POINT_STAT_OUTPUT_FLAG_VAL1L2 =
-POINT_STAT_OUTPUT_FLAG_VCNT = STAT
-#POINT_STAT_OUTPUT_FLAG_PCT =
-#POINT_STAT_OUTPUT_FLAG_PSTD =
-#POINT_STAT_OUTPUT_FLAG_PJC =
-#POINT_STAT_OUTPUT_FLAG_PRC =
-#POINT_STAT_OUTPUT_FLAG_ECNT =
-#POINT_STAT_OUTPUT_FLAG_RPS =
-#POINT_STAT_OUTPUT_FLAG_ECLV =
-#POINT_STAT_OUTPUT_FLAG_MPR =
-#POINT_STAT_OUTPUT_FLAG_ORANK =
-
-POINT_STAT_CLIMO_CDF_BINS = 1
-#POINT_STAT_CLIMO_CDF_CENTER_BINS = False
-#POINT_STAT_CLIMO_CDF_WRITE_BINS = True
-
-#POINT_STAT_HSS_EC_VALUE =
-
-#
-# Observation data time window(s).
-#
-OBS_WINDOW_BEGIN = -1799
-OBS_WINDOW_END = 1800
-OBS_POINT_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN}
-OBS_POINT_STAT_WINDOW_END = {OBS_WINDOW_END}
-
-# Optional list of offsets to look for point observation data
-POINT_STAT_OFFSETS = 0
-#
-# Name to identify model (forecast) data in output.
-#
-# The variable MODEL is recorded in the stat files, and the data in
-# these files is then plotted (e.g. using METViewer). Here, we add a
-# suffix to MODEL that identifies the forecast ensemble member. This
-# makes it easier to identify each curve.
-#
-MODEL = {{vx_fcst_model_name}}_{{ensmem_name}}
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Value to enter under the DESC column in the output stat file. Here,
-# we store the value of the original lead in this column, i.e. the lead
-# with zero corresponding to the actual start time of the forecast (which
-# is (cdate - time_lag)), not to cdate. This is just the lead in
-# LEAD_SEQ with the time lag (time_lag) of the current forecast member
-# added on.
-#
-# Uncomment this line only after upgrading to METplus 5.x.
-#POINT_STAT_DESC = {lead?fmt=%H%M%S?shift={{time_lag}}}
-POINT_STAT_DESC = NA
-
-# Regrid to specified grid. Indicate NONE if no regridding, or the grid id
-# (e.g. G212)
-POINT_STAT_REGRID_TO_GRID = NONE
-POINT_STAT_REGRID_METHOD = BILIN
-POINT_STAT_REGRID_WIDTH = 2
-
-POINT_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}
-
-# sets the -obs_valid_beg command line argument (optional)
-# not used for this example
-#POINT_STAT_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H}
-
-# sets the -obs_valid_end command line argument (optional)
-# not used for this example
-#POINT_STAT_OBS_VALID_END = {valid?fmt=%Y%m%d_%H}
-
-# Verification Masking regions
-# Indicate which grid and polygon masking region, if applicable
-POINT_STAT_GRID =
-
-# List of full path to poly masking files. NOTE: Only short lists of poly
-# files work (those that fit on one line), a long list will result in an
-# environment variable that is too long, resulting in an error. For long
-# lists of poly masking files (i.e. all the mask files in the NCEP_mask
-# directory), define these in the METplus PointStat configuration file.
-POINT_STAT_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
-POINT_STAT_STATION_ID =
-
-# Message types, if all message types are to be returned, leave this empty,
-# otherwise indicate the message types of interest.
-POINT_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}}
-
-# set to True to run PointStat once for each name/level combination
-# set to False to run PointStat once per run time including all fields
-POINT_STAT_ONCE_PER_FIELD = False
-#
-# List of forecast and corresponding observation fields to process.
-#
-# Note on use of set_attr_lead and ensemble member time-lagging:
-# -------------------------------------------------------------
-# The set_attr_lead parameter appearing below in [FCST|OBS]_VAR_OPTIONS
-# specifies the lead to use both in naming of the output .stat and .nc
-# files and for setting the lead values contained in those files. This
-# option causes MET/METplus to use the lead values in the variable LEAD_SEQ
-# set above, which are the same for all ensemble forecast members (i.e.
-# regardless of whether members are time lagged with respect to the
-# nominal cycle date specified by cdate). If set_attr_lead were not
-# specified as below, then MET/METplus would get the lead from the input
-# forecast file, and that would in general differ from one ensemble member
-# to the next depending on whether the member is time-lagged. That would
-# cause confusion, so here, we always use lead values with zero lead
-# corresponding to the nominal cdate.
-#
-FCST_VAR1_NAME = TMP
-FCST_VAR1_LEVELS = Z2
-FCST_VAR1_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
-OBS_VAR1_NAME = TMP
-OBS_VAR1_LEVELS = Z2
-
-FCST_VAR2_NAME = DPT
-FCST_VAR2_LEVELS = Z2
-FCST_VAR2_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
-OBS_VAR2_NAME = DPT
-OBS_VAR2_LEVELS = Z2
-
-FCST_VAR3_NAME = RH
-FCST_VAR3_LEVELS = Z2
-FCST_VAR3_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
-OBS_VAR3_NAME = RH
-OBS_VAR3_LEVELS = Z2
-
-FCST_VAR4_NAME = UGRD
-FCST_VAR4_LEVELS = Z10
-FCST_VAR4_THRESH = ge2.572
-FCST_VAR4_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
-OBS_VAR4_NAME = UGRD
-OBS_VAR4_LEVELS = Z10
-OBS_VAR4_THRESH = ge2.572
-
-FCST_VAR5_NAME = VGRD
-FCST_VAR5_LEVELS = Z10
-FCST_VAR5_THRESH = ge2.572
-FCST_VAR5_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
-OBS_VAR5_NAME = VGRD
-OBS_VAR5_LEVELS = Z10
-OBS_VAR5_THRESH = ge2.572
-
-FCST_VAR6_NAME = WIND
-FCST_VAR6_LEVELS = Z10
-FCST_VAR6_THRESH = ge2.572, ge2.572&<5.144, ge5.144, ge10.288, ge15.433
-FCST_VAR6_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
- GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind.
-OBS_VAR6_NAME = WIND
-OBS_VAR6_LEVELS = Z10
-OBS_VAR6_THRESH = ge2.572, ge2.572&<5.144, ge5.144, ge10.288, ge15.433
-OBS_VAR6_OPTIONS = GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind.
-
-FCST_VAR7_NAME = PRMSL
-FCST_VAR7_LEVELS = Z0
-FCST_VAR7_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
-OBS_VAR7_NAME = PRMSL
-OBS_VAR7_LEVELS = Z0
-
-FCST_VAR8_NAME = TCDC
-FCST_VAR8_LEVELS = L0
-FCST_VAR8_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
- GRIB_lvl_typ = 200;
- GRIB2_ipdtmpl_index=[27];
- GRIB2_ipdtmpl_val=[255];
- interp = { type = [ { method = NEAREST; width = 1; } ]; }
-OBS_VAR8_NAME = TCDC
-OBS_VAR8_LEVELS = L0
-
-FCST_VAR9_NAME = VIS
-FCST_VAR9_LEVELS = L0
-FCST_VAR9_THRESH = lt805, lt1609, lt4828, lt8045, ge8045, lt16090
-FCST_VAR9_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
- censor_thresh = [>16090];
- censor_val = [16090];
- interp = { type = [ { method = NEAREST; width = 1; } ]; }
-OBS_VAR9_NAME = VIS
-OBS_VAR9_LEVELS = L0
-OBS_VAR9_THRESH = lt805, lt1609, lt4828, lt8045, ge8045, lt16090
-OBS_VAR9_OPTIONS = censor_thresh = [>16090];
- censor_val = [16090];
- interp = { type = [ { method = NEAREST; width = 1; } ]; }
-
-FCST_VAR10_NAME = GUST
-FCST_VAR10_LEVELS = Z0
-FCST_VAR10_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
-OBS_VAR10_NAME = GUST
-OBS_VAR10_LEVELS = Z0
-
-FCST_VAR11_NAME = HGT
-FCST_VAR11_LEVELS = L0
-FCST_VAR11_THRESH = lt152, lt305, lt914, lt1520, lt3040, ge914
-FCST_VAR11_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
- GRIB_lvl_typ = 215;
- desc = "CEILING";
-OBS_VAR11_NAME = CEILING
-OBS_VAR11_LEVELS = L0
-OBS_VAR11_THRESH = lt152, lt305, lt914, lt1520, lt3040, ge914
-OBS_VAR11_OPTIONS = GRIB_lvl_typ = 215;
- interp = { type = [ { method = NEAREST; width = 1; } ]; }
-
-FCST_VAR12_NAME = SPFH
-FCST_VAR12_LEVELS = Z2
-OBS_VAR12_NAME = SPFH
-OBS_VAR12_LEVELS = Z2
-
-FCST_VAR13_NAME = CRAIN
-FCST_VAR13_LEVELS = L0
-FCST_VAR13_THRESH = ge1.0
-OBS_VAR13_NAME = PRWE
-OBS_VAR13_LEVELS = Z0
-OBS_VAR13_THRESH = ge161&&le163
-
-FCST_VAR14_NAME = CSNOW
-FCST_VAR14_LEVELS = L0
-FCST_VAR14_THRESH = ge1.0
-OBS_VAR14_NAME = PRWE
-OBS_VAR14_LEVELS = Z0
-OBS_VAR14_THRESH = ge171&&le173
-
-FCST_VAR15_NAME = CFRZR
-FCST_VAR15_LEVELS = L0
-FCST_VAR15_THRESH = ge1.0
-OBS_VAR15_NAME = PRWE
-OBS_VAR15_LEVELS = Z0
-OBS_VAR15_THRESH = ge164&&le166
-
-FCST_VAR16_NAME = CICEP
-FCST_VAR16_LEVELS = L0
-FCST_VAR16_THRESH = ge1.0
-OBS_VAR16_NAME = PRWE
-OBS_VAR16_LEVELS = Z0
-OBS_VAR16_THRESH = ge174&&le176
-
-# End of [config] section and start of [dir] section.
-[dir]
-#
-# Directory containing observation input to PointStat.
-#
-OBS_POINT_STAT_INPUT_DIR = {{obs_input_dir}}
-#
-# Directory containing forecast input to PointStat.
-#
-FCST_POINT_STAT_INPUT_DIR = {{fcst_input_dir}}
-#
-# Directory containing climatology mean input to PointStat. Not used in
-# this example.
-#
-POINT_STAT_CLIMO_MEAN_INPUT_DIR =
-#
-# Directory containing climatology mean input to PointStat. Not used in
-# this example.
-#
-POINT_STAT_CLIMO_STDEV_INPUT_DIR =
-#
-# Directory in which to write output from PointStat.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-POINT_STAT_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for observation input to PointStat relative to
-# OBS_POINT_STAT_INPUT_DIR.
-#
-OBS_POINT_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}}
-#
-# Template for forecast input to PointStat relative to
-# FCST_POINT_STAT_INPUT_DIR.
-#
-FCST_POINT_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-#
-# Template for output from PointStat relative to POINT_STAT_OUTPUT_DIR.
-#
-POINT_STAT_OUTPUT_TEMPLATE =
-#
-# Template for climatology input to PointStat relative to
-# POINT_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example.
-#
-POINT_STAT_CLIMO_MEAN_INPUT_TEMPLATE =
-#
-# Template for climatology input to PointStat relative to
-# POINT_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example.
-#
-POINT_STAT_CLIMO_STDEV_INPUT_TEMPLATE =
diff --git a/parm/metplus/PointStat_ADPUPA.conf b/parm/metplus/PointStat_ADPUPA.conf
deleted file mode 100644
index 519767a51e..0000000000
--- a/parm/metplus/PointStat_ADPUPA.conf
+++ /dev/null
@@ -1,343 +0,0 @@
-# PointStat METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = PointStat
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {POINT_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Location of MET configuration file to pass to PointStat.
-#
-# References PARM_BASE, which is the location of the parm directory
-# corresponding to the ush directory of the run_metplus.py script that
-# is called or the value of the environment variable METPLUS_PARM_BASE
-# if set.
-#
-POINT_STAT_CONFIG_FILE = {PARM_BASE}/met_config/PointStatConfig_wrapped
-
-POINT_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA
-#POINT_STAT_OBS_QUALITY_EXC =
-
-POINT_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST
-#POINT_STAT_CLIMO_STDEV_TIME_INTERP_METHOD =
-
-#POINT_STAT_INTERP_VLD_THRESH =
-#POINT_STAT_INTERP_SHAPE =
-POINT_STAT_INTERP_TYPE_METHOD = BILIN
-POINT_STAT_INTERP_TYPE_WIDTH = 2
-
-POINT_STAT_OUTPUT_FLAG_FHO = STAT
-POINT_STAT_OUTPUT_FLAG_CTC = STAT
-POINT_STAT_OUTPUT_FLAG_CTS = STAT
-#POINT_STAT_OUTPUT_FLAG_MCTC =
-#POINT_STAT_OUTPUT_FLAG_MCTS =
-POINT_STAT_OUTPUT_FLAG_CNT = STAT
-POINT_STAT_OUTPUT_FLAG_SL1L2 = STAT
-#POINT_STAT_OUTPUT_FLAG_SAL1L2 =
-POINT_STAT_OUTPUT_FLAG_VL1L2 = STAT
-#POINT_STAT_OUTPUT_FLAG_VAL1L2 =
-POINT_STAT_OUTPUT_FLAG_VCNT = STAT
-#POINT_STAT_OUTPUT_FLAG_PCT =
-#POINT_STAT_OUTPUT_FLAG_PSTD =
-#POINT_STAT_OUTPUT_FLAG_PJC =
-#POINT_STAT_OUTPUT_FLAG_PRC =
-#POINT_STAT_OUTPUT_FLAG_ECNT =
-#POINT_STAT_OUTPUT_FLAG_RPS =
-#POINT_STAT_OUTPUT_FLAG_ECLV =
-#POINT_STAT_OUTPUT_FLAG_MPR =
-#POINT_STAT_OUTPUT_FLAG_ORANK =
-
-POINT_STAT_CLIMO_CDF_BINS = 1
-#POINT_STAT_CLIMO_CDF_CENTER_BINS = False
-#POINT_STAT_CLIMO_CDF_WRITE_BINS = True
-
-#POINT_STAT_HSS_EC_VALUE =
-
-#
-# Observation data time window(s).
-#
-OBS_WINDOW_BEGIN = -1799
-OBS_WINDOW_END = 1800
-OBS_POINT_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN}
-OBS_POINT_STAT_WINDOW_END = {OBS_WINDOW_END}
-
-# Optional list of offsets to look for point observation data
-POINT_STAT_OFFSETS = 0
-#
-# Name to identify model (forecast) data in output.
-#
-# The variable MODEL is recorded in the stat files, and the data in
-# these files is then plotted (e.g. using METViewer). Here, we add a
-# suffix to MODEL that identifies the forecast ensemble member. This
-# makes it easier to identify each curve.
-#
-MODEL = {{vx_fcst_model_name}}_{{ensmem_name}}
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Value to enter under the DESC column in the output stat file. Here,
-# we store the value of the original lead in this column, i.e. the lead
-# with zero corresponding to the actual start time of the forecast (which
-# is (cdate - time_lag)), not to cdate. This is just the lead in
-# LEAD_SEQ with the time lag (time_lag) of the current forecast member
-# added on.
-#
-# Uncomment this line only after upgrading to METplus 5.x.
-#POINT_STAT_DESC = {lead?fmt=%H%M%S?shift={{time_lag}}}
-POINT_STAT_DESC = NA
-
-# Regrid to specified grid. Indicate NONE if no regridding, or the grid id
-# (e.g. G212)
-POINT_STAT_REGRID_TO_GRID = NONE
-POINT_STAT_REGRID_METHOD = BILIN
-POINT_STAT_REGRID_WIDTH = 2
-
-POINT_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}
-
-# sets the -obs_valid_beg command line argument (optional)
-# not used for this example
-#POINT_STAT_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H}
-
-# sets the -obs_valid_end command line argument (optional)
-# not used for this example
-#POINT_STAT_OBS_VALID_END = {valid?fmt=%Y%m%d_%H}
-
-# Verification Masking regions
-# Indicate which grid and polygon masking region, if applicable
-POINT_STAT_GRID =
-
-# List of full path to poly masking files. NOTE: Only short lists of poly
-# files work (those that fit on one line), a long list will result in an
-# environment variable that is too long, resulting in an error. For long
-# lists of poly masking files (i.e. all the mask files in the NCEP_mask
-# directory), define these in the METplus PointStat configuration file.
-POINT_STAT_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
-POINT_STAT_STATION_ID =
-
-# Message types, if all message types are to be returned, leave this empty,
-# otherwise indicate the message types of interest.
-POINT_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}}
-
-# set to True to run PointStat once for each name/level combination
-# set to False to run PointStat once per run time including all fields
-POINT_STAT_ONCE_PER_FIELD = False
-#
-# List of forecast and corresponding observation fields to process.
-#
-# Note on use of set_attr_lead and ensemble member time-lagging:
-# -------------------------------------------------------------
-# The set_attr_lead parameter appearing below in [FCST|OBS]_VAR_OPTIONS
-# specifies the lead to use both in naming of the output .stat and .nc
-# files and for setting the lead values contained in those files. This
-# option causes MET/METplus to use the lead values in the variable LEAD_SEQ
-# set above, which are the same for all ensemble forecast members (i.e.
-# regardless of whether members are time lagged with respect to the
-# nominal cycle date specified by cdate). If set_attr_lead were not
-# specified as below, then MET/METplus would get the lead from the input
-# forecast file, and that would in general differ from one ensemble member
-# to the next depending on whether the member is time-lagged. That would
-# cause confusion, so here, we always use lead values with zero lead
-# corresponding to the nominal cdate.
-#
-FCST_VAR1_NAME = TMP
-FCST_VAR1_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10
-FCST_VAR1_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
-OBS_VAR1_NAME = TMP
-OBS_VAR1_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10
-
-FCST_VAR2_NAME = RH
-FCST_VAR2_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250
-FCST_VAR2_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
-OBS_VAR2_NAME = RH
-OBS_VAR2_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250
-
-FCST_VAR3_NAME = DPT
-FCST_VAR3_LEVELS = P1000, P925, P850, P700, P500, P400, P300
-FCST_VAR3_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
-OBS_VAR3_NAME = DPT
-OBS_VAR3_LEVELS = P1000, P925, P850, P700, P500, P400, P300
-
-FCST_VAR4_NAME = UGRD
-FCST_VAR4_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10
-FCST_VAR4_THRESH = ge2.572
-FCST_VAR4_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
-OBS_VAR4_NAME = UGRD
-OBS_VAR4_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10
-OBS_VAR4_THRESH = ge2.572
-
-FCST_VAR5_NAME = VGRD
-FCST_VAR5_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10
-FCST_VAR5_THRESH = ge2.572
-FCST_VAR5_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
-OBS_VAR5_NAME = VGRD
-OBS_VAR5_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10
-OBS_VAR5_THRESH = ge2.572
-
-FCST_VAR6_NAME = WIND
-FCST_VAR6_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10
-FCST_VAR6_THRESH = ge2.572, ge2.572&<5.144, ge5.144, ge10.288, ge15.433, ge20.577, ge25.722
-FCST_VAR6_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
-OBS_VAR6_NAME = WIND
-OBS_VAR6_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10
-OBS_VAR6_THRESH = ge2.572, ge2.572&<5.144, ge5.144, ge10.288, ge15.433, ge20.577, ge25.722
-
-FCST_VAR7_NAME = HGT
-FCST_VAR7_LEVELS = P1000, P950, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10
-FCST_VAR7_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
-OBS_VAR7_NAME = HGT
-OBS_VAR7_LEVELS = P1000, P950, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10
-
-FCST_VAR8_NAME = SPFH
-FCST_VAR8_LEVELS = P1000, P850, P700, P500, P400, P300
-FCST_VAR8_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
-OBS_VAR8_NAME = SPFH
-OBS_VAR8_LEVELS = P1000, P850, P700, P500, P400, P300
-
-FCST_VAR9_NAME = CAPE
-FCST_VAR9_LEVELS = L0
-FCST_VAR9_THRESH = gt500, gt1000, gt1500, gt2000, gt3000, gt4000
-FCST_VAR9_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
- cnt_thresh = [ >0 ];
-OBS_VAR9_NAME = CAPE
-OBS_VAR9_LEVELS = L0-100000
-OBS_VAR9_THRESH = gt500, gt1000, gt1500, gt2000, gt3000, gt4000
-OBS_VAR9_OPTIONS = cnt_thresh = [ >0 ];
- cnt_logic = UNION;
-
-FCST_VAR10_NAME = HPBL
-FCST_VAR10_LEVELS = Z0
-FCST_VAR10_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
-OBS_VAR10_NAME = PBL
-OBS_VAR10_LEVELS = L0
-OBS_VAR10_OPTIONS = desc = "TKE";
-
-FCST_VAR11_NAME = HGT
-FCST_VAR11_LEVELS = L0
-FCST_VAR11_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}";
- GRIB_lvl_typ = 220;
-OBS_VAR11_NAME = PBL
-OBS_VAR11_LEVELS = L0
-OBS_VAR11_OPTIONS = desc = "RI";
-
-FCST_VAR12_NAME = CAPE
-FCST_VAR12_LEVELS = L0-90
-FCST_VAR12_THRESH = gt500, gt1000, gt1500, gt2000, gt3000, gt4000
-FCST_VAR12_OPTIONS = cnt_thresh = [ >0 ];
-OBS_VAR12_NAME = MLCAPE
-OBS_VAR12_LEVELS = L0
-OBS_VAR12_THRESH = gt500, gt1000, gt1500, gt2000, gt3000, gt4000
-OBS_VAR12_OPTIONS = cnt_thresh = [ >0 ];
- cnt_logic = UNION;
-
-# End of [config] section and start of [dir] section.
-[dir]
-#
-# Directory containing observation input to PointStat.
-#
-OBS_POINT_STAT_INPUT_DIR = {{obs_input_dir}}
-#
-# Directory containing forecast input to PointStat.
-#
-FCST_POINT_STAT_INPUT_DIR = {{fcst_input_dir}}
-#
-# Directory containing climatology mean input to PointStat. Not used in
-# this example.
-#
-POINT_STAT_CLIMO_MEAN_INPUT_DIR =
-#
-# Directory containing climatology mean input to PointStat. Not used in
-# this example.
-#
-POINT_STAT_CLIMO_STDEV_INPUT_DIR =
-#
-# Directory in which to write output from PointStat.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-POINT_STAT_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for observation input to PointStat relative to
-# OBS_POINT_STAT_INPUT_DIR.
-#
-OBS_POINT_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}}
-#
-# Template for forecast input to PointStat relative to
-# FCST_POINT_STAT_INPUT_DIR.
-#
-FCST_POINT_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-#
-# Template for output from PointStat relative to POINT_STAT_OUTPUT_DIR.
-#
-POINT_STAT_OUTPUT_TEMPLATE =
-#
-# Template for climatology input to PointStat relative to
-# POINT_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example.
-#
-POINT_STAT_CLIMO_MEAN_INPUT_TEMPLATE =
-#
-# Template for climatology input to PointStat relative to
-# POINT_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example.
-#
-POINT_STAT_CLIMO_STDEV_INPUT_TEMPLATE =
diff --git a/parm/metplus/PointStat_ensmean.conf b/parm/metplus/PointStat_ensmean.conf
new file mode 100644
index 0000000000..67a20034df
--- /dev/null
+++ b/parm/metplus/PointStat_ensmean.conf
@@ -0,0 +1,566 @@
+# Ensemble mean {{MetplusToolName}} METplus Configuration
+
+[config]
+
+# List of applications (tools) to run.
+PROCESS_LIST = {{MetplusToolName}}
+
+# time looping - options are INIT, VALID, RETRO, and REALTIME
+# If set to INIT or RETRO:
+# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
+# If set to VALID or REALTIME:
+# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
+LOOP_BY = INIT
+
+# Format of INIT_BEG and INIT_END using % items
+# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
+# see www.strftime.org for more information
+# %Y%m%d%H expands to YYYYMMDDHH
+INIT_TIME_FMT = %Y%m%d%H
+
+# Start time for METplus run - must match INIT_TIME_FMT
+INIT_BEG = {{cdate}}
+
+# End time for METplus run - must match INIT_TIME_FMT
+INIT_END = {{cdate}}
+
+# Increment between METplus runs (in seconds if no units are specified).
+# Must be >= 60 seconds.
+INIT_INCREMENT = 3600
+
+# List of forecast leads to process for each run time (init or valid)
+# In hours if units are not specified
+# If unset, defaults to 0 (don't loop through forecast leads)
+LEAD_SEQ = {{fhr_list}}
+#
+# Order of loops to process data - Options are times, processes
+# Not relevant if only one item is in the PROCESS_LIST
+# times = run all wrappers in the PROCESS_LIST for a single run time, then
+# increment the run time and run all wrappers again until all times have
+# been evaluated.
+# processes = run the first wrapper in the PROCESS_LIST for all times
+# specified, then repeat for the next item in the PROCESS_LIST until all
+# wrappers have been run
+#
+LOOP_ORDER = times
+#
+# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
+#
+LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
+#
+# Specify the name of the METplus log file.
+#
+LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
+#
+# Specify the location and name of the final METplus conf file.
+#
+METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}}
+#
+# Location of MET configuration file to pass to {{MetplusToolName}}.
+#
+# References PARM_BASE, which is the location of the parm directory
+# corresponding to the ush directory of the run_metplus.py script that
+# is called or the value of the environment variable METPLUS_PARM_BASE
+# if set.
+#
+{{METPLUS_TOOL_NAME}}_CONFIG_FILE = {PARM_BASE}/met_config/{{MetplusToolName}}Config_wrapped
+
+{{METPLUS_TOOL_NAME}}_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA
+#{{METPLUS_TOOL_NAME}}_OBS_QUALITY_EXC =
+
+{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_TIME_INTERP_METHOD =
+
+#{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH =
+#{{METPLUS_TOOL_NAME}}_INTERP_SHAPE =
+{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = BILIN
+{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 2
+
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_FHO =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTC =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTS =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTC =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTS =
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CNT = STAT
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SL1L2 = STAT
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SAL1L2 =
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VL1L2 = STAT
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VAL1L2 =
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VCNT = STAT
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PCT =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PSTD =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PJC =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PRC =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECNT =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_RPS =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MPR =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ORANK =
+
+{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1
+#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False
+#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = True
+
+#{{METPLUS_TOOL_NAME}}_HSS_EC_VALUE =
+
+#
+# Observation data time window(s).
+#
+OBS_WINDOW_BEGIN = -1799
+OBS_WINDOW_END = 1800
+OBS_{{METPLUS_TOOL_NAME}}_WINDOW_BEGIN = {OBS_WINDOW_BEGIN}
+OBS_{{METPLUS_TOOL_NAME}}_WINDOW_END = {OBS_WINDOW_END}
+
+# Optional list of offsets to look for point observation data
+{{METPLUS_TOOL_NAME}}_OFFSETS = 0
+#
+# Name to identify model (forecast) data in output.
+#
+# The variable MODEL is recorded in the stat files, and the data in
+# these files is then plotted (e.g. using METViewer). Here, we add a
+# suffix to MODEL that identifies the data as that for the ensemble
+# mean. This makes it easier to identify each curve.
+#
+MODEL = {{vx_fcst_model_name}}_ensmean
+#
+# Name to identify observation data in output.
+#
+OBTYPE = {{obtype}}
+#
+# Value to enter under the DESC column in the output stat file.
+#
+{{METPLUS_TOOL_NAME}}_DESC = NA
+
+# Regrid to specified grid. Indicate NONE if no regridding, or the grid id
+# (e.g. G212)
+{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = NONE
+{{METPLUS_TOOL_NAME}}_REGRID_METHOD = BILIN
+{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2
+
+{{METPLUS_TOOL_NAME}}_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}
+
+# sets the -obs_valid_beg command line argument (optional)
+# not used for this example
+#{{METPLUS_TOOL_NAME}}_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H}
+
+# sets the -obs_valid_end command line argument (optional)
+# not used for this example
+#{{METPLUS_TOOL_NAME}}_OBS_VALID_END = {valid?fmt=%Y%m%d_%H}
+
+# Verification Masking regions
+# Indicate which grid and polygon masking region, if applicable
+{{METPLUS_TOOL_NAME}}_GRID =
+
+# List of full path to poly masking files. NOTE: Only short lists of poly
+# files work (those that fit on one line), a long list will result in an
+# environment variable that is too long, resulting in an error. For long
+# lists of poly masking files (i.e. all the mask files in the NCEP_mask
+# directory), define these in the METplus {{MetplusToolName}} configuration file.
+{{METPLUS_TOOL_NAME}}_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
+{{METPLUS_TOOL_NAME}}_STATION_ID =
+
+# Message types, if all message types are to be returned, leave this empty,
+# otherwise indicate the message types of interest.
+{{METPLUS_TOOL_NAME}}_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}}
+
+# set to True to run {{MetplusToolName}} once for each name/level combination
+# set to False to run {{MetplusToolName}} once per run time including all fields
+{{METPLUS_TOOL_NAME}}_ONCE_PER_FIELD = False
+#
+# List of forecast and corresponding observation fields to process.
+#
+{#-
+Import the file containing jinja macros.
+#}
+{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %}
+
+{#-
+Set the probabilistic threshold to be used for the forecast field. If
+necessary, this can be changed to be an input parameter in the calling
+script instead of a hard-coded value as below.
+#}
+{%- set thresh_fcst_prob = '==0.1' %}
+
+{#-
+Jinja requires certain variables to be defined globally within the template
+before they can be used in if-statements and other scopes (see Jinja
+scoping rules). Define such variables.
+#}
+{%- set level_fcst = '' %}
+{%- set level_obs = '' %}
+{%- set indx_level_fcst = '' %}
+
+{%- set valid_threshes_fcst = [] %}
+{%- set valid_threshes_obs = [] %}
+{%- set threshes_fcst = '' %}
+{%- set threshes_obs = '' %}
+{%- set indx_input_thresh_fcst = '' %}
+
+{%- set opts_indent = '' %}
+{%- set opts_indent_len = '' %}
+{%- set tmp = '' %}
+{%- set error_msg = '' %}
+
+{#-
+Make sure that the set of field groups for forecasts and observations
+are identical.
+#}
+{%- set fgs_fcst = vx_config_dict['fcst'].keys()|list %}
+{%- set fgs_obs = vx_config_dict['obs'].keys()|list %}
+{%- if (fgs_fcst != fgs_obs) %}
+ {%- set error_msg = '\n' ~
+'The list of valid field groups for forecasts (fgs_fcst) must be identical\n' ~
+'to that for observations (fgs_obs) but isn\'t:\n' ~
+' fgs_fcst = ' ~ fgs_fcst ~ '\n' ~
+' fgs_obs = ' ~ fgs_obs %}
+ {{metplus_macros.print_err_and_quit(error_msg)}}
+{%- endif %}
+
+{#-
+Extract the lists of forecast and observation dictionaries containing
+the valid fields, levels, and thresholds corresponding to the specified
+field group (input_field_group). Note that it would be simpler to have
+these be just dictionaries in which the keys are the field names (instead
+of them being LISTS of dictionaries in which each dictionary contains a
+single key that is the field name), but that approach cannot be used here
+because it is possible for field names to be repeated (for both forecasts
+and observations). For example, in the observations, the field name
+'PRWE' appears more than once, each time with a different threshold, and
+the combination of name and threshold is what constitutes a unique field,
+not just the name by itself.
+#}
+{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %}
+{%- set fields_levels_threshes_obs = vx_config_dict['obs'][input_field_group] %}
+
+{#-
+Reset the specified forecast level so that if it happens to be an
+accumulation (e.g. 'A03'), the leading zeros in front of the hour are
+stipped out (e.g. reset to 'A3').
+#}
+{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %}
+
+{#-
+Ensure that the specified input forecast level(s) (input_level_fcst) and
+threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the
+set(s) of valid forecast levels and thresholds, respectively, specified
+in fields_levels_threshes_fcst.
+#}
+{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }}
+{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }}
+
+{#-
+Some fields in the specified field group (input_field_group) may need to
+be excluded from the METplus config file because calculating means for
+them doesn't make sense. List these (for each input_field_group) in the
+following dictionary.
+#}
+{%- set fields_fcst_to_exclude_by_field_group =
+ {'APCP': [],
+ 'ASNOW': [],
+ 'REFC': [],
+ 'RETOP': [],
+ 'ADPSFC': ['TCDC', 'VIS', 'HGT'],
+ 'ADPUPA': []} %}
+{%- set fields_fcst_to_exclude = fields_fcst_to_exclude_by_field_group[input_field_group] %}
+
+{#-
+For convenience, create lists of valid forecast and observation field
+names.
+#}
+{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %}
+{%- set valid_fields_fcst = [] %}
+{%- for i in range(0,num_valid_fields_fcst) %}
+ {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %}
+ {%- set tmp = valid_fields_fcst.append(field) %}
+{%- endfor %}
+
+{%- set valid_fields_obs = [] %}
+{%- set num_valid_fields_obs = fields_levels_threshes_obs|length %}
+{%- for i in range(0,num_valid_fields_obs) %}
+ {%- set field = fields_levels_threshes_obs[i].keys()|list|join('') %}
+ {%- set tmp = valid_fields_obs.append(field) %}
+{%- endfor %}
+
+{#-
+Ensure that the number of valid fields for forecasts is equal to that
+for the observations.
+#}
+{%- set num_valid_fields = 0 %}
+{%- if (num_valid_fields_fcst != num_valid_fields_obs) %}
+ {%- set error_msg = '\n' ~
+'The number of valid forecast fields (num_valid_fields_fcst) must be\n' ~
+'equal to the number of valid observation fields (num_valid_fields_obs)\n' ~
+'but isn\'t:\n' ~
+' num_valid_fields_fcst = ' ~ num_valid_fields_fcst ~ '\n' ~
+' num_valid_fields_obs = ' ~ num_valid_fields_obs ~ '\n' ~
+'The lists of valid forecast and observation fields are:\n' ~
+' valid_fields_fcst = ' ~ valid_fields_fcst ~ '\n' ~
+' valid_fields_obs = ' ~ valid_fields_obs ~ '\n' %}
+ {{metplus_macros.print_err_and_quit(error_msg)}}
+{%- else %}
+ {%- set num_valid_fields = num_valid_fields_fcst %}
+{%- endif %}
+
+{#-
+Loop over the valid fields and set field names, levels, thresholds, and/
+or options for each field, both for forecasts and for obseratiions, in
+the METplus configuration file.
+#}
+{%- set ns = namespace(var_count = 0) %}
+
+{%- for i in range(0,num_valid_fields) if valid_fields_fcst[i] not in fields_fcst_to_exclude %}
+
+ {%- set field_fcst = valid_fields_fcst[i] %}
+ {%- set field_obs = valid_fields_obs[i] %}
+
+{#-
+For convenience, create lists of valid forecast and observation levels
+for the current field. Then check that the number of valid levels for
+forecasts is the same as that for observations.
+#}
+ {%- set valid_levels_fcst = fields_levels_threshes_fcst[i][field_fcst].keys()|list %}
+ {%- set valid_levels_obs = fields_levels_threshes_obs[i][field_obs].keys()|list %}
+
+{#-
+Extract dictionary of valid forecast levels (the dictionary keys) and
+corresponding lists of valid thresholds (the values) for each level.
+Then loop over these levels and corresponding lists of thresholds to set
+both the forecast and observation field names, levels, thresholds, and/or
+options.
+#}
+ {%- set valid_levels_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst] %}
+ {%- for level_fcst, valid_threshes_fcst in valid_levels_threshes_fcst.items() %}
+
+ {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %}
+{#-
+Increment the METplus variable counter.
+#}
+ {%- set ns.var_count = ns.var_count+1 %}
+
+{#-
+Set forecast field name.
+#}
+FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{level_fcst}}_ENS_MEAN
+
+{#-
+Set forecast field level.
+#}
+FCST_VAR{{ns.var_count}}_LEVELS = {{level_fcst}}
+
+{#-
+Set forecast field threshold(s). Note that no forecast thresholds are
+included in the METplus configuration file if input_thresh_fcst is set
+to 'none'.
+#}
+ {%- if (input_thresh_fcst != 'none') %}
+{#-
+If input_thresh_fcst is set to 'all', set the list of forecast thresholds
+to the full set of valid values.
+#}
+ {%- if (input_thresh_fcst == 'all') %}
+
+ {%- set threshes_fcst = valid_threshes_fcst %}
+{#-
+If input_thresh_fcst is set to a specific value:
+ 1) Ensure that input_thresh_fcst exists in the list of valid forecast
+ thresholds.
+ 2) Get the index of input_thresh_fcst in the list of valid forecast
+ thresholds. This will be needed later below when setting the
+ observation threshold(s).
+ 3) Use this index to set the forecast threshold to a one-element list
+ containing the specified forecast threshold.
+#}
+ {%- else %}
+
+ {%- if input_thresh_fcst not in valid_threshes_fcst %}
+ {%- set error_msg = '\n' ~
+'For the current forecast field (field_fcst) and forecast level (level_fcst),\n' ~
+'the input forecast threshold (input_thresh_fcst) does not exist in the list\n' ~
+'of valid forecast thresholds (valid_threshes_fcst):\n' ~
+' field_fcst = ' ~ field_fcst ~ '\n' ~
+' level_fcst = ' ~ level_fcst ~ '\n' ~
+' valid_threshes_fcst = ' ~ valid_threshes_fcst ~ '\n' ~
+' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %}
+ {{metplus_macros.print_err_and_quit(error_msg)}}
+ {%- endif %}
+ {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %}
+ {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %}
+
+ {%- endif %}
+{#-
+If threshes_fcst has been reset to something other than its default
+value of an empty list, then set the forecast thresholds in the METplus
+configuration file because that implies threshes_fcst was set above to
+a non-empty value. Then reset threshes_fcst to its default value for
+proper processing of thresholds for the next field.
+#}
+ {%- if (threshes_fcst != []) %}
+FCST_VAR{{ns.var_count}}_THRESH = {{threshes_fcst|join(', ')}}
+ {%- endif %}
+ {%- set threshes_fcst = [] %}
+
+ {%- endif %}
+
+{#-
+Set forecast field options.
+#}
+ {%- set opts_indent_len = 20 %}
+ {%- if (ns.var_count > 9) and (ns.var_count <= 99) %}
+ {%- set opts_indent_len = opts_indent_len + 1 %}
+ {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %}
+ {%- set opts_indent_len = opts_indent_len + 2 %}
+ {%- elif (ns.var_count > 999) %}
+ {%- set opts_indent_len = opts_indent_len + 3 %}
+ {%- endif %}
+ {%- set opts_indent = ' '*opts_indent_len %}
+
+ {%- if input_field_group == 'ADPUPA' %}
+
+ {%- if field_fcst == 'CAPE' %}
+FCST_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ];
+ {%- endif %}
+
+ {%- endif %}
+
+{#-
+Set observation field name.
+#}
+OBS_VAR{{ns.var_count}}_NAME = {{field_obs}}
+
+{#-
+Set observation field level.
+#}
+ {%- set indx_level_fcst = valid_levels_fcst.index(level_fcst) %}
+ {%- set level_obs = valid_levels_obs[indx_level_fcst] %}
+OBS_VAR{{ns.var_count}}_LEVELS = {{level_obs}}
+
+{#-
+Set observation field threshold(s). Note that no observation thresholds
+are included in the METplus configuration file if input_thresh_fcst is
+set to 'none'.
+#}
+ {%- if (input_thresh_fcst != 'none') %}
+{#-
+Set the list of valid observation thresholds to the one corresponding to
+the current observation level (level_obs).
+#}
+ {%- set valid_threshes_obs = fields_levels_threshes_obs[i][field_obs][level_obs] %}
+{#-
+If input_thresh_fcst is set to 'all', set the list of observation thresholds
+to the full set of valid values.
+#}
+ {%- if (input_thresh_fcst == 'all') %}
+
+ {%- set threshes_obs = valid_threshes_obs %}
+{#-
+If input_thresh_fcst is set to a specific forecast threshold, then the
+observation threshold is given by the element in the list of valid
+observation thresholds that has the same index as that of input_thresh_fcst
+in the list of valid forecast thresholds.
+#}
+ {%- else %}
+
+ {%- set threshes_obs = [valid_threshes_obs[indx_input_thresh_fcst]] %}
+
+ {%- endif %}
+{#-
+If threshes_obs has been reset to something other than its default value
+of an empty list, then set the observation thresholds in the METplus
+configuration file because that implies threshes_obs was set above to
+a non-empty value. Then reset threshes_obs to its default value for
+proper processing of thresholds for the next field.
+#}
+ {%- if (threshes_obs != []) %}
+OBS_VAR{{ns.var_count}}_THRESH = {{threshes_obs|join(', ')}}
+ {%- endif %}
+ {%- set threshes_obs = [] %}
+
+ {%- endif %}
+
+{#-
+Set observation field options.
+#}
+ {%- set opts_indent_len = opts_indent_len - 1 %}
+ {%- set opts_indent = ' '*opts_indent_len %}
+
+ {%- if input_field_group == 'ADPUPA' %}
+
+ {%- if field_obs == 'CAPE' %}
+OBS_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ];
+{{opts_indent}}cnt_logic = UNION;
+ {%- elif field_obs == 'PBL' %}
+OBS_VAR{{ns.var_count}}_OPTIONS = desc = "TKE";
+ {%- endif %}
+
+ {%- endif %}
+
+{#-
+Print out a newline to separate the settings for the current field (both
+forecast and observation settings) from those for the next field.
+#}
+ {{- '\n' }}
+
+ {%- endif %}
+
+ {%- endfor %}
+{%- endfor %}
+# End of [config] section and start of [dir] section.
+[dir]
+#
+# Directory containing observation input to {{MetplusToolName}}.
+#
+OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{obs_input_dir}}
+#
+# Directory containing forecast input to {{MetplusToolName}}.
+#
+FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}}
+#
+# Directory containing climatology mean input to {{MetplusToolName}}. Not used in
+# this example.
+#
+{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR =
+#
+# Directory containing climatology mean input to {{MetplusToolName}}. Not used in
+# this example.
+#
+{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR =
+#
+# Directory in which to write output from {{MetplusToolName}}.
+#
+# OUTPUT_BASE apparently has to be set to something; it cannot be left
+# to its default value. But it is not explicitly used elsewhere in this
+# configuration file.
+#
+OUTPUT_BASE = {{output_base}}
+{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}}
+#
+# Directory for staging data.
+#
+STAGING_DIR = {{staging_dir}}
+
+# End of [dir] section and start of [filename_templates] section.
+[filename_templates]
+#
+# Template for observation input to {{MetplusToolName}} relative to
+# OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR.
+#
+OBS_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{obs_input_fn_template}}
+#
+# Template for forecast input to {{MetplusToolName}} relative to
+# FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR.
+#
+FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}}
+#
+# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR.
+#
+{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE =
+#
+# Template for climatology input to {{MetplusToolName}} relative to
+# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR. Not used in this example.
+#
+{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_TEMPLATE =
+#
+# Template for climatology input to {{MetplusToolName}} relative to
+# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR. Not used in this example.
+#
+{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_TEMPLATE =
diff --git a/parm/metplus/PointStat_ensmean_ADPSFC.conf b/parm/metplus/PointStat_ensmean_ADPSFC.conf
deleted file mode 100644
index 6b7e7e9cff..0000000000
--- a/parm/metplus/PointStat_ensmean_ADPSFC.conf
+++ /dev/null
@@ -1,252 +0,0 @@
-# Ensemble mean PointStat METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = PointStat
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {POINT_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Location of MET configuration file to pass to PointStat.
-#
-# References PARM_BASE, which is the location of the parm directory
-# corresponding to the ush directory of the run_metplus.py script that
-# is called or the value of the environment variable METPLUS_PARM_BASE
-# if set.
-#
-POINT_STAT_CONFIG_FILE = {PARM_BASE}/met_config/PointStatConfig_wrapped
-
-POINT_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA
-#POINT_STAT_OBS_QUALITY_EXC =
-
-POINT_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST
-#POINT_STAT_CLIMO_STDEV_TIME_INTERP_METHOD =
-
-#POINT_STAT_INTERP_VLD_THRESH =
-#POINT_STAT_INTERP_SHAPE =
-POINT_STAT_INTERP_TYPE_METHOD = BILIN
-POINT_STAT_INTERP_TYPE_WIDTH = 2
-
-#POINT_STAT_OUTPUT_FLAG_FHO =
-#POINT_STAT_OUTPUT_FLAG_CTC =
-#POINT_STAT_OUTPUT_FLAG_CTS =
-#POINT_STAT_OUTPUT_FLAG_MCTC =
-#POINT_STAT_OUTPUT_FLAG_MCTS =
-POINT_STAT_OUTPUT_FLAG_CNT = STAT
-POINT_STAT_OUTPUT_FLAG_SL1L2 = STAT
-#POINT_STAT_OUTPUT_FLAG_SAL1L2 =
-POINT_STAT_OUTPUT_FLAG_VL1L2 = STAT
-#POINT_STAT_OUTPUT_FLAG_VAL1L2 =
-POINT_STAT_OUTPUT_FLAG_VCNT = STAT
-#POINT_STAT_OUTPUT_FLAG_PCT =
-#POINT_STAT_OUTPUT_FLAG_PSTD =
-#POINT_STAT_OUTPUT_FLAG_PJC =
-#POINT_STAT_OUTPUT_FLAG_PRC =
-#POINT_STAT_OUTPUT_FLAG_ECNT =
-#POINT_STAT_OUTPUT_FLAG_RPS =
-#POINT_STAT_OUTPUT_FLAG_ECLV =
-#POINT_STAT_OUTPUT_FLAG_MPR =
-#POINT_STAT_OUTPUT_FLAG_ORANK =
-
-POINT_STAT_CLIMO_CDF_BINS = 1
-#POINT_STAT_CLIMO_CDF_CENTER_BINS = False
-#POINT_STAT_CLIMO_CDF_WRITE_BINS = True
-
-#POINT_STAT_HSS_EC_VALUE =
-
-#
-# Observation data time window(s).
-#
-OBS_WINDOW_BEGIN = -1799
-OBS_WINDOW_END = 1800
-OBS_POINT_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN}
-OBS_POINT_STAT_WINDOW_END = {OBS_WINDOW_END}
-
-# Optional list of offsets to look for point observation data
-POINT_STAT_OFFSETS = 0
-#
-# Name to identify model (forecast) data in output.
-#
-# The variable MODEL is recorded in the stat files, and the data in
-# these files is then plotted (e.g. using METViewer). Here, we add a
-# suffix to MODEL that identifies the data as that for the ensemble
-# mean. This makes it easier to identify each curve.
-#
-MODEL = {{vx_fcst_model_name}}_ensmean
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Value to enter under the DESC column in the output stat file.
-#
-POINT_STAT_DESC = NA
-
-# Regrid to specified grid. Indicate NONE if no regridding, or the grid id
-# (e.g. G212)
-POINT_STAT_REGRID_TO_GRID = NONE
-POINT_STAT_REGRID_METHOD = BILIN
-POINT_STAT_REGRID_WIDTH = 2
-
-POINT_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}
-
-# sets the -obs_valid_beg command line argument (optional)
-# not used for this example
-#POINT_STAT_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H}
-
-# sets the -obs_valid_end command line argument (optional)
-# not used for this example
-#POINT_STAT_OBS_VALID_END = {valid?fmt=%Y%m%d_%H}
-
-# Verification Masking regions
-# Indicate which grid and polygon masking region, if applicable
-POINT_STAT_GRID =
-
-# List of full path to poly masking files. NOTE: Only short lists of poly
-# files work (those that fit on one line), a long list will result in an
-# environment variable that is too long, resulting in an error. For long
-# lists of poly masking files (i.e. all the mask files in the NCEP_mask
-# directory), define these in the METplus PointStat configuration file.
-POINT_STAT_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
-POINT_STAT_STATION_ID =
-
-# Message types, if all message types are to be returned, leave this empty,
-# otherwise indicate the message types of interest.
-POINT_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}}
-
-# set to True to run PointStat once for each name/level combination
-# set to False to run PointStat once per run time including all fields
-POINT_STAT_ONCE_PER_FIELD = False
-#
-# List of forecast and corresponding observation fields to process.
-#
-FCST_VAR1_NAME = TMP_Z2_ENS_MEAN
-FCST_VAR1_LEVELS = Z2
-FCST_VAR1_THRESH = ge268, ge273, ge278, ge293, ge298, ge303
-OBS_VAR1_NAME = TMP
-OBS_VAR1_LEVELS = Z2
-OBS_VAR1_THRESH = ge268, ge273, ge278, ge293, ge298, ge303
-
-FCST_VAR2_NAME = DPT_Z2_ENS_MEAN
-FCST_VAR2_LEVELS = Z2
-FCST_VAR2_THRESH = ge263, ge268, ge273, ge288, ge293, ge298
-OBS_VAR2_NAME = DPT
-OBS_VAR2_LEVELS = Z2
-OBS_VAR2_THRESH = ge263, ge268, ge273, ge288, ge293, ge298
-
-FCST_VAR3_NAME = WIND_Z10_ENS_MEAN
-FCST_VAR3_LEVELS = Z10
-FCST_VAR3_THRESH = ge5, ge10, ge15
-OBS_VAR3_NAME = WIND
-OBS_VAR3_LEVELS = Z10
-OBS_VAR3_THRESH = ge5, ge10, ge15
-
-# End of [config] section and start of [dir] section.
-[dir]
-#
-# Directory containing observation input to PointStat.
-#
-OBS_POINT_STAT_INPUT_DIR = {{obs_input_dir}}
-#
-# Directory containing forecast input to PointStat.
-#
-FCST_POINT_STAT_INPUT_DIR = {{fcst_input_dir}}
-#
-# Directory containing climatology mean input to PointStat. Not used in
-# this example.
-#
-POINT_STAT_CLIMO_MEAN_INPUT_DIR =
-#
-# Directory containing climatology mean input to PointStat. Not used in
-# this example.
-#
-POINT_STAT_CLIMO_STDEV_INPUT_DIR =
-#
-# Directory in which to write output from PointStat.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-POINT_STAT_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for observation input to PointStat relative to
-# OBS_POINT_STAT_INPUT_DIR.
-#
-OBS_POINT_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}}
-#
-# Template for forecast input to PointStat relative to
-# FCST_POINT_STAT_INPUT_DIR.
-#
-FCST_POINT_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-#
-# Template for output from PointStat relative to POINT_STAT_OUTPUT_DIR.
-#
-POINT_STAT_OUTPUT_TEMPLATE =
-#
-# Template for climatology input to PointStat relative to
-# POINT_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example.
-#
-POINT_STAT_CLIMO_MEAN_INPUT_TEMPLATE =
-#
-# Template for climatology input to PointStat relative to
-# POINT_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example.
-#
-POINT_STAT_CLIMO_STDEV_INPUT_TEMPLATE =
diff --git a/parm/metplus/PointStat_ensmean_ADPUPA.conf b/parm/metplus/PointStat_ensmean_ADPUPA.conf
deleted file mode 100644
index b54c775b46..0000000000
--- a/parm/metplus/PointStat_ensmean_ADPUPA.conf
+++ /dev/null
@@ -1,319 +0,0 @@
-# Ensemble mean PointStat METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = PointStat
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {POINT_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Location of MET configuration file to pass to PointStat.
-#
-# References PARM_BASE, which is the location of the parm directory
-# corresponding to the ush directory of the run_metplus.py script that
-# is called or the value of the environment variable METPLUS_PARM_BASE
-# if set.
-#
-POINT_STAT_CONFIG_FILE = {PARM_BASE}/met_config/PointStatConfig_wrapped
-
-POINT_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA
-#POINT_STAT_OBS_QUALITY_EXC =
-
-POINT_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST
-#POINT_STAT_CLIMO_STDEV_TIME_INTERP_METHOD =
-
-#POINT_STAT_INTERP_VLD_THRESH =
-#POINT_STAT_INTERP_SHAPE =
-POINT_STAT_INTERP_TYPE_METHOD = BILIN
-POINT_STAT_INTERP_TYPE_WIDTH = 2
-
-#POINT_STAT_OUTPUT_FLAG_FHO =
-#POINT_STAT_OUTPUT_FLAG_CTC =
-#POINT_STAT_OUTPUT_FLAG_CTS =
-#POINT_STAT_OUTPUT_FLAG_MCTC =
-#POINT_STAT_OUTPUT_FLAG_MCTS =
-POINT_STAT_OUTPUT_FLAG_CNT = STAT
-POINT_STAT_OUTPUT_FLAG_SL1L2 = STAT
-#POINT_STAT_OUTPUT_FLAG_SAL1L2 =
-POINT_STAT_OUTPUT_FLAG_VL1L2 = STAT
-#POINT_STAT_OUTPUT_FLAG_VAL1L2 =
-POINT_STAT_OUTPUT_FLAG_VCNT = STAT
-#POINT_STAT_OUTPUT_FLAG_PCT =
-#POINT_STAT_OUTPUT_FLAG_PSTD =
-#POINT_STAT_OUTPUT_FLAG_PJC =
-#POINT_STAT_OUTPUT_FLAG_PRC =
-#POINT_STAT_OUTPUT_FLAG_ECNT =
-#POINT_STAT_OUTPUT_FLAG_RPS =
-#POINT_STAT_OUTPUT_FLAG_ECLV =
-#POINT_STAT_OUTPUT_FLAG_MPR =
-#POINT_STAT_OUTPUT_FLAG_ORANK =
-
-POINT_STAT_CLIMO_CDF_BINS = 1
-#POINT_STAT_CLIMO_CDF_CENTER_BINS = False
-#POINT_STAT_CLIMO_CDF_WRITE_BINS = True
-
-#POINT_STAT_HSS_EC_VALUE =
-
-#
-# Observation data time window(s).
-#
-OBS_WINDOW_BEGIN = -1799
-OBS_WINDOW_END = 1800
-OBS_POINT_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN}
-OBS_POINT_STAT_WINDOW_END = {OBS_WINDOW_END}
-
-# Optional list of offsets to look for point observation data
-POINT_STAT_OFFSETS = 0
-#
-# Name to identify model (forecast) data in output.
-#
-# The variable MODEL is recorded in the stat files, and the data in
-# these files is then plotted (e.g. using METViewer). Here, we add a
-# suffix to MODEL that identifies the data as that for the ensemble
-# mean. This makes it easier to identify each curve.
-#
-MODEL = {{vx_fcst_model_name}}_ensmean
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Value to enter under the DESC column in the output stat file.
-#
-POINT_STAT_DESC = NA
-
-# Regrid to specified grid. Indicate NONE if no regridding, or the grid id
-# (e.g. G212)
-POINT_STAT_REGRID_TO_GRID = NONE
-POINT_STAT_REGRID_METHOD = BILIN
-POINT_STAT_REGRID_WIDTH = 2
-
-POINT_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}
-
-# sets the -obs_valid_beg command line argument (optional)
-# not used for this example
-#POINT_STAT_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H}
-
-# sets the -obs_valid_end command line argument (optional)
-# not used for this example
-#POINT_STAT_OBS_VALID_END = {valid?fmt=%Y%m%d_%H}
-
-# Verification Masking regions
-# Indicate which grid and polygon masking region, if applicable
-POINT_STAT_GRID =
-
-# List of full path to poly masking files. NOTE: Only short lists of poly
-# files work (those that fit on one line), a long list will result in an
-# environment variable that is too long, resulting in an error. For long
-# lists of poly masking files (i.e. all the mask files in the NCEP_mask
-# directory), define these in the METplus PointStat configuration file.
-POINT_STAT_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
-POINT_STAT_STATION_ID =
-
-# Message types, if all message types are to be returned, leave this empty,
-# otherwise indicate the message types of interest.
-POINT_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}}
-
-# set to True to run PointStat once for each name/level combination
-# set to False to run PointStat once per run time including all fields
-POINT_STAT_ONCE_PER_FIELD = False
-#
-# List of forecast and corresponding observation fields to process.
-#
-FCST_VAR1_NAME = TMP_P850_ENS_MEAN
-FCST_VAR1_LEVELS = P850
-FCST_VAR1_THRESH = ge288, ge293, ge298
-OBS_VAR1_NAME = TMP
-OBS_VAR1_LEVELS = P850
-OBS_VAR1_THRESH = ge288, ge293, ge298
-
-FCST_VAR2_NAME = TMP_P700_ENS_MEAN
-FCST_VAR2_LEVELS = P700
-FCST_VAR2_THRESH = ge273, ge278, ge283
-OBS_VAR2_NAME = TMP
-OBS_VAR2_LEVELS = P700
-OBS_VAR2_THRESH = ge273, ge278, ge283
-
-FCST_VAR3_NAME = TMP_P500_ENS_MEAN
-FCST_VAR3_LEVELS = P500
-FCST_VAR3_THRESH = ge258, ge263, ge268
-OBS_VAR3_NAME = TMP
-OBS_VAR3_LEVELS = P500
-OBS_VAR3_THRESH = ge258, ge263, ge268
-
-FCST_VAR4_NAME = DPT_P850_ENS_MEAN
-FCST_VAR4_LEVELS = P850
-FCST_VAR4_THRESH = ge273, ge278, ge283
-OBS_VAR4_NAME = DPT
-OBS_VAR4_LEVELS = P850
-OBS_VAR4_THRESH = ge273, ge278, ge283
-
-FCST_VAR5_NAME = DPT_P700_ENS_MEAN
-FCST_VAR5_LEVELS = P700
-FCST_VAR5_THRESH = ge263, ge286, ge273
-OBS_VAR5_NAME = DPT
-OBS_VAR5_LEVELS = P700
-OBS_VAR5_THRESH = ge263, ge286, ge273
-
-FCST_VAR6_NAME = WIND_P850_ENS_MEAN
-FCST_VAR6_LEVELS = P850
-FCST_VAR6_THRESH = ge5, ge10, ge15
-OBS_VAR6_NAME = WIND
-OBS_VAR6_LEVELS = P850
-OBS_VAR6_THRESH = ge5, ge10, ge15
-
-FCST_VAR7_NAME = WIND_P700_ENS_MEAN
-FCST_VAR7_LEVELS = P700
-FCST_VAR7_THRESH = ge10, ge15, ge20
-OBS_VAR7_NAME = WIND
-OBS_VAR7_LEVELS = P700
-OBS_VAR7_THRESH = ge10, ge15, ge20
-
-FCST_VAR8_NAME = WIND_P500_ENS_MEAN
-FCST_VAR8_LEVELS = P500
-FCST_VAR8_THRESH = ge15, ge21, ge26
-OBS_VAR8_NAME = WIND
-OBS_VAR8_LEVELS = P500
-OBS_VAR8_THRESH = ge15, ge21, ge26
-
-FCST_VAR9_NAME = WIND_P250_ENS_MEAN
-FCST_VAR9_LEVELS = P250
-FCST_VAR9_THRESH = ge26, ge31, ge46, ge62
-OBS_VAR9_NAME = WIND
-OBS_VAR9_LEVELS = P250
-OBS_VAR9_THRESH = ge26, ge31, ge46, ge62
-
-FCST_VAR10_NAME = HGT_P500_ENS_MEAN
-FCST_VAR10_LEVELS = P500
-FCST_VAR10_THRESH = ge5400, ge5600, ge5880
-OBS_VAR10_NAME = HGT
-OBS_VAR10_LEVELS = P500
-OBS_VAR10_THRESH = ge5400, ge5600, ge5880
-
-FCST_VAR11_NAME = CAPE_L0_ENS_MEAN
-FCST_VAR11_LEVELS = L0
-FCST_VAR11_THRESH = le1000, gt1000&<2500, gt2500&<4000, gt2500
-FCST_VAR11_OPTIONS = cnt_thresh = [ >0 ];
-OBS_VAR11_NAME = CAPE
-OBS_VAR11_LEVELS = L0-100000
-OBS_VAR11_THRESH = le1000, gt1000&<2500, gt2500&<4000, gt2500
-OBS_VAR11_OPTIONS = cnt_thresh = [ >0 ];
- cnt_logic = UNION;
-
-FCST_VAR12_NAME = HPBL_Z0_ENS_MEAN
-FCST_VAR12_LEVELS = Z0
-FCST_VAR12_THRESH = lt500, lt1500, gt1500
-OBS_VAR12_NAME = PBL
-OBS_VAR12_LEVELS = L0
-OBS_VAR12_THRESH = lt500, lt1500, gt1500
-OBS_VAR12_OPTIONS = desc = "TKE";
-
-# End of [config] section and start of [dir] section.
-[dir]
-#
-# Directory containing observation input to PointStat.
-#
-OBS_POINT_STAT_INPUT_DIR = {{obs_input_dir}}
-#
-# Directory containing forecast input to PointStat.
-#
-FCST_POINT_STAT_INPUT_DIR = {{fcst_input_dir}}
-#
-# Directory containing climatology mean input to PointStat. Not used in
-# this example.
-#
-POINT_STAT_CLIMO_MEAN_INPUT_DIR =
-#
-# Directory containing climatology mean input to PointStat. Not used in
-# this example.
-#
-POINT_STAT_CLIMO_STDEV_INPUT_DIR =
-#
-# Directory in which to write output from PointStat.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-POINT_STAT_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for observation input to PointStat relative to
-# OBS_POINT_STAT_INPUT_DIR.
-#
-OBS_POINT_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}}
-#
-# Template for forecast input to PointStat relative to
-# FCST_POINT_STAT_INPUT_DIR.
-#
-FCST_POINT_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-#
-# Template for output from PointStat relative to POINT_STAT_OUTPUT_DIR.
-#
-POINT_STAT_OUTPUT_TEMPLATE =
-#
-# Template for climatology input to PointStat relative to
-# POINT_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example.
-#
-POINT_STAT_CLIMO_MEAN_INPUT_TEMPLATE =
-#
-# Template for climatology input to PointStat relative to
-# POINT_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example.
-#
-POINT_STAT_CLIMO_STDEV_INPUT_TEMPLATE =
diff --git a/parm/metplus/PointStat_ensprob.conf b/parm/metplus/PointStat_ensprob.conf
new file mode 100644
index 0000000000..69ef9fd5db
--- /dev/null
+++ b/parm/metplus/PointStat_ensprob.conf
@@ -0,0 +1,524 @@
+# Ensemble probabilistic {{MetplusToolName}} METplus Configuration
+
+[config]
+
+# List of applications (tools) to run.
+PROCESS_LIST = {{MetplusToolName}}
+
+# time looping - options are INIT, VALID, RETRO, and REALTIME
+# If set to INIT or RETRO:
+# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
+# If set to VALID or REALTIME:
+# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
+LOOP_BY = INIT
+
+# Format of INIT_BEG and INIT_END using % items
+# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
+# see www.strftime.org for more information
+# %Y%m%d%H expands to YYYYMMDDHH
+INIT_TIME_FMT = %Y%m%d%H
+
+# Start time for METplus run - must match INIT_TIME_FMT
+INIT_BEG = {{cdate}}
+
+# End time for METplus run - must match INIT_TIME_FMT
+INIT_END = {{cdate}}
+
+# Increment between METplus runs (in seconds if no units are specified).
+# Must be >= 60 seconds.
+INIT_INCREMENT = 3600
+
+# List of forecast leads to process for each run time (init or valid)
+# In hours if units are not specified
+# If unset, defaults to 0 (don't loop through forecast leads)
+LEAD_SEQ = {{fhr_list}}
+#
+# Order of loops to process data - Options are times, processes
+# Not relevant if only one item is in the PROCESS_LIST
+# times = run all wrappers in the PROCESS_LIST for a single run time, then
+# increment the run time and run all wrappers again until all times have
+# been evaluated.
+# processes = run the first wrapper in the PROCESS_LIST for all times
+# specified, then repeat for the next item in the PROCESS_LIST until all
+# wrappers have been run
+#
+LOOP_ORDER = times
+#
+# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
+#
+LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
+#
+# Specify the name of the METplus log file.
+#
+LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
+#
+# Specify the location and name of the final METplus conf file.
+#
+METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}}
+#
+# Location of MET configuration file to pass to {{MetplusToolName}}.
+#
+# References PARM_BASE, which is the location of the parm directory
+# corresponding to the ush directory of the run_metplus.py script that
+# is called or the value of the environment variable METPLUS_PARM_BASE
+# if set.
+#
+{{METPLUS_TOOL_NAME}}_CONFIG_FILE = {PARM_BASE}/met_config/{{MetplusToolName}}Config_wrapped
+
+{{METPLUS_TOOL_NAME}}_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA
+#{{METPLUS_TOOL_NAME}}_OBS_QUALITY_EXC =
+
+{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST
+#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_TIME_INTERP_METHOD =
+
+#{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH =
+#{{METPLUS_TOOL_NAME}}_INTERP_SHAPE =
+{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = BILIN
+{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 2
+
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_FHO =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTC =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTS =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTC =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTS =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CNT =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SL1L2 =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SAL1L2 =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VL1L2 =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VAL1L2 =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VCNT =
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PCT = STAT
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PSTD = STAT
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PJC = STAT
+{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PRC = STAT
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECNT =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_RPS =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MPR =
+#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ORANK =
+
+{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1
+#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False
+#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = True
+
+#{{METPLUS_TOOL_NAME}}_HSS_EC_VALUE =
+
+#
+# Observation data time window(s).
+#
+OBS_WINDOW_BEGIN = -1799
+OBS_WINDOW_END = 1800
+OBS_{{METPLUS_TOOL_NAME}}_WINDOW_BEGIN = {OBS_WINDOW_BEGIN}
+OBS_{{METPLUS_TOOL_NAME}}_WINDOW_END = {OBS_WINDOW_END}
+
+# Optional list of offsets to look for point observation data
+{{METPLUS_TOOL_NAME}}_OFFSETS = 0
+#
+# Name to identify model (forecast) data in output.
+#
+# The variable MODEL is recorded in the stat files, and the data in
+# these files is then plotted (e.g. using METViewer). Here, we add a
+# suffix to MODEL that identifies the data as ensemble-probabilistic.
+# This makes it easier to identify each curve.
+#
+MODEL = {{vx_fcst_model_name}}_ensprob
+#
+# Name to identify observation data in output.
+#
+OBTYPE = {{obtype}}
+#
+# Value to enter under the DESC column in the output stat file.
+#
+{{METPLUS_TOOL_NAME}}_DESC = NA
+
+# Regrid to specified grid. Indicate NONE if no regridding, or the grid id
+# (e.g. G212)
+{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = NONE
+{{METPLUS_TOOL_NAME}}_REGRID_METHOD = BILIN
+{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2
+
+{{METPLUS_TOOL_NAME}}_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}
+
+# sets the -obs_valid_beg command line argument (optional)
+# not used for this example
+#{{METPLUS_TOOL_NAME}}_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H}
+
+# sets the -obs_valid_end command line argument (optional)
+# not used for this example
+#{{METPLUS_TOOL_NAME}}_OBS_VALID_END = {valid?fmt=%Y%m%d_%H}
+
+# Verification Masking regions
+# Indicate which grid and polygon masking region, if applicable
+{{METPLUS_TOOL_NAME}}_GRID =
+
+# List of full path to poly masking files. NOTE: Only short lists of poly
+# files work (those that fit on one line), a long list will result in an
+# environment variable that is too long, resulting in an error. For long
+# lists of poly masking files (i.e. all the mask files in the NCEP_mask
+# directory), define these in the METplus {{MetplusToolName}} configuration file.
+{{METPLUS_TOOL_NAME}}_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
+{{METPLUS_TOOL_NAME}}_STATION_ID =
+
+# Message types, if all message types are to be returned, leave this empty,
+# otherwise indicate the message types of interest.
+{{METPLUS_TOOL_NAME}}_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}}
+
+# set to True to run {{MetplusToolName}} once for each name/level combination
+# set to False to run {{MetplusToolName}} once per run time including all fields
+{{METPLUS_TOOL_NAME}}_ONCE_PER_FIELD = False
+#
+# List of forecast and corresponding observation fields to process.
+# Note that the forecast variable name must exactly match the name of a
+# variable in the forecast input file(s).
+#
+{#-
+Import the file containing jinja macros.
+#}
+{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %}
+
+{#-
+Set the probabilistic threshold to be used for the forecast field. If
+necessary, this can be changed to be an input parameter in the calling
+script instead of a hard-coded value as below.
+#}
+{%- set thresh_fcst_prob = '==0.1' %}
+
+{#-
+Jinja requires certain variables to be defined globally within the template
+before they can be used in if-statements and other scopes (see Jinja
+scoping rules). Define such variables.
+#}
+{%- set level_fcst = '' %}
+{%- set level_obs = '' %}
+{%- set indx_level_fcst = '' %}
+
+{%- set valid_threshes_fcst = [] %}
+{%- set valid_threshes_obs = [] %}
+{%- set thresh_fcst = '' %}
+{%- set thresh_obs = '' %}
+{%- set indx_thresh_fcst = '' %}
+{%- set thresh_fcst_and_or = '' %}
+
+{%- set opts_indent = '' %}
+{%- set opts_indent_len = '' %}
+{%- set tmp = '' %}
+{%- set error_msg = '' %}
+
+{#-
+Make sure that the set of field groups for forecasts and observations
+are identical.
+#}
+{%- set fgs_fcst = vx_config_dict['fcst'].keys()|list %}
+{%- set fgs_obs = vx_config_dict['obs'].keys()|list %}
+{%- if (fgs_fcst != fgs_obs) %}
+ {%- set error_msg = '\n' ~
+'The list of valid field groups for forecasts (fgs_fcst) must be identical\n' ~
+'to that for observations (fgs_obs) but isn\'t:\n' ~
+' fgs_fcst = ' ~ fgs_fcst ~ '\n' ~
+' fgs_obs = ' ~ fgs_obs %}
+ {{metplus_macros.print_err_and_quit(error_msg)}}
+{%- endif %}
+
+{#-
+Extract the lists of forecast and observation dictionaries containing
+the valid fields, levels, and thresholds corresponding to the specified
+field group (input_field_group). Note that it would be simpler to have
+these be just dictionaries in which the keys are the field names (instead
+of them being LISTS of dictionaries in which each dictionary contains a
+single key that is the field name), but that approach cannot be used here
+because it is possible for field names to be repeated (for both forecasts
+and observations). For example, in the observations, the field name
+'PRWE' appears more than once, each time with a different threshold, and
+the combination of name and threshold is what constitutes a unique field,
+not just the name by itself.
+#}
+{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %}
+{%- set fields_levels_threshes_obs = vx_config_dict['obs'][input_field_group] %}
+
+{#-
+Reset the specified forecast level so that if it happens to be an
+accumulation (e.g. 'A03'), the leading zeros in front of the hour are
+stipped out (e.g. reset to 'A3').
+#}
+{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %}
+
+{#-
+Ensure that the specified input forecast level(s) (input_level_fcst) and
+threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the
+set(s) of valid forecast levels and thresholds, respectively, specified
+in fields_levels_threshes_fcst.
+#}
+{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }}
+{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }}
+
+{#-
+For convenience, create lists of valid forecast and observation field
+names.
+#}
+{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %}
+{%- set valid_fields_fcst = [] %}
+{%- for i in range(0,num_valid_fields_fcst) %}
+ {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %}
+ {%- set tmp = valid_fields_fcst.append(field) %}
+{%- endfor %}
+
+{%- set valid_fields_obs = [] %}
+{%- set num_valid_fields_obs = fields_levels_threshes_obs|length %}
+{%- for i in range(0,num_valid_fields_obs) %}
+ {%- set field = fields_levels_threshes_obs[i].keys()|list|join('') %}
+ {%- set tmp = valid_fields_obs.append(field) %}
+{%- endfor %}
+
+{#-
+Ensure that the number of valid fields for forecasts is equal to that
+for the observations.
+#}
+{%- set num_valid_fields = 0 %}
+{%- if (num_valid_fields_fcst != num_valid_fields_obs) %}
+ {%- set error_msg = '\n' ~
+'The number of valid forecast fields (num_valid_fields_fcst) must be\n' ~
+'equal to the number of valid observation fields (num_valid_fields_obs)\n' ~
+'but isn\'t:\n' ~
+' num_valid_fields_fcst = ' ~ num_valid_fields_fcst ~ '\n' ~
+' num_valid_fields_obs = ' ~ num_valid_fields_obs ~ '\n' ~
+'The lists of valid forecast and observation fields are:\n' ~
+' valid_fields_fcst = ' ~ valid_fields_fcst ~ '\n' ~
+' valid_fields_obs = ' ~ valid_fields_obs ~ '\n' %}
+ {{metplus_macros.print_err_and_quit(error_msg)}}
+{%- else %}
+ {%- set num_valid_fields = num_valid_fields_fcst %}
+{%- endif %}
+
+{#-
+Loop over the valid fields and set field names, levels, thresholds, and/
+or options for each field, both for forecasts and for obseratiions, in
+the METplus configuration file.
+#}
+{%- set ns = namespace(var_count = 0) %}
+
+{#-
+This outer for-loop is included to make this code as similar as possible
+to the one in GridStat_ensprob.conf. There, treat_fcst_as_prob takes on
+both True and False values, although here it only takes on the value
+True (which makes the loop redundant). It is not clear why it doesn't
+need to be set to False. This is being investigated (12/13/2023).
+#}
+{%- for treat_fcst_as_prob in [True] %}
+
+ {%- for i in range(0,num_valid_fields) %}
+
+ {%- set field_fcst = valid_fields_fcst[i] %}
+ {%- set field_obs = valid_fields_obs[i] %}
+
+{#-
+For convenience, create lists of valid forecast and observation levels
+for the current field. Then check that the number of valid levels for
+forecasts is the same as that for observations.
+#}
+ {%- set valid_levels_fcst = fields_levels_threshes_fcst[i][field_fcst].keys()|list %}
+ {%- set valid_levels_obs = fields_levels_threshes_obs[i][field_obs].keys()|list %}
+
+{#-
+Extract dictionary of valid forecast levels (the dictionary keys) and
+corresponding lists of valid thresholds (the values) for each level.
+Then loop over these levels and corresponding lists of thresholds to set
+both the forecast and observation field names, levels, thresholds, and/or
+options.
+#}
+ {%- set valid_levels_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst] %}
+ {%- for level_fcst, valid_threshes_fcst in valid_levels_threshes_fcst.items() %}
+
+ {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %}
+
+ {%- for thresh_fcst in valid_threshes_fcst %}
+
+ {%- if (input_thresh_fcst == 'all') or (input_thresh_fcst == thresh_fcst) %}
+{#-
+Increment the METplus variable counter.
+#}
+ {%- set ns.var_count = ns.var_count+1 %}
+
+{#-
+Set forecast field name.
+#}
+ {%- set thresh_fcst_and_or = thresh_fcst|replace("&&", ".and.") %}
+ {%- set thresh_fcst_and_or = thresh_fcst_and_or|replace("||", ".or.") %}
+FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{level_fcst}}_ENS_FREQ_{{thresh_fcst_and_or}}
+
+{#-
+Set forecast field level.
+#}
+FCST_VAR{{ns.var_count}}_LEVELS = {{level_fcst}}
+
+{#-
+Set forecast field threshold.
+Note that since the forecast field being read in is actually a field of
+probabilities, we set the forecast threshold to a probabilistic one
+(thresh_fcst_prob) and not to the physical threshold (thresh_fcst) in
+the dictionary of forecast field names, levels, and thresholds that we
+are looping over.
+#}
+FCST_VAR{{ns.var_count}}_THRESH = {{thresh_fcst_prob}}
+
+{#-
+Set forecast field options.
+#}
+ {%- set opts_indent_len = 20 %}
+ {%- if (ns.var_count > 9) and (ns.var_count <= 99) %}
+ {%- set opts_indent_len = opts_indent_len + 1 %}
+ {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %}
+ {%- set opts_indent_len = opts_indent_len + 2 %}
+ {%- elif (ns.var_count > 999) %}
+ {%- set opts_indent_len = opts_indent_len + 3 %}
+ {%- endif %}
+ {%- set opts_indent = ' '*opts_indent_len %}
+
+ {%- if input_field_group == 'ADPSFC' %}
+
+ {%- if field_fcst == 'HGT' %}
+FCST_VAR{{ns.var_count}}_OPTIONS = desc = "CEILING";
+ {%- elif field_fcst == 'VIS' %}
+FCST_VAR{{ns.var_count}}_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; }
+ {%- endif %}
+
+ {%- endif %}
+
+{#-
+Set observation field name.
+#}
+OBS_VAR{{ns.var_count}}_NAME = {{field_obs}}
+
+{#-
+Set observation field level.
+#}
+ {%- set indx_level_fcst = valid_levels_fcst.index(level_fcst) %}
+ {%- set level_obs = valid_levels_obs[indx_level_fcst] %}
+OBS_VAR{{ns.var_count}}_LEVELS = {{level_obs}}
+
+{#-
+Set observation field threshold. Note that no observation thresholds
+are included in the METplus configuration file if input_thresh_fcst is
+set to 'none'.
+#}
+ {%- if (input_thresh_fcst != 'none') %}
+{#-
+Set the list of valid observation thresholds to the one corresponding to
+the current observation level (level_obs).
+#}
+ {%- set valid_threshes_obs = fields_levels_threshes_obs[i][field_obs][level_obs] %}
+{#-
+Set the observation threshold. This is given by the element in the list
+of valid observation thresholds that has the same index as that of the
+current forcast threshold (thresh_fcst) in the list of valid forecast
+thresholds.
+#}
+ {%- set indx_thresh_fcst = valid_threshes_fcst.index(thresh_fcst) %}
+ {%- set thresh_obs = valid_threshes_obs[indx_thresh_fcst] %}
+OBS_VAR{{ns.var_count}}_THRESH = {{thresh_obs}}
+ {%- endif %}
+
+{#-
+Set observation field options.
+#}
+ {%- set opts_indent_len = opts_indent_len - 1 %}
+ {%- set opts_indent = ' '*opts_indent_len %}
+
+ {%- if input_field_group == 'ADPSFC' %}
+
+ {%- if field_obs == 'CEILING' %}
+OBS_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 215;
+{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; }
+ {%- elif field_obs == 'VIS' %}
+OBS_VAR{{ns.var_count}}_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; }
+ {%- endif %}
+
+ {%- elif input_field_group == 'ADPUPA' %}
+
+ {%- if field_obs == 'CAPE' %}
+OBS_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ];
+{{opts_indent}}cnt_logic = UNION;
+ {%- elif field_obs == 'PBL' %}
+OBS_VAR{{ns.var_count}}_OPTIONS = desc = "TKE";
+ {%- endif %}
+
+ {%- endif %}
+{#-
+Print out a newline to separate the settings for the current field (both
+forecast and observation settings) from those for the next field.
+#}
+ {{- '\n' }}
+
+ {%- endif %}
+ {%- endfor %}
+
+ {%- endif %}
+
+ {%- endfor %}
+ {%- endfor %}
+{%- endfor %}
+#
+# Forecast data description variables
+#
+FCST_IS_PROB = True
+FCST_PROB_IN_GRIB_PDS = False
+
+# End of [config] section and start of [dir] section.
+[dir]
+#
+# Directory containing observation input to {{MetplusToolName}}.
+#
+OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{obs_input_dir}}
+#
+# Directory containing forecast input to {{MetplusToolName}}.
+#
+FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}}
+#
+# Directory containing climatology mean input to {{MetplusToolName}}. Not used in
+# this example.
+#
+{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR =
+#
+# Directory containing climatology mean input to {{MetplusToolName}}. Not used in
+# this example.
+#
+{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR =
+#
+# Directory in which to write output from {{MetplusToolName}}.
+#
+# OUTPUT_BASE apparently has to be set to something; it cannot be left
+# to its default value. But it is not explicitly used elsewhere in this
+# configuration file.
+#
+OUTPUT_BASE = {{output_base}}
+{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}}
+#
+# Directory for staging data.
+#
+STAGING_DIR = {{staging_dir}}
+
+# End of [dir] section and start of [filename_templates] section.
+[filename_templates]
+#
+# Template for observation input to {{MetplusToolName}} relative to
+# OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR.
+#
+OBS_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{obs_input_fn_template}}
+#
+# Template for forecast input to {{MetplusToolName}} relative to
+# FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR.
+#
+FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}}
+#
+# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR.
+#
+{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE =
+#
+# Template for climatology input to {{MetplusToolName}} relative to
+# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR. Not used in this example.
+#
+{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_TEMPLATE =
+#
+# Template for climatology input to {{MetplusToolName}} relative to
+# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR. Not used in this example.
+#
+{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_TEMPLATE =
diff --git a/parm/metplus/PointStat_ensprob_ADPSFC.conf b/parm/metplus/PointStat_ensprob_ADPSFC.conf
deleted file mode 100644
index c9333b2c81..0000000000
--- a/parm/metplus/PointStat_ensprob_ADPSFC.conf
+++ /dev/null
@@ -1,415 +0,0 @@
-# Ensemble probabilistic PointStat METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = PointStat
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {POINT_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Location of MET configuration file to pass to PointStat.
-#
-# References PARM_BASE, which is the location of the parm directory
-# corresponding to the ush directory of the run_metplus.py script that
-# is called or the value of the environment variable METPLUS_PARM_BASE
-# if set.
-#
-POINT_STAT_CONFIG_FILE = {PARM_BASE}/met_config/PointStatConfig_wrapped
-
-POINT_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA
-#POINT_STAT_OBS_QUALITY_EXC =
-
-POINT_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST
-#POINT_STAT_CLIMO_STDEV_TIME_INTERP_METHOD =
-
-#POINT_STAT_INTERP_VLD_THRESH =
-#POINT_STAT_INTERP_SHAPE =
-POINT_STAT_INTERP_TYPE_METHOD = BILIN
-POINT_STAT_INTERP_TYPE_WIDTH = 2
-
-#POINT_STAT_OUTPUT_FLAG_FHO =
-#POINT_STAT_OUTPUT_FLAG_CTC =
-#POINT_STAT_OUTPUT_FLAG_CTS =
-#POINT_STAT_OUTPUT_FLAG_MCTC =
-#POINT_STAT_OUTPUT_FLAG_MCTS =
-#POINT_STAT_OUTPUT_FLAG_CNT =
-#POINT_STAT_OUTPUT_FLAG_SL1L2 =
-#POINT_STAT_OUTPUT_FLAG_SAL1L2 =
-#POINT_STAT_OUTPUT_FLAG_VL1L2 =
-#POINT_STAT_OUTPUT_FLAG_VAL1L2 =
-#POINT_STAT_OUTPUT_FLAG_VCNT =
-POINT_STAT_OUTPUT_FLAG_PCT = STAT
-POINT_STAT_OUTPUT_FLAG_PSTD = STAT
-POINT_STAT_OUTPUT_FLAG_PJC = STAT
-POINT_STAT_OUTPUT_FLAG_PRC = STAT
-#POINT_STAT_OUTPUT_FLAG_ECNT =
-#POINT_STAT_OUTPUT_FLAG_RPS =
-#POINT_STAT_OUTPUT_FLAG_ECLV =
-#POINT_STAT_OUTPUT_FLAG_MPR =
-#POINT_STAT_OUTPUT_FLAG_ORANK =
-
-POINT_STAT_CLIMO_CDF_BINS = 1
-#POINT_STAT_CLIMO_CDF_CENTER_BINS = False
-#POINT_STAT_CLIMO_CDF_WRITE_BINS = True
-
-#POINT_STAT_HSS_EC_VALUE =
-
-#
-# Observation data time window(s).
-#
-OBS_WINDOW_BEGIN = -1799
-OBS_WINDOW_END = 1800
-OBS_POINT_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN}
-OBS_POINT_STAT_WINDOW_END = {OBS_WINDOW_END}
-
-# Optional list of offsets to look for point observation data
-POINT_STAT_OFFSETS = 0
-#
-# Name to identify model (forecast) data in output.
-#
-# The variable MODEL is recorded in the stat files, and the data in
-# these files is then plotted (e.g. using METViewer). Here, we add a
-# suffix to MODEL that identifies the data as ensemble-probabilistic.
-# This makes it easier to identify each curve.
-#
-MODEL = {{vx_fcst_model_name}}_ensprob
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Value to enter under the DESC column in the output stat file.
-#
-POINT_STAT_DESC = NA
-
-# Regrid to specified grid. Indicate NONE if no regridding, or the grid id
-# (e.g. G212)
-POINT_STAT_REGRID_TO_GRID = NONE
-POINT_STAT_REGRID_METHOD = BILIN
-POINT_STAT_REGRID_WIDTH = 2
-
-POINT_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}
-
-# sets the -obs_valid_beg command line argument (optional)
-# not used for this example
-#POINT_STAT_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H}
-
-# sets the -obs_valid_end command line argument (optional)
-# not used for this example
-#POINT_STAT_OBS_VALID_END = {valid?fmt=%Y%m%d_%H}
-
-# Verification Masking regions
-# Indicate which grid and polygon masking region, if applicable
-POINT_STAT_GRID =
-
-# List of full path to poly masking files. NOTE: Only short lists of poly
-# files work (those that fit on one line), a long list will result in an
-# environment variable that is too long, resulting in an error. For long
-# lists of poly masking files (i.e. all the mask files in the NCEP_mask
-# directory), define these in the METplus PointStat configuration file.
-POINT_STAT_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
-POINT_STAT_STATION_ID =
-
-# Message types, if all message types are to be returned, leave this empty,
-# otherwise indicate the message types of interest.
-POINT_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}}
-
-# set to True to run PointStat once for each name/level combination
-# set to False to run PointStat once per run time including all fields
-POINT_STAT_ONCE_PER_FIELD = False
-#
-# List of forecast and corresponding observation fields to process.
-# Note that the forecast variable name must exactly match the name of a
-# variable in the forecast input file(s).
-#
-FCST_VAR1_NAME = TMP_Z2_ENS_FREQ_ge268
-FCST_VAR1_LEVELS = Z2
-FCST_VAR1_THRESH = ==0.1
-OBS_VAR1_NAME = TMP
-OBS_VAR1_LEVELS = Z2
-OBS_VAR1_THRESH = ge268
-
-FCST_VAR2_NAME = TMP_Z2_ENS_FREQ_ge273
-FCST_VAR2_LEVELS = Z2
-FCST_VAR2_THRESH = ==0.1
-OBS_VAR2_NAME = TMP
-OBS_VAR2_LEVELS = Z2
-OBS_VAR2_THRESH = ge273
-
-FCST_VAR3_NAME = TMP_Z2_ENS_FREQ_ge278
-FCST_VAR3_LEVELS = Z2
-FCST_VAR3_THRESH = ==0.1
-OBS_VAR3_NAME = TMP
-OBS_VAR3_LEVELS = Z2
-OBS_VAR3_THRESH = ge278
-
-FCST_VAR4_NAME = TMP_Z2_ENS_FREQ_ge293
-FCST_VAR4_LEVELS = Z2
-FCST_VAR4_THRESH = ==0.1
-OBS_VAR4_NAME = TMP
-OBS_VAR4_LEVELS = Z2
-OBS_VAR4_THRESH = ge293
-
-FCST_VAR5_NAME = TMP_Z2_ENS_FREQ_ge298
-FCST_VAR5_LEVELS = Z2
-FCST_VAR5_THRESH = ==0.1
-OBS_VAR5_NAME = TMP
-OBS_VAR5_LEVELS = Z2
-OBS_VAR5_THRESH = ge298
-
-FCST_VAR6_NAME = TMP_Z2_ENS_FREQ_ge303
-FCST_VAR6_LEVELS = Z2
-FCST_VAR6_THRESH = ==0.1
-OBS_VAR6_NAME = TMP
-OBS_VAR6_LEVELS = Z2
-OBS_VAR6_THRESH = ge303
-
-FCST_VAR7_NAME = DPT_Z2_ENS_FREQ_ge263
-FCST_VAR7_LEVELS = Z2
-FCST_VAR7_THRESH = ==0.1
-OBS_VAR7_NAME = DPT
-OBS_VAR7_LEVELS = Z2
-OBS_VAR7_THRESH = ge263
-
-FCST_VAR8_NAME = DPT_Z2_ENS_FREQ_ge268
-FCST_VAR8_LEVELS = Z2
-FCST_VAR8_THRESH = ==0.1
-OBS_VAR8_NAME = DPT
-OBS_VAR8_LEVELS = Z2
-OBS_VAR8_THRESH = ge268
-
-FCST_VAR9_NAME = DPT_Z2_ENS_FREQ_ge273
-FCST_VAR9_LEVELS = Z2
-FCST_VAR9_THRESH = ==0.1
-OBS_VAR9_NAME = DPT
-OBS_VAR9_LEVELS = Z2
-OBS_VAR9_THRESH = ge273
-
-FCST_VAR10_NAME = DPT_Z2_ENS_FREQ_ge288
-FCST_VAR10_LEVELS = Z2
-FCST_VAR10_THRESH = ==0.1
-OBS_VAR10_NAME = DPT
-OBS_VAR10_LEVELS = Z2
-OBS_VAR10_THRESH = ge288
-
-FCST_VAR11_NAME = DPT_Z2_ENS_FREQ_ge293
-FCST_VAR11_LEVELS = Z2
-FCST_VAR11_THRESH = ==0.1
-OBS_VAR11_NAME = DPT
-OBS_VAR11_LEVELS = Z2
-OBS_VAR11_THRESH = ge293
-
-FCST_VAR12_NAME = DPT_Z2_ENS_FREQ_ge298
-FCST_VAR12_LEVELS = Z2
-FCST_VAR12_THRESH = ==0.1
-OBS_VAR12_NAME = DPT
-OBS_VAR12_LEVELS = Z2
-OBS_VAR12_THRESH = ge298
-
-FCST_VAR13_NAME = WIND_Z10_ENS_FREQ_ge5
-FCST_VAR13_LEVELS = Z10
-FCST_VAR13_THRESH = ==0.1
-OBS_VAR13_NAME = WIND
-OBS_VAR13_LEVELS = Z10
-OBS_VAR13_THRESH = ge5
-
-FCST_VAR14_NAME = WIND_Z10_ENS_FREQ_ge10
-FCST_VAR14_LEVELS = Z10
-FCST_VAR14_THRESH = ==0.1
-OBS_VAR14_NAME = WIND
-OBS_VAR14_LEVELS = Z10
-OBS_VAR14_THRESH = ge10
-
-FCST_VAR15_NAME = WIND_Z10_ENS_FREQ_ge15
-FCST_VAR15_LEVELS = Z10
-FCST_VAR15_THRESH = ==0.1
-OBS_VAR15_NAME = WIND
-OBS_VAR15_LEVELS = Z10
-OBS_VAR15_THRESH = ge15
-
-FCST_VAR16_NAME = TCDC_L0_ENS_FREQ_lt25
-FCST_VAR16_LEVELS = L0
-FCST_VAR16_THRESH = ==0.1
-OBS_VAR16_NAME = TCDC
-OBS_VAR16_LEVELS = L0
-OBS_VAR16_THRESH = lt25
-
-FCST_VAR17_NAME = TCDC_L0_ENS_FREQ_gt75
-FCST_VAR17_LEVELS = L0
-FCST_VAR17_THRESH = ==0.1
-OBS_VAR17_NAME = TCDC
-OBS_VAR17_LEVELS = L0
-OBS_VAR17_THRESH = gt75
-
-FCST_VAR18_NAME = VIS_L0_ENS_FREQ_lt1609
-FCST_VAR18_LEVELS = L0
-FCST_VAR18_THRESH = ==0.1
-FCST_VAR18_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; }
-OBS_VAR18_NAME = VIS
-OBS_VAR18_LEVELS = L0
-OBS_VAR18_THRESH = lt1609
-OBS_VAR18_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; }
-
-FCST_VAR19_NAME = VIS_L0_ENS_FREQ_lt8045
-FCST_VAR19_LEVELS = L0
-FCST_VAR19_THRESH = ==0.1
-FCST_VAR19_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; }
-OBS_VAR19_NAME = VIS
-OBS_VAR19_LEVELS = L0
-OBS_VAR19_THRESH = lt8045
-OBS_VAR19_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; }
-
-FCST_VAR20_NAME = VIS_L0_ENS_FREQ_ge8045
-FCST_VAR20_LEVELS = L0
-FCST_VAR20_THRESH = ==0.1
-FCST_VAR20_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; }
-OBS_VAR20_NAME = VIS
-OBS_VAR20_LEVELS = L0
-OBS_VAR20_THRESH = ge8045
-OBS_VAR20_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; }
-
-FCST_VAR21_NAME = HGT_L0_ENS_FREQ_lt152
-FCST_VAR21_LEVELS = L0
-FCST_VAR21_THRESH = ==0.1
-FCST_VAR21_OPTIONS = desc = "CEILING";
-OBS_VAR21_NAME = CEILING
-OBS_VAR21_LEVELS = L0
-OBS_VAR21_THRESH = lt152
-OBS_VAR21_OPTIONS = GRIB_lvl_typ = 215;
- interp = { type = [ { method = NEAREST; width = 1; } ]; }
-
-FCST_VAR22_NAME = HGT_L0_ENS_FREQ_lt1520
-FCST_VAR22_LEVELS = L0
-FCST_VAR22_THRESH = ==0.1
-FCST_VAR22_OPTIONS = desc = "CEILING";
-OBS_VAR22_NAME = CEILING
-OBS_VAR22_LEVELS = L0
-OBS_VAR22_THRESH = lt1520
-OBS_VAR22_OPTIONS = GRIB_lvl_typ = 215;
- interp = { type = [ { method = NEAREST; width = 1; } ]; }
-
-FCST_VAR23_NAME = HGT_L0_ENS_FREQ_ge914
-FCST_VAR23_LEVELS = L0
-FCST_VAR23_THRESH = ==0.1
-FCST_VAR23_OPTIONS = desc = "CEILING";
-OBS_VAR23_NAME = CEILING
-OBS_VAR23_LEVELS = L0
-OBS_VAR23_THRESH = ge914
-OBS_VAR23_OPTIONS = GRIB_lvl_typ = 215;
- interp = { type = [ { method = NEAREST; width = 1; } ]; }
-
-#
-# Forecast data description variables
-#
-FCST_IS_PROB = True
-FCST_PROB_IN_GRIB_PDS = False
-
-# End of [config] section and start of [dir] section.
-[dir]
-#
-# Directory containing observation input to PointStat.
-#
-OBS_POINT_STAT_INPUT_DIR = {{obs_input_dir}}
-#
-# Directory containing forecast input to PointStat.
-#
-FCST_POINT_STAT_INPUT_DIR = {{fcst_input_dir}}
-#
-# Directory containing climatology mean input to PointStat. Not used in
-# this example.
-#
-POINT_STAT_CLIMO_MEAN_INPUT_DIR =
-#
-# Directory containing climatology mean input to PointStat. Not used in
-# this example.
-#
-POINT_STAT_CLIMO_STDEV_INPUT_DIR =
-#
-# Directory in which to write output from PointStat.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-POINT_STAT_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for observation input to PointStat relative to
-# OBS_POINT_STAT_INPUT_DIR.
-#
-OBS_POINT_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}}
-#
-# Template for forecast input to PointStat relative to
-# FCST_POINT_STAT_INPUT_DIR.
-#
-FCST_POINT_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-#
-# Template for output from PointStat relative to POINT_STAT_OUTPUT_DIR.
-#
-POINT_STAT_OUTPUT_TEMPLATE =
-#
-# Template for climatology input to PointStat relative to
-# POINT_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example.
-#
-POINT_STAT_CLIMO_MEAN_INPUT_TEMPLATE =
-#
-# Template for climatology input to PointStat relative to
-# POINT_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example.
-#
-POINT_STAT_CLIMO_STDEV_INPUT_TEMPLATE =
diff --git a/parm/metplus/PointStat_ensprob_ADPUPA.conf b/parm/metplus/PointStat_ensprob_ADPUPA.conf
deleted file mode 100644
index eab0270c69..0000000000
--- a/parm/metplus/PointStat_ensprob_ADPUPA.conf
+++ /dev/null
@@ -1,523 +0,0 @@
-# Ensemble probabilistic PointStat METplus Configuration
-
-[config]
-
-# List of applications (tools) to run.
-PROCESS_LIST = PointStat
-
-# time looping - options are INIT, VALID, RETRO, and REALTIME
-# If set to INIT or RETRO:
-# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set
-# If set to VALID or REALTIME:
-# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set
-LOOP_BY = INIT
-
-# Format of INIT_BEG and INIT_END using % items
-# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc.
-# see www.strftime.org for more information
-# %Y%m%d%H expands to YYYYMMDDHH
-INIT_TIME_FMT = %Y%m%d%H
-
-# Start time for METplus run - must match INIT_TIME_FMT
-INIT_BEG = {{cdate}}
-
-# End time for METplus run - must match INIT_TIME_FMT
-INIT_END = {{cdate}}
-
-# Increment between METplus runs (in seconds if no units are specified).
-# Must be >= 60 seconds.
-INIT_INCREMENT = 3600
-
-# List of forecast leads to process for each run time (init or valid)
-# In hours if units are not specified
-# If unset, defaults to 0 (don't loop through forecast leads)
-LEAD_SEQ = {{fhr_list}}
-#
-# Order of loops to process data - Options are times, processes
-# Not relevant if only one item is in the PROCESS_LIST
-# times = run all wrappers in the PROCESS_LIST for a single run time, then
-# increment the run time and run all wrappers again until all times have
-# been evaluated.
-# processes = run the first wrapper in the PROCESS_LIST for all times
-# specified, then repeat for the next item in the PROCESS_LIST until all
-# wrappers have been run
-#
-LOOP_ORDER = times
-#
-# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud.
-#
-LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}}
-#
-# Specify the name of the METplus log file.
-#
-LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}}
-#
-# Specify the location and name of the final METplus conf file.
-#
-METPLUS_CONF = {POINT_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}}
-#
-# Location of MET configuration file to pass to PointStat.
-#
-# References PARM_BASE, which is the location of the parm directory
-# corresponding to the ush directory of the run_metplus.py script that
-# is called or the value of the environment variable METPLUS_PARM_BASE
-# if set.
-#
-POINT_STAT_CONFIG_FILE = {PARM_BASE}/met_config/PointStatConfig_wrapped
-
-POINT_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA
-#POINT_STAT_OBS_QUALITY_EXC =
-
-POINT_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST
-#POINT_STAT_CLIMO_STDEV_TIME_INTERP_METHOD =
-
-#POINT_STAT_INTERP_VLD_THRESH =
-#POINT_STAT_INTERP_SHAPE =
-POINT_STAT_INTERP_TYPE_METHOD = BILIN
-POINT_STAT_INTERP_TYPE_WIDTH = 2
-
-#POINT_STAT_OUTPUT_FLAG_FHO =
-#POINT_STAT_OUTPUT_FLAG_CTC =
-#POINT_STAT_OUTPUT_FLAG_CTS =
-#POINT_STAT_OUTPUT_FLAG_MCTC =
-#POINT_STAT_OUTPUT_FLAG_MCTS =
-#POINT_STAT_OUTPUT_FLAG_CNT =
-#POINT_STAT_OUTPUT_FLAG_SL1L2 =
-#POINT_STAT_OUTPUT_FLAG_SAL1L2 =
-#POINT_STAT_OUTPUT_FLAG_VL1L2 =
-#POINT_STAT_OUTPUT_FLAG_VAL1L2 =
-#POINT_STAT_OUTPUT_FLAG_VCNT =
-POINT_STAT_OUTPUT_FLAG_PCT = STAT
-POINT_STAT_OUTPUT_FLAG_PSTD = STAT
-POINT_STAT_OUTPUT_FLAG_PJC = STAT
-POINT_STAT_OUTPUT_FLAG_PRC = STAT
-#POINT_STAT_OUTPUT_FLAG_ECNT =
-#POINT_STAT_OUTPUT_FLAG_RPS =
-#POINT_STAT_OUTPUT_FLAG_ECLV =
-#POINT_STAT_OUTPUT_FLAG_MPR =
-#POINT_STAT_OUTPUT_FLAG_ORANK =
-
-POINT_STAT_CLIMO_CDF_BINS = 1
-#POINT_STAT_CLIMO_CDF_CENTER_BINS = False
-#POINT_STAT_CLIMO_CDF_WRITE_BINS = True
-
-#POINT_STAT_HSS_EC_VALUE =
-
-#
-# Observation data time window(s).
-#
-OBS_WINDOW_BEGIN = -1799
-OBS_WINDOW_END = 1800
-OBS_POINT_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN}
-OBS_POINT_STAT_WINDOW_END = {OBS_WINDOW_END}
-
-# Optional list of offsets to look for point observation data
-POINT_STAT_OFFSETS = 0
-#
-# Name to identify model (forecast) data in output.
-#
-# The variable MODEL is recorded in the stat files, and the data in
-# these files is then plotted (e.g. using METViewer). Here, we add a
-# suffix to MODEL that identifies the data as ensemble-probabilistic.
-# This makes it easier to identify each curve.
-#
-MODEL = {{vx_fcst_model_name}}_ensprob
-#
-# Name to identify observation data in output.
-#
-OBTYPE = {{obtype}}
-#
-# Value to enter under the DESC column in the output stat file.
-#
-POINT_STAT_DESC = NA
-
-# Regrid to specified grid. Indicate NONE if no regridding, or the grid id
-# (e.g. G212)
-POINT_STAT_REGRID_TO_GRID = NONE
-POINT_STAT_REGRID_METHOD = BILIN
-POINT_STAT_REGRID_WIDTH = 2
-
-POINT_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}
-
-# sets the -obs_valid_beg command line argument (optional)
-# not used for this example
-#POINT_STAT_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H}
-
-# sets the -obs_valid_end command line argument (optional)
-# not used for this example
-#POINT_STAT_OBS_VALID_END = {valid?fmt=%Y%m%d_%H}
-
-# Verification Masking regions
-# Indicate which grid and polygon masking region, if applicable
-POINT_STAT_GRID =
-
-# List of full path to poly masking files. NOTE: Only short lists of poly
-# files work (those that fit on one line), a long list will result in an
-# environment variable that is too long, resulting in an error. For long
-# lists of poly masking files (i.e. all the mask files in the NCEP_mask
-# directory), define these in the METplus PointStat configuration file.
-POINT_STAT_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
-POINT_STAT_STATION_ID =
-
-# Message types, if all message types are to be returned, leave this empty,
-# otherwise indicate the message types of interest.
-POINT_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}}
-
-# set to True to run PointStat once for each name/level combination
-# set to False to run PointStat once per run time including all fields
-POINT_STAT_ONCE_PER_FIELD = False
-#
-# List of forecast and corresponding observation fields to process.
-# Note that the forecast variable name must exactly match the name of a
-# variable in the forecast input file(s).
-#
-FCST_VAR1_NAME = TMP_P850_ENS_FREQ_ge288
-FCST_VAR1_LEVELS = P850
-FCST_VAR1_THRESH = ==0.1
-OBS_VAR1_NAME = TMP
-OBS_VAR1_LEVELS = P850
-OBS_VAR1_THRESH = ge288
-
-FCST_VAR2_NAME = TMP_P850_ENS_FREQ_ge293
-FCST_VAR2_LEVELS = P850
-FCST_VAR2_THRESH = ==0.1
-OBS_VAR2_NAME = TMP
-OBS_VAR2_LEVELS = P850
-OBS_VAR2_THRESH = ge293
-
-FCST_VAR3_NAME = TMP_P850_ENS_FREQ_ge298
-FCST_VAR3_LEVELS = P850
-FCST_VAR3_THRESH = ==0.1
-OBS_VAR3_NAME = TMP
-OBS_VAR3_LEVELS = P850
-OBS_VAR3_THRESH = ge298
-
-FCST_VAR4_NAME = TMP_P700_ENS_FREQ_ge273
-FCST_VAR4_LEVELS = P700
-FCST_VAR4_THRESH = ==0.1
-OBS_VAR4_NAME = TMP
-OBS_VAR4_LEVELS = P700
-OBS_VAR4_THRESH = ge273
-
-FCST_VAR5_NAME = TMP_P700_ENS_FREQ_ge278
-FCST_VAR5_LEVELS = P700
-FCST_VAR5_THRESH = ==0.1
-OBS_VAR5_NAME = TMP
-OBS_VAR5_LEVELS = P700
-OBS_VAR5_THRESH = ge278
-
-FCST_VAR6_NAME = TMP_P700_ENS_FREQ_ge283
-FCST_VAR6_LEVELS = P700
-FCST_VAR6_THRESH = ==0.1
-OBS_VAR6_NAME = TMP
-OBS_VAR6_LEVELS = P700
-OBS_VAR6_THRESH = ge283
-
-FCST_VAR7_NAME = TMP_P500_ENS_FREQ_ge258
-FCST_VAR7_LEVELS = P500
-FCST_VAR7_THRESH = ==0.1
-OBS_VAR7_NAME = TMP
-OBS_VAR7_LEVELS = P500
-OBS_VAR7_THRESH = ge258
-
-FCST_VAR8_NAME = TMP_P500_ENS_FREQ_ge263
-FCST_VAR8_LEVELS = P500
-FCST_VAR8_THRESH = ==0.1
-OBS_VAR8_NAME = TMP
-OBS_VAR8_LEVELS = P500
-OBS_VAR8_THRESH = ge263
-
-FCST_VAR9_NAME = TMP_P500_ENS_FREQ_ge268
-FCST_VAR9_LEVELS = P500
-FCST_VAR9_THRESH = ==0.1
-OBS_VAR9_NAME = TMP
-OBS_VAR9_LEVELS = P500
-OBS_VAR9_THRESH = ge268
-
-FCST_VAR10_NAME = DPT_P850_ENS_FREQ_ge273
-FCST_VAR10_LEVELS = P850
-FCST_VAR10_THRESH = ==0.1
-OBS_VAR10_NAME = DPT
-OBS_VAR10_LEVELS = P850
-OBS_VAR10_THRESH = ge273
-
-FCST_VAR11_NAME = DPT_P850_ENS_FREQ_ge278
-FCST_VAR11_LEVELS = P850
-FCST_VAR11_THRESH = ==0.1
-OBS_VAR11_NAME = DPT
-OBS_VAR11_LEVELS = P850
-OBS_VAR11_THRESH = ge278
-
-FCST_VAR12_NAME = DPT_P850_ENS_FREQ_ge283
-FCST_VAR12_LEVELS = P850
-FCST_VAR12_THRESH = ==0.1
-OBS_VAR12_NAME = DPT
-OBS_VAR12_LEVELS = P850
-OBS_VAR12_THRESH = ge283
-
-FCST_VAR13_NAME = DPT_P700_ENS_FREQ_ge263
-FCST_VAR13_LEVELS = P700
-FCST_VAR13_THRESH = ==0.1
-OBS_VAR13_NAME = DPT
-OBS_VAR13_LEVELS = P700
-OBS_VAR13_THRESH = ge263
-
-FCST_VAR14_NAME = DPT_P700_ENS_FREQ_ge268
-FCST_VAR14_LEVELS = P700
-FCST_VAR14_THRESH = ==0.1
-OBS_VAR14_NAME = DPT
-OBS_VAR14_LEVELS = P700
-OBS_VAR14_THRESH = ge268
-
-FCST_VAR15_NAME = DPT_P700_ENS_FREQ_ge273
-FCST_VAR15_LEVELS = P700
-FCST_VAR15_THRESH = ==0.1
-OBS_VAR15_NAME = DPT
-OBS_VAR15_LEVELS = P700
-OBS_VAR15_THRESH = ge273
-
-FCST_VAR16_NAME = WIND_P850_ENS_FREQ_ge5
-FCST_VAR16_LEVELS = P850
-FCST_VAR16_THRESH = ==0.1
-OBS_VAR16_NAME = WIND
-OBS_VAR16_LEVELS = P850
-OBS_VAR16_THRESH = ge5
-
-FCST_VAR17_NAME = WIND_P850_ENS_FREQ_ge10
-FCST_VAR17_LEVELS = P850
-FCST_VAR17_THRESH = ==0.1
-OBS_VAR17_NAME = WIND
-OBS_VAR17_LEVELS = P850
-OBS_VAR17_THRESH = ge10
-
-FCST_VAR18_NAME = WIND_P850_ENS_FREQ_ge15
-FCST_VAR18_LEVELS = P850
-FCST_VAR18_THRESH = ==0.1
-OBS_VAR18_NAME = WIND
-OBS_VAR18_LEVELS = P850
-OBS_VAR18_THRESH = ge15
-
-FCST_VAR19_NAME = WIND_P700_ENS_FREQ_ge10
-FCST_VAR19_LEVELS = P700
-FCST_VAR19_THRESH = ==0.1
-OBS_VAR19_NAME = WIND
-OBS_VAR19_LEVELS = P700
-OBS_VAR19_THRESH = ge10
-
-FCST_VAR20_NAME = WIND_P700_ENS_FREQ_ge15
-FCST_VAR20_LEVELS = P700
-FCST_VAR20_THRESH = ==0.1
-OBS_VAR20_NAME = WIND
-OBS_VAR20_LEVELS = P700
-OBS_VAR20_THRESH = ge15
-
-FCST_VAR21_NAME = WIND_P700_ENS_FREQ_ge20
-FCST_VAR21_LEVELS = P700
-FCST_VAR21_THRESH = ==0.1
-OBS_VAR21_NAME = WIND
-OBS_VAR21_LEVELS = P700
-OBS_VAR21_THRESH = ge20
-
-FCST_VAR22_NAME = WIND_P500_ENS_FREQ_ge15
-FCST_VAR22_LEVELS = P500
-FCST_VAR22_THRESH = ==0.1
-OBS_VAR22_NAME = WIND
-OBS_VAR22_LEVELS = P500
-OBS_VAR22_THRESH = ge15
-
-FCST_VAR23_NAME = WIND_P500_ENS_FREQ_ge21
-FCST_VAR23_LEVELS = P500
-FCST_VAR23_THRESH = ==0.1
-OBS_VAR23_NAME = WIND
-OBS_VAR23_LEVELS = P500
-OBS_VAR23_THRESH = ge21
-
-FCST_VAR24_NAME = WIND_P500_ENS_FREQ_ge26
-FCST_VAR24_LEVELS = P500
-FCST_VAR24_THRESH = ==0.1
-OBS_VAR24_NAME = WIND
-OBS_VAR24_LEVELS = P500
-OBS_VAR24_THRESH = ge26
-
-FCST_VAR25_NAME = WIND_P250_ENS_FREQ_ge26
-FCST_VAR25_LEVELS = P250
-FCST_VAR25_THRESH = ==0.1
-OBS_VAR25_NAME = WIND
-OBS_VAR25_LEVELS = P250
-OBS_VAR25_THRESH = ge26
-
-FCST_VAR26_NAME = WIND_P250_ENS_FREQ_ge31
-FCST_VAR26_LEVELS = P250
-FCST_VAR26_THRESH = ==0.1
-OBS_VAR26_NAME = WIND
-OBS_VAR26_LEVELS = P250
-OBS_VAR26_THRESH = ge31
-
-FCST_VAR27_NAME = WIND_P250_ENS_FREQ_ge36
-FCST_VAR27_LEVELS = P250
-FCST_VAR27_THRESH = ==0.1
-OBS_VAR27_NAME = WIND
-OBS_VAR27_LEVELS = P250
-OBS_VAR27_THRESH = ge36
-
-FCST_VAR28_NAME = WIND_P250_ENS_FREQ_ge46
-FCST_VAR28_LEVELS = P250
-FCST_VAR28_THRESH = ==0.1
-OBS_VAR28_NAME = WIND
-OBS_VAR28_LEVELS = P250
-OBS_VAR28_THRESH = ge46
-
-FCST_VAR29_NAME = WIND_P250_ENS_FREQ_ge62
-FCST_VAR29_LEVELS = P250
-FCST_VAR29_THRESH = ==0.1
-OBS_VAR29_NAME = WIND
-OBS_VAR29_LEVELS = P250
-OBS_VAR29_THRESH = ge62
-
-FCST_VAR30_NAME = HGT_P500_ENS_FREQ_ge5400
-FCST_VAR30_LEVELS = P500
-FCST_VAR30_THRESH = ==0.1
-OBS_VAR30_NAME = HGT
-OBS_VAR30_LEVELS = P500
-OBS_VAR30_THRESH = ge5400
-
-FCST_VAR31_NAME = HGT_P500_ENS_FREQ_ge5600
-FCST_VAR31_LEVELS = P500
-FCST_VAR31_THRESH = ==0.1
-OBS_VAR31_NAME = HGT
-OBS_VAR31_LEVELS = P500
-OBS_VAR31_THRESH = ge5600
-
-FCST_VAR32_NAME = HGT_P500_ENS_FREQ_ge5880
-FCST_VAR32_LEVELS = P500
-FCST_VAR32_THRESH = ==0.1
-OBS_VAR32_NAME = HGT
-OBS_VAR32_LEVELS = P500
-OBS_VAR32_THRESH = ge5880
-
-FCST_VAR33_NAME = CAPE_L0_ENS_FREQ_le1000
-FCST_VAR33_LEVELS = L0
-FCST_VAR33_THRESH = ==0.1
-OBS_VAR33_NAME = CAPE
-OBS_VAR33_LEVELS = L0-100000
-OBS_VAR33_THRESH = le1000
-OBS_VAR33_OPTIONS = cnt_thresh = [ >0 ];
- cnt_logic = UNION;
-
-FCST_VAR34_NAME = CAPE_L0_ENS_FREQ_gt1000.and.lt2500
-FCST_VAR34_LEVELS = L0
-FCST_VAR34_THRESH = ==0.1
-OBS_VAR34_NAME = CAPE
-OBS_VAR34_LEVELS = L0-100000
-OBS_VAR34_THRESH = gt1000&<2500
-OBS_VAR34_OPTIONS = cnt_thresh = [ >0 ];
- cnt_logic = UNION;
-
-FCST_VAR35_NAME = CAPE_L0_ENS_FREQ_gt2500.and.lt4000
-FCST_VAR35_LEVELS = L0
-FCST_VAR35_THRESH = ==0.1
-OBS_VAR35_NAME = CAPE
-OBS_VAR35_LEVELS = L0-100000
-OBS_VAR35_THRESH = gt2500&<4000
-OBS_VAR35_OPTIONS = cnt_thresh = [ >0 ];
- cnt_logic = UNION;
-
-FCST_VAR36_NAME = CAPE_L0_ENS_FREQ_gt2500
-FCST_VAR36_LEVELS = L0
-FCST_VAR36_THRESH = ==0.1
-OBS_VAR36_NAME = CAPE
-OBS_VAR36_LEVELS = L0-100000
-OBS_VAR36_THRESH = gt2500
-OBS_VAR36_OPTIONS = cnt_thresh = [ >0 ];
- cnt_logic = UNION;
-
-FCST_VAR37_NAME = HPBL_Z0_ENS_FREQ_lt500
-FCST_VAR37_LEVELS = Z0
-FCST_VAR37_THRESH = ==0.1
-OBS_VAR37_NAME = PBL
-OBS_VAR37_LEVELS = L0
-OBS_VAR37_THRESH = lt500
-OBS_VAR37_OPTIONS = desc = "TKE";
-
-FCST_VAR38_NAME = HPBL_Z0_ENS_FREQ_lt1500
-FCST_VAR38_LEVELS = Z0
-FCST_VAR38_THRESH = ==0.1
-OBS_VAR38_NAME = PBL
-OBS_VAR38_LEVELS = L0
-OBS_VAR38_THRESH = lt1500
-OBS_VAR38_OPTIONS = desc = "TKE";
-
-FCST_VAR39_NAME = HPBL_Z0_ENS_FREQ_gt1500
-FCST_VAR39_LEVELS = Z0
-FCST_VAR39_THRESH = ==0.1
-OBS_VAR39_NAME = PBL
-OBS_VAR39_LEVELS = L0
-OBS_VAR39_THRESH = gt1500
-OBS_VAR39_OPTIONS = desc = "TKE";
-
-#
-# Forecast data description variables
-#
-FCST_IS_PROB = True
-FCST_PROB_IN_GRIB_PDS = False
-
-# End of [config] section and start of [dir] section.
-[dir]
-#
-# Directory containing observation input to PointStat.
-#
-OBS_POINT_STAT_INPUT_DIR = {{obs_input_dir}}
-#
-# Directory containing forecast input to PointStat.
-#
-FCST_POINT_STAT_INPUT_DIR = {{fcst_input_dir}}
-#
-# Directory containing climatology mean input to PointStat. Not used in
-# this example.
-#
-POINT_STAT_CLIMO_MEAN_INPUT_DIR =
-#
-# Directory containing climatology mean input to PointStat. Not used in
-# this example.
-#
-POINT_STAT_CLIMO_STDEV_INPUT_DIR =
-#
-# Directory in which to write output from PointStat.
-#
-# OUTPUT_BASE apparently has to be set to something; it cannot be left
-# to its default value. But it is not explicitly used elsewhere in this
-# configuration file.
-#
-OUTPUT_BASE = {{output_base}}
-POINT_STAT_OUTPUT_DIR = {{output_dir}}
-#
-# Directory for staging data.
-#
-STAGING_DIR = {{staging_dir}}
-
-# End of [dir] section and start of [filename_templates] section.
-[filename_templates]
-#
-# Template for observation input to PointStat relative to
-# OBS_POINT_STAT_INPUT_DIR.
-#
-OBS_POINT_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}}
-#
-# Template for forecast input to PointStat relative to
-# FCST_POINT_STAT_INPUT_DIR.
-#
-FCST_POINT_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}}
-#
-# Template for output from PointStat relative to POINT_STAT_OUTPUT_DIR.
-#
-POINT_STAT_OUTPUT_TEMPLATE =
-#
-# Template for climatology input to PointStat relative to
-# POINT_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example.
-#
-POINT_STAT_CLIMO_MEAN_INPUT_TEMPLATE =
-#
-# Template for climatology input to PointStat relative to
-# POINT_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example.
-#
-POINT_STAT_CLIMO_STDEV_INPUT_TEMPLATE =
diff --git a/parm/metplus/metplus_macros.jinja b/parm/metplus/metplus_macros.jinja
new file mode 100644
index 0000000000..94ac5d9485
--- /dev/null
+++ b/parm/metplus/metplus_macros.jinja
@@ -0,0 +1,150 @@
+{#-
+This macro prints out an error message and quits the jinja templater.
+#}
+{%- macro print_err_and_quit(error_msg) %}
+ {%- include 'ERROR: ' ~ error_msg %}
+{%- endmacro %}
+{#-
+Given a specified field level that is really an accumulation period, this
+macro prints out an "A" followed by the accumulation period (an integer)
+with any leading zeros removed. For example, if the level is 'A03', it
+prints out 'A3'.
+#}
+{%- macro get_accumulation_no_zero_pad(level) %}
+ {%- set first_char = level[0] %}
+ {%- set the_rest = level[1:] %}
+ {%- if (first_char == 'A') %}
+ {{- first_char ~ '%d'%the_rest|int }}
+ {%- else %}
+ {{- level }}
+ {%- endif %}
+{%- endmacro %}
+{#-
+This macro checks whether the specified level (input_level) has a valid
+value. input_level may be set to 'all' or to a specific level. If set
+to 'all', input_level is not checked because in this case, whatever valid/
+available levels are found will be included in the METplus configuration
+file for all specified fields. input_level IS checked if it is set to
+any other value because in this case, all the specified fields will use
+only that specific level in the METplus configuration file, which implies
+that the level must be valid for all such fields.
+#}
+{%- macro check_level(fields_levels_threshes, input_level) %}
+
+ {%- if input_level != 'all' %}
+
+ {%- set num_valid_fields = fields_levels_threshes|length %}
+ {%- set valid_fields = [] %}
+ {%- for i in range(0,num_valid_fields) %}
+ {%- set field = fields_levels_threshes[i].keys()|list|join('') %}
+ {%- set tmp = valid_fields.append(field) %}
+ {%- endfor %}
+
+ {%- for i in range(0,num_valid_fields) %}
+ {%- set field = valid_fields[i] %}
+ {%- set valid_levels = fields_levels_threshes[i][field].keys()|list %}
+ {%- if input_level not in valid_levels %}
+ {%- set error_msg = '\n' ~
+ 'The specified level (input_level) is not in the list of valid levels\n' ~
+ '(valid_levels) for the current field (field):\n' ~
+ ' field = \'' ~ field ~ '\'\n' ~
+ ' valid_levels = ' ~ valid_levels ~ '\n'
+ ' input_level = \'' ~ input_level ~ '\'\n'
+ 'input_level must either be set to the string \'all\' (to include all valid\n' ~
+ 'values in the verification) or to one of the elements in valid_levels.' %}
+ {{print_err_and_quit(error_msg)}}
+ {%- endif %}
+ {%- endfor %}
+
+ {%- endif %}
+
+{%- endmacro %}
+{#-
+This macro checks whether the specified threshold (input_thresh) has a
+valid value. input_thresh may be set to 'none', 'all', or a specific
+threshold. If set to 'none', input_thresh is not checked for a valid
+value since threshold information will not be included in the METplus
+configuration file. input_thresh is also not checked for a valid value
+if it set to 'all' because in this case, whatever valid/available thresholds
+are found will be included in the METplus configuration file for all
+specified field and level combination. Finally, input_thresh IS checked
+for a valid value if it is set to something other than 'none' and 'all'
+because in this case, all specified field and level combinations (where
+the latter, depending on the value of input_level, may be either all
+valid/available levels or a single one) will use only that specific
+threshold in the METplus configuration file, which implies that the
+threshold must be valid for all such field and level combinations.
+#}
+{%- macro check_thresh(fields_levels_threshes, input_level, input_thresh) %}
+
+ {%- if (input_thresh != 'none') and (input_thresh != 'all') %}
+
+ {%- set num_valid_fields = fields_levels_threshes|length %}
+ {%- set valid_fields = [] %}
+ {%- for i in range(0,num_valid_fields) %}
+ {%- set field = fields_levels_threshes[i].keys()|list|join('') %}
+ {%- set tmp = valid_fields.append(field) %}
+ {%- endfor %}
+
+ {%- for i in range(0,num_valid_fields) %}
+ {%- set field = valid_fields[i] %}
+ {%- set valid_levels = fields_levels_threshes[i][field].keys()|list %}
+ {%- set valid_levels_threshes = fields_levels_threshes[i][field] %}
+
+ {%- for level, valid_threshes in valid_levels_threshes.items() %}
+ {%- if (input_level == 'all') or (input_level == level) %}
+ {%- if input_thresh not in valid_threshes %}
+ {%- set error_msg = '\n' ~
+'The specified threshold (input_thresh) is not in the list of valid\n' ~
+'thresholds (valid_threshes) for the current field (field) and level\n' ~
+'(level) combination:\n' ~
+' field = \'' ~ field ~ '\'\n' ~
+' level = \'' ~ level ~ '\'\n' ~
+' valid_threshes = ' ~ valid_threshes ~ '\n'
+' input_thresh = \'' ~ input_thresh ~ '\'' %}
+'input_thresh must be set to the string \'all\' (to include in the METplus\n' ~
+'configuration file all thresholds for each valid combination of field and\n' ~
+'level), to the string \'none\' (to include no threshold information in the\n' ~
+'METplus configuration file), or to one of the elements in valid_threshes\n' ~
+'(to include only that specific threshold in the METplus configuration file).' %}
+ {{print_err_and_quit(error_msg)}}
+ {%- endif %}
+ {%- endif %}
+
+ {%- endfor %}
+
+ {%- endfor %}
+
+ {%- endif %}
+
+{%- endmacro %}
+{#-
+This macro checks whether, for the given field, the list of thresholds
+for all levels are identical. If not, it prints out an error message
+and errors out.
+#}
+{%- macro check_for_identical_threshes_by_level(field, levels_threshes) %}
+ {%- set avail_levels = levels_threshes[field].keys()|list %}
+ {%- set num_avail_levels = avail_levels|length %}
+ {%- set threshes_by_avail_level = levels_threshes[field].values()|list %}
+ {%- for i in range(1,num_avail_levels) %}
+ {%- set level = avail_levels[i-1] %}
+ {%- set threshes = threshes_by_avail_level[i-1] %}
+ {%- set level_next = avail_levels[i] %}
+ {%- set threshes_next = threshes_by_avail_level[i] %}
+ {%- if (threshes_next != threshes) %}
+ {%- set error_msg = '\n\n' ~
+'For the given field (field), the set of thresholds for the next level\n' ~
+'(threshes_next, level_next) is not equal to that of the current level\n' ~
+'(threshes, level) (note that order of thresholds matters here):\n' ~
+' field = \'' ~ field ~ '\'\n' ~
+' num_avail_levels = ' ~ num_avail_levels ~ '\n' ~
+' level = \'' ~ level ~ '\'\n' ~
+' threshes = ' ~ threshes ~ '\n' ~
+' level_next = \'' ~ level_next ~ '\'\n' ~
+' threshes_next = ' ~ threshes_next ~ '\n'
+ %}
+ {{print_err_and_quit(error_msg)}}
+ {%- endif %}
+ {%- endfor %}
+{%- endmacro %}
diff --git a/parm/metplus/vx_config_det.yaml b/parm/metplus/vx_config_det.yaml
new file mode 100644
index 0000000000..4c721176c6
--- /dev/null
+++ b/parm/metplus/vx_config_det.yaml
@@ -0,0 +1,204 @@
+#
+# This configuration file specifies the field groups, fields, levels,
+# and thresholds to use for DETERMINISTIC verification. The format is
+# as follows:
+#
+# FIELD_GROUP1:
+# FIELD1:
+# LEVEL1: list_of_thresholds
+# LEVEL2: list_of_thresholds
+# ...
+# FIELD2:
+# LEVEL1: list_of_thresholds
+# LEVEL2: list_of_thresholds
+# ...
+# ...
+#
+# FIELD_GROUP2:
+# FIELD1:
+# LEVEL1: list_of_thresholds
+# LEVEL2: list_of_thresholds
+# ...
+# FIELD2:
+# LEVEL1: list_of_thresholds
+# LEVEL2: list_of_thresholds
+# ...
+# ...
+#
+# ...
+#
+# If the threshold list for a given combination of field group, field,
+# and level is set to the empty string ([]), then all values of that
+# field will be included in the verification.
+#
+# Both the keys that represent field groups, fields, and levels and the
+# strings in the list of thresholds may contain the separator string "%%"
+# that separates the value of the quantity for the forecast from that for
+# the observations. For example, if a field is set to
+#
+# RETOP%%EchoTop18
+#
+# it means the name of the field in the forecast data is RETOP while its
+# name in the observations is EchoTop18.
+#
+APCP:
+ APCP:
+ A1: ['gt0.0', 'ge0.254', 'ge0.508', 'ge1.27', 'ge2.54']
+ A3: ['gt0.0', 'ge0.254', 'ge0.508', 'ge1.27', 'ge2.54', 'ge3.810', 'ge6.350']
+ A6: ['gt0.0', 'ge0.254', 'ge0.508', 'ge1.27', 'ge2.54', 'ge3.810', 'ge6.350', 'ge8.890', 'ge12.700']
+ A24: ['gt0.0', 'ge0.254', 'ge0.508', 'ge1.27', 'ge2.54', 'ge3.810', 'ge6.350', 'ge8.890', 'ge12.700', 'ge25.400']
+ASNOW:
+ ASNOW:
+ A6: ['gt0.0', 'ge2.54', 'ge5.08', 'ge10.16', 'ge20.32']
+REFC:
+ REFC%%MergedReflectivityQCComposite:
+ L0%%Z500: ['ge20', 'ge30', 'ge40', 'ge50']
+RETOP:
+ RETOP%%EchoTop18:
+ L0%%Z500: ['ge20', 'ge30', 'ge40', 'ge50']
+ADPSFC:
+ TMP:
+ Z2: []
+ DPT:
+ Z2: []
+ RH:
+ Z2: []
+ UGRD:
+ Z10: ['ge2.572']
+ VGRD:
+ Z10: ['ge2.572']
+ WIND:
+ Z10: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433']
+ PRMSL:
+ Z0: []
+ TCDC:
+ L0: []
+ VIS:
+ L0: ['lt805', 'lt1609', 'lt4828', 'lt8045', 'ge8045', 'lt16090']
+ GUST:
+ Z0: []
+ HGT%%CEILING:
+ L0: ['lt152', 'lt305', 'lt914', 'lt1520', 'lt3040', 'ge914']
+ SPFH:
+ Z2: []
+ CRAIN%%PRWE:
+ L0%%Z0: ['ge1.0%%ge161&&le163']
+ CSNOW%%PRWE:
+ L0%%Z0: ['ge1.0%%ge171&&le173']
+ CFRZR%%PRWE:
+ L0%%Z0: ['ge1.0%%ge164&&le166']
+ CICEP%%PRWE:
+ L0%%Z0: ['ge1.0%%ge174&&le176']
+ADPUPA:
+ TMP:
+ P1000: []
+ P925: []
+ P850: []
+ P700: []
+ P500: []
+ P400: []
+ P300: []
+ P250: []
+ P200: []
+ P150: []
+ P100: []
+ P50: []
+ P20: []
+ P10: []
+ RH:
+ P1000: []
+ P925: []
+ P850: []
+ P700: []
+ P500: []
+ P400: []
+ P300: []
+ P250: []
+ DPT:
+ P1000: []
+ P925: []
+ P850: []
+ P700: []
+ P500: []
+ P400: []
+ P300: []
+ UGRD:
+ P1000: ['ge2.572']
+ P925: ['ge2.572']
+ P850: ['ge2.572']
+ P700: ['ge2.572']
+ P500: ['ge2.572']
+ P400: ['ge2.572']
+ P300: ['ge2.572']
+ P250: ['ge2.572']
+ P200: ['ge2.572']
+ P150: ['ge2.572']
+ P100: ['ge2.572']
+ P50: ['ge2.572']
+ P20: ['ge2.572']
+ P10: ['ge2.572']
+ VGRD:
+ P1000: ['ge2.572']
+ P925: ['ge2.572']
+ P850: ['ge2.572']
+ P700: ['ge2.572']
+ P500: ['ge2.572']
+ P400: ['ge2.572']
+ P300: ['ge2.572']
+ P250: ['ge2.572']
+ P200: ['ge2.572']
+ P150: ['ge2.572']
+ P100: ['ge2.572']
+ P50: ['ge2.572']
+ P20: ['ge2.572']
+ P10: ['ge2.572']
+ WIND:
+ P1000: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
+ P925: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
+ P850: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
+ P700: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
+ P500: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
+ P400: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
+ P300: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
+ P250: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
+ P200: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
+ P150: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
+ P100: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
+ P50: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
+ P20: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
+ P10: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
+ HGT:
+ P1000: []
+ P950: []
+ P925: []
+ P850: []
+ P700: []
+ P500: []
+ P400: []
+ P300: []
+ P250: []
+ P200: []
+ P150: []
+ P100: []
+ P50: []
+ P20: []
+ P10: []
+ SPFH:
+ P1000: []
+ P850: []
+ P700: []
+ P500: []
+ P400: []
+ P300: []
+ CAPE:
+ L0%%L0-100000:
+ ['gt500', 'gt1000', 'gt1500', 'gt2000', 'gt3000', 'gt4000']
+ HPBL%%PBL:
+ Z0%%L0:
+ []
+ HGT%%PBL:
+ L0:
+ []
+ CAPE%%MLCAPE:
+ L0-90%%L0:
+ ['gt500', 'gt1000', 'gt1500', 'gt2000', 'gt3000', 'gt4000']
diff --git a/parm/metplus/vx_config_ens.yaml b/parm/metplus/vx_config_ens.yaml
new file mode 100644
index 0000000000..5f55254a4c
--- /dev/null
+++ b/parm/metplus/vx_config_ens.yaml
@@ -0,0 +1,54 @@
+#
+# This configuration file specifies the field groups, fields, levels,
+# and thresholds to use for ENSEMBLE verification. The format is the
+# same as the one used in the configuration file for deterministic
+# verification (vx_config_det.yaml); please see the documentation in
+# that file for details.
+#
+APCP:
+ APCP:
+ A1: ['gt0.0', 'ge0.254', 'ge0.508', 'ge2.54']
+ A3: ['gt0.0', 'ge0.508', 'ge2.54', 'ge6.350']
+ A6: ['gt0.0', 'ge2.54', 'ge6.350', 'ge12.700']
+ A24: ['gt0.0', 'ge6.350', 'ge12.700', 'ge25.400']
+ASNOW:
+ ASNOW:
+ A6: ['gt0.0', 'ge2.54', 'ge5.08', 'ge10.16', 'ge20.32']
+REFC:
+ REFC%%MergedReflectivityQCComposite:
+ L0%%Z500: ['ge20', 'ge30', 'ge40', 'ge50']
+RETOP:
+ RETOP%%EchoTop18:
+ L0%%Z500: ['ge20', 'ge30', 'ge40', 'ge50']
+ADPSFC:
+ TMP:
+ Z2: ['ge268', 'ge273', 'ge278', 'ge293', 'ge298', 'ge303']
+ DPT:
+ Z2: ['ge263', 'ge268', 'ge273', 'ge288', 'ge293', 'ge298']
+ WIND:
+ Z10: ['ge5', 'ge10', 'ge15']
+ TCDC:
+ L0: ['lt25', 'gt75']
+ VIS:
+ L0: ['lt1609', 'lt8045', 'ge8045']
+ HGT%%CEILING:
+ L0: ['lt152', 'lt305', 'lt914']
+ADPUPA:
+ TMP:
+ P850: ['ge288', 'ge293', 'ge298']
+ P700: ['ge273', 'ge278', 'ge283']
+ P500: ['ge258', 'ge263', 'ge268']
+ DPT:
+ P850: ['ge273', 'ge278', 'ge283']
+ P700: ['ge263', 'ge268', 'ge273']
+ WIND:
+ P850: ['ge5', 'ge10', 'ge15']
+ P700: ['ge10', 'ge15', 'ge20']
+ P500: ['ge15', 'ge21', 'ge26']
+ P250: ['ge26', 'ge31', 'ge36', 'ge46', 'ge62']
+ HGT:
+ P500: ['ge5400', 'ge5600', 'ge5880']
+ CAPE:
+ L0%%L0-100000: ['le1000', 'gt1000&<2500', 'ge2500&<4000', 'ge2500']
+ HPBL%%PBL:
+ Z0%%L0: ['lt500', 'lt1500', 'gt1500']
diff --git a/parm/wflow/verify_det.yaml b/parm/wflow/verify_det.yaml
index 4c6b43ca25..79f04eeaaa 100644
--- a/parm/wflow/verify_det.yaml
+++ b/parm/wflow/verify_det.yaml
@@ -21,6 +21,18 @@ default_task_verify_det: &default_task_verify_det
queue: '&QUEUE_DEFAULT;'
walltime: 00:30:00
+task_parse_vx_config_det:
+ <<: *default_task_verify_det
+ command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_PARSE_VX_CONFIG"'
+ envars:
+ <<: *default_vars
+ DET_OR_ENS: 'det'
+ join: !cycstr '&LOGDIR;/{{ jobname }}&LOGEXT;'
+ walltime: 00:05:00
+ # No dependencies are needed for this task because as long as any deterministic
+ # verification tasks are going to be run (i.e. as long as this configuration
+ # file is included in the workflow), then this task must be launched.
+
metatask_GridStat_CCPA_all_accums_all_mems:
var:
ACCUM_HH: '{% for ah in verification.VX_APCP_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}'
@@ -41,6 +53,8 @@ metatask_GridStat_CCPA_all_accums_all_mems:
OBTYPE: 'CCPA'
ENSMEM_INDX: "#mem#"
SLASH_ENSMEM_SUBDIR_OR_NULL: '{% if global.DO_ENSEMBLE %}{{ "/mem#mem#" }}{% endif %}'
+ FCST_LEVEL: 'A#ACCUM_HH#'
+ FCST_THRESH: 'all'
walltime: 02:00:00
dependency:
and:
@@ -50,6 +64,9 @@ metatask_GridStat_CCPA_all_accums_all_mems:
taskdep_pcpcombine_fcst:
attrs:
task: run_MET_PcpCombine_fcst_APCP#ACCUM_HH#h_mem#mem#
+ taskdep_parse_vx_config_det:
+ attrs:
+ task: parse_vx_config_det
metatask_GridStat_NOHRSC_all_accums_all_mems:
var:
@@ -71,15 +88,20 @@ metatask_GridStat_NOHRSC_all_accums_all_mems:
OBTYPE: 'NOHRSC'
ENSMEM_INDX: "#mem#"
SLASH_ENSMEM_SUBDIR_OR_NULL: '{% if global.DO_ENSEMBLE %}{{ "/mem#mem#" }}{% endif %}'
+ FCST_LEVEL: 'A#ACCUM_HH#'
+ FCST_THRESH: 'all'
walltime: 02:00:00
dependency:
and:
- taskdep:
+ taskdep_get_obs_nohrsc:
attrs:
- task: get_obs_mrms
+ task: get_obs_nohrsc
taskdep_pcpcombine_fcst:
attrs:
task: run_MET_PcpCombine_fcst_ASNOW#ACCUM_HH#h_mem#mem#
+ taskdep_parse_vx_config_det:
+ attrs:
+ task: parse_vx_config_det
metatask_GridStat_MRMS_all_mems:
var:
@@ -99,16 +121,21 @@ metatask_GridStat_MRMS_all_mems:
OBTYPE: 'MRMS'
ENSMEM_INDX: "#mem#"
SLASH_ENSMEM_SUBDIR_OR_NULL: '{% if global.DO_ENSEMBLE %}{{ "/mem#mem#" }}{% endif %}'
+ FCST_LEVEL: 'L0'
+ FCST_THRESH: 'all'
walltime: 02:00:00
dependency:
and:
- taskdep:
+ taskdep_get_obs_mrms:
attrs:
task: get_obs_mrms
datadep_post_files_exist:
attrs:
age: 00:00:00:30
text: !cycstr '{{ workflow.EXPTDIR }}/@Y@m@d@H/post_files_exist_mem#mem#.txt'
+ taskdep_parse_vx_config_det:
+ attrs:
+ task: parse_vx_config_det
metatask_PointStat_NDAS_all_mems:
var:
@@ -128,6 +155,8 @@ metatask_PointStat_NDAS_all_mems:
ACCUM_HH: '01'
ENSMEM_INDX: "#mem#"
SLASH_ENSMEM_SUBDIR_OR_NULL: '{% if global.DO_ENSEMBLE %}{{ "/mem#mem#" }}{% endif %}'
+ FCST_LEVEL: 'all'
+ FCST_THRESH: 'all'
walltime: 01:00:00
dependency:
and:
@@ -138,3 +167,6 @@ metatask_PointStat_NDAS_all_mems:
attrs:
age: 00:00:00:30
text: !cycstr '{{ workflow.EXPTDIR }}/@Y@m@d@H/post_files_exist_mem#mem#.txt'
+ taskdep_parse_vx_config_det:
+ attrs:
+ task: parse_vx_config_det
diff --git a/parm/wflow/verify_ens.yaml b/parm/wflow/verify_ens.yaml
index cf0a8d1dac..3f7638587d 100644
--- a/parm/wflow/verify_ens.yaml
+++ b/parm/wflow/verify_ens.yaml
@@ -21,6 +21,18 @@ default_task_verify_ens: &default_task_verify_ens
queue: '&QUEUE_DEFAULT;'
walltime: 01:00:00
+task_parse_vx_config_ens:
+ <<: *default_task_verify_ens
+ command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_PARSE_VX_CONFIG"'
+ envars:
+ <<: *default_vars
+ DET_OR_ENS: 'ens'
+ join: !cycstr '&LOGDIR;/{{ jobname }}&LOGEXT;'
+ walltime: 00:05:00
+ # No dependencies are needed for this task because as long as any ensemble
+ # verification tasks are going to be run (i.e. as long as this configuration
+ # file is included in the workflow), then this task must be launched.
+
metatask_GenEnsProd_EnsembleStat_CCPA:
var:
ACCUM_HH: '{% for ah in verification.VX_APCP_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}'
@@ -34,6 +46,8 @@ metatask_GenEnsProd_EnsembleStat_CCPA:
VAR: APCP
METPLUSTOOLNAME: 'GENENSPROD'
OBTYPE: 'CCPA'
+ FCST_LEVEL: 'A#ACCUM_HH#'
+ FCST_THRESH: 'all'
dependency:
and:
# The PcpCombine task for obs must be complete because this GenEnsProd
@@ -45,11 +59,15 @@ metatask_GenEnsProd_EnsembleStat_CCPA:
metataskdep_pcpcombine_fcst:
attrs:
metatask: PcpCombine_fcst_APCP#ACCUM_HH#h_all_mems
+ taskdep_parse_vx_config_ens:
+ attrs:
+ task: parse_vx_config_ens
task_run_MET_EnsembleStat_vx_APCP#ACCUM_HH#h:
<<: *task_GenEnsProd_CCPA
envars:
<<: *envars_GenEnsProd_CCPA
METPLUSTOOLNAME: 'ENSEMBLESTAT'
+ FCST_THRESH: 'none'
dependency:
taskdep_genensprod:
attrs:
@@ -68,6 +86,8 @@ metatask_GenEnsProd_EnsembleStat_NOHRSC:
VAR: ASNOW
METPLUSTOOLNAME: 'GENENSPROD'
OBTYPE: 'NOHRSC'
+ FCST_LEVEL: 'A#ACCUM_HH#'
+ FCST_THRESH: 'all'
dependency:
and:
# The PcpCombine task for obs must be complete because this GenEnsProd
@@ -76,11 +96,15 @@ metatask_GenEnsProd_EnsembleStat_NOHRSC:
metataskdep_pcpcombine_fcst:
attrs:
metatask: PcpCombine_fcst_ASNOW#ACCUM_HH#h_all_mems
+ taskdep_parse_vx_config_ens:
+ attrs:
+ task: parse_vx_config_ens
task_run_MET_EnsembleStat_vx_ASNOW#ACCUM_HH#h:
<<: *task_GenEnsProd_NOHRSC
envars:
<<: *envars_GenEnsProd_NOHRSC
METPLUSTOOLNAME: 'ENSEMBLESTAT'
+ FCST_THRESH: 'none'
dependency:
and:
taskdep:
@@ -103,24 +127,31 @@ metatask_GenEnsProd_EnsembleStat_MRMS:
VAR: '#VAR#'
METPLUSTOOLNAME: 'GENENSPROD'
OBTYPE: 'MRMS'
+ FCST_LEVEL: 'L0'
+ FCST_THRESH: 'all'
dependency:
and:
- taskdep:
- attrs:
- task: get_obs_mrms
- metataskdep_post_files_exist: &post_files_exist
+ metataskdep_check_post_output: &check_post_output
attrs:
metatask: check_post_output_all_mems
-
+ taskdep_parse_vx_config_ens:
+ attrs:
+ task: parse_vx_config_ens
task_run_MET_EnsembleStat_vx_#VAR#:
<<: *task_GenEnsProd_MRMS
envars:
<<: *envars_GenEnsProd_MRMS
METPLUSTOOLNAME: 'ENSEMBLESTAT'
+ FCST_LEVEL: 'L0'
+ FCST_THRESH: 'none'
dependency:
- taskdep:
- attrs:
- task: run_MET_GenEnsProd_vx_#VAR#
+ and:
+ taskdep_get_obs_mrms:
+ attrs:
+ task: get_obs_mrms
+ taskdep_genensprod:
+ attrs:
+ task: run_MET_GenEnsProd_vx_#VAR#
metatask_GenEnsProd_EnsembleStat_NDAS:
var:
@@ -135,6 +166,8 @@ metatask_GenEnsProd_EnsembleStat_NDAS:
METPLUSTOOLNAME: 'GENENSPROD'
OBTYPE: 'NDAS'
ACCUM_HH: '01'
+ FCST_LEVEL: 'all'
+ FCST_THRESH: 'all'
walltime: 02:30:00
dependency:
and:
@@ -144,8 +177,11 @@ metatask_GenEnsProd_EnsembleStat_NDAS:
taskdep_pb2nc:
attrs:
task: run_MET_Pb2nc_obs
- metataskdep_post_files_exist:
- <<: *post_files_exist
+ metataskdep_check_post_output:
+ <<: *check_post_output
+ taskdep_parse_vx_config_ens:
+ attrs:
+ task: parse_vx_config_ens
task_run_MET_EnsembleStat_vx_#VAR#:
<<: *task_GenEnsProd_NDAS
envars:
@@ -174,6 +210,8 @@ metatask_GridStat_CCPA_ensmeanprob_all_accums:
METPLUSTOOLNAME: 'GRIDSTAT'
OBTYPE: 'CCPA'
ACCUM_HH: '#ACCUM_HH#'
+ FCST_LEVEL: 'A#ACCUM_HH#'
+ FCST_THRESH: 'all'
dependency:
taskdep:
attrs:
@@ -196,6 +234,8 @@ metatask_GridStat_NOHRSC_ensmeanprob_all_accums:
METPLUSTOOLNAME: 'GRIDSTAT'
OBTYPE: 'NOHRSC'
ACCUM_HH: '#ACCUM_HH#'
+ FCST_LEVEL: 'A#ACCUM_HH#'
+ FCST_THRESH: 'all'
dependency:
taskdep:
attrs:
@@ -214,6 +254,8 @@ metatask_GridStat_MRMS_ensprob:
VAR: '#VAR#'
METPLUSTOOLNAME: 'GRIDSTAT'
OBTYPE: 'MRMS'
+ FCST_LEVEL: 'L0'
+ FCST_THRESH: 'all'
dependency:
taskdep:
attrs:
@@ -236,6 +278,8 @@ metatask_PointStat_NDAS_ensmeanprob:
METPLUSTOOLNAME: 'POINTSTAT'
OBTYPE: 'NDAS'
ACCUM_HH: '01'
+ FCST_LEVEL: 'all'
+ FCST_THRESH: 'all'
dependency:
taskdep:
attrs:
diff --git a/parm/wflow/verify_pre.yaml b/parm/wflow/verify_pre.yaml
index eb1a7eb796..b7511bf63f 100644
--- a/parm/wflow/verify_pre.yaml
+++ b/parm/wflow/verify_pre.yaml
@@ -85,7 +85,7 @@ task_run_MET_Pb2nc_obs:
<<: *default_vars
VAR: ADPSFC
ACCUM_HH: '01'
- obs_or_fcst: obs
+ FCST_OR_OBS: OBS
OBTYPE: NDAS
OBS_DIR: '&NDAS_OBS_DIR;'
METPLUSTOOLNAME: 'PB2NC'
@@ -115,7 +115,7 @@ metatask_PcpCombine_obs:
<<: *default_vars
VAR: APCP
ACCUM_HH: '#ACCUM_HH#'
- obs_or_fcst: obs
+ FCST_OR_OBS: OBS
OBTYPE: CCPA
OBS_DIR: '&CCPA_OBS_DIR;'
METPLUSTOOLNAME: 'PCPCOMBINE'
@@ -226,7 +226,7 @@ metatask_PcpCombine_fcst_APCP_all_accums_all_mems:
<<: *default_vars
VAR: APCP
ACCUM_HH: '#ACCUM_HH#'
- obs_or_fcst: fcst
+ FCST_OR_OBS: FCST
OBTYPE: CCPA
OBS_DIR: '&CCPA_OBS_DIR;'
METPLUSTOOLNAME: 'PCPCOMBINE'
@@ -254,7 +254,7 @@ metatask_PcpCombine_fcst_ASNOW_all_accums_all_mems:
<<: *default_vars
VAR: ASNOW
ACCUM_HH: '#ACCUM_HH#'
- obs_or_fcst: fcst
+ FCST_OR_OBS: FCST
OBTYPE: NOHRSC
OBS_DIR: '&NOHRSC_OBS_DIR;'
METPLUSTOOLNAME: 'PCPCOMBINE'
diff --git a/scripts/exregional_check_post_output.sh b/scripts/exregional_check_post_output.sh
index 1352d38789..ba0d141c5d 100755
--- a/scripts/exregional_check_post_output.sh
+++ b/scripts/exregional_check_post_output.sh
@@ -50,10 +50,11 @@ print_info_msg "
Entering script: \"${scrfunc_fn}\"
In directory: \"${scrfunc_dir}\"
-This is the ex-script for the task that checks that all the post-processed
-output files in fact exist and are at least a certain age. These files
-may have been generated by UPP as part of the current SRW App workflow,
-or they may be user-staged.
+This is the ex-script for the task that checks that no more than
+NUM_MISSING_FCST_FILES_MAX of each forecast's (ensemble member's) post-
+processed output files are missing. Note that such files may have been
+generated by UPP as part of the current SRW App workflow, or they may be
+user-staged.
========================================================================"
#
#-----------------------------------------------------------------------
diff --git a/scripts/exregional_parse_vx_config.sh b/scripts/exregional_parse_vx_config.sh
new file mode 100755
index 0000000000..13632c7e53
--- /dev/null
+++ b/scripts/exregional_parse_vx_config.sh
@@ -0,0 +1,94 @@
+#!/usr/bin/env bash
+
+#
+#-----------------------------------------------------------------------
+#
+# Source the variable definitions file and the bash utility functions.
+#
+#-----------------------------------------------------------------------
+#
+. $USHdir/source_util_funcs.sh
+source_config_for_task "task_run_met_pcpcombine|task_run_post" ${GLOBAL_VAR_DEFNS_FP}
+#
+#-----------------------------------------------------------------------
+#
+# Source files defining auxiliary functions for verification.
+#
+#-----------------------------------------------------------------------
+#
+. $USHdir/set_vx_fhr_list.sh
+#
+#-----------------------------------------------------------------------
+#
+# Save current shell options (in a global array). Then set new options
+# for this script/function.
+#
+#-----------------------------------------------------------------------
+#
+{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
+#
+#-----------------------------------------------------------------------
+#
+# Get the full path to the file in which this script/function is located
+# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
+# which the file is located (scrfunc_dir).
+#
+#-----------------------------------------------------------------------
+#
+scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
+scrfunc_fn=$( basename "${scrfunc_fp}" )
+scrfunc_dir=$( dirname "${scrfunc_fp}" )
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating entry into script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Entering script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+
+This is the ex-script for the task that reads in the \"coupled\" yaml
+verification (vx) configuration file (python dictionary) and generates
+from it two \"decoupled\" vx configuration dictionaries, one for forecasts
+and another for observations. The task then writes these two decoupled
+dictionaries to a new configuration file in the experiment directory
+that can be read by downstream vx tasks.
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# Call python script to generate vx configuration file containing
+# separate vx configuration dictionaries for forecasts and observations.
+#
+#-----------------------------------------------------------------------
+#
+python3 ${USHdir}/metplus/decouple_fcst_obs_vx_config.py \
+ --vx_type "${DET_OR_ENS}" \
+ --outfile_type "txt" \
+ --outdir "${EXPTDIR}"
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating successful completion of script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Done extracting vx configuration.
+
+Exiting script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# Restore the shell options saved at the beginning of this script/func-
+# tion.
+#
+#-----------------------------------------------------------------------
+#
+{ restore_shell_opts; } > /dev/null 2>&1
diff --git a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh
index aa24abbb10..529d8d92cc 100755
--- a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh
+++ b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh
@@ -114,53 +114,6 @@ set_vx_params \
#
#-----------------------------------------------------------------------
#
-# Set additional field-dependent verification parameters.
-#
-#-----------------------------------------------------------------------
-#
-if [ "${grid_or_point}" = "grid" ]; then
-
- case "${FIELDNAME_IN_MET_FILEDIR_NAMES}" in
- "APCP01h")
- FIELD_THRESHOLDS="gt0.0, ge0.254, ge0.508, ge2.54"
- ;;
- "APCP03h")
- FIELD_THRESHOLDS="gt0.0, ge0.508, ge2.54, ge6.350"
- ;;
- "APCP06h")
- FIELD_THRESHOLDS="gt0.0, ge2.54, ge6.350, ge12.700"
- ;;
- "APCP24h")
- FIELD_THRESHOLDS="gt0.0, ge6.350, ge12.700, ge25.400"
- ;;
- "ASNOW06h")
- FIELD_THRESHOLDS="gt0.0, ge2.54, ge5.08, ge10.16, ge20.32"
- ;;
- "ASNOW24h")
- FIELD_THRESHOLDS="gt0.0, ge2.54, ge5.08, ge10.16, ge20.32"
- ;;
- "REFC")
- FIELD_THRESHOLDS="ge20, ge30, ge40, ge50"
- ;;
- "RETOP")
- FIELD_THRESHOLDS="ge20, ge30, ge40, ge50"
- ;;
- *)
- print_err_msg_exit "\
-Verification parameters have not been defined for this field
-(FIELDNAME_IN_MET_FILEDIR_NAMES):
- FIELDNAME_IN_MET_FILEDIR_NAMES = \"${FIELDNAME_IN_MET_FILEDIR_NAMES}\""
- ;;
- esac
-
-elif [ "${grid_or_point}" = "point" ]; then
-
- FIELD_THRESHOLDS=""
-
-fi
-#
-#-----------------------------------------------------------------------
-#
# Set paths and file templates for input to and output from the MET/
# METplus tool to be run as well as other file/directory parameters.
#
@@ -233,13 +186,10 @@ for (( i=0; i<${NUM_ENS_MEMBERS}; i++ )); do
template="${FCST_SUBDIR_TEMPLATE}/${FCST_FN_TEMPLATE}"
fi
- slash_ensmem_subdir_or_null="/${ensmem_name}"
if [ -z "${FCST_INPUT_FN_TEMPLATE}" ]; then
FCST_INPUT_FN_TEMPLATE="$(eval echo ${template})"
else
- FCST_INPUT_FN_TEMPLATE="\
-${FCST_INPUT_FN_TEMPLATE},
-$(eval echo ${template})"
+ FCST_INPUT_FN_TEMPLATE="${FCST_INPUT_FN_TEMPLATE}, $(eval echo ${template})"
fi
done
@@ -251,6 +201,16 @@ STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}"
#-----------------------------------------------------------------------
#
# Set the array of forecast hours for which to run the MET/METplus tool.
+# This is done by starting with the full list of forecast hours for which
+# there is forecast output and then removing from that list any forecast
+# hours for which there is no corresponding observation data.
+#
+# Note that strictly speaking, this does not need to be done if the MET/
+# METplus tool being called is GenEnsProd (because this tool only operates
+# on forecasts), but we run the check anyway in this case in order to
+# keep the code here simpler and because the output of GenEnsProd for
+# forecast hours with missing observations will not be used anyway in
+# downstream verification tasks.
#
#-----------------------------------------------------------------------
#
@@ -318,16 +278,27 @@ fi
#
# First, set the base file names.
#
-metplus_config_tmpl_fn="${VAR}"
-metplus_config_tmpl_fn="${MetplusToolName}_${metplus_config_tmpl_fn}"
-metplus_config_fn="${MetplusToolName}_${FIELDNAME_IN_MET_FILEDIR_NAMES}"
-metplus_log_fn="${metplus_config_fn}"
+metplus_config_tmpl_bn="${MetplusToolName}"
+metplus_config_bn="${MetplusToolName}_${FIELDNAME_IN_MET_FILEDIR_NAMES}"
+metplus_log_bn="${metplus_config_bn}"
#
# Add prefixes and suffixes (extensions) to the base file names.
#
-metplus_config_tmpl_fn="${metplus_config_tmpl_fn}.conf"
-metplus_config_fn="${metplus_config_fn}.conf"
-metplus_log_fn="metplus.log.${metplus_log_fn}"
+metplus_config_tmpl_fn="${metplus_config_tmpl_bn}.conf"
+metplus_config_fn="${metplus_config_bn}.conf"
+metplus_log_fn="metplus.log.${metplus_log_bn}"
+#
+#-----------------------------------------------------------------------
+#
+# Load the yaml-like file containing the configuration for ensemble
+# verification.
+#
+#-----------------------------------------------------------------------
+#
+det_or_ens="ens"
+vx_config_output_fn="vx_config_${det_or_ens}.txt"
+vx_config_output_fp="${EXPTDIR}/${vx_config_output_fn}"
+vx_config_dict=$(<"${vx_config_output_fp}")
#
#-----------------------------------------------------------------------
#
@@ -387,20 +358,22 @@ settings="\
'obtype': '${OBTYPE}'
'accum_hh': '${ACCUM_HH:-}'
'accum_no_pad': '${ACCUM_NO_PAD:-}'
- 'field_thresholds': '${FIELD_THRESHOLDS:-}'
+ 'metplus_templates_dir': '${METPLUS_CONF:-}'
+ 'input_field_group': '${VAR:-}'
+ 'input_level_fcst': '${FCST_LEVEL:-}'
+ 'input_thresh_fcst': '${FCST_THRESH:-}'
+ 'vx_config_dict': ${vx_config_dict:-}
"
# Render the template to create a METplus configuration file
tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)")
-cat > $tmpfile << EOF
-$settings
-EOF
-
+printf "%s" "$settings" > "$tmpfile"
uw template render \
-i ${metplus_config_tmpl_fp} \
-o ${metplus_config_fp} \
--verbose \
- --values-file "${tmpfile}"
+ --values-file "${tmpfile}" \
+ --search-path "/"
err=$?
rm $tmpfile
diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh
index 93444069cb..b8f0c49fec 100755
--- a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh
+++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh
@@ -125,53 +125,6 @@ time_lag=$( bc -l <<< "${ENS_TIME_LAG_HRS[$i]}*${SECS_PER_HOUR}" )
#
#-----------------------------------------------------------------------
#
-# Set additional field-dependent verification parameters.
-#
-#-----------------------------------------------------------------------
-#
-if [ "${grid_or_point}" = "grid" ]; then
-
- case "${FIELDNAME_IN_MET_FILEDIR_NAMES}" in
- "APCP01h")
- FIELD_THRESHOLDS="gt0.0, ge0.254, ge0.508, ge1.27, ge2.54"
- ;;
- "APCP03h")
- FIELD_THRESHOLDS="gt0.0, ge0.254, ge0.508, ge1.27, ge2.54, ge3.810, ge6.350"
- ;;
- "APCP06h")
- FIELD_THRESHOLDS="gt0.0, ge0.254, ge0.508, ge1.27, ge2.54, ge3.810, ge6.350, ge8.890, ge12.700"
- ;;
- "APCP24h")
- FIELD_THRESHOLDS="gt0.0, ge0.254, ge0.508, ge1.27, ge2.54, ge3.810, ge6.350, ge8.890, ge12.700, ge25.400"
- ;;
- "ASNOW06h")
- FIELD_THRESHOLDS="gt0.0, ge2.54, ge5.08, ge10.16, ge20.32"
- ;;
- "ASNOW24h")
- FIELD_THRESHOLDS="gt0.0, ge2.54, ge5.08, ge10.16, ge20.32"
- ;;
- "REFC")
- FIELD_THRESHOLDS="ge20, ge30, ge40, ge50"
- ;;
- "RETOP")
- FIELD_THRESHOLDS="ge20, ge30, ge40, ge50"
- ;;
- *)
- print_err_msg_exit "\
-Verification parameters have not been defined for this field
-(FIELDNAME_IN_MET_FILEDIR_NAMES):
- FIELDNAME_IN_MET_FILEDIR_NAMES = \"${FIELDNAME_IN_MET_FILEDIR_NAMES}\""
- ;;
- esac
-
-elif [ "${grid_or_point}" = "point" ]; then
-
- FIELD_THRESHOLDS=""
-
-fi
-#
-#-----------------------------------------------------------------------
-#
# Set paths and file templates for input to and output from the MET/
# METplus tool to be run as well as other file/directory parameters.
#
@@ -252,6 +205,9 @@ STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}"
#-----------------------------------------------------------------------
#
# Set the array of forecast hours for which to run the MET/METplus tool.
+# This is done by starting with the full list of forecast hours for which
+# there is forecast output and then removing from that list any forecast
+# hours for which there is no corresponding observation data.
#
#-----------------------------------------------------------------------
#
@@ -319,16 +275,27 @@ fi
#
# First, set the base file names.
#
-metplus_config_tmpl_fn="${VAR}"
-metplus_config_tmpl_fn="${MetplusToolName}_${metplus_config_tmpl_fn}"
-metplus_config_fn="${MetplusToolName}_${FIELDNAME_IN_MET_FILEDIR_NAMES}_${ensmem_name}"
-metplus_log_fn="${metplus_config_fn}"
+metplus_config_tmpl_bn="GridStat_or_PointStat"
+metplus_config_bn="${MetplusToolName}_${FIELDNAME_IN_MET_FILEDIR_NAMES}_${ensmem_name}"
+metplus_log_bn="${metplus_config_bn}"
#
# Add prefixes and suffixes (extensions) to the base file names.
#
-metplus_config_tmpl_fn="${metplus_config_tmpl_fn}.conf"
-metplus_config_fn="${metplus_config_fn}.conf"
-metplus_log_fn="metplus.log.${metplus_log_fn}"
+metplus_config_tmpl_fn="${metplus_config_tmpl_bn}.conf"
+metplus_config_fn="${metplus_config_bn}.conf"
+metplus_log_fn="metplus.log.${metplus_log_bn}"
+#
+#-----------------------------------------------------------------------
+#
+# Load the yaml-like file containing the configuration for deterministic
+# verification.
+#
+#-----------------------------------------------------------------------
+#
+det_or_ens="det"
+vx_config_output_fn="vx_config_${det_or_ens}.txt"
+vx_config_output_fp="${EXPTDIR}/${vx_config_output_fn}"
+vx_config_dict=$(<"${vx_config_output_fp}")
#
#-----------------------------------------------------------------------
#
@@ -388,20 +355,22 @@ settings="\
'obtype': '${OBTYPE}'
'accum_hh': '${ACCUM_HH:-}'
'accum_no_pad': '${ACCUM_NO_PAD:-}'
- 'field_thresholds': '${FIELD_THRESHOLDS:-}'
+ 'metplus_templates_dir': '${METPLUS_CONF:-}'
+ 'input_field_group': '${VAR:-}'
+ 'input_level_fcst': '${FCST_LEVEL:-}'
+ 'input_thresh_fcst': '${FCST_THRESH:-}'
+ 'vx_config_dict': ${vx_config_dict:-}
"
# Render the template to create a METplus configuration file
tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)")
-cat > $tmpfile << EOF
-$settings
-EOF
-
+printf "%s" "$settings" > "$tmpfile"
uw template render \
-i ${metplus_config_tmpl_fp} \
-o ${metplus_config_fp} \
--verbose \
- --values-file "${tmpfile}"
+ --values-file "${tmpfile}" \
+ --search-path "/"
err=$?
rm $tmpfile
@@ -415,7 +384,6 @@ $settings"
print_err_msg_exit "${message_txt}"
fi
fi
-
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh
index 4b9716493e..9939daaf76 100755
--- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh
+++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh
@@ -103,53 +103,6 @@ set_vx_params \
#
#-----------------------------------------------------------------------
#
-# Set additional field-dependent verification parameters.
-#
-#-----------------------------------------------------------------------
-#
-if [ "${grid_or_point}" = "grid" ]; then
-
- case "${FIELDNAME_IN_MET_FILEDIR_NAMES}" in
- "APCP01h")
- FIELD_THRESHOLDS="gt0.0, ge0.254, ge0.508, ge2.54"
- ;;
- "APCP03h")
- FIELD_THRESHOLDS="gt0.0, ge0.508, ge2.54, ge6.350"
- ;;
- "APCP06h")
- FIELD_THRESHOLDS="gt0.0, ge2.54, ge6.350, ge12.700"
- ;;
- "APCP24h")
- FIELD_THRESHOLDS="gt0.0, ge6.350, ge12.700, ge25.400"
- ;;
- "ASNOW06h")
- FIELD_THRESHOLDS="gt0.0, ge2.54, ge5.08, ge10.16, ge20.32"
- ;;
- "ASNOW24h")
- FIELD_THRESHOLDS="gt0.0, ge2.54, ge5.08, ge10.16, ge20.32"
- ;;
- "REFC")
- FIELD_THRESHOLDS="ge20, ge30, ge40, ge50"
- ;;
- "RETOP")
- FIELD_THRESHOLDS="ge20, ge30, ge40, ge50"
- ;;
- *)
- print_err_msg_exit "\
-Verification parameters have not been defined for this field
-(FIELDNAME_IN_MET_FILEDIR_NAMES):
- FIELDNAME_IN_MET_FILEDIR_NAMES = \"${FIELDNAME_IN_MET_FILEDIR_NAMES}\""
- ;;
- esac
-
-elif [ "${grid_or_point}" = "point" ]; then
-
- FIELD_THRESHOLDS=""
-
-fi
-#
-#-----------------------------------------------------------------------
-#
# Set paths and file templates for input to and output from the MET/
# METplus tool to be run as well as other file/directory parameters.
#
@@ -201,6 +154,9 @@ STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}_ensmean"
#-----------------------------------------------------------------------
#
# Set the array of forecast hours for which to run the MET/METplus tool.
+# This is done by starting with the full list of forecast hours for which
+# there is forecast output and then removing from that list any forecast
+# hours for which there is no corresponding observation data.
#
#-----------------------------------------------------------------------
#
@@ -277,16 +233,27 @@ fi
#
# First, set the base file names.
#
-metplus_config_tmpl_fn="${VAR}"
-metplus_config_tmpl_fn="${MetplusToolName}_ensmean_${metplus_config_tmpl_fn}"
-metplus_config_fn="${MetplusToolName}_ensmean_${FIELDNAME_IN_MET_FILEDIR_NAMES}"
-metplus_log_fn="${metplus_config_fn}"
+metplus_config_tmpl_bn="${MetplusToolName}_ensmean"
+metplus_config_bn="${MetplusToolName}_ensmean_${FIELDNAME_IN_MET_FILEDIR_NAMES}"
+metplus_log_bn="${metplus_config_bn}"
#
# Add prefixes and suffixes (extensions) to the base file names.
#
-metplus_config_tmpl_fn="${metplus_config_tmpl_fn}.conf"
-metplus_config_fn="${metplus_config_fn}.conf"
-metplus_log_fn="metplus.log.${metplus_log_fn}"
+metplus_config_tmpl_fn="${metplus_config_tmpl_bn}.conf"
+metplus_config_fn="${metplus_config_bn}.conf"
+metplus_log_fn="metplus.log.${metplus_log_bn}"
+#
+#-----------------------------------------------------------------------
+#
+# Load the yaml-like file containing the configuration for ensemble
+# verification.
+#
+#-----------------------------------------------------------------------
+#
+det_or_ens="ens"
+vx_config_output_fn="vx_config_${det_or_ens}.txt"
+vx_config_output_fp="${EXPTDIR}/${vx_config_output_fn}"
+vx_config_dict=$(<"${vx_config_output_fp}")
#
#-----------------------------------------------------------------------
#
@@ -346,20 +313,22 @@ settings="\
'obtype': '${OBTYPE}'
'accum_hh': '${ACCUM_HH:-}'
'accum_no_pad': '${ACCUM_NO_PAD:-}'
- 'field_thresholds': '${FIELD_THRESHOLDS:-}'
+ 'metplus_templates_dir': '${METPLUS_CONF:-}'
+ 'input_field_group': '${VAR:-}'
+ 'input_level_fcst': '${FCST_LEVEL:-}'
+ 'input_thresh_fcst': '${FCST_THRESH:-}'
+ 'vx_config_dict': ${vx_config_dict:-}
"
# Render the template to create a METplus configuration file
tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)")
-cat > $tmpfile << EOF
-$settings
-EOF
-
+printf "%s" "$settings" > "$tmpfile"
uw template render \
-i ${metplus_config_tmpl_fp} \
-o ${metplus_config_fp} \
--verbose \
- --values-file "${tmpfile}"
+ --values-file "${tmpfile}" \
+ --search-path "/"
err=$?
rm $tmpfile
@@ -373,7 +342,6 @@ $settings"
print_err_msg_exit "${message_txt}"
fi
fi
-
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh
index 918fb900d3..33d00b1d37 100755
--- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh
+++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh
@@ -153,6 +153,9 @@ STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}_ensprob"
#-----------------------------------------------------------------------
#
# Set the array of forecast hours for which to run the MET/METplus tool.
+# This is done by starting with the full list of forecast hours for which
+# there is forecast output and then removing from that list any forecast
+# hours for which there is no corresponding observation data.
#
#-----------------------------------------------------------------------
#
@@ -229,16 +232,27 @@ fi
#
# First, set the base file names.
#
-metplus_config_tmpl_fn="${VAR}"
-metplus_config_tmpl_fn="${MetplusToolName}_ensprob_${metplus_config_tmpl_fn}"
-metplus_config_fn="${MetplusToolName}_ensprob_${FIELDNAME_IN_MET_FILEDIR_NAMES}"
-metplus_log_fn="${metplus_config_fn}"
+metplus_config_tmpl_bn="${MetplusToolName}_ensprob"
+metplus_config_bn="${MetplusToolName}_ensprob_${FIELDNAME_IN_MET_FILEDIR_NAMES}"
+metplus_log_bn="${metplus_config_bn}"
#
# Add prefixes and suffixes (extensions) to the base file names.
#
-metplus_config_tmpl_fn="${metplus_config_tmpl_fn}.conf"
-metplus_config_fn="${metplus_config_fn}.conf"
-metplus_log_fn="metplus.log.${metplus_log_fn}"
+metplus_config_tmpl_fn="${metplus_config_tmpl_bn}.conf"
+metplus_config_fn="${metplus_config_bn}.conf"
+metplus_log_fn="metplus.log.${metplus_log_bn}"
+#
+#-----------------------------------------------------------------------
+#
+# Load the yaml-like file containing the configuration for ensemble
+# verification.
+#
+#-----------------------------------------------------------------------
+#
+det_or_ens="ens"
+vx_config_output_fn="vx_config_${det_or_ens}.txt"
+vx_config_output_fp="${EXPTDIR}/${vx_config_output_fn}"
+vx_config_dict=$(<"${vx_config_output_fp}")
#
#-----------------------------------------------------------------------
#
@@ -298,20 +312,22 @@ settings="\
'obtype': '${OBTYPE}'
'accum_hh': '${ACCUM_HH:-}'
'accum_no_pad': '${ACCUM_NO_PAD:-}'
- 'field_thresholds': '${FIELD_THRESHOLDS:-}'
+ 'metplus_templates_dir': '${METPLUS_CONF:-}'
+ 'input_field_group': '${VAR:-}'
+ 'input_level_fcst': '${FCST_LEVEL:-}'
+ 'input_thresh_fcst': '${FCST_THRESH:-}'
+ 'vx_config_dict': ${vx_config_dict:-}
"
# Render the template to create a METplus configuration file
tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)")
-cat > $tmpfile << EOF
-$settings
-EOF
-
+printf "%s" "$settings" > "$tmpfile"
uw template render \
-i ${metplus_config_tmpl_fp} \
-o ${metplus_config_fp} \
--verbose \
- --values-file "${tmpfile}"
+ --values-file "${tmpfile}" \
+ --search-path "/"
err=$?
rm $tmpfile
@@ -325,8 +341,6 @@ $settings"
print_err_msg_exit "${message_txt}"
fi
fi
-
-
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_run_met_pb2nc_obs.sh b/scripts/exregional_run_met_pb2nc_obs.sh
index 2528c32ced..985cd33c7f 100755
--- a/scripts/exregional_run_met_pb2nc_obs.sh
+++ b/scripts/exregional_run_met_pb2nc_obs.sh
@@ -272,20 +272,17 @@ settings="\
'obtype': '${OBTYPE}'
'accum_hh': '${ACCUM_HH:-}'
'accum_no_pad': '${ACCUM_NO_PAD:-}'
- 'field_thresholds': '${FIELD_THRESHOLDS:-}'
"
# Render the template to create a METplus configuration file
tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)")
-cat > $tmpfile << EOF
-$settings
-EOF
-
+printf "%s" "$settings" > "$tmpfile"
uw template render \
-i ${metplus_config_tmpl_fp} \
-o ${metplus_config_fp} \
--verbose \
- --values-file "${tmpfile}"
+ --values-file "${tmpfile}" \
+ --search-path "/"
err=$?
rm $tmpfile
@@ -299,7 +296,6 @@ $settings"
print_err_msg_exit "${message_txt}"
fi
fi
-
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_run_met_pcpcombine.sh b/scripts/exregional_run_met_pcpcombine.sh
index fb495a6145..6e64d102e6 100755
--- a/scripts/exregional_run_met_pcpcombine.sh
+++ b/scripts/exregional_run_met_pcpcombine.sh
@@ -120,7 +120,7 @@ set_vx_params \
#-----------------------------------------------------------------------
#
time_lag="0"
-if [ "${obs_or_fcst}" = "fcst" ]; then
+if [ "${FCST_OR_OBS}" = "FCST" ]; then
i="0"
if [ "${DO_ENSEMBLE}" = "TRUE" ]; then
i=$( bc -l <<< "${ENSMEM_INDX}-1" )
@@ -137,10 +137,9 @@ fi
#
vx_fcst_input_basedir=$( eval echo "${VX_FCST_INPUT_BASEDIR}" )
vx_output_basedir=$( eval echo "${VX_OUTPUT_BASEDIR}" )
-if [ "${obs_or_fcst}" = "fcst" ]; then
+if [ "${FCST_OR_OBS}" = "FCST" ]; then
ensmem_indx=$(printf "%0${VX_NDIGITS_ENSMEM_NAMES}d" $(( 10#${ENSMEM_INDX})))
ensmem_name="mem${ensmem_indx}"
-
if [ "${RUN_ENVIR}" = "nco" ]; then
slash_cdate_or_null=""
slash_ensmem_subdir_or_null=""
@@ -171,17 +170,7 @@ OBS_INPUT_FN_TEMPLATE=""
FCST_INPUT_DIR=""
FCST_INPUT_FN_TEMPLATE=""
-if [ "${obs_or_fcst}" = "obs" ]; then
-
- OBS_INPUT_DIR="${OBS_DIR}"
- OBS_INPUT_FN_TEMPLATE=$( eval echo ${OBS_CCPA_APCP_FN_TEMPLATE} )
-
- OUTPUT_BASE="${vx_output_basedir}"
- OUTPUT_DIR="${OUTPUT_BASE}/metprd/${MetplusToolName}_obs"
- OUTPUT_FN_TEMPLATE=$( eval echo ${OBS_CCPA_APCP_FN_TEMPLATE_PCPCOMBINE_OUTPUT} )
- STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}"
-
-elif [ "${obs_or_fcst}" = "fcst" ]; then
+if [ "${FCST_OR_OBS}" = "FCST" ]; then
FCST_INPUT_DIR="${vx_fcst_input_basedir}"
FCST_INPUT_FN_TEMPLATE=$( eval echo ${FCST_SUBDIR_TEMPLATE:+${FCST_SUBDIR_TEMPLATE}/}${FCST_FN_TEMPLATE} )
@@ -191,22 +180,36 @@ elif [ "${obs_or_fcst}" = "fcst" ]; then
OUTPUT_FN_TEMPLATE=$( eval echo ${FCST_FN_TEMPLATE_PCPCOMBINE_OUTPUT} )
STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}"
+elif [ "${FCST_OR_OBS}" = "OBS" ]; then
+
+ OBS_INPUT_DIR="${OBS_DIR}"
+ OBS_INPUT_FN_TEMPLATE=$( eval echo ${OBS_CCPA_APCP_FN_TEMPLATE} )
+
+ OUTPUT_BASE="${vx_output_basedir}"
+ OUTPUT_DIR="${OUTPUT_BASE}/metprd/${MetplusToolName}_obs"
+ OUTPUT_FN_TEMPLATE=$( eval echo ${OBS_CCPA_APCP_FN_TEMPLATE_PCPCOMBINE_OUTPUT} )
+ STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}"
+
fi
#
#-----------------------------------------------------------------------
#
# Set the array of forecast hours for which to run the MET/METplus tool.
+# This is done by starting with the full list of forecast hours for which
+# there is forecast output and then removing from that list any forecast
+# hours for which there is no corresponding observation data (if combining
+# observed APCP) or forecast data (if combining forecast APCP).
#
#-----------------------------------------------------------------------
#
-if [ "${obs_or_fcst}" = "obs" ]; then
- base_dir="${OBS_INPUT_DIR}"
- fn_template="${OBS_INPUT_FN_TEMPLATE}"
- num_missing_files_max="${NUM_MISSING_OBS_FILES_MAX}"
-elif [ "${obs_or_fcst}" = "fcst" ]; then
+if [ "${FCST_OR_OBS}" = "FCST" ]; then
base_dir="${FCST_INPUT_DIR}"
fn_template="${FCST_INPUT_FN_TEMPLATE}"
num_missing_files_max="${NUM_MISSING_FCST_FILES_MAX}"
+elif [ "${FCST_OR_OBS}" = "OBS" ]; then
+ base_dir="${OBS_INPUT_DIR}"
+ fn_template="${OBS_INPUT_FN_TEMPLATE}"
+ num_missing_files_max="${NUM_MISSING_OBS_FILES_MAX}"
fi
set_vx_fhr_list \
@@ -273,8 +276,8 @@ fi
#
# First, set the base file names.
#
-metplus_config_tmpl_fn="${MetplusToolName}_${obs_or_fcst}"
-metplus_config_fn="${metplus_config_tmpl_fn}_${FIELDNAME_IN_MET_FILEDIR_NAMES}${ENSMEM_INDX:+_${ensmem_name}}"
+metplus_config_tmpl_fn="${MetplusToolName}"
+metplus_config_fn="${metplus_config_tmpl_fn}_$(echo_lowercase ${FCST_OR_OBS})_${FIELDNAME_IN_MET_FILEDIR_NAMES}${ENSMEM_INDX:+_${ensmem_name}}"
metplus_log_fn="${metplus_config_fn}_$CDATE"
#
# If operating on observation files, append the cycle date to the name
@@ -283,13 +286,13 @@ metplus_log_fn="${metplus_config_fn}_$CDATE"
# necessary to associate the configuration file with the cycle for which
# it is used).
#
-if [ "${obs_or_fcst}" = "obs" ]; then
+if [ "${FCST_OR_OBS}" = "OBS" ]; then
metplus_config_fn="${metplus_log_fn}"
fi
#
# Add prefixes and suffixes (extensions) to the base file names.
#
-metplus_config_tmpl_fn="${metplus_config_tmpl_fn}_${field}.conf"
+metplus_config_tmpl_fn="${metplus_config_tmpl_fn}.conf"
metplus_config_fn="${metplus_config_fn}.conf"
metplus_log_fn="metplus.log.${metplus_log_fn}"
#
@@ -326,10 +329,8 @@ settings="\
#
'metplus_config_fn': '${metplus_config_fn:-}'
'metplus_log_fn': '${metplus_log_fn:-}'
- 'obs_input_dir': '${OBS_INPUT_DIR:-}'
- 'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}'
- 'fcst_input_dir': '${FCST_INPUT_DIR:-}'
- 'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}'
+ 'input_dir': '${FCST_INPUT_DIR:-${OBS_INPUT_DIR}}'
+ 'input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-${OBS_INPUT_FN_TEMPLATE}}'
'output_base': '${OUTPUT_BASE}'
'output_dir': '${OUTPUT_DIR}'
'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}'
@@ -349,21 +350,24 @@ settings="\
'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}'
'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}'
'obtype': '${OBTYPE}'
+ 'FCST_OR_OBS': '${FCST_OR_OBS}'
'accum_hh': '${ACCUM_HH:-}'
'accum_no_pad': '${ACCUM_NO_PAD:-}'
- 'field_thresholds': '${FIELD_THRESHOLDS:-}'
+ 'metplus_templates_dir': '${METPLUS_CONF:-}'
+ 'input_field_group': '${VAR:-}'
+ 'input_level_fcst': '${FCST_LEVEL:-}'
+ 'input_thresh_fcst': '${FCST_THRESH:-}'
"
+
# Render the template to create a METplus configuration file
tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)")
-cat > $tmpfile << EOF
-$settings
-EOF
-
+printf "%s" "$settings" > "$tmpfile"
uw template render \
-i ${metplus_config_tmpl_fp} \
-o ${metplus_config_fp} \
--verbose \
- --values-file "${tmpfile}"
+ --values-file "${tmpfile}" \
+ --search-path "/"
err=$?
rm $tmpfile
diff --git a/ush/metplus/decouple_fcst_obs_vx_config.py b/ush/metplus/decouple_fcst_obs_vx_config.py
new file mode 100755
index 0000000000..afa001859c
--- /dev/null
+++ b/ush/metplus/decouple_fcst_obs_vx_config.py
@@ -0,0 +1,436 @@
+#!/usr/bin/env python3
+
+import os
+import sys
+import glob
+import argparse
+import yaml
+
+import logging
+import textwrap
+from textwrap import indent, dedent
+
+import pprint
+import subprocess
+
+from pathlib import Path
+file = Path(__file__).resolve()
+home_dir = file.parents[2]
+ush_dir = Path(os.path.join(home_dir, 'ush')).resolve()
+sys.path.append(str(ush_dir))
+
+from python_utils import (
+ log_info,
+ load_config_file,
+)
+
+
+def get_pprint_str(var, indent_str=''):
+ """
+ Function to format a python variable as a pretty-printed string and add
+ indentation.
+
+ Arguments:
+ ---------
+ var:
+ A variable.
+
+ indent_str:
+ String to be added to the beginning of each line of the pretty-printed
+ form of var. This usually consists of multiple space characters.
+
+ Returns:
+ -------
+ var_str:
+ Formatted string containing contents of variable.
+ """
+
+ var_str = pprint.pformat(var, compact=True, sort_dicts=False)
+ var_str = var_str.splitlines(True)
+ var_str = [indent_str + s for s in var_str]
+ var_str = ''.join(var_str)
+
+ return var_str
+
+
+def create_pprinted_msg(vars_dict, indent_str='', add_nl_after_varname=False):
+ """
+ Function to create an output message (string) containing one or more
+ variables' names, with each name followed possibly by a newline, an equal
+ sign, and the pretty-printed value of the variable. Each variable name
+ starts on a new line.
+
+ Arguments:
+ ---------
+ vars_dict:
+ Dictionary containing the variable names (the keys) and their values
+ (the values).
+
+ indent_str:
+ String to be added to the beginning of each line of the string before
+ returning it. This usually consists of multiple space characters.
+
+ add_nl_after_varname:
+ Flag indicating whether to add a newline after the variable name (and
+ before the equal sign).
+
+ Returns:
+ -------
+ vars_str:
+ Formatted string containing contents of variable.
+ """
+
+ space_or_nl = ' '
+ one_or_zero = 1
+ if add_nl_after_varname:
+ space_or_nl = '\n'
+ one_or_zero = 0
+
+ vars_str = ''
+ for var_name, var_value in vars_dict.items():
+ pprint_indent_str = ' '*(2 + one_or_zero*(1 + len(var_name)))
+ tmp = f'{var_name}' + space_or_nl + '= ' + \
+ get_pprint_str(var_value, pprint_indent_str).lstrip()
+ vars_str = '\n'.join([vars_str, tmp])
+
+ vars_str = indent(vars_str, indent_str)
+
+ return vars_str
+
+
+def extract_fcst_obs_vals_from_cpld(item_cpld):
+ """
+ Function to parse the "coupled" value of an item (obtained from the coupled
+ verification (vx) configuration dictionary) to extract from it the item's
+ value for forecasts and its value for observations. The coupled item
+ (item_cpld) is a string that may correspond to a field name, a level, or
+ a threshold. If item_cpld has the form
+
+ item_cpld = str1 + delim_str + str2
+
+ where delim_str is a delimiter string (e.g. delim_str may be set to '%%'),
+ then the forecast and observation values of the item are given by
+
+ item_fcst = str1
+ item_obs = str2
+
+ For example, if delim_str = '%%' and
+
+ item_cpld = 'ABCD%%EFGH'
+
+ then
+
+ item_fcst = 'ABCD'
+ item_obs = 'EFGH'
+
+ Alternatively, if delim_str is not be a substring within item_cpld, both
+ return values will be identical to the input.
+
+ Arguments:
+ ---------
+ item_cpld
+ String representing a "coupled" item (field name, level, or threshold).
+ containing both the item's forecast value and its observations value.
+
+ Returns:
+ -------
+ item_fcst, item_obs:
+ Strings containing the values of the item for forecasts and observations,
+ respectively.
+ """
+
+ # Set the delimiter string.
+ delim_str = '%%'
+
+ # Parse the string containing the coupled value of the item to extract
+ # its forecast and observation values.
+ if delim_str in item_cpld:
+ if item_cpld.count(delim_str) == 1:
+ item_fcst, item_obs = item_cpld.split(delim_str)
+ else:
+ msg = dedent(f"""
+ The delimiter string (delim_str) appears more than once in the current
+ coupled item value (item_cpld):
+ delim_str = {get_pprint_str(delim_str)}
+ item_cpld = {get_pprint_str(item_cpld)}
+ Stopping.
+ """)
+ logging.error(msg)
+ raise ValueError(msg)
+ else:
+ item_fcst = item_cpld
+ item_obs = item_cpld
+
+ return item_fcst, item_obs
+
+
+def decouple_fcst_obs_vx_config(vx_type, outfile_type, outdir='./', log_lvl='info', log_fp=''):
+ """
+ This function reads from a yaml configuration file the coupled verification
+ (vx) configuration dictionary and parses it (i.e. decouples its contents)
+ to produce two new configuration dictionaries -- one for forecasts and
+ another for observations. Here, by "coupled" dictionary, we mean one that
+ contains items (keys and values) that store the forecast and observation
+ values for various quantities (field names, levels, and thresholds) in
+ combined/coupled form. (See the documentation for the function
+ extract_fcst_obs_vals_from_cpld() for more details of this coupled form.)
+ This function then writes the two separate (decoupled) vx configuration
+ dictionaries (one for forecasts and the other for observations) to a file.
+
+ Arguments:
+ ---------
+ vx_type:
+ Type of verification for which the coupled dictionary to be read in
+ applies. This can be 'det' (for deterministic verification) or 'ens'
+ (for ensemble verification).
+ outfile_type:
+ Type of the output file. This can be 'txt' (for the output to be saved
+ in a pretty-printed text file) or 'yaml' (for the output to be saved in
+ a yaml-formatted file. Here, the "output" consists of the two separate
+ vx configuration files (one for forecasts and another for observations).
+ outdir:
+ The directory in which to save the output file.
+ log_lvl:
+ The logging level to use.
+ log_fp:
+ Path to the log file. Default is an empty string, so that logging output
+ is sent to stdout.
+
+ Returns:
+ -------
+ None
+ """
+
+ # Set up logging.
+ log_level = str.upper(log_lvl)
+ fmt = "[%(levelname)s:%(name)s: %(filename)s, line %(lineno)s: %(funcName)s()] %(message)s"
+ if log_fp:
+ logging.basicConfig(level=log_level, format=fmt, filename=log_fp, filemode='w')
+ else:
+ logging.basicConfig(level=log_level, format=fmt)
+ logging.basicConfig(level=log_level)
+
+ # Load the yaml file containing the coupled forecast-and-observations
+ # verification (vx) configuration dictionary.
+ metplus_conf_dir = Path(os.path.join(home_dir, 'parm', 'metplus')).resolve()
+ config_fn = ''.join(['vx_config_', vx_type, '.yaml'])
+ config_fp = Path(os.path.join(metplus_conf_dir, config_fn)).resolve()
+ fgs_fields_levels_threshes_cpld = load_config_file(config_fp)
+
+ msg = create_pprinted_msg(
+ vars_dict = {'fgs_fields_levels_threshes_cpld': fgs_fields_levels_threshes_cpld},
+ indent_str = ' '*0,
+ add_nl_after_varname = True)
+ logging.debug(msg)
+
+ # Loop through the field groups in the coupled vx configuration dictionary
+ # and generate two separate vx configuration dictionaries, one for forecasts
+ # and another for observations.
+ fgs_fields_levels_threshes_fcst = {}
+ fgs_fields_levels_threshes_obs = {}
+ indent_incr = 4
+ indent_size = indent_incr
+ indent_str = ' '*indent_size
+ for field_group, fields_levels_threshes_cpld in fgs_fields_levels_threshes_cpld.items():
+
+ msg = create_pprinted_msg(
+ vars_dict = {'field_group': field_group},
+ indent_str = indent_str)
+ logging.debug(msg)
+
+ # Loop over the field names associated with the current field group.
+ #
+ # Note that the following variables have to be lists of dictionaries
+ # (where each dictionary contains only one key-value pair) instead of
+ # dictionaries because the field names might be repeated and thus cannot
+ # be used as dictionary keys. For example, in the ADPSFC field group,
+ # the forecast fields CRAIN, CSNOW, CFRZR, and CICEP all have the
+ # corresponding observation field PRWE but with different thresholds,
+ # so although fields_levels_threshes_fcst could be a dictionary with
+ # CRAIN, CSNOW, CFRZR, and CICEP as keys, fields_levels_threshes_obs
+ # cannot be a dictionary because the string PRWE cannot be used as a key
+ # more than once.
+ fields_levels_threshes_fcst = []
+ fields_levels_threshes_obs = []
+ indent_size += indent_incr
+ indent_str = ' '*indent_size
+ for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items():
+
+ msg = create_pprinted_msg(
+ vars_dict = {'field_cpld': field_cpld},
+ indent_str = indent_str)
+ logging.debug(msg)
+
+ # Parse the current coupled field name to extract the forecast and
+ # observation field names.
+ field_fcst, field_obs = extract_fcst_obs_vals_from_cpld(field_cpld)
+
+ msg = create_pprinted_msg(
+ vars_dict = {'field_fcst': field_fcst, 'field_obs': field_obs},
+ indent_str = indent_str)
+ logging.debug(msg)
+
+ # Loop over the levels associated with the current field.
+ levels_threshes_fcst = {}
+ levels_threshes_obs = {}
+ indent_size += indent_incr
+ indent_str = ' '*indent_size
+ for level_cpld, threshes_cpld in levels_threshes_cpld.items():
+
+ msg = create_pprinted_msg(
+ vars_dict = {'level_cpld': level_cpld},
+ indent_str = indent_str)
+ logging.debug(msg)
+
+ # Parse the current coupled level to extract the forecast and observation
+ # levels.
+ level_fcst, level_obs = extract_fcst_obs_vals_from_cpld(level_cpld)
+
+ msg = create_pprinted_msg(
+ vars_dict = {'level_fcst': level_fcst, 'level_obs': level_obs},
+ indent_str = indent_str)
+ logging.debug(msg)
+
+ # Loop over the thresholds associated with the current level.
+ threshes_fcst = []
+ threshes_obs = []
+ indent_size += indent_incr
+ indent_str = ' '*indent_size
+ for thresh_cpld in threshes_cpld:
+
+ msg = create_pprinted_msg(
+ vars_dict = {'thresh_cpld': thresh_cpld},
+ indent_str = indent_str)
+ logging.debug(msg)
+
+ # Parse the current coupled threshold to extract the forecast and
+ # observation thresholds.
+ thresh_fcst, thresh_obs = extract_fcst_obs_vals_from_cpld(thresh_cpld)
+
+ msg = create_pprinted_msg(
+ vars_dict = {'thresh_fcst': thresh_fcst, 'thresh_obs': thresh_obs},
+ indent_str = indent_str)
+ logging.debug(msg)
+
+ threshes_fcst.append(thresh_fcst)
+ threshes_obs.append(thresh_obs)
+
+ indent_size -= indent_incr
+ indent_str = ' '*indent_size
+ msg = create_pprinted_msg(
+ vars_dict = {'threshes_fcst': threshes_fcst,
+ 'threshes_obs': threshes_obs},
+ indent_str = indent_str,
+ add_nl_after_varname = True)
+ logging.debug(msg)
+
+ levels_threshes_fcst[level_fcst] = threshes_fcst
+ levels_threshes_obs[level_obs] = threshes_obs
+
+ indent_size -= indent_incr
+ indent_str = ' '*indent_size
+ msg = create_pprinted_msg(
+ vars_dict = {'levels_threshes_fcst': levels_threshes_fcst,
+ 'levels_threshes_obs': levels_threshes_obs},
+ indent_str = indent_str,
+ add_nl_after_varname = True)
+ logging.debug(msg)
+
+ fields_levels_threshes_fcst.append({field_fcst: levels_threshes_fcst})
+ fields_levels_threshes_obs.append({field_obs: levels_threshes_obs})
+
+ indent_size -= indent_incr
+ indent_str = ' '*indent_size
+ msg = create_pprinted_msg(
+ vars_dict = {'fields_levels_threshes_fcst': fields_levels_threshes_fcst,
+ 'fields_levels_threshes_obs': fields_levels_threshes_obs},
+ indent_str = indent_str,
+ add_nl_after_varname = True)
+ logging.debug(msg)
+
+ fgs_fields_levels_threshes_fcst[field_group] = fields_levels_threshes_fcst
+ fgs_fields_levels_threshes_obs[field_group] = fields_levels_threshes_obs
+
+ indent_size -= indent_incr
+ indent_str = ' '*indent_size
+ msg = create_pprinted_msg(
+ vars_dict = {'fgs_fields_levels_threshes_fcst': fgs_fields_levels_threshes_fcst,
+ 'fgs_fields_levels_threshes_obs': fgs_fields_levels_threshes_obs},
+ indent_str = indent_str,
+ add_nl_after_varname = True)
+ logging.debug(msg)
+
+ # We now have a verification configuration dictionary for forecasts and
+ # a separate one for the observations. To conveniently write these to a
+ # file, first place (wrap) them in a higher-level dictionary.
+ vx_config_dict = {'fcst': fgs_fields_levels_threshes_fcst,
+ 'obs': fgs_fields_levels_threshes_obs}
+
+ # Write the contents of the higher-level dictionary to file.
+ output_fn = ''.join(['vx_config_', vx_type, '.', outfile_type])
+ output_fp = Path(os.path.join(outdir, output_fn)).resolve()
+ with open(f'{output_fp}', 'w') as fn:
+ if outfile_type == 'txt':
+ dict_to_str = get_pprint_str(vx_config_dict, ' ')
+ fn.write(dict_to_str)
+ elif outfile_type == 'yaml':
+ yaml_vars = yaml.dump(vx_config_dict, fn)
+
+ return None
+#
+# -----------------------------------------------------------------------
+#
+# Call the function defined above.
+#
+# -----------------------------------------------------------------------
+#
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser(
+ description='Read in and process verification configuration file'
+ )
+
+ default_vx_type = 'det'
+ parser.add_argument('--vx_type',
+ type=str,
+ required=True,
+ choices=['det', 'ens'],
+ default=default_vx_type,
+ help=dedent(f"""
+ String that determines whether to read in the deterministic or ensemble
+ verification configuration file.
+ """))
+
+ default_outfile_type = 'txt'
+ parser.add_argument('--outfile_type',
+ type=str,
+ required=True,
+ choices=['txt', 'yaml'],
+ default=default_outfile_type,
+ help=dedent(f"""
+ Type of output file. The output consists of a high-level dictionary
+ containing two keys: 'fcst' and 'obs'. The value of 'fcst' is the vx
+ configuration dictionary for forecasts, and the value of 'obs' is the vx
+ dictionary for observations. If outfile_type is set to 'txt', this high-
+ level dictionary is saved to a text file in a form that can be read in by
+ the SRW App's ex-scripts for the verification tasks. In particular, this
+ form contains the curly braces and brackets that define dictionaries and
+ lists in python code (but that would normally not appear in a yaml file).
+ If outfile_type is set to 'yaml', then the high-level dictionary is saved
+ to a yaml-formatted file.
+ """))
+
+ parser.add_argument('--outdir',
+ type=str,
+ required=False,
+ default='./',
+ help=dedent(f"""
+ Directory in which to place the output file containing the decoupled
+ (i.e. with forecast and observation information placed in separate data
+ structures) verifcation configuration information.
+ """))
+
+ args = parser.parse_args()
+
+ decouple_fcst_obs_vx_config(vx_type=args.vx_type, outfile_type=args.outfile_type, outdir=args.outdir)
+
diff --git a/ush/set_vx_fhr_list.sh b/ush/set_vx_fhr_list.sh
index 8a1c9735a5..5cefc78365 100644
--- a/ush/set_vx_fhr_list.sh
+++ b/ush/set_vx_fhr_list.sh
@@ -253,7 +253,7 @@ METplus configuration file.
#
fhr_list=$( echo "${fhr_list}" | $SED "s/^,//g" )
print_info_msg "$VERBOSE" "\
-Final (i.e. after filtering for missing files) set of foreast hours is
+Final (i.e. after filtering for missing files) set of forecast hours is
(written as a single string):
fhr_list = \"${fhr_list}\"
"
From fc10bdbe140b74ffca8e4894ed4bae3321326f99 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 7 May 2024 09:22:12 -0400
Subject: [PATCH 10/39] [develop] Bump jinja2 from 3.1.3 to 3.1.4 in /doc
(#1080)
Bumps [jinja2](https://github.com/pallets/jinja) from 3.1.3 to 3.1.4.
- [Release notes](https://github.com/pallets/jinja/releases)
- [Changelog](https://github.com/pallets/jinja/blob/main/CHANGES.rst)
- [Commits](https://github.com/pallets/jinja/compare/3.1.3...3.1.4)
---
updated-dependencies:
- dependency-name: jinja2
dependency-type: indirect
...
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
doc/requirements.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/requirements.txt b/doc/requirements.txt
index eadc94dcaf..a2f32cd83f 100644
--- a/doc/requirements.txt
+++ b/doc/requirements.txt
@@ -22,7 +22,7 @@ idna==3.7
# via requests
imagesize==1.4.1
# via sphinx
-jinja2==3.1.3
+jinja2==3.1.4
# via sphinx
latexcodec==2.0.1
# via pybtex
From a712ef15fc42c88012f2cd926712dedbd3a8f91e Mon Sep 17 00:00:00 2001
From: gsketefian <31046882+gsketefian@users.noreply.github.com>
Date: Mon, 13 May 2024 07:16:35 -0600
Subject: [PATCH 11/39] [develop] Simplify the way the configuration of the vx
is handled (#1082)
The parse_vx_config_[det|ens] tasks and the decouple_fcst_obs_vx_config.py script are removed (so that the intermediate configuration files are no longer created). The separation into forecast and observation values of the "coupled" information in the vx configuration files is now performed in the jinja2 templates for the METplus configuration files, hiding these details from the user.
---
jobs/JREGIONAL_PARSE_VX_CONFIG | 97 ----
parm/metplus/EnsembleStat.conf | 215 ++++-----
parm/metplus/GenEnsProd.conf | 142 ++++--
parm/metplus/GridStat_ensmean.conf | 236 +++++-----
parm/metplus/GridStat_ensprob.conf | 208 ++++-----
parm/metplus/GridStat_or_PointStat.conf | 399 +++++++---------
parm/metplus/PointStat_ensmean.conf | 228 +++++----
parm/metplus/PointStat_ensprob.conf | 198 ++++----
parm/metplus/metplus_macros.jinja | 122 +----
parm/metplus/vx_config_det.yaml | 204 ++++----
parm/wflow/verify_det.yaml | 24 -
parm/wflow/verify_ens.yaml | 24 -
scripts/exregional_parse_vx_config.sh | 94 ----
...onal_run_met_genensprod_or_ensemblestat.sh | 77 ++--
...gional_run_met_gridstat_or_pointstat_vx.sh | 77 ++--
...un_met_gridstat_or_pointstat_vx_ensmean.sh | 77 ++--
...un_met_gridstat_or_pointstat_vx_ensprob.sh | 77 ++--
ush/metplus/decouple_fcst_obs_vx_config.py | 436 ------------------
18 files changed, 1061 insertions(+), 1874 deletions(-)
delete mode 100755 jobs/JREGIONAL_PARSE_VX_CONFIG
delete mode 100755 scripts/exregional_parse_vx_config.sh
delete mode 100755 ush/metplus/decouple_fcst_obs_vx_config.py
diff --git a/jobs/JREGIONAL_PARSE_VX_CONFIG b/jobs/JREGIONAL_PARSE_VX_CONFIG
deleted file mode 100755
index c1cbba8e34..0000000000
--- a/jobs/JREGIONAL_PARSE_VX_CONFIG
+++ /dev/null
@@ -1,97 +0,0 @@
-#!/usr/bin/env bash
-
-#
-#-----------------------------------------------------------------------
-#
-#
-#
-#-----------------------------------------------------------------------
-#
-
-#
-#-----------------------------------------------------------------------
-#
-# Source the variable definitions file and the bash utility functions.
-#
-#-----------------------------------------------------------------------
-#
-. $USHdir/source_util_funcs.sh
-source_config_for_task "task_parse_vx_config" ${GLOBAL_VAR_DEFNS_FP}
-. $USHdir/job_preamble.sh
-#
-#-----------------------------------------------------------------------
-#
-# Save current shell options (in a global array). Then set new options
-# for this script/function.
-#
-#-----------------------------------------------------------------------
-#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
-#
-#-----------------------------------------------------------------------
-#
-# Get the full path to the file in which this script/function is located
-# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
-# which the file is located (scrfunc_dir).
-#
-#-----------------------------------------------------------------------
-#
-scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
-scrfunc_fn=$( basename "${scrfunc_fp}" )
-scrfunc_dir=$( dirname "${scrfunc_fp}" )
-#
-#-----------------------------------------------------------------------
-#
-# Print message indicating entry into script.
-#
-#-----------------------------------------------------------------------
-#
-print_info_msg "
-========================================================================
-Entering script: \"${scrfunc_fn}\"
-In directory: \"${scrfunc_dir}\"
-
-This is the J-job script for the task that reads in the \"coupled\" yaml
-verification (vx) configuration file (python dictionary) and generates
-from it two \"decoupled\" vx configuration dictionaries, one for forecasts
-and another for observations. The task then writes these two decoupled
-dictionaries to a new configuration file in the experiment directory
-that can be read by downstream vx tasks.
-
-Note:
-The \"coupled\" vx configuration file contains items (dictionary keys and
-values representing field names, levels, and thresholds) that consist of
-both the forecast and the observation value for that item separated by a
-delimiter string. Thus, they first need to be separated (decoupled) into
-a value for forecasts and one for the observations before they can be
-further processed.
-========================================================================"
-#
-#-----------------------------------------------------------------------
-#
-# Call the ex-script for this J-job and pass to it the necessary varia-
-# bles.
-#
-#-----------------------------------------------------------------------
-#
-$SCRIPTSdir/exregional_parse_vx_config.sh || \
-print_err_msg_exit "\
-Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed."
-#
-#-----------------------------------------------------------------------
-#
-# Run job postamble.
-#
-#-----------------------------------------------------------------------
-#
-job_postamble
-#
-#-----------------------------------------------------------------------
-#
-# Restore the shell options saved at the beginning of this script/func-
-# tion.
-#
-#-----------------------------------------------------------------------
-#
-{ restore_shell_opts; } > /dev/null 2>&1
-
diff --git a/parm/metplus/EnsembleStat.conf b/parm/metplus/EnsembleStat.conf
index 1ca46b961e..2caeda1521 100644
--- a/parm/metplus/EnsembleStat.conf
+++ b/parm/metplus/EnsembleStat.conf
@@ -242,136 +242,123 @@ Import the file containing jinja macros.
{#-
Jinja requires certain variables to be defined globally within the template
-before they can be used in if-statements and other scopes (see Jinja
-scoping rules). Define such variables.
+before they can be used in if-statements and other scopes (see Jinja scoping
+rules). Define such variables.
#}
+{%- set indx_level_fcst = '' %}
+{%- set indx_input_thresh_fcst = '' %}
+{%- set error_msg = '' %}
+{%- set opts_indent = '' %}
+{%- set opts_indent_len = '' %}
+
+{%- set field_fcst = '' %}
+{%- set field_obs = '' %}
{%- set level_fcst = '' %}
{%- set level_obs = '' %}
-{%- set indx_level_fcst = '' %}
+{%- set thresh_fcst = '' %}
+{%- set thresh_obs = '' %}
-{%- set valid_threshes_fcst = [] %}
-{%- set valid_threshes_obs = [] %}
{%- set threshes_fcst = [] %}
{%- set threshes_obs = [] %}
-{%- set indx_input_thresh_fcst = '' %}
-
-{%- set opts_indent = '' %}
-{%- set opts_indent_len = '' %}
-{%- set tmp = '' %}
-{%- set error_msg = '' %}
-{#-
-Make sure that the set of field groups for forecasts and observations
-are identical.
-#}
-{%- set fgs_fcst = vx_config_dict['fcst'].keys()|list %}
-{%- set fgs_obs = vx_config_dict['obs'].keys()|list %}
-{%- if (fgs_fcst != fgs_obs) %}
- {%- set error_msg = '\n' ~
-'The list of valid field groups for forecasts (fgs_fcst) must be identical\n' ~
-'to that for observations (fgs_obs) but isn\'t:\n' ~
-' fgs_fcst = ' ~ fgs_fcst ~ '\n' ~
-' fgs_obs = ' ~ fgs_obs %}
- {{metplus_macros.print_err_and_quit(error_msg)}}
-{%- endif %}
{#-
-Extract the lists of forecast and observation dictionaries containing
-the valid fields, levels, and thresholds corresponding to the specified
-field group (input_field_group). Note that it would be simpler to have
-these be just dictionaries in which the keys are the field names (instead
-of them being LISTS of dictionaries in which each dictionary contains a
-single key that is the field name), but that approach cannot be used here
-because it is possible for field names to be repeated (for both forecasts
-and observations). For example, in the observations, the field name
-'PRWE' appears more than once, each time with a different threshold, and
-the combination of name and threshold is what constitutes a unique field,
-not just the name by itself.
+Get the set of valid field groups and ensure that the specified input
+field group appears in this list.
#}
-{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %}
-{%- set fields_levels_threshes_obs = vx_config_dict['obs'][input_field_group] %}
+{%- set valid_field_groups = vx_config_dict.keys()|list %}
+{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }}
{#-
-Reset the specified forecast level so that if it happens to be an
-accumulation (e.g. 'A03'), the leading zeros in front of the hour are
-stipped out (e.g. reset to 'A3').
+Reset the input forecast level so that if it happens to be an accumulation
+(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g.
+reset to 'A3').
#}
{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %}
{#-
-Ensure that the specified input forecast level(s) (input_level_fcst) and
-threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the
-set(s) of valid forecast levels and thresholds, respectively, specified
-in fields_levels_threshes_fcst.
+Extract from the configuration dictionary the set (which itself is a
+dictionary) of fields, levels, and thresholds corresponding to the input
+field group. Then set the delimiter string that separates forecast and
+observation values in the various items (i.e. dictionary keys and values
+representing field names, levels, and thresholds) in this dictionary.
#}
-{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }}
-{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }}
+{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %}
+{%- set delim_str = metplus_macros.set_delim_str() %}
{#-
-For convenience, create lists of valid forecast and observation field
-names.
+Loop over the fields and set field names, levels, thresholds, and/or
+options for each field, both for forecasts and for observations, in the
+METplus configuration file.
#}
-{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %}
-{%- set valid_fields_fcst = [] %}
-{%- for i in range(0,num_valid_fields_fcst) %}
- {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %}
- {%- set tmp = valid_fields_fcst.append(field) %}
-{%- endfor %}
+{%- set ns = namespace(var_count = 0) %}
+{%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %}
-{%- set valid_fields_obs = [] %}
-{%- set num_valid_fields_obs = fields_levels_threshes_obs|length %}
-{%- for i in range(0,num_valid_fields_obs) %}
- {%- set field = fields_levels_threshes_obs[i].keys()|list|join('') %}
- {%- set tmp = valid_fields_obs.append(field) %}
-{%- endfor %}
+ {%- if delim_str in field_cpld %}
+ {%- set field_fcst, field_obs = field_cpld.split(delim_str) %}
+ {%- else %}
+ {%- set field_fcst = field_cpld %}
+ {%- set field_obs = field_cpld %}
+ {%- endif %}
{#-
-Ensure that the number of valid fields for forecasts is equal to that
-for the observations.
+For convenience, create lists of valid forecast and observation levels
+for the current field.
#}
-{%- set num_valid_fields = 0 %}
-{%- if (num_valid_fields_fcst != num_valid_fields_obs) %}
- {%- set error_msg = '\n' ~
-'The number of valid forecast fields (num_valid_fields_fcst) must be\n' ~
-'equal to the number of valid observation fields (num_valid_fields_obs)\n' ~
-'but isn\'t:\n' ~
-' num_valid_fields_fcst = ' ~ num_valid_fields_fcst ~ '\n' ~
-' num_valid_fields_obs = ' ~ num_valid_fields_obs ~ '\n' ~
-'The lists of valid forecast and observation fields are:\n' ~
-' valid_fields_fcst = ' ~ valid_fields_fcst ~ '\n' ~
-' valid_fields_obs = ' ~ valid_fields_obs ~ '\n' %}
- {{metplus_macros.print_err_and_quit(error_msg)}}
-{%- else %}
- {%- set num_valid_fields = num_valid_fields_fcst %}
-{%- endif %}
+ {%- set valid_levels_fcst = [] %}
+ {%- set valid_levels_obs = [] %}
+ {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %}
+ {%- if delim_str in level_cpld %}
+ {%- set level_fcst, level_obs = level_cpld.split(delim_str) %}
+ {%- else %}
+ {%- set level_fcst = level_cpld %}
+ {%- set level_obs = level_cpld %}
+ {%- endif %}
+ {%- set tmp = valid_levels_fcst.append(level_fcst) %}
+ {%- set tmp = valid_levels_obs.append(level_obs) %}
+ {%- endfor %}
{#-
-Loop over the valid fields and set field names, levels, thresholds, and/
-or options for each field, both for forecasts and for obseratiions, in
-the METplus configuration file.
+Make sure that the input forecast level (input_level_fcst) is set to a
+valid value.
#}
-{%- set ns = namespace(var_count = 0) %}
-{%- for i in range(0,num_valid_fields) %}
-
- {%- set field_fcst = valid_fields_fcst[i] %}
- {%- set field_obs = valid_fields_obs[i] %}
+ {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %}
+ {%- set error_msg = '\n' ~
+'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~
+'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~
+'for the current forecast field (field_fcst). This is not the case:\n' ~
+' field_fcst = ' ~ field_fcst ~ '\n' ~
+' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~
+' input_level_fcst = ' ~ input_level_fcst ~ '\n' %}
+ {{metplus_macros.print_err_and_quit(error_msg)}}
+ {%- endif %}
{#-
-For convenience, create lists of valid forecast and observation levels
-for the current field. Then check that the number of valid levels for
-forecasts is the same as that for observations.
+Loop over the (coupled) levels and corresponding lists of thresholds.
+Extract from these the level values for forecasts and observations and
+use them to set the forecast and observation field names, levels,
+thresholds, and/or options in the METplus configuration file.
#}
- {%- set valid_levels_fcst = fields_levels_threshes_fcst[i][field_fcst].keys()|list %}
- {%- set valid_levels_obs = fields_levels_threshes_obs[i][field_obs].keys()|list %}
+ {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %}
-{#-
-Extract dictionary of valid forecast levels (the dictionary keys) and
-corresponding lists of valid thresholds (the values) for each level.
-Then loop over these levels and corresponding lists of thresholds to set
-both the forecast and observation field names, levels, thresholds, and/or
-options.
-#}
- {%- set valid_levels_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst] %}
- {%- for level_fcst, valid_threshes_fcst in valid_levels_threshes_fcst.items() %}
+ {%- if delim_str in level_cpld %}
+ {%- set level_fcst, level_obs = level_cpld.split(delim_str) %}
+ {%- else %}
+ {%- set level_fcst = level_cpld %}
+ {%- set level_obs = level_cpld %}
+ {%- endif %}
+
+ {%- set valid_threshes_fcst = [] %}
+ {%- set valid_threshes_obs = [] %}
+ {%- for thresh_cpld in threshes_cpld %}
+ {%- if delim_str in thresh_cpld %}
+ {%- set thresh_fcst, thresh_obs = thresh_cpld.split(delim_str) %}
+ {%- else %}
+ {%- set thresh_fcst = thresh_cpld %}
+ {%- set thresh_obs = thresh_cpld %}
+ {%- endif %}
+ {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %}
+ {%- set tmp = valid_threshes_obs.append(thresh_obs) %}
+ {%- endfor %}
{%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %}
{#-
@@ -415,17 +402,19 @@ to the full set of valid values.
{%- set threshes_fcst = valid_threshes_fcst %}
{#-
If input_thresh_fcst is set to a specific value:
- 1) Ensure that input_thresh_fcst exists in the list of valid forecast
- thresholds.
- 2) Get the index of input_thresh_fcst in the list of valid forecast
- thresholds. This will be needed later below when setting the
- observation threshold(s).
- 3) Use this index to set the forecast threshold to a one-element list
- containing the specified forecast threshold.
+* If that value is valid, i.e. it exists in the list of valid forecast
+ thresholds, get its index in that list and use it to set the forecast
+ threshold to a one-element list containing that value. Note that the
+ index will be needed later below when setting the observation threshold(s).
+* If the input forecast threshold is not valid, print out a warning message
+ and exit.
#}
{%- else %}
- {%- if input_thresh_fcst not in valid_threshes_fcst %}
+ {%- if input_thresh_fcst in valid_threshes_fcst %}
+ {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %}
+ {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %}
+ {%- else %}
{%- set error_msg = '\n' ~
'For the current forecast field (field_fcst) and forecast level (level_fcst),\n' ~
'the input forecast threshold (input_thresh_fcst) does not exist in the list\n' ~
@@ -436,8 +425,6 @@ If input_thresh_fcst is set to a specific value:
' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %}
{{metplus_macros.print_err_and_quit(error_msg)}}
{%- endif %}
- {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %}
- {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %}
{%- endif %}
{#-
@@ -525,7 +512,7 @@ Set observation field name. Note that this has to exactly match the name
of the field in the input observation file.
For accumulated fields, the input observation file is generated by MET's
-PcpCombine tool. In that file, the field name consists of the observation
+PcpCombine tool. In that file, the field name consists of the observation
field name here (field_obs) with the accumulation period appended to it
(separated by an underscore), so we must do the same here to get an exact
match.
@@ -557,11 +544,6 @@ set to 'none'.
#}
{%- if (input_thresh_fcst != 'none') %}
{#-
-Set the list of valid observation thresholds to the one corresponding to
-the current observation level (level_obs).
-#}
- {%- set valid_threshes_obs = fields_levels_threshes_obs[i][field_obs][level_obs] %}
-{#-
If input_thresh_fcst is set to 'all', set the list of observation thresholds
to the full set of valid values.
#}
@@ -653,6 +635,7 @@ OBS_VAR{{ns.var_count}}_OPTIONS = desc = "TKE";
{%- endif %}
{%- endif %}
+
{#-
Print out a newline to separate the settings for the current field (both
forecast and observation settings) from those for the next field.
diff --git a/parm/metplus/GenEnsProd.conf b/parm/metplus/GenEnsProd.conf
index 7291ce02fa..6c47cedb0d 100644
--- a/parm/metplus/GenEnsProd.conf
+++ b/parm/metplus/GenEnsProd.conf
@@ -124,68 +124,110 @@ Import the file containing jinja macros.
{#-
Jinja requires certain variables to be defined globally within the template
-before they can be used in if-statements and other scopes (see Jinja
-scoping rules). Define such variables.
+before they can be used in if-statements and other scopes (see Jinja scoping
+rules). Define such variables.
#}
-{%- set threshes_fcst = [] %}
{%- set indx_input_thresh_fcst = '' %}
-
+{%- set error_msg = '' %}
{%- set opts_indent = '' %}
{%- set opts_indent_len = '' %}
-{%- set tmp = '' %}
-{%- set error_msg = '' %}
+
+{%- set field_fcst = '' %}
+{%- set level_fcst = '' %}
+{%- set thresh_fcst = '' %}
+
+{%- set threshes_fcst = [] %}
{#-
-Extract the list of forecast dictionaries containing the valid fields,
-levels, and thresholds corresponding to the specified field group
-(input_field_group).
+Get the set of valid field groups and ensure that the specified input
+field group appears in this list.
#}
-{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %}
+{%- set valid_field_groups = vx_config_dict.keys()|list %}
+{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }}
{#-
-Reset the specified forecast level so that if it happens to be an
-accumulation (e.g. 'A03'), the leading zeros in front of the hour are
-stipped out (e.g. reset to 'A3').
+Reset the input forecast level so that if it happens to be an accumulation
+(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g.
+reset to 'A3').
#}
{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %}
{#-
-Ensure that the specified input forecast level(s) (input_level_fcst) and
-threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the
-set(s) of valid forecast levels and thresholds, respectively, specified
-in fields_levels_threshes_fcst.
+Extract from the configuration dictionary the set (which itself is a
+dictionary) of fields, levels, and thresholds corresponding to the input
+field group. Then set the delimiter string that separates forecast and
+observation values in the various items (i.e. dictionary keys and values
+representing field names, levels, and thresholds) in this dictionary.
#}
-{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }}
-{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }}
+{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %}
+{%- set delim_str = metplus_macros.set_delim_str() %}
{#-
-For convenience, create lists of valid forecast field names.
+Loop over the fields and set field names, levels, thresholds, and/or
+options for each forecast field in the METplus configuration file. Note
+that GenEnsProd only deals with forecasts; it does not consider observations.
#}
-{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %}
-{%- set valid_fields_fcst = [] %}
-{%- for i in range(0,num_valid_fields_fcst) %}
- {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %}
- {%- set tmp = valid_fields_fcst.append(field) %}
-{%- endfor %}
+{%- set ns = namespace(var_count = 0) %}
+{%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %}
+
+ {%- if delim_str in field_cpld %}
+ {%- set field_fcst = field_cpld.split(delim_str)[0] %}
+ {%- else %}
+ {%- set field_fcst = field_cpld %}
+ {%- endif %}
{#-
-Loop over the valid fields and set field names, levels, thresholds, and/
-or options for each forecast field. Note that GenEnsProd only deals with
-forecasts; it does not need observations.
+For convenience, create list of valid forecast levels for the current
+field.
#}
-{%- set ns = namespace(var_count = 0) %}
-{%- for i in range(0,num_valid_fields_fcst) %}
+ {%- set valid_levels_fcst = [] %}
+ {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %}
+ {%- if delim_str in level_cpld %}
+ {%- set level_fcst = level_cpld.split(delim_str)[0] %}
+ {%- else %}
+ {%- set level_fcst = level_cpld %}
+ {%- endif %}
+ {%- set tmp = valid_levels_fcst.append(level_fcst) %}
+ {%- endfor %}
- {%- set field_fcst = valid_fields_fcst[i] %}
+{#-
+Make sure that the input forecast level (input_level_fcst) is set to a
+valid value.
+#}
+ {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %}
+ {%- set error_msg = '\n' ~
+'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~
+'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~
+'for the current forecast field (field_fcst). This is not the case:\n' ~
+' field_fcst = ' ~ field_fcst ~ '\n' ~
+' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~
+' input_level_fcst = ' ~ input_level_fcst ~ '\n' %}
+ {{metplus_macros.print_err_and_quit(error_msg)}}
+ {%- endif %}
{#-
-Extract dictionary of valid forecast levels (the dictionary keys) and
-corresponding lists of valid thresholds (the values) for each level.
-Then loop over these levels and corresponding lists of thresholds to set
-the forecast field names, levels, thresholds, and/or options.
+Loop over the (coupled) levels and corresponding lists of thresholds.
+Extract from these the level values for forecasts and use them to set the
+forecast field names, levels, thresholds, and/or options in the METplus
+configuration file.
#}
- {%- set valid_levels_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst] %}
- {%- for level_fcst, valid_threshes_fcst in valid_levels_threshes_fcst.items() %}
+ {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %}
+
+ {%- if delim_str in level_cpld %}
+ {%- set level_fcst = level_cpld.split(delim_str)[0] %}
+ {%- else %}
+ {%- set level_fcst = level_cpld %}
+ {%- endif %}
+
+ {%- set valid_threshes_fcst = [] %}
+ {%- for thresh_cpld in threshes_cpld %}
+ {%- if delim_str in thresh_cpld %}
+ {%- set thresh_fcst = thresh_cpld.split(delim_str)[0] %}
+ {%- else %}
+ {%- set thresh_fcst = thresh_cpld %}
+ {%- endif %}
+ {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %}
+ {%- endfor %}
{%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %}
{#-
@@ -229,17 +271,18 @@ to the full set of valid values.
{%- set threshes_fcst = valid_threshes_fcst %}
{#-
If input_thresh_fcst is set to a specific value:
- 1) Ensure that input_thresh_fcst exists in the list of valid forecast
- thresholds.
- 2) Get the index of input_thresh_fcst in the list of valid forecast
- thresholds. This will be needed later below when setting the
- observation threshold(s).
- 3) Use this index to set the forecast threshold to a one-element list
- containing the specified forecast threshold.
+* If that value is valid, i.e. it exists in the list of valid forecast
+ thresholds, get its index in that list and use it to set the forecast
+ threshold to a one-element list containing that value.
+* If the input forecast threshold is not valid, print out a warning message
+ and exit.
#}
{%- else %}
- {%- if input_thresh_fcst not in valid_threshes_fcst %}
+ {%- if input_thresh_fcst in valid_threshes_fcst %}
+ {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %}
+ {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %}
+ {%- else %}
{%- set error_msg = '\n' ~
'For the current forecast field (field_fcst) and forecast level (level_fcst),\n' ~
'the input forecast threshold (input_thresh_fcst) does not exist in the list\n' ~
@@ -250,8 +293,6 @@ If input_thresh_fcst is set to a specific value:
' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %}
{{metplus_macros.print_err_and_quit(error_msg)}}
{%- endif %}
- {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %}
- {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %}
{%- endif %}
{#-
@@ -310,9 +351,10 @@ ENS_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ];
{%- endif %}
{%- endif %}
+
{#-
-Print out a newline to separate the settings for the current field (both
-forecast and observation settings) from those for the next field.
+Print out a newline to separate the settings for the current field from
+those for the next field.
#}
{{- '\n' }}
diff --git a/parm/metplus/GridStat_ensmean.conf b/parm/metplus/GridStat_ensmean.conf
index 4b8c71ddab..6bbc20e3f8 100644
--- a/parm/metplus/GridStat_ensmean.conf
+++ b/parm/metplus/GridStat_ensmean.conf
@@ -119,70 +119,49 @@ script instead of a hard-coded value as below.
{#-
Jinja requires certain variables to be defined globally within the template
-before they can be used in if-statements and other scopes (see Jinja
-scoping rules). Define such variables.
+before they can be used in if-statements and other scopes (see Jinja scoping
+rules). Define such variables.
#}
-{%- set level_fcst = '' %}
-{%- set level_obs = '' %}
{%- set indx_level_fcst = '' %}
-
-{%- set valid_threshes_fcst = [] %}
-{%- set valid_threshes_obs = [] %}
-{%- set threshes_fcst = '' %}
-{%- set threshes_obs = '' %}
{%- set indx_input_thresh_fcst = '' %}
-
+{%- set error_msg = '' %}
{%- set opts_indent = '' %}
{%- set opts_indent_len = '' %}
{%- set tmp = '' %}
-{%- set error_msg = '' %}
-{#-
-Make sure that the set of field groups for forecasts and observations
-are identical.
-#}
-{%- set fgs_fcst = vx_config_dict['fcst'].keys()|list %}
-{%- set fgs_obs = vx_config_dict['obs'].keys()|list %}
-{%- if (fgs_fcst != fgs_obs) %}
- {%- set error_msg = '\n' ~
-'The list of valid field groups for forecasts (fgs_fcst) must be identical\n' ~
-'to that for observations (fgs_obs) but isn\'t:\n' ~
-' fgs_fcst = ' ~ fgs_fcst ~ '\n' ~
-' fgs_obs = ' ~ fgs_obs %}
- {{metplus_macros.print_err_and_quit(error_msg)}}
-{%- endif %}
+{%- set field_fcst = '' %}
+{%- set field_obs = '' %}
+{%- set level_fcst = '' %}
+{%- set level_obs = '' %}
+{%- set thresh_fcst = '' %}
+{%- set thresh_obs = '' %}
+
+{%- set threshes_fcst = [] %}
+{%- set threshes_obs = [] %}
{#-
-Extract the lists of forecast and observation dictionaries containing
-the valid fields, levels, and thresholds corresponding to the specified
-field group (input_field_group). Note that it would be simpler to have
-these be just dictionaries in which the keys are the field names (instead
-of them being LISTS of dictionaries in which each dictionary contains a
-single key that is the field name), but that approach cannot be used here
-because it is possible for field names to be repeated (for both forecasts
-and observations). For example, in the observations, the field name
-'PRWE' appears more than once, each time with a different threshold, and
-the combination of name and threshold is what constitutes a unique field,
-not just the name by itself.
+Get the set of valid field groups and ensure that the specified input
+field group appears in this list.
#}
-{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %}
-{%- set fields_levels_threshes_obs = vx_config_dict['obs'][input_field_group] %}
+{%- set valid_field_groups = vx_config_dict.keys()|list %}
+{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }}
{#-
-Reset the specified forecast level so that if it happens to be an
-accumulation (e.g. 'A03'), the leading zeros in front of the hour are
-stipped out (e.g. reset to 'A3').
+Reset the input forecast level so that if it happens to be an accumulation
+(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g.
+reset to 'A3').
#}
{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %}
{#-
-Ensure that the specified input forecast level(s) (input_level_fcst) and
-threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the
-set(s) of valid forecast levels and thresholds, respectively, specified
-in fields_levels_threshes_fcst.
+Extract from the configuration dictionary the set (which itself is a
+dictionary) of fields, levels, and thresholds corresponding to the input
+field group. Then set the delimiter string that separates forecast and
+observation values in the various items (i.e. dictionary keys and values
+representing field names, levels, and thresholds) in this dictionary.
#}
-{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }}
-{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }}
+{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %}
+{%- set delim_str = metplus_macros.set_delim_str() %}
{#-
Some fields in the specified field group (input_field_group) may need to
@@ -200,72 +179,98 @@ following dictionary.
{%- set fields_fcst_to_exclude = fields_fcst_to_exclude_by_field_group[input_field_group] %}
{#-
-For convenience, create lists of valid forecast and observation field
-names.
+Remove from the dictionary fields_levels_threshes_cpld any fields that
+are in the list to be excluded.
#}
-{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %}
-{%- set valid_fields_fcst = [] %}
-{%- for i in range(0,num_valid_fields_fcst) %}
- {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %}
- {%- set tmp = valid_fields_fcst.append(field) %}
-{%- endfor %}
+{%- for field_cpld in fields_levels_threshes_cpld.copy() %}
-{%- set valid_fields_obs = [] %}
-{%- set num_valid_fields_obs = fields_levels_threshes_obs|length %}
-{%- for i in range(0,num_valid_fields_obs) %}
- {%- set field = fields_levels_threshes_obs[i].keys()|list|join('') %}
- {%- set tmp = valid_fields_obs.append(field) %}
-{%- endfor %}
+ {%- if delim_str in field_cpld %}
+ {%- set field_fcst, field_obs = field_cpld.split(delim_str) %}
+ {%- else %}
+ {%- set field_fcst = field_cpld %}
+ {%- set field_obs = field_cpld %}
+ {%- endif %}
-{#-
-Ensure that the number of valid fields for forecasts is equal to that
-for the observations.
-#}
-{%- set num_valid_fields = 0 %}
-{%- if (num_valid_fields_fcst != num_valid_fields_obs) %}
- {%- set error_msg = '\n' ~
-'The number of valid forecast fields (num_valid_fields_fcst) must be\n' ~
-'equal to the number of valid observation fields (num_valid_fields_obs)\n' ~
-'but isn\'t:\n' ~
-' num_valid_fields_fcst = ' ~ num_valid_fields_fcst ~ '\n' ~
-' num_valid_fields_obs = ' ~ num_valid_fields_obs ~ '\n' ~
-'The lists of valid forecast and observation fields are:\n' ~
-' valid_fields_fcst = ' ~ valid_fields_fcst ~ '\n' ~
-' valid_fields_obs = ' ~ valid_fields_obs ~ '\n' %}
- {{metplus_macros.print_err_and_quit(error_msg)}}
-{%- else %}
- {%- set num_valid_fields = num_valid_fields_fcst %}
-{%- endif %}
+ {%- if field_fcst in fields_fcst_to_exclude %}
+ {%- set tmp = fields_levels_threshes_cpld.pop(field_cpld) %}
+ {%- endif %}
+
+{%- endfor %}
{#-
-Loop over the valid fields and set field names, levels, thresholds, and/
-or options for each field, both for forecasts and for obseratiions, in
-the METplus configuration file.
+Loop over the fields and set field names, levels, thresholds, and/or
+options for each field, both for forecasts and for observations, in the
+METplus configuration file.
#}
{%- set ns = namespace(var_count = 0) %}
+{%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %}
-{%- for i in range(0,num_valid_fields) if valid_fields_fcst[i] not in fields_fcst_to_exclude %}
-
- {%- set field_fcst = valid_fields_fcst[i] %}
- {%- set field_obs = valid_fields_obs[i] %}
+ {%- if delim_str in field_cpld %}
+ {%- set field_fcst, field_obs = field_cpld.split(delim_str) %}
+ {%- else %}
+ {%- set field_fcst = field_cpld %}
+ {%- set field_obs = field_cpld %}
+ {%- endif %}
{#-
For convenience, create lists of valid forecast and observation levels
-for the current field. Then check that the number of valid levels for
-forecasts is the same as that for observations.
+for the current field.
+#}
+ {%- set valid_levels_fcst = [] %}
+ {%- set valid_levels_obs = [] %}
+ {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %}
+ {%- if delim_str in level_cpld %}
+ {%- set level_fcst, level_obs = level_cpld.split(delim_str) %}
+ {%- else %}
+ {%- set level_fcst = level_cpld %}
+ {%- set level_obs = level_cpld %}
+ {%- endif %}
+ {%- set tmp = valid_levels_fcst.append(level_fcst) %}
+ {%- set tmp = valid_levels_obs.append(level_obs) %}
+ {%- endfor %}
+
+{#-
+Make sure that the input forecast level (input_level_fcst) is set to a
+valid value.
#}
- {%- set valid_levels_fcst = fields_levels_threshes_fcst[i][field_fcst].keys()|list %}
- {%- set valid_levels_obs = fields_levels_threshes_obs[i][field_obs].keys()|list %}
+ {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %}
+ {%- set error_msg = '\n' ~
+'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~
+'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~
+'for the current forecast field (field_fcst). This is not the case:\n' ~
+' field_fcst = ' ~ field_fcst ~ '\n' ~
+' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~
+' input_level_fcst = ' ~ input_level_fcst ~ '\n' %}
+ {{metplus_macros.print_err_and_quit(error_msg)}}
+ {%- endif %}
{#-
-Extract dictionary of valid forecast levels (the dictionary keys) and
-corresponding lists of valid thresholds (the values) for each level.
-Then loop over these levels and corresponding lists of thresholds to set
-both the forecast and observation field names, levels, thresholds, and/or
-options.
+Loop over the (coupled) levels and corresponding lists of thresholds.
+Extract from these the level values for forecasts and observations and
+use them to set the forecast and observation field names, levels,
+thresholds, and/or options in the METplus configuration file.
#}
- {%- set valid_levels_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst] %}
- {%- for level_fcst, valid_threshes_fcst in valid_levels_threshes_fcst.items() %}
+ {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %}
+
+ {%- if delim_str in level_cpld %}
+ {%- set level_fcst, level_obs = level_cpld.split(delim_str) %}
+ {%- else %}
+ {%- set level_fcst = level_cpld %}
+ {%- set level_obs = level_cpld %}
+ {%- endif %}
+
+ {%- set valid_threshes_fcst = [] %}
+ {%- set valid_threshes_obs = [] %}
+ {%- for thresh_cpld in threshes_cpld %}
+ {%- if delim_str in thresh_cpld %}
+ {%- set thresh_fcst, thresh_obs = thresh_cpld.split(delim_str) %}
+ {%- else %}
+ {%- set thresh_fcst = thresh_cpld %}
+ {%- set thresh_obs = thresh_cpld %}
+ {%- endif %}
+ {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %}
+ {%- set tmp = valid_threshes_obs.append(thresh_obs) %}
+ {%- endfor %}
{%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %}
{#-
@@ -284,20 +289,20 @@ For example, if the same field, say APCP, is output at two different
levels, say at A3 and A6 (for APCP, "levels" are really accumulation
periods), there need to be two variables in the output file, and they
obviously can't both be named "APCP", so GenEnsProd names one "APCP_A3"
-and the other "APCP_A6". Here, the level is stored in the variable
+and the other "APCP_A6". Here, the level is stored in the variable
level_fcst and, below, is included in the name of the forecast field.
For accumulated fields, the field name in the input forecast file contains
TWO references to the accumulation period. The first is the level of the
forecast field added by GenEnsProd as described above. The second is
-another reference to this same level (accumulation period) but added by
+another reference to this same level (accumulation period) but added by
the MET/METplus's PcpCombine tool (whose output file is the input into
GenEnsProd). PcpCombine adds this reference to the level (really the
accumulation period) to the field's name for the same reason that
GenEnsProd does, i.e. to ensure that the names of variables in the output
-file are distinct. Here, this accumulation period is stored in the
+file are distinct. Here, this accumulation period is stored in the
variable accum_hh. Thus, for accumulated fields, below we add both
-accum_hh and level_fcst to the field name to get an exact field name
+accum_hh and level_fcst to the field name to get an exact field name
match.
#}
{%- if (input_field_group in ['APCP', 'ASNOW']) %}
@@ -326,17 +331,19 @@ to the full set of valid values.
{%- set threshes_fcst = valid_threshes_fcst %}
{#-
If input_thresh_fcst is set to a specific value:
- 1) Ensure that input_thresh_fcst exists in the list of valid forecast
- thresholds.
- 2) Get the index of input_thresh_fcst in the list of valid forecast
- thresholds. This will be needed later below when setting the
- observation threshold(s).
- 3) Use this index to set the forecast threshold to a one-element list
- containing the specified forecast threshold.
+* If that value is valid, i.e. it exists in the list of valid forecast
+ thresholds, get its index in that list and use it to set the forecast
+ threshold to a one-element list containing that value. Note that the
+ index will be needed later below when setting the observation threshold(s).
+* If the input forecast threshold is not valid, print out a warning message
+ and exit.
#}
{%- else %}
- {%- if input_thresh_fcst not in valid_threshes_fcst %}
+ {%- if input_thresh_fcst in valid_threshes_fcst %}
+ {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %}
+ {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %}
+ {%- else %}
{%- set error_msg = '\n' ~
'For the current forecast field (field_fcst) and forecast level (level_fcst),\n' ~
'the input forecast threshold (input_thresh_fcst) does not exist in the list\n' ~
@@ -347,8 +354,6 @@ If input_thresh_fcst is set to a specific value:
' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %}
{{metplus_macros.print_err_and_quit(error_msg)}}
{%- endif %}
- {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %}
- {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %}
{%- endif %}
{#-
@@ -391,7 +396,7 @@ Set observation field name. Note that this has to exactly match the name
of the field in the input observation file.
For accumulated fields, the input observation file is generated by MET's
-PcpCombine tool. In that file, the field name consists of the observation
+PcpCombine tool. In that file, the field name consists of the observation
field name here (field_obs) with the accumulation period appended to it
(separated by an underscore), so we must do the same here to get an exact
match.
@@ -423,11 +428,6 @@ set to 'none'.
#}
{%- if (input_thresh_fcst != 'none') %}
{#-
-Set the list of valid observation thresholds to the one corresponding to
-the current observation level (level_obs).
-#}
- {%- set valid_threshes_obs = fields_levels_threshes_obs[i][field_obs][level_obs] %}
-{#-
If input_thresh_fcst is set to 'all', set the list of observation thresholds
to the full set of valid values.
#}
diff --git a/parm/metplus/GridStat_ensprob.conf b/parm/metplus/GridStat_ensprob.conf
index 6a4873e446..a43b8ed340 100644
--- a/parm/metplus/GridStat_ensprob.conf
+++ b/parm/metplus/GridStat_ensprob.conf
@@ -133,121 +133,96 @@ Jinja requires certain variables to be defined globally within the template
before they can be used in if-statements and other scopes (see Jinja
scoping rules). Define such variables.
#}
-{%- set level_fcst = '' %}
-{%- set level_obs = '' %}
{%- set indx_level_fcst = '' %}
+{%- set indx_thresh_fcst = '' %}
+{%- set error_msg = '' %}
+{%- set opts_indent = '' %}
+{%- set opts_indent_len = '' %}
-{%- set valid_threshes_fcst = [] %}
-{%- set valid_threshes_obs = [] %}
+{%- set field_fcst = '' %}
+{%- set field_obs = '' %}
+{%- set level_fcst = '' %}
+{%- set level_obs = '' %}
{%- set thresh_fcst = '' %}
{%- set thresh_obs = '' %}
-{%- set indx_thresh_fcst = '' %}
{%- set thresh_fcst_and_or = '' %}
-{%- set opts_indent = '' %}
-{%- set opts_indent_len = '' %}
-{%- set tmp = '' %}
-{%- set error_msg = '' %}
-
{#-
-Make sure that the set of field groups for forecasts and observations
-are identical.
+Get the set of valid field groups and ensure that the specified input
+field group appears in this list.
#}
-{%- set fgs_fcst = vx_config_dict['fcst'].keys()|list %}
-{%- set fgs_obs = vx_config_dict['obs'].keys()|list %}
-{%- if (fgs_fcst != fgs_obs) %}
- {%- set error_msg = '\n' ~
-'The list of valid field groups for forecasts (fgs_fcst) must be identical\n' ~
-'to that for observations (fgs_obs) but isn\'t:\n' ~
-' fgs_fcst = ' ~ fgs_fcst ~ '\n' ~
-' fgs_obs = ' ~ fgs_obs %}
- {{metplus_macros.print_err_and_quit(error_msg)}}
-{%- endif %}
+{%- set valid_field_groups = vx_config_dict.keys()|list %}
+{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }}
{#-
-Extract the lists of forecast and observation dictionaries containing
-the valid fields, levels, and thresholds corresponding to the specified
-field group (input_field_group). Note that it would be simpler to have
-these be just dictionaries in which the keys are the field names (instead
-of them being LISTS of dictionaries in which each dictionary contains a
-single key that is the field name), but that approach cannot be used here
-because it is possible for field names to be repeated (for both forecasts
-and observations). For example, in the observations, the field name
-'PRWE' appears more than once, each time with a different threshold, and
-the combination of name and threshold is what constitutes a unique field,
-not just the name by itself.
+Reset the input forecast level so that if it happens to be an accumulation
+(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g.
+reset to 'A3').
#}
-{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %}
-{%- set fields_levels_threshes_obs = vx_config_dict['obs'][input_field_group] %}
+{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %}
{#-
-Reset the specified forecast level so that if it happens to be an
-accumulation (e.g. 'A03'), the leading zeros in front of the hour are
-stipped out (e.g. reset to 'A3').
+Extract from the configuration dictionary the set (which itself is a
+dictionary) of fields, levels, and thresholds corresponding to the input
+field group. Then set the delimiter string that separates forecast and
+observation values in the various items (i.e. dictionary keys and values
+representing field names, levels, and thresholds) in this dictionary.
#}
-{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %}
+{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %}
+{%- set delim_str = metplus_macros.set_delim_str() %}
{#-
-Ensure that the specified input forecast level(s) (input_level_fcst) and
-threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the
-set(s) of valid forecast levels and thresholds, respectively, specified
-in fields_levels_threshes_fcst.
+Loop over the fields and set field names, levels, thresholds, and/or
+options for each field, both for forecasts and for observations, in the
+METplus configuration file.
#}
-{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }}
-{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }}
-
+{%- set ns = namespace(var_count = 0) %}
{#-
-For convenience, create lists of valid forecast and observation field
-names.
+Loop over each field twice, the first treating the forecast field as
+probabilistic and the second time as a scalar.
#}
-{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %}
-{%- set valid_fields_fcst = [] %}
-{%- for i in range(0,num_valid_fields_fcst) %}
- {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %}
- {%- set tmp = valid_fields_fcst.append(field) %}
-{%- endfor %}
+{%- for treat_fcst_as_prob in [True, False] %}
-{%- set valid_fields_obs = [] %}
-{%- set num_valid_fields_obs = fields_levels_threshes_obs|length %}
-{%- for i in range(0,num_valid_fields_obs) %}
- {%- set field = fields_levels_threshes_obs[i].keys()|list|join('') %}
- {%- set tmp = valid_fields_obs.append(field) %}
-{%- endfor %}
+ {%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %}
-{#-
-Ensure that the number of valid fields for forecasts is equal to that
-for the observations.
-#}
-{%- set num_valid_fields = 0 %}
-{%- if (num_valid_fields_fcst != num_valid_fields_obs) %}
- {%- set error_msg = '\n' ~
-'The number of valid forecast fields (num_valid_fields_fcst) must be\n' ~
-'equal to the number of valid observation fields (num_valid_fields_obs)\n' ~
-'but isn\'t:\n' ~
-' num_valid_fields_fcst = ' ~ num_valid_fields_fcst ~ '\n' ~
-' num_valid_fields_obs = ' ~ num_valid_fields_obs ~ '\n' ~
-'The lists of valid forecast and observation fields are:\n' ~
-' valid_fields_fcst = ' ~ valid_fields_fcst ~ '\n' ~
-' valid_fields_obs = ' ~ valid_fields_obs ~ '\n' %}
- {{metplus_macros.print_err_and_quit(error_msg)}}
-{%- else %}
- {%- set num_valid_fields = num_valid_fields_fcst %}
-{%- endif %}
+ {%- if delim_str in field_cpld %}
+ {%- set field_fcst, field_obs = field_cpld.split(delim_str) %}
+ {%- else %}
+ {%- set field_fcst = field_cpld %}
+ {%- set field_obs = field_cpld %}
+ {%- endif %}
{#-
-Loop over the valid fields and set field names, levels, thresholds, and/
-or options for each field, both for forecasts and for obseratiions, in
-the METplus configuration file.
+For convenience, create lists of valid forecast and observation levels
+for the current field.
#}
-{%- set ns = namespace(var_count = 0) %}
+ {%- set valid_levels_fcst = [] %}
+ {%- set valid_levels_obs = [] %}
+ {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %}
+ {%- if delim_str in level_cpld %}
+ {%- set level_fcst, level_obs = level_cpld.split(delim_str) %}
+ {%- else %}
+ {%- set level_fcst = level_cpld %}
+ {%- set level_obs = level_cpld %}
+ {%- endif %}
+ {%- set tmp = valid_levels_fcst.append(level_fcst) %}
+ {%- set tmp = valid_levels_obs.append(level_obs) %}
+ {%- endfor %}
{#-
-Loop over each field twice, the first treating the forecast field as
-probabilistic and the second time as a scalar.
+Make sure that the input forecast level (input_level_fcst) is set to a
+valid value.
#}
-{%- for treat_fcst_as_prob in [True, False] %}
-
- {%- for i in range(0,num_valid_fields) %}
+ {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %}
+ {%- set error_msg = '\n' ~
+'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~
+'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~
+'for the current forecast field (field_fcst). This is not the case:\n' ~
+' field_fcst = ' ~ field_fcst ~ '\n' ~
+' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~
+' input_level_fcst = ' ~ input_level_fcst ~ '\n' %}
+ {{metplus_macros.print_err_and_quit(error_msg)}}
+ {%- endif %}
{#-
Add comment depending on whether or not the field is being treated
@@ -265,26 +240,33 @@ probabilistically.
#
{%- endif %}
- {%- set field_fcst = valid_fields_fcst[i] %}
- {%- set field_obs = valid_fields_obs[i] %}
-
{#-
-For convenience, create lists of valid forecast and observation levels
-for the current field. Then check that the number of valid levels for
-forecasts is the same as that for observations.
+Loop over the (coupled) levels and corresponding lists of thresholds.
+Extract from these the level values for forecasts and observations and
+use them to set the forecast and observation field names, levels,
+thresholds, and/or options in the METplus configuration file.
#}
- {%- set valid_levels_fcst = fields_levels_threshes_fcst[i][field_fcst].keys()|list %}
- {%- set valid_levels_obs = fields_levels_threshes_obs[i][field_obs].keys()|list %}
+ {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %}
-{#-
-Extract dictionary of valid forecast levels (the dictionary keys) and
-corresponding lists of valid thresholds (the values) for each level.
-Then loop over these levels and corresponding lists of thresholds to set
-both the forecast and observation field names, levels, thresholds, and/or
-options.
-#}
- {%- set valid_levels_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst] %}
- {%- for level_fcst, valid_threshes_fcst in valid_levels_threshes_fcst.items() %}
+ {%- if delim_str in level_cpld %}
+ {%- set level_fcst, level_obs = level_cpld.split(delim_str) %}
+ {%- else %}
+ {%- set level_fcst = level_cpld %}
+ {%- set level_obs = level_cpld %}
+ {%- endif %}
+
+ {%- set valid_threshes_fcst = [] %}
+ {%- set valid_threshes_obs = [] %}
+ {%- for thresh_cpld in threshes_cpld %}
+ {%- if delim_str in thresh_cpld %}
+ {%- set thresh_fcst, thresh_obs = thresh_cpld.split(delim_str) %}
+ {%- else %}
+ {%- set thresh_fcst = thresh_cpld %}
+ {%- set thresh_obs = thresh_cpld %}
+ {%- endif %}
+ {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %}
+ {%- set tmp = valid_threshes_obs.append(thresh_obs) %}
+ {%- endfor %}
{%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %}
@@ -307,20 +289,20 @@ For example, if the same field, say APCP, is output at two different
levels, say at A3 and A6 (for APCP, "levels" are really accumulation
periods), there need to be two variables in the output file, and they
obviously can't both be named "APCP", so GenEnsProd names one "APCP_A3"
-and the other "APCP_A6". Here, the level is stored in the variable
+and the other "APCP_A6". Here, the level is stored in the variable
level_fcst and, below, is included in the name of the forecast field.
For accumulated fields, the field name in the input forecast file contains
TWO references to the accumulation period. The first is the level of the
forecast field added by GenEnsProd as described above. The second is
-another reference to this same level (accumulation period) but added by
+another reference to this same level (accumulation period) but added by
the MET/METplus's PcpCombine tool (whose output file is the input into
GenEnsProd). PcpCombine adds this reference to the level (really the
accumulation period) to the field's name for the same reason that
GenEnsProd does, i.e. to ensure that the names of variables in the output
-file are distinct. Here, this accumulation period is stored in the
+file are distinct. Here, this accumulation period is stored in the
variable accum_hh. Thus, for accumulated fields, below we add both
-accum_hh and level_fcst to the field name to get an exact field name
+accum_hh and level_fcst to the field name to get an exact field name
match.
#}
{%- set thresh_fcst_and_or = thresh_fcst|replace("&&", ".and.") %}
@@ -368,7 +350,7 @@ Set observation field name. Note that this has to exactly match the name
of the field in the input observation file.
For accumulated fields, the input observation file is generated by MET's
-PcpCombine tool. In that file, the field name consists of the observation
+PcpCombine tool. In that file, the field name consists of the observation
field name here (field_obs) with the accumulation period appended to it
(separated by an underscore), so we must do the same here to get an exact
match.
@@ -400,11 +382,6 @@ set to 'none'.
#}
{%- if (input_thresh_fcst != 'none') %}
{#-
-Set the list of valid observation thresholds to the one corresponding to
-the current observation level (level_obs).
-#}
- {%- set valid_threshes_obs = fields_levels_threshes_obs[i][field_obs][level_obs] %}
-{#-
Set the observation threshold. This is given by the element in the list
of valid observation thresholds that has the same index as that of the
current forcast threshold (thresh_fcst) in the list of valid forecast
@@ -466,6 +443,7 @@ OBS_VAR{{ns.var_count}}_OPTIONS = censor_thresh = lt-20.0;
{%- endif %}
{%- endif %}
+
{#-
Print out a newline to separate the settings for the current field (both
forecast and observation settings) from those for the next field.
diff --git a/parm/metplus/GridStat_or_PointStat.conf b/parm/metplus/GridStat_or_PointStat.conf
index c90783862b..39d34eb24f 100644
--- a/parm/metplus/GridStat_or_PointStat.conf
+++ b/parm/metplus/GridStat_or_PointStat.conf
@@ -189,7 +189,7 @@ OBTYPE = {{obtype}}
{%- if input_field_group in ['APCP', 'ASNOW'] %}
# Note that for accumulated fields such as APCP and ASNOW, in the input
# forecast and observation files (which are generated by MET's PcpCombine
-# tool) the accumulation period is appended to the field name, so the
+# tool) the accumulation period is appended to the field name, so the
# same is done here.
#
{%- endif %}
@@ -216,154 +216,99 @@ Import the file containing jinja macros.
{#-
Jinja requires certain variables to be defined globally within the template
-before they can be used in if-statements and other scopes (see Jinja
-scoping rules). Define such variables.
+before they can be used in if-statements and other scopes (see Jinja scoping
+rules). Define such variables.
#}
-{%- set levels_fcst = '' %}
-{%- set levels_obs = '' %}
{%- set indx_input_level_fcst = '' %}
+{%- set indx_input_thresh_fcst = '' %}
+{%- set error_msg = '' %}
+{%- set field_fcst = '' %}
+{%- set field_obs = '' %}
+{%- set level_fcst = '' %}
+{%- set level_obs = '' %}
+{%- set thresh_fcst = '' %}
+{%- set thresh_obs = '' %}
+
+{%- set levels_fcst = '' %}
+{%- set levels_obs = '' %}
+{%- set threshes_cpld = [] %}
{%- set valid_threshes_fcst = [] %}
{%- set valid_threshes_obs = [] %}
{%- set threshes_fcst = [] %}
{%- set threshes_obs = [] %}
-{%- set indx_input_thresh_fcst = '' %}
-
-{%- set opts_indent = '' %}
-{%- set opts_indent_len = '' %}
-{%- set tmp = '' %}
-{%- set error_msg = '' %}
-{#-
-Make sure that the set of field groups for forecasts and observations
-are identical.
-#}
-{%- set fgs_fcst = vx_config_dict['fcst'].keys()|list %}
-{%- set fgs_obs = vx_config_dict['obs'].keys()|list %}
-{%- if (fgs_fcst != fgs_obs) %}
- {%- set error_msg = '\n' ~
-'The list of valid field groups for forecasts (fgs_fcst) must be identical\n' ~
-'to that for observations (fgs_obs) but isn\'t:\n' ~
-' fgs_fcst = ' ~ fgs_fcst ~ '\n' ~
-' fgs_obs = ' ~ fgs_obs %}
- {{metplus_macros.print_err_and_quit(error_msg)}}
-{%- endif %}
{#-
-Extract the lists of forecast and observation dictionaries containing
-the valid fields, levels, and thresholds corresponding to the specified
-field group (input_field_group). Note that it would be simpler to have
-these be just dictionaries in which the keys are the field names (instead
-of them being LISTS of dictionaries in which each dictionary contains a
-single key that is the field name), but that approach cannot be used here
-because it is possible for field names to be repeated (for both forecasts
-and observations). For example, in the observations, the field name
-'PRWE' appears more than once, each time with a different threshold, and
-the combination of name and threshold is what constitutes a unique field,
-not just the name by itself.
+Get the set of valid field groups and ensure that the specified input
+field group appears in this list.
#}
-{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %}
-{%- set fields_levels_threshes_obs = vx_config_dict['obs'][input_field_group] %}
+{%- set valid_field_groups = vx_config_dict.keys()|list %}
+{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }}
{#-
-Reset the specified forecast level so that if it happens to be an
-accumulation (e.g. 'A03'), the leading zeros in front of the hour are
-stipped out (e.g. reset to 'A3').
+Reset the input forecast level so that if it happens to be an accumulation
+(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g.
+reset to 'A3').
#}
{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %}
{#-
-Ensure that the specified input forecast level(s) (input_level_fcst) and
-threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the
-set(s) of valid forecast levels and thresholds, respectively, specified
-in fields_levels_threshes_fcst.
+Extract from the configuration dictionary the set (which itself is a
+dictionary) of fields, levels, and thresholds corresponding to the input
+field group. Then set the delimiter string that separates forecast and
+observation values in the various items (i.e. dictionary keys and values
+representing field names, levels, and thresholds) in this dictionary.
#}
-{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }}
-{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }}
+{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %}
+{%- set delim_str = metplus_macros.set_delim_str() %}
{#-
-For convenience, create lists of valid forecast and observation field
-names.
-#}
-{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %}
-{%- set valid_fields_fcst = [] %}
-{%- for i in range(0,num_valid_fields_fcst) %}
- {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %}
- {%- set tmp = valid_fields_fcst.append(field) %}
-{%- endfor %}
-
-{%- set valid_fields_obs = [] %}
-{%- set num_valid_fields_obs = fields_levels_threshes_obs|length %}
-{%- for i in range(0,num_valid_fields_obs) %}
- {%- set field = fields_levels_threshes_obs[i].keys()|list|join('') %}
- {%- set tmp = valid_fields_obs.append(field) %}
-{%- endfor %}
-
-{#-
-Ensure that the number of valid fields for forecasts is equal to that
-for the observations.
-#}
-{%- set num_valid_fields = 0 %}
-{%- if (num_valid_fields_fcst != num_valid_fields_obs) %}
- {%- set error_msg = '\n' ~
-'The number of valid forecast fields (num_valid_fields_fcst) must be\n' ~
-'equal to the number of valid observation fields (num_valid_fields_obs)\n' ~
-'but isn\'t:\n' ~
-' num_valid_fields_fcst = ' ~ num_valid_fields_fcst ~ '\n' ~
-' num_valid_fields_obs = ' ~ num_valid_fields_obs ~ '\n' ~
-'The lists of valid forecast and observation fields are:\n' ~
-' valid_fields_fcst = ' ~ valid_fields_fcst ~ '\n' ~
-' valid_fields_obs = ' ~ valid_fields_obs ~ '\n' %}
- {{metplus_macros.print_err_and_quit(error_msg)}}
-{%- else %}
- {%- set num_valid_fields = num_valid_fields_fcst %}
-{%- endif %}
-
-{#-
-Loop over the valid fields and set field names, levels, thresholds, and/
-or options for each field, both for forecasts and for obseratiions, in
-the METplus configuration file.
+Loop over the fields and set field names, levels, thresholds, and/or
+options for each field, both for forecasts and for observations, in the
+METplus configuration file.
#}
{%- set ns = namespace(var_count = 0) %}
-{%- for i in range(0,num_valid_fields) %}
+{%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %}
- {%- set field_fcst = valid_fields_fcst[i] %}
- {%- set field_obs = valid_fields_obs[i] %}
+ {%- if delim_str in field_cpld %}
+ {%- set field_fcst, field_obs = field_cpld.split(delim_str) %}
+ {%- else %}
+ {%- set field_fcst = field_cpld %}
+ {%- set field_obs = field_cpld %}
+ {%- endif %}
+
+ {%- set levels_cpld = levels_threshes_cpld.keys()|list %}
+ {%- set num_levels = levels_cpld|length %}
{#-
For convenience, create lists of valid forecast and observation levels
-for the current field. Then check that the number of valid levels for
-forecasts is the same as that for observations.
+for the current field.
#}
- {%- set valid_levels_fcst = fields_levels_threshes_fcst[i][field_fcst].keys()|list %}
- {%- set valid_levels_obs = fields_levels_threshes_obs[i][field_obs].keys()|list %}
-
- {%- set num_valid_levels = 0 %}
- {%- set num_valid_levels_fcst = valid_levels_fcst|length %}
- {%- set num_valid_levels_obs = valid_levels_obs|length %}
- {%- if (num_valid_levels_fcst != num_valid_levels_obs) %}
- {%- set error_msg = '\n' ~
-'The number of valid forecast levels (num_valid_levels_fcst) must be\n' ~
-'equal to the number of valid observation levels (num_valid_levels_obs)\n' ~
-'but isn\'t:\n' ~
-' num_valid_levels_fcst = ' ~ num_valid_levels_fcst ~ '\n' ~
-' num_valid_levels_obs = ' ~ num_valid_levels_obs ~ '\n' %}
- {{metplus_macros.print_err_and_quit(error_msg)}}
- {%- else %}
- {%- set num_valid_levels = num_valid_levels_fcst %}
- {%- endif %}
+ {%- set valid_levels_fcst = [] %}
+ {%- set valid_levels_obs = [] %}
+ {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %}
+ {%- if delim_str in level_cpld %}
+ {%- set level_fcst, level_obs = level_cpld.split(delim_str) %}
+ {%- else %}
+ {%- set level_fcst = level_cpld %}
+ {%- set level_obs = level_cpld %}
+ {%- endif %}
+ {%- set tmp = valid_levels_fcst.append(level_fcst) %}
+ {%- set tmp = valid_levels_obs.append(level_obs) %}
+ {%- endfor %}
{#-
-Make sure that input_level_fcst is set to a valid value.
+Make sure that the input forecast level (input_level_fcst) is set to a
+valid value.
#}
{%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %}
{%- set error_msg = '\n' ~
-'The input forecast level (input_level_fcst) must either be set to \'all\',\n' ~
-'or it must be set to one of the elements in the list of valid levels\n' ~
-'(valid_levels_fcst) for the current forecast field (field_fcst). This\n' ~
-'is not the case:\n' ~
+'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~
+'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~
+'for the current forecast field (field_fcst). This is not the case:\n' ~
' field_fcst = ' ~ field_fcst ~ '\n' ~
-' input_level_fcst = ' ~ input_level_fcst ~ '\n' ~
-' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' %}
+' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~
+' input_level_fcst = ' ~ input_level_fcst ~ '\n' %}
{{metplus_macros.print_err_and_quit(error_msg)}}
{%- endif %}
@@ -373,38 +318,43 @@ Increment the METplus variable counter.
{%- set ns.var_count = ns.var_count+1 %}
{#-
-Set forecast field name. Note that this has to exactly match the name
-of the field in the input forecast file.
-
-For accumulated fields, the input forecast file is generated by MET's
-PcpCombine tool. In that file, the field name consists of the forecast
-field name here (field_fcst) with the accumulation period appended to
-it (separated by an underscore), so we must do the same here to get an
-exact match.
-#}
- {%- if (input_field_group in ['APCP', 'ASNOW']) %}
-FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{accum_hh}}
- {%- else %}
-FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}
- {%- endif %}
-
-{#-
-Set forecast field level(s).
+Set jinja parameters needed in setting the forecast and observation field
+level(s).
#}
{%- if (input_level_fcst == 'all') %}
{%- set levels_fcst = valid_levels_fcst %}
+ {%- set levels_obs = valid_levels_obs %}
+{#-
+If input_level_fcst is set to 'all' and there is more than one level to
+be verified for the current field, then the list of forecast thresholds
+for each forecast level must be identical to every other. Check for this.
+Note that this restriction includes the order of the thresholds, i.e. the
+set of thresholds for each level must be in the same order as for all
+other levels. Once this is verified, we can set the index of the level
+to use when obtaining thresholds to that of the first (index 0), which
+will be valid both for the case of num_levels = 1 and num_levels > 1.
+#}
+ {%- if (num_levels > 1) %}
+ {{- metplus_macros.check_for_identical_threshes_by_level(
+ field_cpld, levels_threshes_cpld) }}
+ {%- endif %}
+ {%- set indx_input_level_fcst = 0 %}
{#-
If input_level_fcst is set to a specific value:
1) Ensure that input_level_fcst exists in the list of valid forecast
levels.
2) Get the index of input_level_fcst in the list of valid forecast
- levels. This will be needed later below when setting the observation
- level(s).
- 3) Use this index to set the forecast level to a one-element list
- containing the specified forecast level.
+ levels.
+ 3) Use this index to set the forecast and observation levels to one-
+ element lists containing the appropriate level values.
#}
{%- else %}
- {%- if input_level_fcst not in valid_levels_fcst %}
+
+ {%- if input_level_fcst in valid_levels_fcst %}
+ {%- set indx_input_level_fcst = valid_levels_fcst.index(input_level_fcst) %}
+ {%- set levels_fcst = [valid_levels_fcst[indx_input_level_fcst]] %}
+ {%- set levels_obs = [valid_levels_obs[indx_input_level_fcst]] %}
+ {%- else %}
{%- set error_msg = '\n' ~
'For the current forecast field (field_fcst), the input forecast level\n' ~
'(input_level_fcst) does not exist in the list of valid forecast levels\n' ~
@@ -414,31 +364,15 @@ If input_level_fcst is set to a specific value:
' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' %}
{{metplus_macros.print_err_and_quit(error_msg)}}
{%- endif %}
- {%- set indx_input_level_fcst = valid_levels_fcst.index(input_level_fcst) %}
- {%- set levels_fcst = [valid_levels_fcst[indx_input_level_fcst]] %}
+
{%- endif %}
-FCST_VAR{{ns.var_count}}_LEVELS = {{levels_fcst|join(', ')}}
{#-
-Set forecast field threshold(s). Note that no forecast thresholds are
-included in the METplus configuration file if input_thresh_fcst is set
-to 'none'.
+Set jinja parameters needed in setting the forecast and observation field
+threshold(s).
#}
{%- if (input_thresh_fcst != 'none') %}
{#-
-If input_level_fcst is set to 'all' and there is more than one (forecast
-or observation) level to be verified for the current (forecast or
-observation) field, then the list of forecast thresholds for each forecast
-level must be identical to every other. Check for this. Note that this
-restriction includes the order of the thresholds, i.e. the set of
-thresholds for each level must be in the same order as for all other
-levels.
-#}
- {%- if (input_level_fcst == 'all') and (num_valid_levels > 1) %}
- {{- metplus_macros.check_for_identical_threshes_by_level(
- field_fcst, fields_levels_threshes_fcst[i]) }}
- {%- endif %}
-{#-
Now set the list of valid forecast thresholds to the one corresponding
to the first (zeroth) forecast level in the list of forecast levels set
above. We can do this because, for the case of a single forecast level,
@@ -446,29 +380,45 @@ there is only one list of forecast thresholds to consider (the first
one), and for the case of all levels, all levels have the same set of
thresholds (as verified by the check above).
#}
- {%- set valid_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst][levels_fcst[0]] %}
+ {%- set threshes_cpld = levels_threshes_cpld[levels_cpld[indx_input_level_fcst]] %}
+ {%- set valid_threshes_fcst = [] %}
+ {%- set valid_threshes_obs = [] %}
+ {%- for thresh_cpld in threshes_cpld %}
+ {%- if delim_str in thresh_cpld %}
+ {%- set thresh_fcst, thresh_obs = thresh_cpld.split(delim_str) %}
+ {%- else %}
+ {%- set thresh_fcst = thresh_cpld %}
+ {%- set thresh_obs = thresh_cpld %}
+ {%- endif %}
+ {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %}
+ {%- set tmp = valid_threshes_obs.append(thresh_obs) %}
+ {%- endfor %}
{#-
-If input_thresh_fcst is set to 'all', set the list of forecast thresholds
-to the full set of valid values.
+If input_thresh_fcst is set to 'all', set the list of forecast and
+observation thresholds to the full set of valid values.
#}
{%- if (input_thresh_fcst == 'all') %}
{%- set threshes_fcst = valid_threshes_fcst %}
+ {%- set threshes_obs = valid_threshes_obs %}
{#-
If input_thresh_fcst is set to a specific value:
1) Ensure that input_thresh_fcst exists in the list of valid forecast
thresholds.
2) Get the index of input_thresh_fcst in the list of valid forecast
- thresholds. This will be needed later below when setting the
- observation threshold(s).
- 3) Use this index to set the forecast threshold to a one-element list
- containing the specified forecast threshold.
+ thresholds.
+ 3) Use this index to set the forecast and observation threshold to one-
+ element lists containing the appropriate threshold values.
#}
{%- else %}
- {%- if input_thresh_fcst not in valid_threshes_fcst %}
+ {%- if input_thresh_fcst in valid_threshes_fcst %}
+ {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %}
+ {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %}
+ {%- set threshes_obs = [valid_threshes_obs[indx_input_thresh_fcst]] %}
+ {%- else %}
{%- set error_msg = '\n' ~
-'For the current forecast field (field_fcst) and list of forecast level(s)\n' ~
+'For the current forecast field (field_fcst) and list of forecast levels\n' ~
'(levels_fcst), the input forecast threshold (input_thresh_fcst) does not\n' ~
'exist in the list of valid forecast thresholds (valid_threshes_fcst):\n' ~
' field_fcst = ' ~ field_fcst ~ '\n' ~
@@ -477,22 +427,47 @@ If input_thresh_fcst is set to a specific value:
' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %}
{{metplus_macros.print_err_and_quit(error_msg)}}
{%- endif %}
- {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %}
- {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %}
{%- endif %}
+
+ {%- endif %}
+
+{#-
+Set forecast field name. Note that this has to exactly match the name
+of the field in the input forecast file.
+
+For accumulated fields, the input forecast file is generated by MET's
+PcpCombine tool. In that file, the field name consists of the forecast
+field name here (field_fcst) with the accumulation period appended to
+it (separated by an underscore), so we must do the same here to get an
+exact match.
+#}
+ {%- if (input_field_group in ['APCP', 'ASNOW']) %}
+FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{accum_hh}}
+ {%- else %}
+FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}
+ {%- endif %}
+
+{#-
+Set forecast field level(s).
+#}
+FCST_VAR{{ns.var_count}}_LEVELS = {{levels_fcst|join(', ')}}
+
{#-
-If threshes_fcst has been reset to something other than its default
-value of an empty list, then set the forecast thresholds in the METplus
-configuration file because that implies threshes_fcst was set above to
-a non-empty value. Then reset threshes_fcst to its default value for
-proper processing of thresholds for the next field.
+Set forecast field threshold(s). Note that:
+1) No forecast thresholds are included in the METplus configuration file
+ if input_thresh_fcst is set to 'none'.
+2) If threshes_fcst has been reset to something other than its default value
+ value of an empty list, then set the forecast thresholds in the METplus
+ configuration file because that implies threshes_fcst was set above to
+ a non-empty value. Then reset threshes_fcst to its default value for
+ proper processing of thresholds for the next field.
#}
+ {%- if (input_thresh_fcst != 'none') %}
{%- if (threshes_fcst != []) %}
FCST_VAR{{ns.var_count}}_THRESH = {{threshes_fcst|join(', ')}}
{%- endif %}
{%- set threshes_fcst = [] %}
-
{%- endif %}
{#-
@@ -560,7 +535,7 @@ Set observation field name. Note that this has to exactly match the name
of the field in the input observation file.
For accumulated fields, the input observation file is generated by MET's
-PcpCombine tool. In that file, the field name consists of the observation
+PcpCombine tool. In that file, the field name consists of the observation
field name here (field_obs) with the accumulation period appended to it
(separated by an underscore), so we must do the same here to get an exact
match.
@@ -581,77 +556,23 @@ OBS_VAR{{ns.var_count}}_NAME = {{field_obs}}
{#-
Set observation field level(s).
#}
- {%- if (input_level_fcst == 'all') %}
- {%- set levels_obs = valid_levels_obs %}
-{#-
-If input_level_fcst is set to a specific forecast level, then the
-observation level is given by the element in the list of valid observation
-levels that has the same index as that of input_level_fcst in the list
-of valid forecast levels.
-#}
- {%- else %}
- {%- set levels_obs = [valid_levels_obs[indx_input_level_fcst]] %}
- {%- endif %}
OBS_VAR{{ns.var_count}}_LEVELS = {{levels_obs|join(', ')}}
{#-
-Set observation field threshold(s). Note that no observation thresholds
-are included in the METplus configuration file if input_thresh_fcst is
-set to 'none'.
+Set observation field threshold(s). Note that:
+1) No observation thresholds are included in the METplus configuration
+ file if input_thresh_fcst is set to 'none'.
+2) If threshes_obs has been reset to something other than its default value
+ of an empty list, then we set the observation thresholds in the METplus
+ configuration file because that implies threshes_obs was set above to
+ a non-empty value. Then reset threshes_obs to its default value for
+ proper processing of thresholds for the next field.
#}
{%- if (input_thresh_fcst != 'none') %}
-{#-
-If input_level_fcst is set to 'all' and there is more than one (forecast
-or observation) level to be verified for the current (forecast or
-observation) field, then the list of observation thresholds for each
-observation level must be identical to every other. Check for this.
-Note that this restriction includes the order of the thresholds, i.e.
-the set of thresholds for each level must be in the same order as for
-all other levels.
-#}
- {%- if (input_level_fcst == 'all') and (num_valid_levels > 1) %}
- {{- metplus_macros.check_for_identical_threshes_by_level(
- field_obs, fields_levels_threshes_obs[i]) }}
- {%- endif %}
-{#-
-Now set the list of valid observation thresholds to the one corresponding
-to the first (zeroth) observation level in the list of observation levels
-set above. We can do this because, for the case of a single observaton
-level, there is only one list of observation thresholds to consider (the
-first one), and for the case of all levels, all levels have the same set
-of thresholds (as verified by the check above).
-#}
- {%- set valid_threshes_obs = fields_levels_threshes_obs[i][field_obs][levels_obs[0]] %}
-{#-
-If input_thresh_fcst is set to 'all', set the list of observation thresholds
-to the full set of valid values.
-#}
- {%- if (input_thresh_fcst == 'all') %}
-
- {%- set threshes_obs = valid_threshes_obs %}
-{#-
-If input_thresh_fcst is set to a specific forecast threshold, then the
-observation threshold is given by the element in the list of valid
-observation thresholds that has the same index as that of input_thresh_fcst
-in the list of valid forecast thresholds.
-#}
- {%- else %}
-
- {%- set threshes_obs = [valid_threshes_obs[indx_input_thresh_fcst]] %}
-
- {%- endif %}
-{#-
-If threshes_obs has been reset to something other than its default value
-of an empty list, then set the observation thresholds in the METplus
-configuration file because that implies threshes_obs was set above to
-a non-empty value. Then reset threshes_obs to its default value for
-proper processing of thresholds for the next field.
-#}
{%- if (threshes_obs != []) %}
OBS_VAR{{ns.var_count}}_THRESH = {{threshes_obs|join(', ')}}
{%- endif %}
{%- set threshes_obs = [] %}
-
{%- endif %}
{#-
@@ -721,6 +642,8 @@ forecast and observation settings) from those for the next field.
{%- endfor %}
+
+
{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %}
{%- if (input_field_group in ['APCP', 'ASNOW']) %}
#
diff --git a/parm/metplus/PointStat_ensmean.conf b/parm/metplus/PointStat_ensmean.conf
index 67a20034df..b16a481dbd 100644
--- a/parm/metplus/PointStat_ensmean.conf
+++ b/parm/metplus/PointStat_ensmean.conf
@@ -183,70 +183,49 @@ script instead of a hard-coded value as below.
{#-
Jinja requires certain variables to be defined globally within the template
-before they can be used in if-statements and other scopes (see Jinja
-scoping rules). Define such variables.
+before they can be used in if-statements and other scopes (see Jinja scoping
+rules). Define such variables.
#}
-{%- set level_fcst = '' %}
-{%- set level_obs = '' %}
{%- set indx_level_fcst = '' %}
-
-{%- set valid_threshes_fcst = [] %}
-{%- set valid_threshes_obs = [] %}
-{%- set threshes_fcst = '' %}
-{%- set threshes_obs = '' %}
{%- set indx_input_thresh_fcst = '' %}
-
+{%- set error_msg = '' %}
{%- set opts_indent = '' %}
{%- set opts_indent_len = '' %}
{%- set tmp = '' %}
-{%- set error_msg = '' %}
-{#-
-Make sure that the set of field groups for forecasts and observations
-are identical.
-#}
-{%- set fgs_fcst = vx_config_dict['fcst'].keys()|list %}
-{%- set fgs_obs = vx_config_dict['obs'].keys()|list %}
-{%- if (fgs_fcst != fgs_obs) %}
- {%- set error_msg = '\n' ~
-'The list of valid field groups for forecasts (fgs_fcst) must be identical\n' ~
-'to that for observations (fgs_obs) but isn\'t:\n' ~
-' fgs_fcst = ' ~ fgs_fcst ~ '\n' ~
-' fgs_obs = ' ~ fgs_obs %}
- {{metplus_macros.print_err_and_quit(error_msg)}}
-{%- endif %}
+{%- set field_fcst = '' %}
+{%- set field_obs = '' %}
+{%- set level_fcst = '' %}
+{%- set level_obs = '' %}
+{%- set thresh_fcst = '' %}
+{%- set thresh_obs = '' %}
+
+{%- set threshes_fcst = [] %}
+{%- set threshes_obs = [] %}
{#-
-Extract the lists of forecast and observation dictionaries containing
-the valid fields, levels, and thresholds corresponding to the specified
-field group (input_field_group). Note that it would be simpler to have
-these be just dictionaries in which the keys are the field names (instead
-of them being LISTS of dictionaries in which each dictionary contains a
-single key that is the field name), but that approach cannot be used here
-because it is possible for field names to be repeated (for both forecasts
-and observations). For example, in the observations, the field name
-'PRWE' appears more than once, each time with a different threshold, and
-the combination of name and threshold is what constitutes a unique field,
-not just the name by itself.
+Get the set of valid field groups and ensure that the specified input
+field group appears in this list.
#}
-{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %}
-{%- set fields_levels_threshes_obs = vx_config_dict['obs'][input_field_group] %}
+{%- set valid_field_groups = vx_config_dict.keys()|list %}
+{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }}
{#-
-Reset the specified forecast level so that if it happens to be an
-accumulation (e.g. 'A03'), the leading zeros in front of the hour are
-stipped out (e.g. reset to 'A3').
+Reset the input forecast level so that if it happens to be an accumulation
+(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g.
+reset to 'A3').
#}
{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %}
{#-
-Ensure that the specified input forecast level(s) (input_level_fcst) and
-threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the
-set(s) of valid forecast levels and thresholds, respectively, specified
-in fields_levels_threshes_fcst.
+Extract from the configuration dictionary the set (which itself is a
+dictionary) of fields, levels, and thresholds corresponding to the input
+field group. Then set the delimiter string that separates forecast and
+observation values in the various items (i.e. dictionary keys and values
+representing field names, levels, and thresholds) in this dictionary.
#}
-{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }}
-{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }}
+{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %}
+{%- set delim_str = metplus_macros.set_delim_str() %}
{#-
Some fields in the specified field group (input_field_group) may need to
@@ -264,72 +243,98 @@ following dictionary.
{%- set fields_fcst_to_exclude = fields_fcst_to_exclude_by_field_group[input_field_group] %}
{#-
-For convenience, create lists of valid forecast and observation field
-names.
+Remove from the dictionary fields_levels_threshes_cpld any fields that
+are in the list to be excluded.
#}
-{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %}
-{%- set valid_fields_fcst = [] %}
-{%- for i in range(0,num_valid_fields_fcst) %}
- {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %}
- {%- set tmp = valid_fields_fcst.append(field) %}
-{%- endfor %}
+{%- for field_cpld in fields_levels_threshes_cpld.copy() %}
-{%- set valid_fields_obs = [] %}
-{%- set num_valid_fields_obs = fields_levels_threshes_obs|length %}
-{%- for i in range(0,num_valid_fields_obs) %}
- {%- set field = fields_levels_threshes_obs[i].keys()|list|join('') %}
- {%- set tmp = valid_fields_obs.append(field) %}
-{%- endfor %}
+ {%- if delim_str in field_cpld %}
+ {%- set field_fcst, field_obs = field_cpld.split(delim_str) %}
+ {%- else %}
+ {%- set field_fcst = field_cpld %}
+ {%- set field_obs = field_cpld %}
+ {%- endif %}
-{#-
-Ensure that the number of valid fields for forecasts is equal to that
-for the observations.
-#}
-{%- set num_valid_fields = 0 %}
-{%- if (num_valid_fields_fcst != num_valid_fields_obs) %}
- {%- set error_msg = '\n' ~
-'The number of valid forecast fields (num_valid_fields_fcst) must be\n' ~
-'equal to the number of valid observation fields (num_valid_fields_obs)\n' ~
-'but isn\'t:\n' ~
-' num_valid_fields_fcst = ' ~ num_valid_fields_fcst ~ '\n' ~
-' num_valid_fields_obs = ' ~ num_valid_fields_obs ~ '\n' ~
-'The lists of valid forecast and observation fields are:\n' ~
-' valid_fields_fcst = ' ~ valid_fields_fcst ~ '\n' ~
-' valid_fields_obs = ' ~ valid_fields_obs ~ '\n' %}
- {{metplus_macros.print_err_and_quit(error_msg)}}
-{%- else %}
- {%- set num_valid_fields = num_valid_fields_fcst %}
-{%- endif %}
+ {%- if field_fcst in fields_fcst_to_exclude %}
+ {%- set tmp = fields_levels_threshes_cpld.pop(field_cpld) %}
+ {%- endif %}
+
+{%- endfor %}
{#-
-Loop over the valid fields and set field names, levels, thresholds, and/
-or options for each field, both for forecasts and for obseratiions, in
-the METplus configuration file.
+Loop over the fields and set field names, levels, thresholds, and/or
+options for each field, both for forecasts and for observations, in the
+METplus configuration file.
#}
{%- set ns = namespace(var_count = 0) %}
+{%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %}
-{%- for i in range(0,num_valid_fields) if valid_fields_fcst[i] not in fields_fcst_to_exclude %}
-
- {%- set field_fcst = valid_fields_fcst[i] %}
- {%- set field_obs = valid_fields_obs[i] %}
+ {%- if delim_str in field_cpld %}
+ {%- set field_fcst, field_obs = field_cpld.split(delim_str) %}
+ {%- else %}
+ {%- set field_fcst = field_cpld %}
+ {%- set field_obs = field_cpld %}
+ {%- endif %}
{#-
For convenience, create lists of valid forecast and observation levels
-for the current field. Then check that the number of valid levels for
-forecasts is the same as that for observations.
+for the current field.
+#}
+ {%- set valid_levels_fcst = [] %}
+ {%- set valid_levels_obs = [] %}
+ {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %}
+ {%- if delim_str in level_cpld %}
+ {%- set level_fcst, level_obs = level_cpld.split(delim_str) %}
+ {%- else %}
+ {%- set level_fcst = level_cpld %}
+ {%- set level_obs = level_cpld %}
+ {%- endif %}
+ {%- set tmp = valid_levels_fcst.append(level_fcst) %}
+ {%- set tmp = valid_levels_obs.append(level_obs) %}
+ {%- endfor %}
+
+{#-
+Make sure that the input forecast level (input_level_fcst) is set to a
+valid value.
#}
- {%- set valid_levels_fcst = fields_levels_threshes_fcst[i][field_fcst].keys()|list %}
- {%- set valid_levels_obs = fields_levels_threshes_obs[i][field_obs].keys()|list %}
+ {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %}
+ {%- set error_msg = '\n' ~
+'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~
+'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~
+'for the current forecast field (field_fcst). This is not the case:\n' ~
+' field_fcst = ' ~ field_fcst ~ '\n' ~
+' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~
+' input_level_fcst = ' ~ input_level_fcst ~ '\n' %}
+ {{metplus_macros.print_err_and_quit(error_msg)}}
+ {%- endif %}
{#-
-Extract dictionary of valid forecast levels (the dictionary keys) and
-corresponding lists of valid thresholds (the values) for each level.
-Then loop over these levels and corresponding lists of thresholds to set
-both the forecast and observation field names, levels, thresholds, and/or
-options.
+Loop over the (coupled) levels and corresponding lists of thresholds.
+Extract from these the level values for forecasts and observations and
+use them to set the forecast and observation field names, levels,
+thresholds, and/or options in the METplus configuration file.
#}
- {%- set valid_levels_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst] %}
- {%- for level_fcst, valid_threshes_fcst in valid_levels_threshes_fcst.items() %}
+ {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %}
+
+ {%- if delim_str in level_cpld %}
+ {%- set level_fcst, level_obs = level_cpld.split(delim_str) %}
+ {%- else %}
+ {%- set level_fcst = level_cpld %}
+ {%- set level_obs = level_cpld %}
+ {%- endif %}
+
+ {%- set valid_threshes_fcst = [] %}
+ {%- set valid_threshes_obs = [] %}
+ {%- for thresh_cpld in threshes_cpld %}
+ {%- if delim_str in thresh_cpld %}
+ {%- set thresh_fcst, thresh_obs = thresh_cpld.split(delim_str) %}
+ {%- else %}
+ {%- set thresh_fcst = thresh_cpld %}
+ {%- set thresh_obs = thresh_cpld %}
+ {%- endif %}
+ {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %}
+ {%- set tmp = valid_threshes_obs.append(thresh_obs) %}
+ {%- endfor %}
{%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %}
{#-
@@ -362,17 +367,19 @@ to the full set of valid values.
{%- set threshes_fcst = valid_threshes_fcst %}
{#-
If input_thresh_fcst is set to a specific value:
- 1) Ensure that input_thresh_fcst exists in the list of valid forecast
- thresholds.
- 2) Get the index of input_thresh_fcst in the list of valid forecast
- thresholds. This will be needed later below when setting the
- observation threshold(s).
- 3) Use this index to set the forecast threshold to a one-element list
- containing the specified forecast threshold.
+* If that value is valid, i.e. it exists in the list of valid forecast
+ thresholds, get its index in that list and use it to set the forecast
+ threshold to a one-element list containing that value. Note that the
+ index will be needed later below when setting the observation threshold(s).
+* If the input forecast threshold is not valid, print out a warning message
+ and exit.
#}
{%- else %}
- {%- if input_thresh_fcst not in valid_threshes_fcst %}
+ {%- if input_thresh_fcst in valid_threshes_fcst %}
+ {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %}
+ {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %}
+ {%- else %}
{%- set error_msg = '\n' ~
'For the current forecast field (field_fcst) and forecast level (level_fcst),\n' ~
'the input forecast threshold (input_thresh_fcst) does not exist in the list\n' ~
@@ -383,8 +390,6 @@ If input_thresh_fcst is set to a specific value:
' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %}
{{metplus_macros.print_err_and_quit(error_msg)}}
{%- endif %}
- {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %}
- {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %}
{%- endif %}
{#-
@@ -441,11 +446,6 @@ set to 'none'.
#}
{%- if (input_thresh_fcst != 'none') %}
{#-
-Set the list of valid observation thresholds to the one corresponding to
-the current observation level (level_obs).
-#}
- {%- set valid_threshes_obs = fields_levels_threshes_obs[i][field_obs][level_obs] %}
-{#-
If input_thresh_fcst is set to 'all', set the list of observation thresholds
to the full set of valid values.
#}
@@ -459,9 +459,7 @@ observation thresholds that has the same index as that of input_thresh_fcst
in the list of valid forecast thresholds.
#}
{%- else %}
-
{%- set threshes_obs = [valid_threshes_obs[indx_input_thresh_fcst]] %}
-
{%- endif %}
{#-
If threshes_obs has been reset to something other than its default value
diff --git a/parm/metplus/PointStat_ensprob.conf b/parm/metplus/PointStat_ensprob.conf
index 69ef9fd5db..84b9f3954d 100644
--- a/parm/metplus/PointStat_ensprob.conf
+++ b/parm/metplus/PointStat_ensprob.conf
@@ -185,117 +185,53 @@ script instead of a hard-coded value as below.
{#-
Jinja requires certain variables to be defined globally within the template
-before they can be used in if-statements and other scopes (see Jinja
-scoping rules). Define such variables.
+before they can be used in if-statements and other scopes (see Jinja scoping
+rules). Define such variables.
#}
-{%- set level_fcst = '' %}
-{%- set level_obs = '' %}
{%- set indx_level_fcst = '' %}
-
-{%- set valid_threshes_fcst = [] %}
-{%- set valid_threshes_obs = [] %}
-{%- set thresh_fcst = '' %}
-{%- set thresh_obs = '' %}
{%- set indx_thresh_fcst = '' %}
-{%- set thresh_fcst_and_or = '' %}
-
+{%- set error_msg = '' %}
{%- set opts_indent = '' %}
{%- set opts_indent_len = '' %}
-{%- set tmp = '' %}
-{%- set error_msg = '' %}
-{#-
-Make sure that the set of field groups for forecasts and observations
-are identical.
-#}
-{%- set fgs_fcst = vx_config_dict['fcst'].keys()|list %}
-{%- set fgs_obs = vx_config_dict['obs'].keys()|list %}
-{%- if (fgs_fcst != fgs_obs) %}
- {%- set error_msg = '\n' ~
-'The list of valid field groups for forecasts (fgs_fcst) must be identical\n' ~
-'to that for observations (fgs_obs) but isn\'t:\n' ~
-' fgs_fcst = ' ~ fgs_fcst ~ '\n' ~
-' fgs_obs = ' ~ fgs_obs %}
- {{metplus_macros.print_err_and_quit(error_msg)}}
-{%- endif %}
+{%- set field_fcst = '' %}
+{%- set field_obs = '' %}
+{%- set level_fcst = '' %}
+{%- set level_obs = '' %}
+{%- set thresh_fcst = '' %}
+{%- set thresh_obs = '' %}
+{%- set thresh_fcst_and_or = '' %}
{#-
-Extract the lists of forecast and observation dictionaries containing
-the valid fields, levels, and thresholds corresponding to the specified
-field group (input_field_group). Note that it would be simpler to have
-these be just dictionaries in which the keys are the field names (instead
-of them being LISTS of dictionaries in which each dictionary contains a
-single key that is the field name), but that approach cannot be used here
-because it is possible for field names to be repeated (for both forecasts
-and observations). For example, in the observations, the field name
-'PRWE' appears more than once, each time with a different threshold, and
-the combination of name and threshold is what constitutes a unique field,
-not just the name by itself.
+Get the set of valid field groups and ensure that the specified input
+field group appears in this list.
#}
-{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %}
-{%- set fields_levels_threshes_obs = vx_config_dict['obs'][input_field_group] %}
+{%- set valid_field_groups = vx_config_dict.keys()|list %}
+{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }}
{#-
-Reset the specified forecast level so that if it happens to be an
-accumulation (e.g. 'A03'), the leading zeros in front of the hour are
-stipped out (e.g. reset to 'A3').
+Reset the input forecast level so that if it happens to be an accumulation
+(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g.
+reset to 'A3').
#}
{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %}
{#-
-Ensure that the specified input forecast level(s) (input_level_fcst) and
-threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the
-set(s) of valid forecast levels and thresholds, respectively, specified
-in fields_levels_threshes_fcst.
+Extract from the configuration dictionary the set (which itself is a
+dictionary) of fields, levels, and thresholds corresponding to the input
+field group. Then set the delimiter string that separates forecast and
+observation values in the various items (i.e. dictionary keys and values
+representing field names, levels, and thresholds) in this dictionary.
#}
-{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }}
-{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }}
+{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %}
+{%- set delim_str = metplus_macros.set_delim_str() %}
{#-
-For convenience, create lists of valid forecast and observation field
-names.
-#}
-{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %}
-{%- set valid_fields_fcst = [] %}
-{%- for i in range(0,num_valid_fields_fcst) %}
- {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %}
- {%- set tmp = valid_fields_fcst.append(field) %}
-{%- endfor %}
-
-{%- set valid_fields_obs = [] %}
-{%- set num_valid_fields_obs = fields_levels_threshes_obs|length %}
-{%- for i in range(0,num_valid_fields_obs) %}
- {%- set field = fields_levels_threshes_obs[i].keys()|list|join('') %}
- {%- set tmp = valid_fields_obs.append(field) %}
-{%- endfor %}
-
-{#-
-Ensure that the number of valid fields for forecasts is equal to that
-for the observations.
-#}
-{%- set num_valid_fields = 0 %}
-{%- if (num_valid_fields_fcst != num_valid_fields_obs) %}
- {%- set error_msg = '\n' ~
-'The number of valid forecast fields (num_valid_fields_fcst) must be\n' ~
-'equal to the number of valid observation fields (num_valid_fields_obs)\n' ~
-'but isn\'t:\n' ~
-' num_valid_fields_fcst = ' ~ num_valid_fields_fcst ~ '\n' ~
-' num_valid_fields_obs = ' ~ num_valid_fields_obs ~ '\n' ~
-'The lists of valid forecast and observation fields are:\n' ~
-' valid_fields_fcst = ' ~ valid_fields_fcst ~ '\n' ~
-' valid_fields_obs = ' ~ valid_fields_obs ~ '\n' %}
- {{metplus_macros.print_err_and_quit(error_msg)}}
-{%- else %}
- {%- set num_valid_fields = num_valid_fields_fcst %}
-{%- endif %}
-
-{#-
-Loop over the valid fields and set field names, levels, thresholds, and/
-or options for each field, both for forecasts and for obseratiions, in
-the METplus configuration file.
+Loop over the fields and set field names, levels, thresholds, and/or
+options for each field, both for forecasts and for observations, in the
+METplus configuration file.
#}
{%- set ns = namespace(var_count = 0) %}
-
{#-
This outer for-loop is included to make this code as similar as possible
to the one in GridStat_ensprob.conf. There, treat_fcst_as_prob takes on
@@ -305,28 +241,74 @@ need to be set to False. This is being investigated (12/13/2023).
#}
{%- for treat_fcst_as_prob in [True] %}
- {%- for i in range(0,num_valid_fields) %}
+ {%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %}
- {%- set field_fcst = valid_fields_fcst[i] %}
- {%- set field_obs = valid_fields_obs[i] %}
+ {%- if delim_str in field_cpld %}
+ {%- set field_fcst, field_obs = field_cpld.split(delim_str) %}
+ {%- else %}
+ {%- set field_fcst = field_cpld %}
+ {%- set field_obs = field_cpld %}
+ {%- endif %}
{#-
For convenience, create lists of valid forecast and observation levels
-for the current field. Then check that the number of valid levels for
-forecasts is the same as that for observations.
+for the current field.
+#}
+ {%- set valid_levels_fcst = [] %}
+ {%- set valid_levels_obs = [] %}
+ {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %}
+ {%- if delim_str in level_cpld %}
+ {%- set level_fcst, level_obs = level_cpld.split(delim_str) %}
+ {%- else %}
+ {%- set level_fcst = level_cpld %}
+ {%- set level_obs = level_cpld %}
+ {%- endif %}
+ {%- set tmp = valid_levels_fcst.append(level_fcst) %}
+ {%- set tmp = valid_levels_obs.append(level_obs) %}
+ {%- endfor %}
+
+{#-
+Make sure that the input forecast level (input_level_fcst) is set to a
+valid value.
#}
- {%- set valid_levels_fcst = fields_levels_threshes_fcst[i][field_fcst].keys()|list %}
- {%- set valid_levels_obs = fields_levels_threshes_obs[i][field_obs].keys()|list %}
+ {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %}
+ {%- set error_msg = '\n' ~
+'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~
+'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~
+'for the current forecast field (field_fcst). This is not the case:\n' ~
+' field_fcst = ' ~ field_fcst ~ '\n' ~
+' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~
+' input_level_fcst = ' ~ input_level_fcst ~ '\n' %}
+ {{metplus_macros.print_err_and_quit(error_msg)}}
+ {%- endif %}
{#-
-Extract dictionary of valid forecast levels (the dictionary keys) and
-corresponding lists of valid thresholds (the values) for each level.
-Then loop over these levels and corresponding lists of thresholds to set
-both the forecast and observation field names, levels, thresholds, and/or
-options.
+Loop over the (coupled) levels and corresponding lists of thresholds.
+Extract from these the level values for forecasts and observations and
+use them to set the forecast and observation field names, levels,
+thresholds, and/or options in the METplus configuration file.
#}
- {%- set valid_levels_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst] %}
- {%- for level_fcst, valid_threshes_fcst in valid_levels_threshes_fcst.items() %}
+ {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %}
+
+ {%- if delim_str in level_cpld %}
+ {%- set level_fcst, level_obs = level_cpld.split(delim_str) %}
+ {%- else %}
+ {%- set level_fcst = level_cpld %}
+ {%- set level_obs = level_cpld %}
+ {%- endif %}
+
+ {%- set valid_threshes_fcst = [] %}
+ {%- set valid_threshes_obs = [] %}
+ {%- for thresh_cpld in threshes_cpld %}
+ {%- if delim_str in thresh_cpld %}
+ {%- set thresh_fcst, thresh_obs = thresh_cpld.split(delim_str) %}
+ {%- else %}
+ {%- set thresh_fcst = thresh_cpld %}
+ {%- set thresh_obs = thresh_cpld %}
+ {%- endif %}
+ {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %}
+ {%- set tmp = valid_threshes_obs.append(thresh_obs) %}
+ {%- endfor %}
{%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %}
@@ -402,11 +384,6 @@ set to 'none'.
#}
{%- if (input_thresh_fcst != 'none') %}
{#-
-Set the list of valid observation thresholds to the one corresponding to
-the current observation level (level_obs).
-#}
- {%- set valid_threshes_obs = fields_levels_threshes_obs[i][field_obs][level_obs] %}
-{#-
Set the observation threshold. This is given by the element in the list
of valid observation thresholds that has the same index as that of the
current forcast threshold (thresh_fcst) in the list of valid forecast
@@ -442,6 +419,7 @@ OBS_VAR{{ns.var_count}}_OPTIONS = desc = "TKE";
{%- endif %}
{%- endif %}
+
{#-
Print out a newline to separate the settings for the current field (both
forecast and observation settings) from those for the next field.
diff --git a/parm/metplus/metplus_macros.jinja b/parm/metplus/metplus_macros.jinja
index 94ac5d9485..4dc8c599ce 100644
--- a/parm/metplus/metplus_macros.jinja
+++ b/parm/metplus/metplus_macros.jinja
@@ -1,3 +1,13 @@
+{#-
+Set the string delimiter that separates the forecast value of an item
+(e.g. a field name, level, or threshold) from its observation value in the
+various items in the deterministic and ensemble verification configuration
+files.
+#}
+{%- macro set_delim_str() %}
+ {{-'%%'}}
+{%- endmacro %}
+
{#-
This macro prints out an error message and quits the jinja templater.
#}
@@ -19,114 +29,32 @@ prints out 'A3'.
{{- level }}
{%- endif %}
{%- endmacro %}
-{#-
-This macro checks whether the specified level (input_level) has a valid
-value. input_level may be set to 'all' or to a specific level. If set
-to 'all', input_level is not checked because in this case, whatever valid/
-available levels are found will be included in the METplus configuration
-file for all specified fields. input_level IS checked if it is set to
-any other value because in this case, all the specified fields will use
-only that specific level in the METplus configuration file, which implies
-that the level must be valid for all such fields.
-#}
-{%- macro check_level(fields_levels_threshes, input_level) %}
- {%- if input_level != 'all' %}
-
- {%- set num_valid_fields = fields_levels_threshes|length %}
- {%- set valid_fields = [] %}
- {%- for i in range(0,num_valid_fields) %}
- {%- set field = fields_levels_threshes[i].keys()|list|join('') %}
- {%- set tmp = valid_fields.append(field) %}
- {%- endfor %}
-
- {%- for i in range(0,num_valid_fields) %}
- {%- set field = valid_fields[i] %}
- {%- set valid_levels = fields_levels_threshes[i][field].keys()|list %}
- {%- if input_level not in valid_levels %}
- {%- set error_msg = '\n' ~
- 'The specified level (input_level) is not in the list of valid levels\n' ~
- '(valid_levels) for the current field (field):\n' ~
- ' field = \'' ~ field ~ '\'\n' ~
- ' valid_levels = ' ~ valid_levels ~ '\n'
- ' input_level = \'' ~ input_level ~ '\'\n'
- 'input_level must either be set to the string \'all\' (to include all valid\n' ~
- 'values in the verification) or to one of the elements in valid_levels.' %}
- {{print_err_and_quit(error_msg)}}
- {%- endif %}
- {%- endfor %}
-
- {%- endif %}
-
-{%- endmacro %}
{#-
This macro checks whether the specified threshold (input_thresh) has a
-valid value. input_thresh may be set to 'none', 'all', or a specific
-threshold. If set to 'none', input_thresh is not checked for a valid
-value since threshold information will not be included in the METplus
-configuration file. input_thresh is also not checked for a valid value
-if it set to 'all' because in this case, whatever valid/available thresholds
-are found will be included in the METplus configuration file for all
-specified field and level combination. Finally, input_thresh IS checked
-for a valid value if it is set to something other than 'none' and 'all'
-because in this case, all specified field and level combinations (where
-the latter, depending on the value of input_level, may be either all
-valid/available levels or a single one) will use only that specific
-threshold in the METplus configuration file, which implies that the
-threshold must be valid for all such field and level combinations.
#}
-{%- macro check_thresh(fields_levels_threshes, input_level, input_thresh) %}
-
- {%- if (input_thresh != 'none') and (input_thresh != 'all') %}
-
- {%- set num_valid_fields = fields_levels_threshes|length %}
- {%- set valid_fields = [] %}
- {%- for i in range(0,num_valid_fields) %}
- {%- set field = fields_levels_threshes[i].keys()|list|join('') %}
- {%- set tmp = valid_fields.append(field) %}
- {%- endfor %}
-
- {%- for i in range(0,num_valid_fields) %}
- {%- set field = valid_fields[i] %}
- {%- set valid_levels = fields_levels_threshes[i][field].keys()|list %}
- {%- set valid_levels_threshes = fields_levels_threshes[i][field] %}
-
- {%- for level, valid_threshes in valid_levels_threshes.items() %}
- {%- if (input_level == 'all') or (input_level == level) %}
- {%- if input_thresh not in valid_threshes %}
- {%- set error_msg = '\n' ~
-'The specified threshold (input_thresh) is not in the list of valid\n' ~
-'thresholds (valid_threshes) for the current field (field) and level\n' ~
-'(level) combination:\n' ~
-' field = \'' ~ field ~ '\'\n' ~
-' level = \'' ~ level ~ '\'\n' ~
-' valid_threshes = ' ~ valid_threshes ~ '\n'
-' input_thresh = \'' ~ input_thresh ~ '\'' %}
-'input_thresh must be set to the string \'all\' (to include in the METplus\n' ~
-'configuration file all thresholds for each valid combination of field and\n' ~
-'level), to the string \'none\' (to include no threshold information in the\n' ~
-'METplus configuration file), or to one of the elements in valid_threshes\n' ~
-'(to include only that specific threshold in the METplus configuration file).' %}
- {{print_err_and_quit(error_msg)}}
- {%- endif %}
- {%- endif %}
-
- {%- endfor %}
-
- {%- endfor %}
-
+{%- macro check_field_group(valid_field_groups, input_field_group) %}
+ {%- if input_field_group not in valid_field_groups %}
+ {%- set error_msg = '\n' ~
+ 'The specified input field group (input_field_group) is not in the list of\n' ~
+ 'valid field groups (valid_field_groups):\n' ~
+ ' input_field_group = \'' ~ input_field_group ~ '\'\n' ~
+ ' valid_field_groups = ' ~ valid_field_groups ~ '\n' ~
+ 'Reset input_field_group to one of the elements in valid_field_groups and\n' ~
+ 'rerun.' %}
+ {{print_err_and_quit(error_msg)}}
{%- endif %}
-
{%- endmacro %}
+
{#-
This macro checks whether, for the given field, the list of thresholds
for all levels are identical. If not, it prints out an error message
and errors out.
#}
{%- macro check_for_identical_threshes_by_level(field, levels_threshes) %}
- {%- set avail_levels = levels_threshes[field].keys()|list %}
+ {%- set avail_levels = levels_threshes.keys()|list %}
{%- set num_avail_levels = avail_levels|length %}
- {%- set threshes_by_avail_level = levels_threshes[field].values()|list %}
+ {%- set threshes_by_avail_level = levels_threshes.values()|list %}
{%- for i in range(1,num_avail_levels) %}
{%- set level = avail_levels[i-1] %}
{%- set threshes = threshes_by_avail_level[i-1] %}
@@ -135,8 +63,8 @@ and errors out.
{%- if (threshes_next != threshes) %}
{%- set error_msg = '\n\n' ~
'For the given field (field), the set of thresholds for the next level\n' ~
-'(threshes_next, level_next) is not equal to that of the current level\n' ~
-'(threshes, level) (note that order of thresholds matters here):\n' ~
+'(level_next, threshes_next) is not equal to that of the current level\n' ~
+'(level, threshes) (note that order of thresholds matters here):\n' ~
' field = \'' ~ field ~ '\'\n' ~
' num_avail_levels = ' ~ num_avail_levels ~ '\n' ~
' level = \'' ~ level ~ '\'\n' ~
diff --git a/parm/metplus/vx_config_det.yaml b/parm/metplus/vx_config_det.yaml
index 4c721176c6..8ea3fd5e13 100644
--- a/parm/metplus/vx_config_det.yaml
+++ b/parm/metplus/vx_config_det.yaml
@@ -91,114 +91,118 @@ ADPSFC:
L0%%Z0: ['ge1.0%%ge174&&le176']
ADPUPA:
TMP:
- P1000: []
- P925: []
- P850: []
- P700: []
- P500: []
- P400: []
- P300: []
- P250: []
- P200: []
- P150: []
- P100: []
- P50: []
- P20: []
- P10: []
+ P1000: &adpupa_tmp_threshes
+ []
+ P925: *adpupa_tmp_threshes
+ P850: *adpupa_tmp_threshes
+ P700: *adpupa_tmp_threshes
+ P500: *adpupa_tmp_threshes
+ P400: *adpupa_tmp_threshes
+ P300: *adpupa_tmp_threshes
+ P250: *adpupa_tmp_threshes
+ P200: *adpupa_tmp_threshes
+ P150: *adpupa_tmp_threshes
+ P100: *adpupa_tmp_threshes
+ P50: *adpupa_tmp_threshes
+ P20: *adpupa_tmp_threshes
+ P10: *adpupa_tmp_threshes
RH:
- P1000: []
- P925: []
- P850: []
- P700: []
- P500: []
- P400: []
- P300: []
- P250: []
+ P1000: &adpupa_rh_threshes
+ []
+ P925: *adpupa_rh_threshes
+ P850: *adpupa_rh_threshes
+ P700: *adpupa_rh_threshes
+ P500: *adpupa_rh_threshes
+ P400: *adpupa_rh_threshes
+ P300: *adpupa_rh_threshes
+ P250: *adpupa_rh_threshes
DPT:
- P1000: []
- P925: []
- P850: []
- P700: []
- P500: []
- P400: []
- P300: []
+ P1000: &adpupa_dpt_threshes
+ []
+ P925: *adpupa_dpt_threshes
+ P850: *adpupa_dpt_threshes
+ P700: *adpupa_dpt_threshes
+ P500: *adpupa_dpt_threshes
+ P400: *adpupa_dpt_threshes
+ P300: *adpupa_dpt_threshes
UGRD:
- P1000: ['ge2.572']
- P925: ['ge2.572']
- P850: ['ge2.572']
- P700: ['ge2.572']
- P500: ['ge2.572']
- P400: ['ge2.572']
- P300: ['ge2.572']
- P250: ['ge2.572']
- P200: ['ge2.572']
- P150: ['ge2.572']
- P100: ['ge2.572']
- P50: ['ge2.572']
- P20: ['ge2.572']
- P10: ['ge2.572']
+ P1000: &adpupa_ugrd_threshes
+ ['ge2.572']
+ P925: *adpupa_ugrd_threshes
+ P850: *adpupa_ugrd_threshes
+ P700: *adpupa_ugrd_threshes
+ P500: *adpupa_ugrd_threshes
+ P400: *adpupa_ugrd_threshes
+ P300: *adpupa_ugrd_threshes
+ P250: *adpupa_ugrd_threshes
+ P200: *adpupa_ugrd_threshes
+ P150: *adpupa_ugrd_threshes
+ P100: *adpupa_ugrd_threshes
+ P50: *adpupa_ugrd_threshes
+ P20: *adpupa_ugrd_threshes
+ P10: *adpupa_ugrd_threshes
VGRD:
- P1000: ['ge2.572']
- P925: ['ge2.572']
- P850: ['ge2.572']
- P700: ['ge2.572']
- P500: ['ge2.572']
- P400: ['ge2.572']
- P300: ['ge2.572']
- P250: ['ge2.572']
- P200: ['ge2.572']
- P150: ['ge2.572']
- P100: ['ge2.572']
- P50: ['ge2.572']
- P20: ['ge2.572']
- P10: ['ge2.572']
+ P1000: &adpupa_vgrd_threshes
+ ['ge2.572']
+ P925: *adpupa_vgrd_threshes
+ P850: *adpupa_vgrd_threshes
+ P700: *adpupa_vgrd_threshes
+ P500: *adpupa_vgrd_threshes
+ P400: *adpupa_vgrd_threshes
+ P300: *adpupa_vgrd_threshes
+ P250: *adpupa_vgrd_threshes
+ P200: *adpupa_vgrd_threshes
+ P150: *adpupa_vgrd_threshes
+ P100: *adpupa_vgrd_threshes
+ P50: *adpupa_vgrd_threshes
+ P20: *adpupa_vgrd_threshes
+ P10: *adpupa_vgrd_threshes
WIND:
- P1000: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
- P925: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
- P850: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
- P700: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
- P500: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
- P400: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
- P300: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
- P250: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
- P200: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
- P150: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
- P100: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
- P50: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
- P20: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
- P10: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
+ P1000: &adpupa_wind_threshes
+ ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722']
+ P925: *adpupa_wind_threshes
+ P850: *adpupa_wind_threshes
+ P700: *adpupa_wind_threshes
+ P500: *adpupa_wind_threshes
+ P400: *adpupa_wind_threshes
+ P300: *adpupa_wind_threshes
+ P250: *adpupa_wind_threshes
+ P200: *adpupa_wind_threshes
+ P150: *adpupa_wind_threshes
+ P100: *adpupa_wind_threshes
+ P50: *adpupa_wind_threshes
+ P20: *adpupa_wind_threshes
+ P10: *adpupa_wind_threshes
HGT:
- P1000: []
- P950: []
- P925: []
- P850: []
- P700: []
- P500: []
- P400: []
- P300: []
- P250: []
- P200: []
- P150: []
- P100: []
- P50: []
- P20: []
- P10: []
+ P1000: &adpupa_hgt_threshes
+ []
+ P950: *adpupa_hgt_threshes
+ P925: *adpupa_hgt_threshes
+ P850: *adpupa_hgt_threshes
+ P700: *adpupa_hgt_threshes
+ P500: *adpupa_hgt_threshes
+ P400: *adpupa_hgt_threshes
+ P300: *adpupa_hgt_threshes
+ P250: *adpupa_hgt_threshes
+ P200: *adpupa_hgt_threshes
+ P150: *adpupa_hgt_threshes
+ P100: *adpupa_hgt_threshes
+ P50: *adpupa_hgt_threshes
+ P20: *adpupa_hgt_threshes
+ P10: *adpupa_hgt_threshes
SPFH:
- P1000: []
- P850: []
- P700: []
- P500: []
- P400: []
- P300: []
+ P1000: &adpupa_spfh_threshes
+ []
+ P850: *adpupa_spfh_threshes
+ P700: *adpupa_spfh_threshes
+ P500: *adpupa_spfh_threshes
+ P400: *adpupa_spfh_threshes
+ P300: *adpupa_spfh_threshes
CAPE:
- L0%%L0-100000:
- ['gt500', 'gt1000', 'gt1500', 'gt2000', 'gt3000', 'gt4000']
+ L0%%L0-100000: ['gt500', 'gt1000', 'gt1500', 'gt2000', 'gt3000', 'gt4000']
HPBL%%PBL:
- Z0%%L0:
- []
+ Z0%%L0: []
HGT%%PBL:
- L0:
- []
+ L0: []
CAPE%%MLCAPE:
- L0-90%%L0:
- ['gt500', 'gt1000', 'gt1500', 'gt2000', 'gt3000', 'gt4000']
+ L0-90%%L0: ['gt500', 'gt1000', 'gt1500', 'gt2000', 'gt3000', 'gt4000']
diff --git a/parm/wflow/verify_det.yaml b/parm/wflow/verify_det.yaml
index 79f04eeaaa..e82d7c61e1 100644
--- a/parm/wflow/verify_det.yaml
+++ b/parm/wflow/verify_det.yaml
@@ -21,18 +21,6 @@ default_task_verify_det: &default_task_verify_det
queue: '&QUEUE_DEFAULT;'
walltime: 00:30:00
-task_parse_vx_config_det:
- <<: *default_task_verify_det
- command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_PARSE_VX_CONFIG"'
- envars:
- <<: *default_vars
- DET_OR_ENS: 'det'
- join: !cycstr '&LOGDIR;/{{ jobname }}&LOGEXT;'
- walltime: 00:05:00
- # No dependencies are needed for this task because as long as any deterministic
- # verification tasks are going to be run (i.e. as long as this configuration
- # file is included in the workflow), then this task must be launched.
-
metatask_GridStat_CCPA_all_accums_all_mems:
var:
ACCUM_HH: '{% for ah in verification.VX_APCP_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}'
@@ -64,9 +52,6 @@ metatask_GridStat_CCPA_all_accums_all_mems:
taskdep_pcpcombine_fcst:
attrs:
task: run_MET_PcpCombine_fcst_APCP#ACCUM_HH#h_mem#mem#
- taskdep_parse_vx_config_det:
- attrs:
- task: parse_vx_config_det
metatask_GridStat_NOHRSC_all_accums_all_mems:
var:
@@ -99,9 +84,6 @@ metatask_GridStat_NOHRSC_all_accums_all_mems:
taskdep_pcpcombine_fcst:
attrs:
task: run_MET_PcpCombine_fcst_ASNOW#ACCUM_HH#h_mem#mem#
- taskdep_parse_vx_config_det:
- attrs:
- task: parse_vx_config_det
metatask_GridStat_MRMS_all_mems:
var:
@@ -133,9 +115,6 @@ metatask_GridStat_MRMS_all_mems:
attrs:
age: 00:00:00:30
text: !cycstr '{{ workflow.EXPTDIR }}/@Y@m@d@H/post_files_exist_mem#mem#.txt'
- taskdep_parse_vx_config_det:
- attrs:
- task: parse_vx_config_det
metatask_PointStat_NDAS_all_mems:
var:
@@ -167,6 +146,3 @@ metatask_PointStat_NDAS_all_mems:
attrs:
age: 00:00:00:30
text: !cycstr '{{ workflow.EXPTDIR }}/@Y@m@d@H/post_files_exist_mem#mem#.txt'
- taskdep_parse_vx_config_det:
- attrs:
- task: parse_vx_config_det
diff --git a/parm/wflow/verify_ens.yaml b/parm/wflow/verify_ens.yaml
index 3f7638587d..18b23a1eb0 100644
--- a/parm/wflow/verify_ens.yaml
+++ b/parm/wflow/verify_ens.yaml
@@ -21,18 +21,6 @@ default_task_verify_ens: &default_task_verify_ens
queue: '&QUEUE_DEFAULT;'
walltime: 01:00:00
-task_parse_vx_config_ens:
- <<: *default_task_verify_ens
- command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_PARSE_VX_CONFIG"'
- envars:
- <<: *default_vars
- DET_OR_ENS: 'ens'
- join: !cycstr '&LOGDIR;/{{ jobname }}&LOGEXT;'
- walltime: 00:05:00
- # No dependencies are needed for this task because as long as any ensemble
- # verification tasks are going to be run (i.e. as long as this configuration
- # file is included in the workflow), then this task must be launched.
-
metatask_GenEnsProd_EnsembleStat_CCPA:
var:
ACCUM_HH: '{% for ah in verification.VX_APCP_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}'
@@ -59,9 +47,6 @@ metatask_GenEnsProd_EnsembleStat_CCPA:
metataskdep_pcpcombine_fcst:
attrs:
metatask: PcpCombine_fcst_APCP#ACCUM_HH#h_all_mems
- taskdep_parse_vx_config_ens:
- attrs:
- task: parse_vx_config_ens
task_run_MET_EnsembleStat_vx_APCP#ACCUM_HH#h:
<<: *task_GenEnsProd_CCPA
envars:
@@ -96,9 +81,6 @@ metatask_GenEnsProd_EnsembleStat_NOHRSC:
metataskdep_pcpcombine_fcst:
attrs:
metatask: PcpCombine_fcst_ASNOW#ACCUM_HH#h_all_mems
- taskdep_parse_vx_config_ens:
- attrs:
- task: parse_vx_config_ens
task_run_MET_EnsembleStat_vx_ASNOW#ACCUM_HH#h:
<<: *task_GenEnsProd_NOHRSC
envars:
@@ -134,9 +116,6 @@ metatask_GenEnsProd_EnsembleStat_MRMS:
metataskdep_check_post_output: &check_post_output
attrs:
metatask: check_post_output_all_mems
- taskdep_parse_vx_config_ens:
- attrs:
- task: parse_vx_config_ens
task_run_MET_EnsembleStat_vx_#VAR#:
<<: *task_GenEnsProd_MRMS
envars:
@@ -179,9 +158,6 @@ metatask_GenEnsProd_EnsembleStat_NDAS:
task: run_MET_Pb2nc_obs
metataskdep_check_post_output:
<<: *check_post_output
- taskdep_parse_vx_config_ens:
- attrs:
- task: parse_vx_config_ens
task_run_MET_EnsembleStat_vx_#VAR#:
<<: *task_GenEnsProd_NDAS
envars:
diff --git a/scripts/exregional_parse_vx_config.sh b/scripts/exregional_parse_vx_config.sh
deleted file mode 100755
index 13632c7e53..0000000000
--- a/scripts/exregional_parse_vx_config.sh
+++ /dev/null
@@ -1,94 +0,0 @@
-#!/usr/bin/env bash
-
-#
-#-----------------------------------------------------------------------
-#
-# Source the variable definitions file and the bash utility functions.
-#
-#-----------------------------------------------------------------------
-#
-. $USHdir/source_util_funcs.sh
-source_config_for_task "task_run_met_pcpcombine|task_run_post" ${GLOBAL_VAR_DEFNS_FP}
-#
-#-----------------------------------------------------------------------
-#
-# Source files defining auxiliary functions for verification.
-#
-#-----------------------------------------------------------------------
-#
-. $USHdir/set_vx_fhr_list.sh
-#
-#-----------------------------------------------------------------------
-#
-# Save current shell options (in a global array). Then set new options
-# for this script/function.
-#
-#-----------------------------------------------------------------------
-#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
-#
-#-----------------------------------------------------------------------
-#
-# Get the full path to the file in which this script/function is located
-# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
-# which the file is located (scrfunc_dir).
-#
-#-----------------------------------------------------------------------
-#
-scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
-scrfunc_fn=$( basename "${scrfunc_fp}" )
-scrfunc_dir=$( dirname "${scrfunc_fp}" )
-#
-#-----------------------------------------------------------------------
-#
-# Print message indicating entry into script.
-#
-#-----------------------------------------------------------------------
-#
-print_info_msg "
-========================================================================
-Entering script: \"${scrfunc_fn}\"
-In directory: \"${scrfunc_dir}\"
-
-This is the ex-script for the task that reads in the \"coupled\" yaml
-verification (vx) configuration file (python dictionary) and generates
-from it two \"decoupled\" vx configuration dictionaries, one for forecasts
-and another for observations. The task then writes these two decoupled
-dictionaries to a new configuration file in the experiment directory
-that can be read by downstream vx tasks.
-========================================================================"
-#
-#-----------------------------------------------------------------------
-#
-# Call python script to generate vx configuration file containing
-# separate vx configuration dictionaries for forecasts and observations.
-#
-#-----------------------------------------------------------------------
-#
-python3 ${USHdir}/metplus/decouple_fcst_obs_vx_config.py \
- --vx_type "${DET_OR_ENS}" \
- --outfile_type "txt" \
- --outdir "${EXPTDIR}"
-#
-#-----------------------------------------------------------------------
-#
-# Print message indicating successful completion of script.
-#
-#-----------------------------------------------------------------------
-#
-print_info_msg "
-========================================================================
-Done extracting vx configuration.
-
-Exiting script: \"${scrfunc_fn}\"
-In directory: \"${scrfunc_dir}\"
-========================================================================"
-#
-#-----------------------------------------------------------------------
-#
-# Restore the shell options saved at the beginning of this script/func-
-# tion.
-#
-#-----------------------------------------------------------------------
-#
-{ restore_shell_opts; } > /dev/null 2>&1
diff --git a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh
index 529d8d92cc..93caeaa7f2 100755
--- a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh
+++ b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh
@@ -296,9 +296,12 @@ metplus_log_fn="metplus.log.${metplus_log_bn}"
#-----------------------------------------------------------------------
#
det_or_ens="ens"
-vx_config_output_fn="vx_config_${det_or_ens}.txt"
-vx_config_output_fp="${EXPTDIR}/${vx_config_output_fn}"
-vx_config_dict=$(<"${vx_config_output_fp}")
+vx_config_fn="vx_config_${det_or_ens}.yaml"
+vx_config_fp="${METPLUS_CONF}/${vx_config_fn}"
+vx_config_dict=$(<"${vx_config_fp}")
+# Indent each line of vx_config_dict so that it is aligned properly when
+# included in the yaml-formatted variable "settings" below.
+vx_config_dict=$( printf "%s\n" "${vx_config_dict}" | sed 's/^/ /' )
#
#-----------------------------------------------------------------------
#
@@ -319,50 +322,54 @@ settings="\
#
# MET/METplus information.
#
- 'metplus_tool_name': '${metplus_tool_name}'
- 'MetplusToolName': '${MetplusToolName}'
- 'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}'
- 'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}'
+'metplus_tool_name': '${metplus_tool_name}'
+'MetplusToolName': '${MetplusToolName}'
+'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}'
+'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}'
#
# Date and forecast hour information.
#
- 'cdate': '$CDATE'
- 'fhr_list': '${FHR_LIST}'
+'cdate': '$CDATE'
+'fhr_list': '${FHR_LIST}'
#
# Input and output directory/file information.
#
- 'metplus_config_fn': '${metplus_config_fn:-}'
- 'metplus_log_fn': '${metplus_log_fn:-}'
- 'obs_input_dir': '${OBS_INPUT_DIR:-}'
- 'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}'
- 'fcst_input_dir': '${FCST_INPUT_DIR:-}'
- 'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}'
- 'output_base': '${OUTPUT_BASE}'
- 'output_dir': '${OUTPUT_DIR}'
- 'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}'
- 'staging_dir': '${STAGING_DIR}'
- 'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}'
+'metplus_config_fn': '${metplus_config_fn:-}'
+'metplus_log_fn': '${metplus_log_fn:-}'
+'obs_input_dir': '${OBS_INPUT_DIR:-}'
+'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}'
+'fcst_input_dir': '${FCST_INPUT_DIR:-}'
+'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}'
+'output_base': '${OUTPUT_BASE}'
+'output_dir': '${OUTPUT_DIR}'
+'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}'
+'staging_dir': '${STAGING_DIR}'
+'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}'
#
# Ensemble and member-specific information.
#
- 'num_ens_members': '${NUM_ENS_MEMBERS}'
- 'ensmem_name': '${ensmem_name:-}'
- 'time_lag': '${time_lag:-}'
+'num_ens_members': '${NUM_ENS_MEMBERS}'
+'ensmem_name': '${ensmem_name:-}'
+'time_lag': '${time_lag:-}'
#
# Field information.
#
- 'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}'
- 'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}'
- 'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}'
- 'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}'
- 'obtype': '${OBTYPE}'
- 'accum_hh': '${ACCUM_HH:-}'
- 'accum_no_pad': '${ACCUM_NO_PAD:-}'
- 'metplus_templates_dir': '${METPLUS_CONF:-}'
- 'input_field_group': '${VAR:-}'
- 'input_level_fcst': '${FCST_LEVEL:-}'
- 'input_thresh_fcst': '${FCST_THRESH:-}'
- 'vx_config_dict': ${vx_config_dict:-}
+'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}'
+'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}'
+'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}'
+'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}'
+'obtype': '${OBTYPE}'
+'accum_hh': '${ACCUM_HH:-}'
+'accum_no_pad': '${ACCUM_NO_PAD:-}'
+'metplus_templates_dir': '${METPLUS_CONF:-}'
+'input_field_group': '${VAR:-}'
+'input_level_fcst': '${FCST_LEVEL:-}'
+'input_thresh_fcst': '${FCST_THRESH:-}'
+#
+# Verification configuration dictionary.
+#
+'vx_config_dict':
+${vx_config_dict:-}
"
# Render the template to create a METplus configuration file
diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh
index b8f0c49fec..4f871e6e1b 100755
--- a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh
+++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh
@@ -293,9 +293,12 @@ metplus_log_fn="metplus.log.${metplus_log_bn}"
#-----------------------------------------------------------------------
#
det_or_ens="det"
-vx_config_output_fn="vx_config_${det_or_ens}.txt"
-vx_config_output_fp="${EXPTDIR}/${vx_config_output_fn}"
-vx_config_dict=$(<"${vx_config_output_fp}")
+vx_config_fn="vx_config_${det_or_ens}.yaml"
+vx_config_fp="${METPLUS_CONF}/${vx_config_fn}"
+vx_config_dict=$(<"${vx_config_fp}")
+# Indent each line of vx_config_dict so that it is aligned properly when
+# included in the yaml-formatted variable "settings" below.
+vx_config_dict=$( printf "%s\n" "${vx_config_dict}" | sed 's/^/ /' )
#
#-----------------------------------------------------------------------
#
@@ -316,50 +319,54 @@ settings="\
#
# MET/METplus information.
#
- 'metplus_tool_name': '${metplus_tool_name}'
- 'MetplusToolName': '${MetplusToolName}'
- 'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}'
- 'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}'
+'metplus_tool_name': '${metplus_tool_name}'
+'MetplusToolName': '${MetplusToolName}'
+'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}'
+'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}'
#
# Date and forecast hour information.
#
- 'cdate': '$CDATE'
- 'fhr_list': '${FHR_LIST}'
+'cdate': '$CDATE'
+'fhr_list': '${FHR_LIST}'
#
# Input and output directory/file information.
#
- 'metplus_config_fn': '${metplus_config_fn:-}'
- 'metplus_log_fn': '${metplus_log_fn:-}'
- 'obs_input_dir': '${OBS_INPUT_DIR:-}'
- 'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}'
- 'fcst_input_dir': '${FCST_INPUT_DIR:-}'
- 'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}'
- 'output_base': '${OUTPUT_BASE}'
- 'output_dir': '${OUTPUT_DIR}'
- 'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}'
- 'staging_dir': '${STAGING_DIR}'
- 'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}'
+'metplus_config_fn': '${metplus_config_fn:-}'
+'metplus_log_fn': '${metplus_log_fn:-}'
+'obs_input_dir': '${OBS_INPUT_DIR:-}'
+'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}'
+'fcst_input_dir': '${FCST_INPUT_DIR:-}'
+'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}'
+'output_base': '${OUTPUT_BASE}'
+'output_dir': '${OUTPUT_DIR}'
+'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}'
+'staging_dir': '${STAGING_DIR}'
+'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}'
#
# Ensemble and member-specific information.
#
- 'num_ens_members': '${NUM_ENS_MEMBERS}'
- 'ensmem_name': '${ensmem_name:-}'
- 'time_lag': '${time_lag:-}'
+'num_ens_members': '${NUM_ENS_MEMBERS}'
+'ensmem_name': '${ensmem_name:-}'
+'time_lag': '${time_lag:-}'
#
# Field information.
#
- 'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}'
- 'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}'
- 'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}'
- 'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}'
- 'obtype': '${OBTYPE}'
- 'accum_hh': '${ACCUM_HH:-}'
- 'accum_no_pad': '${ACCUM_NO_PAD:-}'
- 'metplus_templates_dir': '${METPLUS_CONF:-}'
- 'input_field_group': '${VAR:-}'
- 'input_level_fcst': '${FCST_LEVEL:-}'
- 'input_thresh_fcst': '${FCST_THRESH:-}'
- 'vx_config_dict': ${vx_config_dict:-}
+'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}'
+'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}'
+'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}'
+'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}'
+'obtype': '${OBTYPE}'
+'accum_hh': '${ACCUM_HH:-}'
+'accum_no_pad': '${ACCUM_NO_PAD:-}'
+'metplus_templates_dir': '${METPLUS_CONF:-}'
+'input_field_group': '${VAR:-}'
+'input_level_fcst': '${FCST_LEVEL:-}'
+'input_thresh_fcst': '${FCST_THRESH:-}'
+#
+# Verification configuration dictionary.
+#
+'vx_config_dict':
+${vx_config_dict:-}
"
# Render the template to create a METplus configuration file
diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh
index 9939daaf76..6e4a4ff33f 100755
--- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh
+++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh
@@ -251,9 +251,12 @@ metplus_log_fn="metplus.log.${metplus_log_bn}"
#-----------------------------------------------------------------------
#
det_or_ens="ens"
-vx_config_output_fn="vx_config_${det_or_ens}.txt"
-vx_config_output_fp="${EXPTDIR}/${vx_config_output_fn}"
-vx_config_dict=$(<"${vx_config_output_fp}")
+vx_config_fn="vx_config_${det_or_ens}.yaml"
+vx_config_fp="${METPLUS_CONF}/${vx_config_fn}"
+vx_config_dict=$(<"${vx_config_fp}")
+# Indent each line of vx_config_dict so that it is aligned properly when
+# included in the yaml-formatted variable "settings" below.
+vx_config_dict=$( printf "%s\n" "${vx_config_dict}" | sed 's/^/ /' )
#
#-----------------------------------------------------------------------
#
@@ -274,50 +277,54 @@ settings="\
#
# MET/METplus information.
#
- 'metplus_tool_name': '${metplus_tool_name}'
- 'MetplusToolName': '${MetplusToolName}'
- 'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}'
- 'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}'
+'metplus_tool_name': '${metplus_tool_name}'
+'MetplusToolName': '${MetplusToolName}'
+'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}'
+'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}'
#
# Date and forecast hour information.
#
- 'cdate': '$CDATE'
- 'fhr_list': '${FHR_LIST}'
+'cdate': '$CDATE'
+'fhr_list': '${FHR_LIST}'
#
# Input and output directory/file information.
#
- 'metplus_config_fn': '${metplus_config_fn:-}'
- 'metplus_log_fn': '${metplus_log_fn:-}'
- 'obs_input_dir': '${OBS_INPUT_DIR:-}'
- 'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}'
- 'fcst_input_dir': '${FCST_INPUT_DIR:-}'
- 'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}'
- 'output_base': '${OUTPUT_BASE}'
- 'output_dir': '${OUTPUT_DIR}'
- 'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}'
- 'staging_dir': '${STAGING_DIR}'
- 'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}'
+'metplus_config_fn': '${metplus_config_fn:-}'
+'metplus_log_fn': '${metplus_log_fn:-}'
+'obs_input_dir': '${OBS_INPUT_DIR:-}'
+'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}'
+'fcst_input_dir': '${FCST_INPUT_DIR:-}'
+'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}'
+'output_base': '${OUTPUT_BASE}'
+'output_dir': '${OUTPUT_DIR}'
+'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}'
+'staging_dir': '${STAGING_DIR}'
+'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}'
#
# Ensemble and member-specific information.
#
- 'num_ens_members': '${NUM_ENS_MEMBERS}'
- 'ensmem_name': '${ensmem_name:-}'
- 'time_lag': '${time_lag:-}'
+'num_ens_members': '${NUM_ENS_MEMBERS}'
+'ensmem_name': '${ensmem_name:-}'
+'time_lag': '${time_lag:-}'
#
# Field information.
#
- 'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}'
- 'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}'
- 'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}'
- 'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}'
- 'obtype': '${OBTYPE}'
- 'accum_hh': '${ACCUM_HH:-}'
- 'accum_no_pad': '${ACCUM_NO_PAD:-}'
- 'metplus_templates_dir': '${METPLUS_CONF:-}'
- 'input_field_group': '${VAR:-}'
- 'input_level_fcst': '${FCST_LEVEL:-}'
- 'input_thresh_fcst': '${FCST_THRESH:-}'
- 'vx_config_dict': ${vx_config_dict:-}
+'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}'
+'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}'
+'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}'
+'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}'
+'obtype': '${OBTYPE}'
+'accum_hh': '${ACCUM_HH:-}'
+'accum_no_pad': '${ACCUM_NO_PAD:-}'
+'metplus_templates_dir': '${METPLUS_CONF:-}'
+'input_field_group': '${VAR:-}'
+'input_level_fcst': '${FCST_LEVEL:-}'
+'input_thresh_fcst': '${FCST_THRESH:-}'
+#
+# Verification configuration dictionary.
+#
+'vx_config_dict':
+${vx_config_dict:-}
"
# Render the template to create a METplus configuration file
diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh
index 33d00b1d37..924d321ec3 100755
--- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh
+++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh
@@ -250,9 +250,12 @@ metplus_log_fn="metplus.log.${metplus_log_bn}"
#-----------------------------------------------------------------------
#
det_or_ens="ens"
-vx_config_output_fn="vx_config_${det_or_ens}.txt"
-vx_config_output_fp="${EXPTDIR}/${vx_config_output_fn}"
-vx_config_dict=$(<"${vx_config_output_fp}")
+vx_config_fn="vx_config_${det_or_ens}.yaml"
+vx_config_fp="${METPLUS_CONF}/${vx_config_fn}"
+vx_config_dict=$(<"${vx_config_fp}")
+# Indent each line of vx_config_dict so that it is aligned properly when
+# included in the yaml-formatted variable "settings" below.
+vx_config_dict=$( printf "%s\n" "${vx_config_dict}" | sed 's/^/ /' )
#
#-----------------------------------------------------------------------
#
@@ -273,50 +276,54 @@ settings="\
#
# MET/METplus information.
#
- 'metplus_tool_name': '${metplus_tool_name}'
- 'MetplusToolName': '${MetplusToolName}'
- 'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}'
- 'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}'
+'metplus_tool_name': '${metplus_tool_name}'
+'MetplusToolName': '${MetplusToolName}'
+'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}'
+'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}'
#
# Date and forecast hour information.
#
- 'cdate': '$CDATE'
- 'fhr_list': '${FHR_LIST}'
+'cdate': '$CDATE'
+'fhr_list': '${FHR_LIST}'
#
# Input and output directory/file information.
#
- 'metplus_config_fn': '${metplus_config_fn:-}'
- 'metplus_log_fn': '${metplus_log_fn:-}'
- 'obs_input_dir': '${OBS_INPUT_DIR:-}'
- 'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}'
- 'fcst_input_dir': '${FCST_INPUT_DIR:-}'
- 'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}'
- 'output_base': '${OUTPUT_BASE}'
- 'output_dir': '${OUTPUT_DIR}'
- 'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}'
- 'staging_dir': '${STAGING_DIR}'
- 'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}'
+'metplus_config_fn': '${metplus_config_fn:-}'
+'metplus_log_fn': '${metplus_log_fn:-}'
+'obs_input_dir': '${OBS_INPUT_DIR:-}'
+'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}'
+'fcst_input_dir': '${FCST_INPUT_DIR:-}'
+'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}'
+'output_base': '${OUTPUT_BASE}'
+'output_dir': '${OUTPUT_DIR}'
+'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}'
+'staging_dir': '${STAGING_DIR}'
+'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}'
#
# Ensemble and member-specific information.
#
- 'num_ens_members': '${NUM_ENS_MEMBERS}'
- 'ensmem_name': '${ensmem_name:-}'
- 'time_lag': '${time_lag:-}'
+'num_ens_members': '${NUM_ENS_MEMBERS}'
+'ensmem_name': '${ensmem_name:-}'
+'time_lag': '${time_lag:-}'
#
# Field information.
#
- 'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}'
- 'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}'
- 'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}'
- 'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}'
- 'obtype': '${OBTYPE}'
- 'accum_hh': '${ACCUM_HH:-}'
- 'accum_no_pad': '${ACCUM_NO_PAD:-}'
- 'metplus_templates_dir': '${METPLUS_CONF:-}'
- 'input_field_group': '${VAR:-}'
- 'input_level_fcst': '${FCST_LEVEL:-}'
- 'input_thresh_fcst': '${FCST_THRESH:-}'
- 'vx_config_dict': ${vx_config_dict:-}
+'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}'
+'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}'
+'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}'
+'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}'
+'obtype': '${OBTYPE}'
+'accum_hh': '${ACCUM_HH:-}'
+'accum_no_pad': '${ACCUM_NO_PAD:-}'
+'metplus_templates_dir': '${METPLUS_CONF:-}'
+'input_field_group': '${VAR:-}'
+'input_level_fcst': '${FCST_LEVEL:-}'
+'input_thresh_fcst': '${FCST_THRESH:-}'
+#
+# Verification configuration dictionary.
+#
+'vx_config_dict':
+${vx_config_dict:-}
"
# Render the template to create a METplus configuration file
diff --git a/ush/metplus/decouple_fcst_obs_vx_config.py b/ush/metplus/decouple_fcst_obs_vx_config.py
deleted file mode 100755
index afa001859c..0000000000
--- a/ush/metplus/decouple_fcst_obs_vx_config.py
+++ /dev/null
@@ -1,436 +0,0 @@
-#!/usr/bin/env python3
-
-import os
-import sys
-import glob
-import argparse
-import yaml
-
-import logging
-import textwrap
-from textwrap import indent, dedent
-
-import pprint
-import subprocess
-
-from pathlib import Path
-file = Path(__file__).resolve()
-home_dir = file.parents[2]
-ush_dir = Path(os.path.join(home_dir, 'ush')).resolve()
-sys.path.append(str(ush_dir))
-
-from python_utils import (
- log_info,
- load_config_file,
-)
-
-
-def get_pprint_str(var, indent_str=''):
- """
- Function to format a python variable as a pretty-printed string and add
- indentation.
-
- Arguments:
- ---------
- var:
- A variable.
-
- indent_str:
- String to be added to the beginning of each line of the pretty-printed
- form of var. This usually consists of multiple space characters.
-
- Returns:
- -------
- var_str:
- Formatted string containing contents of variable.
- """
-
- var_str = pprint.pformat(var, compact=True, sort_dicts=False)
- var_str = var_str.splitlines(True)
- var_str = [indent_str + s for s in var_str]
- var_str = ''.join(var_str)
-
- return var_str
-
-
-def create_pprinted_msg(vars_dict, indent_str='', add_nl_after_varname=False):
- """
- Function to create an output message (string) containing one or more
- variables' names, with each name followed possibly by a newline, an equal
- sign, and the pretty-printed value of the variable. Each variable name
- starts on a new line.
-
- Arguments:
- ---------
- vars_dict:
- Dictionary containing the variable names (the keys) and their values
- (the values).
-
- indent_str:
- String to be added to the beginning of each line of the string before
- returning it. This usually consists of multiple space characters.
-
- add_nl_after_varname:
- Flag indicating whether to add a newline after the variable name (and
- before the equal sign).
-
- Returns:
- -------
- vars_str:
- Formatted string containing contents of variable.
- """
-
- space_or_nl = ' '
- one_or_zero = 1
- if add_nl_after_varname:
- space_or_nl = '\n'
- one_or_zero = 0
-
- vars_str = ''
- for var_name, var_value in vars_dict.items():
- pprint_indent_str = ' '*(2 + one_or_zero*(1 + len(var_name)))
- tmp = f'{var_name}' + space_or_nl + '= ' + \
- get_pprint_str(var_value, pprint_indent_str).lstrip()
- vars_str = '\n'.join([vars_str, tmp])
-
- vars_str = indent(vars_str, indent_str)
-
- return vars_str
-
-
-def extract_fcst_obs_vals_from_cpld(item_cpld):
- """
- Function to parse the "coupled" value of an item (obtained from the coupled
- verification (vx) configuration dictionary) to extract from it the item's
- value for forecasts and its value for observations. The coupled item
- (item_cpld) is a string that may correspond to a field name, a level, or
- a threshold. If item_cpld has the form
-
- item_cpld = str1 + delim_str + str2
-
- where delim_str is a delimiter string (e.g. delim_str may be set to '%%'),
- then the forecast and observation values of the item are given by
-
- item_fcst = str1
- item_obs = str2
-
- For example, if delim_str = '%%' and
-
- item_cpld = 'ABCD%%EFGH'
-
- then
-
- item_fcst = 'ABCD'
- item_obs = 'EFGH'
-
- Alternatively, if delim_str is not be a substring within item_cpld, both
- return values will be identical to the input.
-
- Arguments:
- ---------
- item_cpld
- String representing a "coupled" item (field name, level, or threshold).
- containing both the item's forecast value and its observations value.
-
- Returns:
- -------
- item_fcst, item_obs:
- Strings containing the values of the item for forecasts and observations,
- respectively.
- """
-
- # Set the delimiter string.
- delim_str = '%%'
-
- # Parse the string containing the coupled value of the item to extract
- # its forecast and observation values.
- if delim_str in item_cpld:
- if item_cpld.count(delim_str) == 1:
- item_fcst, item_obs = item_cpld.split(delim_str)
- else:
- msg = dedent(f"""
- The delimiter string (delim_str) appears more than once in the current
- coupled item value (item_cpld):
- delim_str = {get_pprint_str(delim_str)}
- item_cpld = {get_pprint_str(item_cpld)}
- Stopping.
- """)
- logging.error(msg)
- raise ValueError(msg)
- else:
- item_fcst = item_cpld
- item_obs = item_cpld
-
- return item_fcst, item_obs
-
-
-def decouple_fcst_obs_vx_config(vx_type, outfile_type, outdir='./', log_lvl='info', log_fp=''):
- """
- This function reads from a yaml configuration file the coupled verification
- (vx) configuration dictionary and parses it (i.e. decouples its contents)
- to produce two new configuration dictionaries -- one for forecasts and
- another for observations. Here, by "coupled" dictionary, we mean one that
- contains items (keys and values) that store the forecast and observation
- values for various quantities (field names, levels, and thresholds) in
- combined/coupled form. (See the documentation for the function
- extract_fcst_obs_vals_from_cpld() for more details of this coupled form.)
- This function then writes the two separate (decoupled) vx configuration
- dictionaries (one for forecasts and the other for observations) to a file.
-
- Arguments:
- ---------
- vx_type:
- Type of verification for which the coupled dictionary to be read in
- applies. This can be 'det' (for deterministic verification) or 'ens'
- (for ensemble verification).
- outfile_type:
- Type of the output file. This can be 'txt' (for the output to be saved
- in a pretty-printed text file) or 'yaml' (for the output to be saved in
- a yaml-formatted file. Here, the "output" consists of the two separate
- vx configuration files (one for forecasts and another for observations).
- outdir:
- The directory in which to save the output file.
- log_lvl:
- The logging level to use.
- log_fp:
- Path to the log file. Default is an empty string, so that logging output
- is sent to stdout.
-
- Returns:
- -------
- None
- """
-
- # Set up logging.
- log_level = str.upper(log_lvl)
- fmt = "[%(levelname)s:%(name)s: %(filename)s, line %(lineno)s: %(funcName)s()] %(message)s"
- if log_fp:
- logging.basicConfig(level=log_level, format=fmt, filename=log_fp, filemode='w')
- else:
- logging.basicConfig(level=log_level, format=fmt)
- logging.basicConfig(level=log_level)
-
- # Load the yaml file containing the coupled forecast-and-observations
- # verification (vx) configuration dictionary.
- metplus_conf_dir = Path(os.path.join(home_dir, 'parm', 'metplus')).resolve()
- config_fn = ''.join(['vx_config_', vx_type, '.yaml'])
- config_fp = Path(os.path.join(metplus_conf_dir, config_fn)).resolve()
- fgs_fields_levels_threshes_cpld = load_config_file(config_fp)
-
- msg = create_pprinted_msg(
- vars_dict = {'fgs_fields_levels_threshes_cpld': fgs_fields_levels_threshes_cpld},
- indent_str = ' '*0,
- add_nl_after_varname = True)
- logging.debug(msg)
-
- # Loop through the field groups in the coupled vx configuration dictionary
- # and generate two separate vx configuration dictionaries, one for forecasts
- # and another for observations.
- fgs_fields_levels_threshes_fcst = {}
- fgs_fields_levels_threshes_obs = {}
- indent_incr = 4
- indent_size = indent_incr
- indent_str = ' '*indent_size
- for field_group, fields_levels_threshes_cpld in fgs_fields_levels_threshes_cpld.items():
-
- msg = create_pprinted_msg(
- vars_dict = {'field_group': field_group},
- indent_str = indent_str)
- logging.debug(msg)
-
- # Loop over the field names associated with the current field group.
- #
- # Note that the following variables have to be lists of dictionaries
- # (where each dictionary contains only one key-value pair) instead of
- # dictionaries because the field names might be repeated and thus cannot
- # be used as dictionary keys. For example, in the ADPSFC field group,
- # the forecast fields CRAIN, CSNOW, CFRZR, and CICEP all have the
- # corresponding observation field PRWE but with different thresholds,
- # so although fields_levels_threshes_fcst could be a dictionary with
- # CRAIN, CSNOW, CFRZR, and CICEP as keys, fields_levels_threshes_obs
- # cannot be a dictionary because the string PRWE cannot be used as a key
- # more than once.
- fields_levels_threshes_fcst = []
- fields_levels_threshes_obs = []
- indent_size += indent_incr
- indent_str = ' '*indent_size
- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items():
-
- msg = create_pprinted_msg(
- vars_dict = {'field_cpld': field_cpld},
- indent_str = indent_str)
- logging.debug(msg)
-
- # Parse the current coupled field name to extract the forecast and
- # observation field names.
- field_fcst, field_obs = extract_fcst_obs_vals_from_cpld(field_cpld)
-
- msg = create_pprinted_msg(
- vars_dict = {'field_fcst': field_fcst, 'field_obs': field_obs},
- indent_str = indent_str)
- logging.debug(msg)
-
- # Loop over the levels associated with the current field.
- levels_threshes_fcst = {}
- levels_threshes_obs = {}
- indent_size += indent_incr
- indent_str = ' '*indent_size
- for level_cpld, threshes_cpld in levels_threshes_cpld.items():
-
- msg = create_pprinted_msg(
- vars_dict = {'level_cpld': level_cpld},
- indent_str = indent_str)
- logging.debug(msg)
-
- # Parse the current coupled level to extract the forecast and observation
- # levels.
- level_fcst, level_obs = extract_fcst_obs_vals_from_cpld(level_cpld)
-
- msg = create_pprinted_msg(
- vars_dict = {'level_fcst': level_fcst, 'level_obs': level_obs},
- indent_str = indent_str)
- logging.debug(msg)
-
- # Loop over the thresholds associated with the current level.
- threshes_fcst = []
- threshes_obs = []
- indent_size += indent_incr
- indent_str = ' '*indent_size
- for thresh_cpld in threshes_cpld:
-
- msg = create_pprinted_msg(
- vars_dict = {'thresh_cpld': thresh_cpld},
- indent_str = indent_str)
- logging.debug(msg)
-
- # Parse the current coupled threshold to extract the forecast and
- # observation thresholds.
- thresh_fcst, thresh_obs = extract_fcst_obs_vals_from_cpld(thresh_cpld)
-
- msg = create_pprinted_msg(
- vars_dict = {'thresh_fcst': thresh_fcst, 'thresh_obs': thresh_obs},
- indent_str = indent_str)
- logging.debug(msg)
-
- threshes_fcst.append(thresh_fcst)
- threshes_obs.append(thresh_obs)
-
- indent_size -= indent_incr
- indent_str = ' '*indent_size
- msg = create_pprinted_msg(
- vars_dict = {'threshes_fcst': threshes_fcst,
- 'threshes_obs': threshes_obs},
- indent_str = indent_str,
- add_nl_after_varname = True)
- logging.debug(msg)
-
- levels_threshes_fcst[level_fcst] = threshes_fcst
- levels_threshes_obs[level_obs] = threshes_obs
-
- indent_size -= indent_incr
- indent_str = ' '*indent_size
- msg = create_pprinted_msg(
- vars_dict = {'levels_threshes_fcst': levels_threshes_fcst,
- 'levels_threshes_obs': levels_threshes_obs},
- indent_str = indent_str,
- add_nl_after_varname = True)
- logging.debug(msg)
-
- fields_levels_threshes_fcst.append({field_fcst: levels_threshes_fcst})
- fields_levels_threshes_obs.append({field_obs: levels_threshes_obs})
-
- indent_size -= indent_incr
- indent_str = ' '*indent_size
- msg = create_pprinted_msg(
- vars_dict = {'fields_levels_threshes_fcst': fields_levels_threshes_fcst,
- 'fields_levels_threshes_obs': fields_levels_threshes_obs},
- indent_str = indent_str,
- add_nl_after_varname = True)
- logging.debug(msg)
-
- fgs_fields_levels_threshes_fcst[field_group] = fields_levels_threshes_fcst
- fgs_fields_levels_threshes_obs[field_group] = fields_levels_threshes_obs
-
- indent_size -= indent_incr
- indent_str = ' '*indent_size
- msg = create_pprinted_msg(
- vars_dict = {'fgs_fields_levels_threshes_fcst': fgs_fields_levels_threshes_fcst,
- 'fgs_fields_levels_threshes_obs': fgs_fields_levels_threshes_obs},
- indent_str = indent_str,
- add_nl_after_varname = True)
- logging.debug(msg)
-
- # We now have a verification configuration dictionary for forecasts and
- # a separate one for the observations. To conveniently write these to a
- # file, first place (wrap) them in a higher-level dictionary.
- vx_config_dict = {'fcst': fgs_fields_levels_threshes_fcst,
- 'obs': fgs_fields_levels_threshes_obs}
-
- # Write the contents of the higher-level dictionary to file.
- output_fn = ''.join(['vx_config_', vx_type, '.', outfile_type])
- output_fp = Path(os.path.join(outdir, output_fn)).resolve()
- with open(f'{output_fp}', 'w') as fn:
- if outfile_type == 'txt':
- dict_to_str = get_pprint_str(vx_config_dict, ' ')
- fn.write(dict_to_str)
- elif outfile_type == 'yaml':
- yaml_vars = yaml.dump(vx_config_dict, fn)
-
- return None
-#
-# -----------------------------------------------------------------------
-#
-# Call the function defined above.
-#
-# -----------------------------------------------------------------------
-#
-if __name__ == "__main__":
-
- parser = argparse.ArgumentParser(
- description='Read in and process verification configuration file'
- )
-
- default_vx_type = 'det'
- parser.add_argument('--vx_type',
- type=str,
- required=True,
- choices=['det', 'ens'],
- default=default_vx_type,
- help=dedent(f"""
- String that determines whether to read in the deterministic or ensemble
- verification configuration file.
- """))
-
- default_outfile_type = 'txt'
- parser.add_argument('--outfile_type',
- type=str,
- required=True,
- choices=['txt', 'yaml'],
- default=default_outfile_type,
- help=dedent(f"""
- Type of output file. The output consists of a high-level dictionary
- containing two keys: 'fcst' and 'obs'. The value of 'fcst' is the vx
- configuration dictionary for forecasts, and the value of 'obs' is the vx
- dictionary for observations. If outfile_type is set to 'txt', this high-
- level dictionary is saved to a text file in a form that can be read in by
- the SRW App's ex-scripts for the verification tasks. In particular, this
- form contains the curly braces and brackets that define dictionaries and
- lists in python code (but that would normally not appear in a yaml file).
- If outfile_type is set to 'yaml', then the high-level dictionary is saved
- to a yaml-formatted file.
- """))
-
- parser.add_argument('--outdir',
- type=str,
- required=False,
- default='./',
- help=dedent(f"""
- Directory in which to place the output file containing the decoupled
- (i.e. with forecast and observation information placed in separate data
- structures) verifcation configuration information.
- """))
-
- args = parser.parse_args()
-
- decouple_fcst_obs_vx_config(vx_type=args.vx_type, outfile_type=args.outfile_type, outdir=args.outdir)
-
From 59c78fb14736e9f69938bfa2d74ded6f0f227832 Mon Sep 17 00:00:00 2001
From: EdwardSnyder-NOAA <96196752+EdwardSnyder-NOAA@users.noreply.github.com>
Date: Wed, 15 May 2024 08:57:11 -0500
Subject: [PATCH 12/39] [develop] Add the remaining UFS Case Studies (#1081)
Add the remaining UFS Case Studies to the SRW App as WE2E tests. These new tests were added to the comprehensive and coverage files as well.
---
tests/WE2E/machine_suites/comprehensive | 5 +++
.../WE2E/machine_suites/comprehensive.derecho | 5 +++
.../machine_suites/comprehensive.noaacloud | 5 +++
tests/WE2E/machine_suites/comprehensive.orion | 5 +++
tests/WE2E/machine_suites/coverage.derecho | 2 +
tests/WE2E/machine_suites/coverage.gaea | 1 +
.../WE2E/machine_suites/coverage.hera.gnu.com | 1 +
tests/WE2E/machine_suites/coverage.hercules | 1 +
.../config.2019_hurricane_lorenzo.yaml | 38 +++++++++++++++++++
.../config.2019_memorial_day_heat_wave.yaml | 36 ++++++++++++++++++
...onfig.2020_denver_radiation_inversion.yaml | 38 +++++++++++++++++++
.../config.2020_easter_storm.yaml | 38 +++++++++++++++++++
.../config.2020_jan_cold_blast.yaml | 38 +++++++++++++++++++
13 files changed, 213 insertions(+)
create mode 100644 tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_lorenzo.yaml
create mode 100644 tests/WE2E/test_configs/ufs_case_studies/config.2019_memorial_day_heat_wave.yaml
create mode 100644 tests/WE2E/test_configs/ufs_case_studies/config.2020_denver_radiation_inversion.yaml
create mode 100644 tests/WE2E/test_configs/ufs_case_studies/config.2020_easter_storm.yaml
create mode 100644 tests/WE2E/test_configs/ufs_case_studies/config.2020_jan_cold_blast.yaml
diff --git a/tests/WE2E/machine_suites/comprehensive b/tests/WE2E/machine_suites/comprehensive
index 3af6ae0db4..8c546918a0 100644
--- a/tests/WE2E/machine_suites/comprehensive
+++ b/tests/WE2E/machine_suites/comprehensive
@@ -2,6 +2,11 @@
2020_CAPE
2019_hurricane_barry
2019_halloween_storm
+2019_hurricane_lorenzo
+2019_memorial_day_heat_wave
+2020_denver_radiation_inversion
+2020_easter_storm
+2020_jan_cold_blast
community
custom_ESGgrid
custom_ESGgrid_Central_Asia_3km
diff --git a/tests/WE2E/machine_suites/comprehensive.derecho b/tests/WE2E/machine_suites/comprehensive.derecho
index 9ce8d067ac..a28718a10a 100644
--- a/tests/WE2E/machine_suites/comprehensive.derecho
+++ b/tests/WE2E/machine_suites/comprehensive.derecho
@@ -2,6 +2,11 @@
2020_CAPE
2019_hurricane_barry
2019_halloween_storm
+2019_hurricane_lorenzo
+2019_memorial_day_heat_wave
+2020_denver_radiation_inversion
+2020_easter_storm
+2020_jan_cold_blast
community
custom_ESGgrid
#custom_ESGgrid_Central_Asia_3km
diff --git a/tests/WE2E/machine_suites/comprehensive.noaacloud b/tests/WE2E/machine_suites/comprehensive.noaacloud
index 23c0aa8456..6c01bd70a8 100644
--- a/tests/WE2E/machine_suites/comprehensive.noaacloud
+++ b/tests/WE2E/machine_suites/comprehensive.noaacloud
@@ -56,6 +56,11 @@ specify_template_filenames
2020_CAPE
2019_hurricane_barry
2019_halloween_storm
+2019_hurricane_lorenzo
+2019_memorial_day_heat_wave
+2020_denver_radiation_inversion
+2020_easter_storm
+2020_jan_cold_blast
get_from_AWS_ics_GEFS_lbcs_GEFS_fmt_grib2_2022040400_ensemble_2mems
get_from_NOMADS_ics_FV3GFS_lbcs_FV3GFS
long_fcst
diff --git a/tests/WE2E/machine_suites/comprehensive.orion b/tests/WE2E/machine_suites/comprehensive.orion
index 739b4fff8e..ce71fe05db 100644
--- a/tests/WE2E/machine_suites/comprehensive.orion
+++ b/tests/WE2E/machine_suites/comprehensive.orion
@@ -2,6 +2,11 @@
2020_CAPE
2019_hurricane_barry
2019_halloween_storm
+2019_hurricane_lorenzo
+2019_memorial_day_heat_wave
+2020_denver_radiation_inversion
+2020_easter_storm
+2020_jan_cold_blast
community
custom_ESGgrid
custom_ESGgrid_Central_Asia_3km
diff --git a/tests/WE2E/machine_suites/coverage.derecho b/tests/WE2E/machine_suites/coverage.derecho
index c2a770672e..a948c76033 100644
--- a/tests/WE2E/machine_suites/coverage.derecho
+++ b/tests/WE2E/machine_suites/coverage.derecho
@@ -7,3 +7,5 @@ grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_HRRR_suite_HRRR
pregen_grid_orog_sfc_climo
specify_template_filenames
2019_hurricane_barry
+2019_memorial_day_heat_wave
+2020_denver_radiation_inversion
diff --git a/tests/WE2E/machine_suites/coverage.gaea b/tests/WE2E/machine_suites/coverage.gaea
index e6aba6ea3d..970fdf4086 100644
--- a/tests/WE2E/machine_suites/coverage.gaea
+++ b/tests/WE2E/machine_suites/coverage.gaea
@@ -8,3 +8,4 @@ grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR
grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta
grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot
2020_CAPE
+2020_easter_storm
diff --git a/tests/WE2E/machine_suites/coverage.hera.gnu.com b/tests/WE2E/machine_suites/coverage.hera.gnu.com
index 4c802781f9..c2018a6e78 100644
--- a/tests/WE2E/machine_suites/coverage.hera.gnu.com
+++ b/tests/WE2E/machine_suites/coverage.hera.gnu.com
@@ -8,3 +8,4 @@ long_fcst
MET_verification_only_vx
MET_ensemble_verification_only_vx_time_lag
2019_halloween_storm
+2020_jan_cold_blast
diff --git a/tests/WE2E/machine_suites/coverage.hercules b/tests/WE2E/machine_suites/coverage.hercules
index 273de3108e..ec37d81a56 100644
--- a/tests/WE2E/machine_suites/coverage.hercules
+++ b/tests/WE2E/machine_suites/coverage.hercules
@@ -9,3 +9,4 @@ grid_RRFS_NA_13km_ics_FV3GFS_lbcs_FV3GFS_suite_RAP
grid_SUBCONUS_Ind_3km_ics_NAM_lbcs_NAM_suite_GFS_v16
MET_verification_only_vx
specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS
+2019_hurricane_lorenzo
diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_lorenzo.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_lorenzo.yaml
new file mode 100644
index 0000000000..557607d810
--- /dev/null
+++ b/tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_lorenzo.yaml
@@ -0,0 +1,38 @@
+metadata:
+ description: |-
+ This test is to ensure that the workflow running in community mode
+ completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16
+ physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms
+ for 2019 Hurricane Lorenzo.
+ # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to
+ # download these files, which can delay the WE2E testing process.
+ # To capture the event, extend the FCST_LEN_HRS from 6 to 90.
+user:
+ RUN_ENVIR: community
+platform:
+ EXTRN_MDL_DATA_STORES: aws
+workflow:
+ CCPP_PHYS_SUITE: FV3_GFS_v16
+ PREDEF_GRID_NAME: RRFS_CONUS_13km
+ DATE_FIRST_CYCL: '2019092512'
+ DATE_LAST_CYCL: '2019092512'
+ FCST_LEN_HRS: 6
+ PREEXISTING_DIR_METHOD: rename
+task_get_extrn_ics:
+ EXTRN_MDL_NAME_ICS: UFS-CASE-STUDY
+ FV3GFS_FILE_FMT_ICS: nemsio
+task_get_extrn_lbcs:
+ EXTRN_MDL_NAME_LBCS: UFS-CASE-STUDY
+ LBC_SPEC_INTVL_HRS: 3
+ FV3GFS_FILE_FMT_LBCS: nemsio
+rocoto:
+ tasks:
+ task_get_extrn_ics:
+ walltime: 06:00:00
+ task_get_extrn_lbcs:
+ walltime: 06:00:00
+ metatask_run_ensemble:
+ task_make_lbcs_mem#mem#:
+ walltime: 06:00:00
+ task_run_fcst_mem#mem#:
+ walltime: 06:00:00
diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2019_memorial_day_heat_wave.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2019_memorial_day_heat_wave.yaml
new file mode 100644
index 0000000000..fcba9c7924
--- /dev/null
+++ b/tests/WE2E/test_configs/ufs_case_studies/config.2019_memorial_day_heat_wave.yaml
@@ -0,0 +1,36 @@
+metadata:
+ description: |-
+ This test is to ensure that the workflow running in community mode
+ completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16
+ physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms
+ for 2019 Memorial Day Heat Wave.
+ # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to
+ # download these files, which can delay the WE2E testing process.
+ # To capture the event, extend the FCST_LEN_HRS from 6 to 24.
+user:
+ RUN_ENVIR: community
+platform:
+ EXTRN_MDL_DATA_STORES: aws
+workflow:
+ CCPP_PHYS_SUITE: FV3_GFS_v16
+ PREDEF_GRID_NAME: RRFS_CONUS_13km
+ DATE_FIRST_CYCL: '2019052300'
+ DATE_LAST_CYCL: '2019052300'
+ FCST_LEN_HRS: 6
+ PREEXISTING_DIR_METHOD: rename
+task_get_extrn_ics:
+ EXTRN_MDL_NAME_ICS: UFS-CASE-STUDY
+ FV3GFS_FILE_FMT_ICS: nemsio
+task_get_extrn_lbcs:
+ EXTRN_MDL_NAME_LBCS: UFS-CASE-STUDY
+ LBC_SPEC_INTVL_HRS: 6
+ FV3GFS_FILE_FMT_LBCS: nemsio
+rocoto:
+ tasks:
+ task_get_extrn_ics:
+ walltime: 06:00:00
+ task_get_extrn_lbcs:
+ walltime: 06:00:00
+ metatask_run_ensemble:
+ task_make_lbcs_mem#mem#:
+ walltime: 06:00:00
diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2020_denver_radiation_inversion.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2020_denver_radiation_inversion.yaml
new file mode 100644
index 0000000000..8bf5ece9ee
--- /dev/null
+++ b/tests/WE2E/test_configs/ufs_case_studies/config.2020_denver_radiation_inversion.yaml
@@ -0,0 +1,38 @@
+metadata:
+ description: |-
+ This test is to ensure that the workflow running in community mode
+ completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16
+ physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms
+ for 2020 Denver Radiation Inversion.
+ # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to
+ # download these files, which can delay the WE2E testing process.
+ # To capture the event, extend the FCST_LEN_HRS from 6 to 90.
+user:
+ RUN_ENVIR: community
+platform:
+ EXTRN_MDL_DATA_STORES: aws
+workflow:
+ CCPP_PHYS_SUITE: FV3_GFS_v16
+ PREDEF_GRID_NAME: RRFS_CONUS_13km
+ DATE_FIRST_CYCL: '2020042912'
+ DATE_LAST_CYCL: '2020042912'
+ FCST_LEN_HRS: 6
+ PREEXISTING_DIR_METHOD: rename
+task_get_extrn_ics:
+ EXTRN_MDL_NAME_ICS: UFS-CASE-STUDY
+ FV3GFS_FILE_FMT_ICS: nemsio
+task_get_extrn_lbcs:
+ EXTRN_MDL_NAME_LBCS: UFS-CASE-STUDY
+ LBC_SPEC_INTVL_HRS: 3
+ FV3GFS_FILE_FMT_LBCS: nemsio
+rocoto:
+ tasks:
+ task_get_extrn_ics:
+ walltime: 06:00:00
+ task_get_extrn_lbcs:
+ walltime: 06:00:00
+ metatask_run_ensemble:
+ task_make_lbcs_mem#mem#:
+ walltime: 06:00:00
+ task_run_fcst_mem#mem#:
+ walltime: 06:00:00
diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2020_easter_storm.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2020_easter_storm.yaml
new file mode 100644
index 0000000000..3c619c06bb
--- /dev/null
+++ b/tests/WE2E/test_configs/ufs_case_studies/config.2020_easter_storm.yaml
@@ -0,0 +1,38 @@
+metadata:
+ description: |-
+ This test is to ensure that the workflow running in community mode
+ completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16
+ physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms
+ for 2020 Easter Sunday Storm.
+ # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to
+ # download these files, which can delay the WE2E testing process.
+ # To capture the event, extend the FCST_LEN_HRS from 6 to 90.
+user:
+ RUN_ENVIR: community
+platform:
+ EXTRN_MDL_DATA_STORES: aws
+workflow:
+ CCPP_PHYS_SUITE: FV3_GFS_v16
+ PREDEF_GRID_NAME: RRFS_CONUS_13km
+ DATE_FIRST_CYCL: '2020040912'
+ DATE_LAST_CYCL: '2020040912'
+ FCST_LEN_HRS: 6
+ PREEXISTING_DIR_METHOD: rename
+task_get_extrn_ics:
+ EXTRN_MDL_NAME_ICS: UFS-CASE-STUDY
+ FV3GFS_FILE_FMT_ICS: nemsio
+task_get_extrn_lbcs:
+ EXTRN_MDL_NAME_LBCS: UFS-CASE-STUDY
+ LBC_SPEC_INTVL_HRS: 3
+ FV3GFS_FILE_FMT_LBCS: nemsio
+rocoto:
+ tasks:
+ task_get_extrn_ics:
+ walltime: 06:00:00
+ task_get_extrn_lbcs:
+ walltime: 06:00:00
+ metatask_run_ensemble:
+ task_make_lbcs_mem#mem#:
+ walltime: 06:00:00
+ task_run_fcst_mem#mem#:
+ walltime: 06:00:00
diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2020_jan_cold_blast.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2020_jan_cold_blast.yaml
new file mode 100644
index 0000000000..6121228cb8
--- /dev/null
+++ b/tests/WE2E/test_configs/ufs_case_studies/config.2020_jan_cold_blast.yaml
@@ -0,0 +1,38 @@
+metadata:
+ description: |-
+ This test is to ensure that the workflow running in community mode
+ completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16
+ physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms
+ for 2020 January Cold Blast.
+ # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to
+ # download these files, which can delay the WE2E testing process.
+ # To capture the event, extend the FCST_LEN_HRS from 6 to 90.
+user:
+ RUN_ENVIR: community
+platform:
+ EXTRN_MDL_DATA_STORES: aws
+workflow:
+ CCPP_PHYS_SUITE: FV3_GFS_v16
+ PREDEF_GRID_NAME: RRFS_CONUS_13km
+ DATE_FIRST_CYCL: '2020011812'
+ DATE_LAST_CYCL: '2020011812'
+ FCST_LEN_HRS: 6
+ PREEXISTING_DIR_METHOD: rename
+task_get_extrn_ics:
+ EXTRN_MDL_NAME_ICS: UFS-CASE-STUDY
+ FV3GFS_FILE_FMT_ICS: nemsio
+task_get_extrn_lbcs:
+ EXTRN_MDL_NAME_LBCS: UFS-CASE-STUDY
+ LBC_SPEC_INTVL_HRS: 3
+ FV3GFS_FILE_FMT_LBCS: nemsio
+rocoto:
+ tasks:
+ task_get_extrn_ics:
+ walltime: 06:00:00
+ task_get_extrn_lbcs:
+ walltime: 06:00:00
+ metatask_run_ensemble:
+ task_make_lbcs_mem#mem#:
+ walltime: 06:00:00
+ task_run_fcst_mem#mem#:
+ walltime: 06:00:00
From 6ddf61b371fd3952cd6164fca1dad8432ed11dfe Mon Sep 17 00:00:00 2001
From: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com>
Date: Wed, 15 May 2024 14:34:41 -0400
Subject: [PATCH 13/39] [develop] Update WM and UPP hashes (#1083)
* Update weather model to 26cb9e6 from May 2 and UPP to 5faac75 from April 9
* Increase walltime from 1 hour to 2 hours for the grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0 WE2E test configuration
---
Externals.cfg | 4 ++--
...SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/Externals.cfg b/Externals.cfg
index c76f7d8845..6a05d66e94 100644
--- a/Externals.cfg
+++ b/Externals.cfg
@@ -12,7 +12,7 @@ protocol = git
repo_url = https://github.com/ufs-community/ufs-weather-model
# Specify either a branch name or a hash but not both.
#branch = develop
-hash = 4f32a4b
+hash = 26cb9e6
local_path = sorc/ufs-weather-model
required = True
@@ -21,7 +21,7 @@ protocol = git
repo_url = https://github.com/NOAA-EMC/UPP
# Specify either a branch name or a hash but not both.
#branch = develop
-hash = 945cb2c
+hash = 5faac75
local_path = sorc/UPP
required = True
diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml
index 120a38291e..0d850b0147 100644
--- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml
+++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml
@@ -19,7 +19,7 @@ rocoto:
taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml", "parm/wflow/test.yaml"]|include }}'
metatask_run_ensemble:
task_run_fcst_mem#mem#:
- walltime: 01:00:00
+ walltime: 02:00:00
task_get_extrn_ics:
EXTRN_MDL_NAME_ICS: FV3GFS
FV3GFS_FILE_FMT_ICS: grib2
From 28cbbc8cae87f6147f346cd9b94c11aca9c02e37 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 21 May 2024 10:17:29 -0400
Subject: [PATCH 14/39] [develop] Bump requests from 2.31.0 to 2.32.0 in /doc
(#1085)
updated-dependencies:
- dependency-name: requests
dependency-type: indirect
...
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
doc/requirements.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/requirements.txt b/doc/requirements.txt
index a2f32cd83f..f4dbdfb2a9 100644
--- a/doc/requirements.txt
+++ b/doc/requirements.txt
@@ -40,7 +40,7 @@ pygments==2.17.2
# via sphinx
pyyaml==6.0.1
# via pybtex
-requests==2.31.0
+requests==2.32.0
# via sphinx
six==1.16.0
# via
From 51f4981943337ea54277f4412834ff995b30806a Mon Sep 17 00:00:00 2001
From: Bruce Kropp - Raytheon
<104453151+BruceKropp-Raytheon@users.noreply.github.com>
Date: Fri, 31 May 2024 05:50:32 -0700
Subject: [PATCH 15/39] [develop] fix CI scripts to save logfile names that
Jenkinsfile needs for pwcloud platform builds (#1087)
Make sure the log file names match what Jenkinsfile needs, specifically for PW cloud platforms - Azure, AWS, GCP
---
.cicd/scripts/srw_build.sh | 2 +-
.cicd/scripts/srw_test.sh | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/.cicd/scripts/srw_build.sh b/.cicd/scripts/srw_build.sh
index 4733c4a4ca..25546561eb 100755
--- a/.cicd/scripts/srw_build.sh
+++ b/.cicd/scripts/srw_build.sh
@@ -36,6 +36,6 @@ cd -
# Create combined log file for upload to s3
build_dir="${workspace}/build_${SRW_COMPILER}"
cat ${build_dir}/log.cmake ${build_dir}/log.make \
- >${build_dir}/srw_build-${platform}-${SRW_COMPILER}.txt
+ >${build_dir}/srw_build-${SRW_PLATFORM}-${SRW_COMPILER}.txt
exit $build_exit
diff --git a/.cicd/scripts/srw_test.sh b/.cicd/scripts/srw_test.sh
index 8ed4756987..90273f2730 100755
--- a/.cicd/scripts/srw_test.sh
+++ b/.cicd/scripts/srw_test.sh
@@ -45,7 +45,7 @@ fi
cd ${we2e_test_dir}
# Progress file
-progress_file="${workspace}/we2e_test_results-${platform}-${SRW_COMPILER}.txt"
+progress_file="${workspace}/we2e_test_results-${SRW_PLATFORM}-${SRW_COMPILER}.txt"
/usr/bin/time -p -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' -o ${WORKSPACE}/${SRW_PLATFORM}-${SRW_COMPILER}-time-srw_test.json \
./setup_WE2E_tests.sh ${platform} ${SRW_PROJECT} ${SRW_COMPILER} ${test_type} \
--expt_basedir=${we2e_experiment_base_dir} | tee ${progress_file}; \
From 4c2cedea792b6f37ac22923d7f8d2844ea0ba7c3 Mon Sep 17 00:00:00 2001
From: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com>
Date: Wed, 5 Jun 2024 08:54:55 -0400
Subject: [PATCH 16/39] [develop] Update WM hash to 1c6b4d4 (May 16) and UPP
hash to be0410e (April 23) (#1086)
* Updated the UFS-WM hash to 1c6b4d4 (May 16) and the UPP hash to be0410e (April 23).
* Increased walltime from 01:00:00 to 02:30:00 for the custom_ESGgrid_SF_1p1km WE2E test to allow it to properly run on Hera using executables built with GNU compilers.
---
Externals.cfg | 4 ++--
.../custom_grids/config.custom_ESGgrid_SF_1p1km.yaml | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/Externals.cfg b/Externals.cfg
index 6a05d66e94..9b2b544ffd 100644
--- a/Externals.cfg
+++ b/Externals.cfg
@@ -12,7 +12,7 @@ protocol = git
repo_url = https://github.com/ufs-community/ufs-weather-model
# Specify either a branch name or a hash but not both.
#branch = develop
-hash = 26cb9e6
+hash = 1c6b4d4
local_path = sorc/ufs-weather-model
required = True
@@ -21,7 +21,7 @@ protocol = git
repo_url = https://github.com/NOAA-EMC/UPP
# Specify either a branch name or a hash but not both.
#branch = develop
-hash = 5faac75
+hash = be0410e
local_path = sorc/UPP
required = True
diff --git a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_SF_1p1km.yaml b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_SF_1p1km.yaml
index 6d9e2e0d6d..867b4675a0 100644
--- a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_SF_1p1km.yaml
+++ b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_SF_1p1km.yaml
@@ -57,7 +57,7 @@ rocoto:
tasks:
metatask_run_ensemble:
task_run_fcst_mem#mem#:
- walltime: 01:00:00
+ walltime: 02:30:00
task_make_ics_mem#mem#:
nnodes: 16
ppn: 12
From 81be59e608c130b2c488356097db539ee1523bb0 Mon Sep 17 00:00:00 2001
From: "Chan-Hoo.Jeon-NOAA" <60152248+chan-hoo@users.noreply.github.com>
Date: Fri, 7 Jun 2024 08:53:51 -0400
Subject: [PATCH 17/39] [SRW-AQM] Port SRW-AQM to Derecho (#1090)
* Port SRW-AQM to Derecho
---------
Co-authored-by: Chan-Hoo Jeon
Co-authored-by: Chan-Hoo Jeon
Co-authored-by: Chan-Hoo Jeon
Co-authored-by: Chan-Hoo Jeon
---
Externals.cfg | 4 ++--
.../CustomizingTheWorkflow/ConfigWorkflow.rst | 10 ++++++++--
modulefiles/tasks/derecho/aqm_ics.local.lua | 2 +-
modulefiles/tasks/derecho/aqm_lbcs.local.lua | 2 +-
modulefiles/tasks/derecho/fire_emission.local.lua | 1 -
.../tasks/derecho/nexus_emission.local.lua | 4 +---
modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua | 1 -
.../tasks/derecho/nexus_post_split.local.lua | 4 +---
modulefiles/tasks/derecho/point_source.local.lua | 1 -
modulefiles/tasks/derecho/pre_post_stat.local.lua | 2 +-
modulefiles/wflow_derecho.lua | 2 --
ush/config.aqm.yaml | 2 +-
ush/config_defaults.yaml | 15 +++++++++++----
ush/machine/derecho.yaml | 10 ++++++++--
14 files changed, 35 insertions(+), 25 deletions(-)
diff --git a/Externals.cfg b/Externals.cfg
index 9b2b544ffd..25ec5f79b9 100644
--- a/Externals.cfg
+++ b/Externals.cfg
@@ -30,7 +30,7 @@ protocol = git
repo_url = https://github.com/noaa-oar-arl/NEXUS
# Specify either a branch name or a hash but not both.
#branch = develop
-hash = 40346b6
+hash = e153072
local_path = sorc/arl_nexus
required = True
@@ -39,7 +39,7 @@ protocol = git
repo_url = https://github.com/NOAA-EMC/AQM-utils
# Specify either a branch name or a hash but not both.
#branch = develop
-hash = d953bd1
+hash = e236acd
local_path = sorc/AQM-utils
required = True
diff --git a/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst
index 3bfa5bdf7d..960275d2bb 100644
--- a/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst
+++ b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst
@@ -156,8 +156,8 @@ These settings define platform-specific run commands. Users should set run comma
``RUN_CMD_SERIAL``: (Default: "")
The run command for some serial jobs.
-``RUN_CMD_AQM``: (Default: "")
- The run command for some AQM tasks.
+``RUN_CMD_NEXUS``: (Default: "")
+ The run command for the AQM NEXUS tasks.
``RUN_CMD_AQMLBC``: (Default: "")
The run command for the ``aqm_lbcs`` task.
@@ -271,6 +271,12 @@ These parameters are associated with the fixed (i.e., static) files. On :srw-wik
``FIXshp``: (Default: "")
System directory containing the graphics shapefiles. On Level 1 systems, these are set within the machine files. Users on other systems will need to provide the path to the directory that contains the *Natural Earth* shapefiles.
+``FIXaqm``: (Default: "")
+ Path to system directory containing AQM fixed files.
+
+``FIXemis``: (Default: "")
+ Path to system directory containing AQM emission data files.
+
``FIXcrtm``: (Default: "")
Path to system directory containing CRTM fixed files.
diff --git a/modulefiles/tasks/derecho/aqm_ics.local.lua b/modulefiles/tasks/derecho/aqm_ics.local.lua
index 30f1157fbb..9b519c10f6 100644
--- a/modulefiles/tasks/derecho/aqm_ics.local.lua
+++ b/modulefiles/tasks/derecho/aqm_ics.local.lua
@@ -1,2 +1,2 @@
-load("nco/5.0.6")
+load("nco/5.1.9")
load("python_srw_aqm")
diff --git a/modulefiles/tasks/derecho/aqm_lbcs.local.lua b/modulefiles/tasks/derecho/aqm_lbcs.local.lua
index 30f1157fbb..9b519c10f6 100644
--- a/modulefiles/tasks/derecho/aqm_lbcs.local.lua
+++ b/modulefiles/tasks/derecho/aqm_lbcs.local.lua
@@ -1,2 +1,2 @@
-load("nco/5.0.6")
+load("nco/5.1.9")
load("python_srw_aqm")
diff --git a/modulefiles/tasks/derecho/fire_emission.local.lua b/modulefiles/tasks/derecho/fire_emission.local.lua
index 86252a9a4f..df0e35d5da 100644
--- a/modulefiles/tasks/derecho/fire_emission.local.lua
+++ b/modulefiles/tasks/derecho/fire_emission.local.lua
@@ -1,2 +1 @@
-load("ncarenv")
load("python_srw_aqm")
diff --git a/modulefiles/tasks/derecho/nexus_emission.local.lua b/modulefiles/tasks/derecho/nexus_emission.local.lua
index e7f216375c..9b519c10f6 100644
--- a/modulefiles/tasks/derecho/nexus_emission.local.lua
+++ b/modulefiles/tasks/derecho/nexus_emission.local.lua
@@ -1,4 +1,2 @@
-load("nco/5.0.6")
-
-load("ncarenv")
+load("nco/5.1.9")
load("python_srw_aqm")
diff --git a/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua b/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua
index 86252a9a4f..df0e35d5da 100644
--- a/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua
+++ b/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua
@@ -1,2 +1 @@
-load("ncarenv")
load("python_srw_aqm")
diff --git a/modulefiles/tasks/derecho/nexus_post_split.local.lua b/modulefiles/tasks/derecho/nexus_post_split.local.lua
index e7f216375c..9b519c10f6 100644
--- a/modulefiles/tasks/derecho/nexus_post_split.local.lua
+++ b/modulefiles/tasks/derecho/nexus_post_split.local.lua
@@ -1,4 +1,2 @@
-load("nco/5.0.6")
-
-load("ncarenv")
+load("nco/5.1.9")
load("python_srw_aqm")
diff --git a/modulefiles/tasks/derecho/point_source.local.lua b/modulefiles/tasks/derecho/point_source.local.lua
index 86252a9a4f..df0e35d5da 100644
--- a/modulefiles/tasks/derecho/point_source.local.lua
+++ b/modulefiles/tasks/derecho/point_source.local.lua
@@ -1,2 +1 @@
-load("ncarenv")
load("python_srw_aqm")
diff --git a/modulefiles/tasks/derecho/pre_post_stat.local.lua b/modulefiles/tasks/derecho/pre_post_stat.local.lua
index 30f1157fbb..9b519c10f6 100644
--- a/modulefiles/tasks/derecho/pre_post_stat.local.lua
+++ b/modulefiles/tasks/derecho/pre_post_stat.local.lua
@@ -1,2 +1,2 @@
-load("nco/5.0.6")
+load("nco/5.1.9")
load("python_srw_aqm")
diff --git a/modulefiles/wflow_derecho.lua b/modulefiles/wflow_derecho.lua
index d9a3e24e2f..28bc7ec2f6 100644
--- a/modulefiles/wflow_derecho.lua
+++ b/modulefiles/wflow_derecho.lua
@@ -5,8 +5,6 @@ on the CISL machine Derecho (Cray)
whatis([===[Loads libraries for running the UFS SRW Workflow on Derecho ]===])
-load("ncarenv")
-
append_path("MODULEPATH","/glade/work/epicufsrt/contrib/derecho/rocoto/modulefiles")
load("rocoto")
diff --git a/ush/config.aqm.yaml b/ush/config.aqm.yaml
index 155f846add..21a73591ee 100644
--- a/ush/config.aqm.yaml
+++ b/ush/config.aqm.yaml
@@ -24,7 +24,7 @@ workflow:
COLDSTART: false # set to true for cold start
WARMSTART_CYCLE_DIR: '/scratch2/NAGAPE/epic/SRW-AQM_DATA/aqm_data/restart/2023111000' # for hera
# WARMSTART_CYCLE_DIR: '/work/noaa/epic/SRW-AQM_DATA/aqm_data/restart/2023111000' # for orion/hercules
-# WARMSTART_CYCLE_DIR: '' # for derecho
+# WARMSTART_CYCLE_DIR: '/glade/work/chanhooj/SRW-AQM_DATA/aqm_data/restart/2023111000' # for derecho
nco:
envir_default: test_aqm_warmstart
NET_default: aqm
diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml
index 6a403754cb..c9c0fc7cb8 100644
--- a/ush/config_defaults.yaml
+++ b/ush/config_defaults.yaml
@@ -245,8 +245,8 @@ platform:
# RUN_CMD_SERIAL:
# The run command for some serial jobs
#
- # RUN_CMD_AQM:
- # The run command for some AQM tasks.
+ # RUN_CMD_NEXUS:
+ # The run command for the AQM NEXUS tasks.
#
# RUN_CMD_AQMLBC:
# The run command for the AQM_LBCS task.
@@ -258,9 +258,8 @@ platform:
RUN_CMD_FCST: ""
RUN_CMD_POST: ""
RUN_CMD_PRDGEN: ""
- RUN_CMD_AQM: ""
+ RUN_CMD_NEXUS: ""
RUN_CMD_AQMLBC: ""
-
#
#-----------------------------------------------------------------------
#
@@ -421,6 +420,12 @@ platform:
# FIXshp:
# System directory where the graphics shapefiles are located.
#
+ # FIXaqm:
+ # System directory where AQM data files are located
+ #
+ # FIXemis:
+ # System directory where AQM emission data files are located.
+ #
# FIXcrtm:
# System directory where CRTM fixed files are located
#
@@ -435,6 +440,8 @@ platform:
FIXorg: ""
FIXsfc: ""
FIXshp: ""
+ FIXaqm: ""
+ FIXemis: ""
FIXcrtm: ""
FIXcrtmupp: ""
#
diff --git a/ush/machine/derecho.yaml b/ush/machine/derecho.yaml
index b12e65513c..8bc768732f 100644
--- a/ush/machine/derecho.yaml
+++ b/ush/machine/derecho.yaml
@@ -15,8 +15,8 @@ platform:
RUN_CMD_PRDGEN: mpiexec -n $nprocs
RUN_CMD_SERIAL: time
RUN_CMD_UTILS: mpiexec -n $nprocs
- RUN_CMD_NEXUS: mpiexec -n $nprocs
- RUN_CMD_AQMLBC: mpiexec -n ${numts}
+ RUN_CMD_NEXUS: /opt/cray/pe/pals/1.2.11/bin/mpiexec -n $nprocs
+ RUN_CMD_AQMLBC: /opt/cray/pe/pals/1.2.11/bin/mpiexec -n ${numts}
PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }'
TEST_EXTRN_MDL_SOURCE_BASEDIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/input_model_data
TEST_AQM_INPUT_BASEDIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/aqm_data
@@ -31,6 +31,8 @@ platform:
FIXorg: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/fix/fix_orog
FIXsfc: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/fix/fix_sfc_climo
FIXshp: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/NaturalEarth
+ FIXaqm: /glade/work/chanhooj/SRW-AQM_DATA/fix_aqm
+ FIXemis: /glade/work/chanhooj/SRW-AQM_DATA/fix_emis
EXTRN_MDL_DATA_STORES: aws
data:
ics_lbcs:
@@ -42,3 +44,7 @@ data:
HRRR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/input_model_data/HRRR/${yyyymmdd}${hh}
RAP: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/input_model_data/RAP/${yyyymmdd}${hh}
GSMGFS: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/input_model_data/GSMGFS/${yyyymmdd}${hh}
+cpl_aqm_parm:
+ COMINfire_default: /glade/work/chanhooj/SRW-AQM_DATA/aqm_data/RAVE_fire
+ COMINgefs_default: /glade/work/chanhooj/SRW-AQM_DATA/aqm_data/GEFS_DATA
+ NEXUS_GFS_SFC_DIR: /glade/work/chanhooj/SRW-AQM_DATA/aqm_data/GFS_SFC_DATA
From dbfed17f6c89b52196bb9cf8fab0873e65989fb4 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 18 Jun 2024 10:13:44 -0400
Subject: [PATCH 18/39] Bump urllib3 from 2.2.0 to 2.2.2 in /doc (#1097)
Bumps [urllib3](https://github.com/urllib3/urllib3) from 2.2.0 to 2.2.2.
- [Release notes](https://github.com/urllib3/urllib3/releases)
- [Changelog](https://github.com/urllib3/urllib3/blob/main/CHANGES.rst)
- [Commits](https://github.com/urllib3/urllib3/compare/2.2.0...2.2.2)
---
updated-dependencies:
- dependency-name: urllib3
dependency-type: indirect
...
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
doc/requirements.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/requirements.txt b/doc/requirements.txt
index f4dbdfb2a9..90efd3211e 100644
--- a/doc/requirements.txt
+++ b/doc/requirements.txt
@@ -72,5 +72,5 @@ sphinxcontrib-qthelp==1.0.7
# via sphinx
sphinxcontrib-serializinghtml==1.1.10
# via sphinx
-urllib3==2.2.0
+urllib3==2.2.2
# via requests
From 94dc192a033a5e5752f43a2d898aca2b2f0c3b56 Mon Sep 17 00:00:00 2001
From: RatkoVasic-NOAA <37597874+RatkoVasic-NOAA@users.noreply.github.com>
Date: Fri, 21 Jun 2024 10:59:31 -0400
Subject: [PATCH 19/39] [develop] Upgrade SRW to spack-stack 1.6.0 from 1.5.1
(#1093)
Since the ufs-weather-model was upgraded to spack-stack 1.6.0, the SRW App has been upgraded as well.
---------
Co-authored-by: EdwardSnyder-NOAA
---
modulefiles/build_derecho_intel.lua | 6 ++---
modulefiles/build_gaea_intel.lua | 4 ++--
modulefiles/build_hera_gnu.lua | 23 +++++++++++--------
modulefiles/build_hera_intel.lua | 7 +++---
modulefiles/build_hercules_intel.lua | 9 ++++----
modulefiles/build_jet_intel.lua | 5 ++--
modulefiles/build_noaacloud_intel.lua | 2 +-
modulefiles/build_orion_intel.lua | 19 +++++++--------
modulefiles/srw_common.lua | 12 +++++-----
.../tasks/noaacloud/plot_allvars.local.lua | 7 ++----
modulefiles/tasks/noaacloud/python_srw.lua | 5 ++++
modulefiles/tasks/noaacloud/run_vx.local.lua | 5 ++++
modulefiles/tasks/orion/run_vx.local.lua | 6 +----
modulefiles/wflow_noaacloud.lua | 10 ++++----
modulefiles/wflow_orion.lua | 5 ++--
15 files changed, 65 insertions(+), 60 deletions(-)
diff --git a/modulefiles/build_derecho_intel.lua b/modulefiles/build_derecho_intel.lua
index e057c9e5dc..491a94f912 100644
--- a/modulefiles/build_derecho_intel.lua
+++ b/modulefiles/build_derecho_intel.lua
@@ -6,15 +6,15 @@ the CISL machine Derecho (Cray) using Intel@2021.10.0
whatis([===[Loads libraries needed for building the UFS SRW App on Derecho ]===])
prepend_path("MODULEPATH","/lustre/desc1/scratch/epicufsrt/contrib/modulefiles_extra")
-prepend_path("MODULEPATH", "/glade/work/epicufsrt/contrib/spack-stack/derecho/spack-stack-1.5.1/envs/unified-env/install/modulefiles/Core")
+prepend_path("MODULEPATH", "/glade/work/epicufsrt/contrib/spack-stack/derecho/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core")
load(pathJoin("stack-intel", os.getenv("stack_intel_ver") or "2021.10.0"))
load(pathJoin("stack-cray-mpich", os.getenv("stack_cray_mpich_ver") or "8.1.25"))
-load(pathJoin("cmake", os.getenv("cmake_ver") or "3.26.3"))
+load(pathJoin("cmake", os.getenv("cmake_ver") or "3.23.1"))
load("srw_common")
-load(pathJoin("prod_util", os.getenv("prod_util_ver") or "1.2.2"))
+load(pathJoin("prod_util", os.getenv("prod_util_ver") or "2.1.1"))
setenv("CMAKE_Platform","derecho.intel")
diff --git a/modulefiles/build_gaea_intel.lua b/modulefiles/build_gaea_intel.lua
index 0eca20b5e1..b47209194c 100644
--- a/modulefiles/build_gaea_intel.lua
+++ b/modulefiles/build_gaea_intel.lua
@@ -5,14 +5,14 @@ the NOAA RDHPC machine Gaea C5 using Intel-2023.1.0
whatis([===[Loads libraries needed for building the UFS SRW App on Gaea C5 ]===])
-prepend_path("MODULEPATH","/ncrc/proj/epic/spack-stack/spack-stack-1.5.1/envs/unified-env/install/modulefiles/Core")
+prepend_path("MODULEPATH","/ncrc/proj/epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core")
stack_intel_ver=os.getenv("stack_intel_ver") or "2023.1.0"
load(pathJoin("stack-intel", stack_intel_ver))
stack_mpich_ver=os.getenv("stack_mpich_ver") or "8.1.25"
load(pathJoin("stack-cray-mpich", stack_mpich_ver))
-stack_python_ver=os.getenv("stack_python_ver") or "3.10.8"
+stack_python_ver=os.getenv("stack_python_ver") or "3.10.13"
load(pathJoin("stack-python", stack_python_ver))
cmake_ver=os.getenv("cmake_ver") or "3.23.1"
diff --git a/modulefiles/build_hera_gnu.lua b/modulefiles/build_hera_gnu.lua
index 7defa36bbf..8854108966 100644
--- a/modulefiles/build_hera_gnu.lua
+++ b/modulefiles/build_hera_gnu.lua
@@ -1,23 +1,28 @@
help([[
This module loads libraries for building the UFS SRW App on
-the NOAA RDHPC machine Hera using GNU 9.2.0
+the NOAA RDHPC machine Hera using GNU 13.3.0
]])
-whatis([===[Loads libraries needed for building the UFS SRW App on Hera using GNU 9.2.0 ]===])
+whatis([===[Loads libraries needed for building the UFS SRW App on Hera using GNU 13.3.0 ]===])
-prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.1/envs/unified-env-rocky8/install/modulefiles/Core")
-prepend_path("MODULEPATH", "/scratch1/NCEPDEV/jcsda/jedipara/spack-stack/modulefiles")
+prepend_path("MODULEPATH", "/scratch2/NCEPDEV/stmp1/role.epic/installs/gnu/modulefiles")
+prepend_path("MODULEPATH", "/scratch2/NCEPDEV/stmp1/role.epic/installs/openmpi/modulefiles")
+prepend_path("MODULEPATH", "/scratch2/NCEPDEV/stmp1/role.epic/spack-stack/spack-stack-1.6.0_gnu13/envs/ufs-wm-srw-rocky8/install/modulefiles/Core")
-load("stack-gcc/9.2.0")
-load("stack-openmpi/4.1.5")
-load("stack-python/3.10.8")
+load("stack-gcc/13.3.0")
+load("stack-openmpi/4.1.6")
+load("stack-python/3.10.13")
load("cmake/3.23.1")
load("srw_common")
load(pathJoin("nccmp", os.getenv("nccmp_ver") or "1.9.0.1"))
-load(pathJoin("nco", os.getenv("nco_ver") or "5.0.6"))
-load(pathJoin("openblas", os.getenv("openblas_ver") or "0.3.19"))
+load(pathJoin("nco", os.getenv("nco_ver") or "5.1.6"))
+load(pathJoin("openblas", os.getenv("openblas_ver") or "0.3.24"))
+
+prepend_path("CPPFLAGS", " -I/apps/slurm_hera/23.11.3/include/slurm"," ")
+prepend_path("LD_LIBRARY_PATH", "/apps/slurm_hera/23.11.3/lib")
+setenv("LD_PRELOAD", "/scratch2/NCEPDEV/stmp1/role.epic/installs/gnu/13.3.0/lib64/libstdc++.so.6")
setenv("CC", "mpicc")
setenv("CXX", "mpic++")
diff --git a/modulefiles/build_hera_intel.lua b/modulefiles/build_hera_intel.lua
index 72a90d9f47..d8e793044c 100644
--- a/modulefiles/build_hera_intel.lua
+++ b/modulefiles/build_hera_intel.lua
@@ -8,8 +8,7 @@ whatis([===[Loads libraries needed for building the UFS SRW App on Hera ]===])
prepend_path("MODULEPATH","/contrib/sutils/modulefiles")
load("sutils")
-prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.1/envs/unified-env-rocky8/install/modulefiles/Core")
-prepend_path("MODULEPATH", "/scratch1/NCEPDEV/jcsda/jedipara/spack-stack/modulefiles")
+prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.6.0/envs/unified-env-rocky8/install/modulefiles/Core")
stack_intel_ver=os.getenv("stack_intel_ver") or "2021.5.0"
load(pathJoin("stack-intel", stack_intel_ver))
@@ -17,7 +16,7 @@ load(pathJoin("stack-intel", stack_intel_ver))
stack_impi_ver=os.getenv("stack_impi_ver") or "2021.5.1"
load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver))
-stack_python_ver=os.getenv("stack_python_ver") or "3.10.8"
+stack_python_ver=os.getenv("stack_python_ver") or "3.10.13"
load(pathJoin("stack-python", stack_python_ver))
cmake_ver=os.getenv("cmake_ver") or "3.23.1"
@@ -27,7 +26,7 @@ load("srw_common")
load(pathJoin("nccmp", os.getenv("nccmp_ver") or "1.9.0.1"))
load(pathJoin("nco", os.getenv("nco_ver") or "5.0.6"))
-load(pathJoin("prod_util", os.getenv("prod_util_ver") or "1.2.2"))
+load(pathJoin("prod_util", os.getenv("prod_util_ver") or "2.1.1"))
setenv("CMAKE_C_COMPILER","mpiicc")
setenv("CMAKE_CXX_COMPILER","mpiicpc")
diff --git a/modulefiles/build_hercules_intel.lua b/modulefiles/build_hercules_intel.lua
index 531f48a080..b65890f1c4 100644
--- a/modulefiles/build_hercules_intel.lua
+++ b/modulefiles/build_hercules_intel.lua
@@ -5,19 +5,18 @@ the MSU machine Hercules using intel-oneapi-compilers/2022.2.1
whatis([===[Loads libraries needed for building the UFS SRW App on Hercules ]===])
-prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.5.1/envs/unified-env/install/modulefiles/Core")
-prepend_path("MODULEPATH", "/work/noaa/da/role-da/spack-stack/modulefiles")
+prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core")
load("stack-intel/2021.9.0")
load("stack-intel-oneapi-mpi/2021.9.0")
-load("stack-python/3.10.8")
-load("cmake/3.26.3")
+load("stack-python/3.10.13")
+load("cmake/3.23.1")
load("srw_common")
load("nccmp/1.9.0.1")
load("nco/5.0.6")
-load(pathJoin("prod_util", os.getenv("prod_util_ver") or "1.2.2"))
+load(pathJoin("prod_util", os.getenv("prod_util_ver") or "2.1.1"))
setenv("CFLAGS","-diag-disable=10441")
setenv("FFLAGS","-diag-disable=10441")
diff --git a/modulefiles/build_jet_intel.lua b/modulefiles/build_jet_intel.lua
index 925fef3853..854b4404cb 100644
--- a/modulefiles/build_jet_intel.lua
+++ b/modulefiles/build_jet_intel.lua
@@ -5,12 +5,11 @@ the NOAA RDHPC machine Jet using Intel-2021.5.0
whatis([===[Loads libraries needed for building the UFS SRW App on Jet ]===])
-prepend_path("MODULEPATH","/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.5.1/envs/unified-env-rocky8/install/modulefiles/Core")
-prepend_path("MODULEPATH", "/lfs4/HFIP/hfv3gfs/spack-stack/modulefiles")
+prepend_path("MODULEPATH","/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.6.0/envs/unified-env-rocky8/install/modulefiles/Core")
load("stack-intel/2021.5.0")
load("stack-intel-oneapi-mpi/2021.5.1")
-load("stack-python/3.10.8")
+load("stack-python/3.10.13")
load("cmake/3.23.1")
load("srw_common")
diff --git a/modulefiles/build_noaacloud_intel.lua b/modulefiles/build_noaacloud_intel.lua
index 0b6a9c1ca4..dd774e8ed9 100644
--- a/modulefiles/build_noaacloud_intel.lua
+++ b/modulefiles/build_noaacloud_intel.lua
@@ -5,7 +5,7 @@ the NOAA cloud using Intel-oneapi
whatis([===[Loads libraries needed for building the UFS SRW App on NOAA cloud ]===])
-prepend_path("MODULEPATH", "/contrib/spack-stack/spack-stack-1.5.1/envs/unified-env/install/modulefiles/Core")
+prepend_path("MODULEPATH", "/contrib/spack-stack/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core")
prepend_path("MODULEPATH", "/apps/modules/modulefiles")
prepend_path("PATH", "/contrib/EPIC/bin")
load("stack-intel")
diff --git a/modulefiles/build_orion_intel.lua b/modulefiles/build_orion_intel.lua
index 8e895c5bee..b2f3d85c00 100644
--- a/modulefiles/build_orion_intel.lua
+++ b/modulefiles/build_orion_intel.lua
@@ -1,24 +1,25 @@
help([[
This module loads libraries for building the UFS SRW App on
-the MSU machine Orion using Intel-2022.1.2
+the MSU machine Orion using intel-oneapi-compilers/2021.9.0
]])
whatis([===[Loads libraries needed for building the UFS SRW App on Orion ]===])
-prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.5.1/envs/unified-env/install/modulefiles/Core")
-prepend_path("MODULEPATH", "/work/noaa/da/role-da/spack-stack/modulefiles")
+prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.6.0/envs/unified-env-rocky9/install/modulefiles/Core")
-load("stack-intel/2022.0.2")
-load("stack-intel-oneapi-mpi/2021.5.1")
-load("stack-python/3.10.8")
-load("cmake/3.22.1")
+load("stack-intel/2021.9.0")
+load("stack-intel-oneapi-mpi/2021.9.0")
+load("stack-python/3.10.13")
+load("cmake/3.23.1")
load("srw_common")
load("nccmp/1.9.0.1")
load("nco/5.0.6")
-load("wget")
-load(pathJoin("prod_util", os.getenv("prod_util_ver") or "1.2.2"))
+load(pathJoin("prod_util", os.getenv("prod_util_ver") or "2.1.1"))
+
+setenv("CFLAGS","-diag-disable=10441")
+setenv("FFLAGS","-diag-disable=10441")
setenv("CMAKE_C_COMPILER","mpiicc")
setenv("CMAKE_CXX_COMPILER","mpiicpc")
diff --git a/modulefiles/srw_common.lua b/modulefiles/srw_common.lua
index 79c67283f9..cb2047cbe1 100644
--- a/modulefiles/srw_common.lua
+++ b/modulefiles/srw_common.lua
@@ -3,21 +3,21 @@ load("zlib/1.2.13")
load("libpng/1.6.37")
load("netcdf-c/4.9.2")
-load("netcdf-fortran/4.6.0")
+load("netcdf-fortran/4.6.1")
load("parallelio/2.5.10")
-load("esmf/8.5.0")
-load("fms/2023.02.01")
+load("esmf/8.6.0")
+load("fms/2023.04")
load("bacio/2.4.1")
-load("crtm/2.4.0")
+load("crtm/2.4.0.1")
load("g2/3.4.5")
load("g2tmpl/1.10.2")
load("ip/4.3.0")
-load("sp/2.3.3")
+load("sp/2.5.0")
load("w3emc/2.10.0")
load("gftl-shared/1.6.1")
-load("mapl/2.40.3-esmf-8.5.0")
+load("mapl/2.40.3-esmf-8.6.0")
load("nemsio/2.5.4")
load("sfcio/1.4.1")
diff --git a/modulefiles/tasks/noaacloud/plot_allvars.local.lua b/modulefiles/tasks/noaacloud/plot_allvars.local.lua
index cc122f69b2..b7e9528710 100644
--- a/modulefiles/tasks/noaacloud/plot_allvars.local.lua
+++ b/modulefiles/tasks/noaacloud/plot_allvars.local.lua
@@ -1,5 +1,2 @@
-unload("python")
-append_path("MODULEPATH","/contrib/EPIC/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
-
-setenv("SRW_ENV", "regional_workflow")
+load("conda")
+setenv("SRW_ENV", "srw_graphics")
diff --git a/modulefiles/tasks/noaacloud/python_srw.lua b/modulefiles/tasks/noaacloud/python_srw.lua
index a2dd45084c..e6e4268c35 100644
--- a/modulefiles/tasks/noaacloud/python_srw.lua
+++ b/modulefiles/tasks/noaacloud/python_srw.lua
@@ -1,2 +1,7 @@
load("conda")
setenv("SRW_ENV", "srw_app")
+
+-- Add missing libstdc binary for Azure
+if os.getenv("PW_CSP") == "azure" then
+ setenv("LD_PRELOAD","/opt/nvidia/nsight-systems/2023.1.2/host-linux-x64/libstdc++.so.6")
+end
diff --git a/modulefiles/tasks/noaacloud/run_vx.local.lua b/modulefiles/tasks/noaacloud/run_vx.local.lua
index 737fc4f7cc..67b1b98ad6 100644
--- a/modulefiles/tasks/noaacloud/run_vx.local.lua
+++ b/modulefiles/tasks/noaacloud/run_vx.local.lua
@@ -25,3 +25,8 @@ end
load("ufs-pyenv")
load("conda")
setenv("SRW_ENV", "srw_app")
+
+-- Add missing libstdc binary for Azure
+if os.getenv("PW_CSP") == "azure" then
+ setenv("LD_PRELOAD","/opt/nvidia/nsight-systems/2023.1.2/host-linux-x64/libstdc++.so.6")
+end
diff --git a/modulefiles/tasks/orion/run_vx.local.lua b/modulefiles/tasks/orion/run_vx.local.lua
index 5bafb4d46b..737fc4f7cc 100644
--- a/modulefiles/tasks/orion/run_vx.local.lua
+++ b/modulefiles/tasks/orion/run_vx.local.lua
@@ -1,8 +1,6 @@
--[[
Compiler-specific modules are used for met and metplus libraries
--]]
---load("build_orion_intel")
-
local met_ver = (os.getenv("met_ver") or "11.1.0")
local metplus_ver = (os.getenv("metplus_ver") or "5.1.0")
if (mode() == "load") then
@@ -20,12 +18,10 @@ setenv("METPLUS_VERSION", metplus_ver)
setenv("METPLUS_ROOT", base_metplus)
setenv("METPLUS_PATH", base_metplus)
-
if (mode() == "unload") then
unload(pathJoin("met", met_ver))
unload(pathJoin("metplus",metplus_ver))
end
---load("ufs-pyenv")
-load("stack-python/3.10.8")
+load("ufs-pyenv")
load("conda")
setenv("SRW_ENV", "srw_app")
diff --git a/modulefiles/wflow_noaacloud.lua b/modulefiles/wflow_noaacloud.lua
index ebf907545b..5e0c0ca50a 100644
--- a/modulefiles/wflow_noaacloud.lua
+++ b/modulefiles/wflow_noaacloud.lua
@@ -8,15 +8,15 @@ whatis([===[Loads libraries needed for running the UFS SRW App on NOAA cloud ]==
prepend_path("MODULEPATH","/apps/modules/modulefiles")
load("rocoto")
-
-
load("conda")
-setenv("PROJ_LIB","/contrib/EPIC/miniconda3/4.12.0/envs/regional_workflow/share/proj")
-setenv("OPT","/contrib/EPIC/hpc-modules")
-append_path("PATH","/contrib/EPIC/miniconda3/4.12.0/envs/regional_workflow/bin")
prepend_path("PATH","/contrib/EPIC/bin")
+-- Add missing libstdc binary for Azure
+if os.getenv("PW_CSP") == "azure" then
+ setenv("LD_PRELOAD","/opt/nvidia/nsight-systems/2023.1.2/host-linux-x64/libstdc++.so.6")
+end
+
if mode() == "load" then
LmodMsgRaw([===[Please do the following to activate conda:
> conda activate srw_app
diff --git a/modulefiles/wflow_orion.lua b/modulefiles/wflow_orion.lua
index 711991bb09..8bbc5663da 100644
--- a/modulefiles/wflow_orion.lua
+++ b/modulefiles/wflow_orion.lua
@@ -6,9 +6,8 @@ the MSU machine Orion
whatis([===[Loads libraries needed for running SRW on Orion ]===])
load("contrib")
-load("rocoto")
-load("wget")
-
+load("ruby/3.2.3")
+load("rocoto/1.3.7")
unload("python")
load("conda")
From fe8fc68b0c22ccbd2181b28b88a0e77a9f6b3ba5 Mon Sep 17 00:00:00 2001
From: jdkublnick <47824899+jdkublnick@users.noreply.github.com>
Date: Fri, 21 Jun 2024 11:07:42 -0400
Subject: [PATCH 20/39] [develop]: Updated ConfigWorkflow.rst to reflect
changes to config_defaults.yaml (PI12) (#1095)
Updated ConfigWorkflow.rst to reflect recent changes to config_defaults.yaml in order to keep documentation up to date.
---------
Co-authored-by: Gillian Petro <96886803+gspetro-NOAA@users.noreply.github.com>
Co-authored-by: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com>
---
.../CustomizingTheWorkflow/ConfigWorkflow.rst | 94 ++++++-------------
1 file changed, 27 insertions(+), 67 deletions(-)
diff --git a/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst
index 960275d2bb..52cce90c2c 100644
--- a/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst
+++ b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst
@@ -538,7 +538,7 @@ CCPP Parameter
``CCPP_PHYS_SUITE_FP``: (Default: ``'{{ [workflow.EXPTDIR, CCPP_PHYS_SUITE_FN]|path_join }}'``)
The full path to the suite definition file (SDF) in the experiment directory.
-``CCPP_PHYS_DIR``: (Default: ``'{{ [user.UFS_WTHR_MDL_DIR, "FV3", "ccpp", "physics", "physics"] |path_join }}'``)
+``CCPP_PHYS_DIR``: (Default: ``'{{ [user.UFS_WTHR_MDL_DIR, "FV3", "ccpp", "physics", "physics", "SFC_Models", "Land", "Noahmp"] |path_join }}'``)
The directory containing the CCPP physics source code. This is needed to link table(s) contained in that repository.
Field Dictionary Parameters
@@ -707,7 +707,7 @@ A standard set of environment variables has been established for *nco* mode to s
``envir_default, NET_default, model_ver_default, RUN_default``:
Standard environment variables defined in the NCEP Central Operations WCOSS Implementation Standards document. These variables are used in forming the path to various directories containing input, output, and workflow files. The variables are defined in the :nco:`WCOSS Implementation Standards ` document (pp. 4-5) as follows:
- ``envir_default``: (Default: "para")
+ ``envir_default``: (Default: "test")
Set to "test" during the initial testing phase, "para" when running in parallel (on a schedule), and "prod" in production.
``NET_default``: (Default: "srw")
@@ -719,46 +719,28 @@ A standard set of environment variables has been established for *nco* mode to s
``RUN_default``: (Default: "srw")
Name of model run (third level of ``com`` directory structure). In general, same as ``${NET_default}``.
-``OPSROOT_default``: (Default: ``'{{ workflow.EXPT_BASEDIR }}/../nco_dirs'``)
- The operations root directory in *nco* mode.
-
-``COMROOT_default``: (Default: ``'{{ OPSROOT_default }}/com'``)
- The ``com`` root directory for input/output data that is located on the current system (typically ``$OPSROOT_default/com``).
-
-``DATAROOT_default``: (Default: ``'{{OPSROOT_default }}/tmp'``)
- Directory containing the (temporary) working directory for running jobs; typically named ``$OPSROOT_default/tmp`` in production.
-
-``DCOMROOT_default``: (Default: ``'{{OPSROOT_default }}/dcom'``)
- ``dcom`` root directory, typically ``$OPSROOT_default/dcom``. This directory contains input/incoming data that is retrieved from outside WCOSS.
-
-``LOGBASEDIR_default``: (Default: ``'{% if user.RUN_ENVIR == "nco" %}{{ [OPSROOT_default, "output"]|path_join }}{% else %}{{ [workflow.EXPTDIR, "log"]|path_join }}{% endif %}'``)
- Directory in which the log files from the workflow tasks will be placed.
-
-``COMIN_BASEDIR``: (Default: ``'{{ COMROOT_default }}/{{ NET_default }}/{{ model_ver_default }}'``)
- ``com`` directory for current model's input data, typically ``$COMROOT/$NET/$model_ver/$RUN.$PDY``.
-
-``COMOUT_BASEDIR``: (Default: ``'{{ COMROOT_default }}/{{ NET_default }}/{{ model_ver_default }}'``)
- ``com`` directory for current model's output data, typically ``$COMROOT/$NET/$model_ver/$RUN.$PDY``.
+``PTMP``: (Default ``'{{ workflow.EXPT_BASEDIR }}/../nco_dirs'``)
+ User-defined path to the com type directories (``OPSROOT=$PTMP/$envir``).
``DBNROOT_default``: (Default: "")
Root directory for the data-alerting utilities.
-``SENDECF_default``: (Default: false)
+``SENDECF_default``: (Default: "NO")
Boolean variable used to control ``ecflow_client`` child commands.
-``SENDDBN_default``: (Default: false)
+``SENDDBN_default``: (Default: "NO")
Boolean variable used to control sending products off WCOSS2.
-``SENDDBN_NTC_default``: (Default: false)
+``SENDDBN_NTC_default``: (Default: "NO")
Boolean variable used to control sending products with WMO headers off WCOSS2.
-``SENDCOM_default``: (Default: false)
+``SENDCOM_default``: (Default: "YES")
Boolean variable to control data copies to ``$COMOUT``.
-``SENDWEB_default``: (Default: false)
+``SENDWEB_default``: (Default: "NO")
Boolean variable used to control sending products to a web server, often ``ncorzdm``.
-``KEEPDATA_default``: (Default: true)
+``KEEPDATA_default``: (Default: "YES")
Boolean variable used to specify whether or not the working directory should be kept upon successful job completion.
``MAILTO_default``: (Default: "")
@@ -1382,6 +1364,9 @@ Non-default parameters for the ``nexus_emission_*`` tasks are set in the ``task_
``PPN_NEXUS_EMISSION``: (Default: ``'{{ platform.NCORES_PER_NODE // OMP_NUM_THREADS_NEXUS_EMISSION }}'``)
Processes per node for the ``nexus_emission_*`` tasks.
+``NNODES_NEXUS_EMISSION``: (Default: 4)
+ The number of nodes to request from the job scheduler for the NEXUS emission task.
+
``KMP_AFFINITY_NEXUS_EMISSION``: (Default: "scatter")
Intel Thread Affinity Interface for the ``nexus_emission_*`` tasks. See :ref:`this note ` for more information on thread affinity.
@@ -1391,12 +1376,20 @@ Non-default parameters for the ``nexus_emission_*`` tasks are set in the ``task_
``OMP_STACKSIZE_NEXUS_EMISSION``: (Default: "1024m")
Controls the size of the stack for threads created by the OpenMP implementation.
+POINT_SOURCE Configuration Parameters
+------------------------------------------------
+Non-default parameters for the ``task_point_source`` tasks are set in the ``task_point_source:`` section of the ``config.yaml`` file.
+
+``PT_SRC_SUBDIR``: (Default: ``"NEI2016v1/v2023-01-PT"``)
+ Subdirectory structure of point source data under ``FIXemis``.
+ Full path: ``FIXemis/PT_SRC_SUBDIR``
+
BIAS_CORRECTION_O3 Configuration Parameters
-------------------------------------------------
Non-default parameters for the ``bias_correction_o3`` tasks are set in the ``task_bias_correction_o3:`` section of the ``config.yaml`` file.
-``KMP_AFFINITY_BIAS_CORRECTION_O3``: "scatter"
+``KMP_AFFINITY_BIAS_CORRECTION_O3``: (Default: "scatter")
Intel Thread Affinity Interface for the ``bias_correction_o3`` task. See :ref:`this note ` for more information on thread affinity.
``OMP_NUM_THREADS_BIAS_CORRECTION_O3``: (Default: 32)
@@ -1750,38 +1743,14 @@ Non-default parameters for coupled Air Quality Modeling (AQM) tasks are set in t
``DO_AQM_SAVE_FIRE``: (Default: false)
Archive fire emission file to HPSS.
-``DCOMINbio_default``: (Default: "")
- Path to the directory containing AQM bio files.
-
-``DCOMINdust_default``: (Default: "/path/to/dust/dir")
- Path to the directory containing AQM dust file.
-
-``DCOMINcanopy_default``: (Default: "/path/to/canopy/dir")
- Path to the directory containing AQM canopy files.
-
-``DCOMINfire_default``: (Default: "")
- Path to the directory containing AQM fire files.
-
-``DCOMINchem_lbcs_default``: (Default: "")
- Path to the directory containing chemical LBC files.
-
-``DCOMINgefs_default``: (Default: "")
- Path to the directory containing GEFS aerosol LBC files.
-
-``DCOMINpt_src_default``: (Default: "/path/to/point/source/base/directory")
- Parent directory containing point source files.
-
-``DCOMINairnow_default``: (Default: "/path/to/airnow/obaservation/data")
+``COMINairnow_default``: (Default: "/path/to/airnow/observation/data")
Path to the directory containing AIRNOW observation data.
-``COMINbicor``: (Default: "/path/to/historical/airnow/data/dir")
- Path of reading in historical training data for bias correction.
-
-``COMOUTbicor``: (Default: "/path/to/historical/airnow/data/dir")
- Path to save the current cycle's model output and AirNow observations as training data for future use. ``$COMINbicor`` and ``$COMOUTbicor`` can be distinguished by the ``${yyyy}${mm}${dd}`` under the same location.
+``COMINfire_default``: (Default: "")
+ Path to the directory containing AQM fire files.
-``AQM_CONFIG_DIR``: (Default: "")
- Configuration directory for AQM.
+``COMINgefs_default``:(Default: "")
+ Path to the directory containing GEFS aerosol LBC files.
``AQM_BIO_FILE``: (Default: "BEIS_SARC401.ncf")
File name of AQM BIO file.
@@ -1807,9 +1776,6 @@ Non-default parameters for coupled Air Quality Modeling (AQM) tasks are set in t
``AQM_FIRE_FILE_OFFSET_HRS``: (Default: 0)
Time offset when retrieving fire emission data files. In a real-time run, the data files for :term:`ICs/LBCs` are not ready for use until the case starts. To resolve this issue, a real-time run uses the input data files in the previous cycle. For example, if the experiment run cycle starts at 12z, and ``AQM_FIRE_FILE_OFFSET_HRS: 6``, the fire emission data file from the previous cycle (06z) is used.
-``AQM_FIRE_ARCHV_DIR``: (Default: "/path/to/archive/dir/for/RAVE/on/HPSS")
- Path to the archive directory for RAVE emission files on :term:`HPSS`.
-
``AQM_RC_FIRE_FREQUENCY``: (Default: "static")
Fire frequency in ``aqm.rc``.
@@ -1828,12 +1794,6 @@ Non-default parameters for coupled Air Quality Modeling (AQM) tasks are set in t
``AQM_GEFS_FILE_CYC``: (Default: "")
Cycle of the GEFS aerosol LBC files only if it is fixed.
-``NEXUS_INPUT_DIR``: (Default: "")
- Same as ``GRID_DIR`` but for the the air quality emission generation task. Should be blank for the default value specified in ``setup.sh``.
-
-``NEXUS_FIX_DIR``: (Default: "")
- Directory containing ``grid_spec`` files as the input file of NEXUS.
-
``NEXUS_GRID_FN``: (Default: "grid_spec_GSD_HRRR_25km.nc")
File name of the input ``grid_spec`` file of NEXUS.
From e5832d184575985f5bbc613a427696eb76cf31d1 Mon Sep 17 00:00:00 2001
From: gsketefian <31046882+gsketefian@users.noreply.github.com>
Date: Fri, 12 Jul 2024 06:57:23 -0600
Subject: [PATCH 21/39] [develop] Bug fix to support the %H format in METplus
via printf. (#1102)
This bug was encountered when verifying forecast output that has a 2-digit forecast hour in its name. It turns out specifying the METplus format %H to obtain a 2-digit forecast hour in the workflow/verification configuration variable FCST_FN_TEMPLATE (and others) causes an error in the shell script eval_METplus_timestr_tmpl.sh because bash's printf utility does not support the %H format. This fixes that error using a similar approach to the %HHH format for obtaining 3-digit hours.
---
ush/bash_utils/eval_METplus_timestr_tmpl.sh | 16 +++++++++++++++-
1 file changed, 15 insertions(+), 1 deletion(-)
diff --git a/ush/bash_utils/eval_METplus_timestr_tmpl.sh b/ush/bash_utils/eval_METplus_timestr_tmpl.sh
index 245369509b..572f7c68c4 100644
--- a/ush/bash_utils/eval_METplus_timestr_tmpl.sh
+++ b/ush/bash_utils/eval_METplus_timestr_tmpl.sh
@@ -163,9 +163,23 @@ cannot be empty:
#-----------------------------------------------------------------------
#
case "${METplus_time_fmt}" in
- "%Y%m%d%H"|"%Y%m%d"|"%H%M%S"|"%H")
+ "%Y%m%d%H"|"%Y%m%d"|"%H%M%S")
fmt="${METplus_time_fmt}"
;;
+ "%H")
+#
+# The "%H" format needs to be treated differently depending on if it's
+# formatting a "lead" time type or another (e.g. "init" or "vald") because
+# for "lead", the printf function is used below (which doesn't understand
+# the "%H" format) whereas for the others, the date utility is used (which
+# does understand "%H").
+#
+ if [ "${METplus_time_type}" = "lead" ]; then
+ fmt="%02.0f"
+ else
+ fmt="${METplus_time_fmt}"
+ fi
+ ;;
"%HHH")
#
# Print format assumes that the argument to printf (i.e. the number to
From 29429fedec7155c4815bcf4f7083e3dbadafa7d3 Mon Sep 17 00:00:00 2001
From: Gillian Petro <96886803+gspetro-NOAA@users.noreply.github.com>
Date: Mon, 15 Jul 2024 12:53:25 -0400
Subject: [PATCH 22/39] [develop]: Update requests and certifi in
requirements.txt (#1103)
* The Dependabot PR #1101 identified the need to update the certifi version, but requests should also be updated from the current (yanked) version in the requirements file.
* The README.md and doc/README files have also been updated.
---
README.md | 6 +++---
doc/README | 5 +++--
doc/requirements.txt | 6 +++---
3 files changed, 9 insertions(+), 8 deletions(-)
diff --git a/README.md b/README.md
index 3bf56f4c21..bdda52279d 100644
--- a/README.md
+++ b/README.md
@@ -1,13 +1,13 @@
# UFS Short-Range Weather Application
-The Unified Forecast System (UFS) is a community-based, coupled, comprehensive Earth modeling system. NOAA's operational model suite for numerical weather prediction (NWP) is quickly transitioning to the UFS from a number of legacy modeling systems. The UFS enables research, development, and contribution opportunities within the broader Weather Enterprise (including government, industry, and academia). For more information about the UFS, visit the UFS Portal at https://ufscommunity.org/.
+The Unified Forecast System (UFS) is a community-based, coupled, comprehensive Earth modeling system. NOAA's operational model suite for numerical weather prediction (NWP) is quickly transitioning to the UFS from a number of legacy modeling systems. The UFS enables research, development, and contribution opportunities within the broader Weather Enterprise (including government, industry, and academia). For more information about the UFS, visit the UFS Portal at https://ufs.epic.noaa.gov/.
-The UFS includes multiple applications (see a complete list at https://ufscommunity.org/science/aboutapps/) that support different forecast durations and spatial domains. This documentation describes the development branch of the UFS Short-Range Weather (SRW) Application, which targets predictions of atmospheric behavior on a limited spatial domain and on time scales from minutes to several days. The development branch of the application is continually evolving as the system undergoes open development. The latest SRW App release (v2.2.0) represents a snapshot of this continuously evolving system.
+The UFS includes multiple applications (see a complete list at https://ufs.epic.noaa.gov/applications/) that support different forecast durations and spatial domains. This documentation describes the development branch of the UFS Short-Range Weather (SRW) Application, which targets predictions of atmospheric behavior on a limited spatial domain and on time scales from minutes to several days. The development branch of the application is continually evolving as the system undergoes open development. The latest SRW App release (v2.2.0) represents a snapshot of this continuously evolving system.
The UFS SRW App User's Guide associated with the development branch is at: https://ufs-srweather-app.readthedocs.io/en/develop/, while the guide specific to the SRW App v2.2.0 release can be found at: https://ufs-srweather-app.readthedocs.io/en/release-public-v2.2.0/. The repository is at: https://github.com/ufs-community/ufs-srweather-app.
For instructions on how to clone the repository, build the code, and run the workflow, see:
-- https://ufs-srweather-app.readthedocs.io/en/develop/BuildingRunningTesting/Quickstart.html
+- https://ufs-srweather-app.readthedocs.io/en/develop/UsersGuide/BuildingRunningTesting/Quickstart.html
For a debugging guide for users and developers in the field of Earth System Modeling, please see:
https://epic.noaa.gov/wp-content/uploads/2022/12/Debugging-Guide.pdf
diff --git a/doc/README b/doc/README
index 0ad8948eda..017f865384 100644
--- a/doc/README
+++ b/doc/README
@@ -20,10 +20,11 @@ Steps to build and use the Sphinx documentation tool:
To build html:
-$ cd ufs-srweather-app/docs/UsersGuide
-$ make clean && sphinx-build -b html source build
+$ cd ufs-srweather-app/doc
+$ make clean && sphinx-build -b html . build
The "make html" command can often be used in place of the previous command.
+"make doc" will both build the html and run the linkchecker.
Sphinx uses Latex to export the documentation as a PDF file. To build pdf:
diff --git a/doc/requirements.txt b/doc/requirements.txt
index 90efd3211e..e6d38a4eb8 100644
--- a/doc/requirements.txt
+++ b/doc/requirements.txt
@@ -2,13 +2,13 @@
# This file is autogenerated by pip-compile with Python 3.11
# by the following command:
#
-# pip-compile requirements.in
+# pip-compile --strip-extras requirements.in
#
alabaster==0.7.16
# via sphinx
babel==2.14.0
# via sphinx
-certifi==2024.2.2
+certifi==2024.7.4
# via requests
charset-normalizer==3.3.2
# via requests
@@ -40,7 +40,7 @@ pygments==2.17.2
# via sphinx
pyyaml==6.0.1
# via pybtex
-requests==2.32.0
+requests==2.32.2
# via sphinx
six==1.16.0
# via
From c377164582ee071ce8b3921e10b2d0f100141887 Mon Sep 17 00:00:00 2001
From: Christina Holt <56881914+christinaholtNOAA@users.noreply.github.com>
Date: Fri, 26 Jul 2024 11:26:41 -0600
Subject: [PATCH 23/39] [develop] Transition the var_defns bash file to YAML.
(#1098)
Use YAML for the configuration language at run time.
---------
Co-authored-by: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com>
Co-authored-by: Michael Kavulich
Co-authored-by: michael.lueken
---
.cicd/scripts/wrapper_srw_ftest.sh | 3 +-
aqm_environment.yml | 2 +-
.../CustomizingTheWorkflow/ConfigWorkflow.rst | 6 +-
environment.yml | 2 +-
jobs/JREGIONAL_CHECK_POST_OUTPUT | 19 +-
jobs/JREGIONAL_GET_EXTRN_MDL_FILES | 64 +++--
jobs/JREGIONAL_GET_VERIF_OBS | 18 +-
jobs/JREGIONAL_INTEGRATION_TEST | 31 ++-
jobs/JREGIONAL_MAKE_GRID | 114 ++------
jobs/JREGIONAL_MAKE_ICS | 30 +-
jobs/JREGIONAL_MAKE_LBCS | 28 +-
jobs/JREGIONAL_MAKE_OROG | 26 +-
jobs/JREGIONAL_MAKE_SFC_CLIMO | 29 +-
jobs/JREGIONAL_PLOT_ALLVARS | 49 +++-
jobs/JREGIONAL_RUN_FCST | 25 +-
...EGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT | 15 +-
...JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX | 18 +-
...L_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSMEAN | 16 +-
...L_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB | 16 +-
jobs/JREGIONAL_RUN_MET_PB2NC_OBS | 16 +-
jobs/JREGIONAL_RUN_MET_PCPCOMBINE | 17 +-
jobs/JREGIONAL_RUN_POST | 42 ++-
jobs/JREGIONAL_RUN_PRDGEN | 33 ++-
jobs/JSRW_AQM_ICS | 6 +-
jobs/JSRW_AQM_LBCS | 7 +-
jobs/JSRW_BIAS_CORRECTION_O3 | 7 +-
jobs/JSRW_BIAS_CORRECTION_PM25 | 7 +-
jobs/JSRW_FIRE_EMISSION | 6 +-
jobs/JSRW_NEXUS_EMISSION | 6 +-
jobs/JSRW_NEXUS_GFS_SFC | 8 +-
jobs/JSRW_NEXUS_POST_SPLIT | 6 +-
jobs/JSRW_POINT_SOURCE | 7 +-
jobs/JSRW_POST_STAT_O3 | 7 +-
jobs/JSRW_POST_STAT_PM25 | 7 +-
jobs/JSRW_PRE_POST_STAT | 6 +-
.../tasks/cheyenne/plot_allvars.local.lua | 2 +-
.../tasks/derecho/plot_allvars.local.lua | 2 +-
modulefiles/tasks/gaea/plot_allvars.local.lua | 2 +-
modulefiles/tasks/hera/plot_allvars.local.lua | 2 +-
.../tasks/hercules/plot_allvars.local.lua | 2 +-
modulefiles/tasks/jet/plot_allvars.local.lua | 2 +-
.../tasks/noaacloud/plot_allvars.local.lua | 7 +-
.../tasks/orion/plot_allvars.local.lua | 2 +-
parm/wflow/aqm_post.yaml | 10 +-
parm/wflow/aqm_prep.yaml | 16 +-
parm/wflow/coldstart.yaml | 10 +-
parm/wflow/default_workflow.yaml | 2 +-
parm/wflow/plot.yaml | 2 +-
parm/wflow/post.yaml | 2 +-
parm/wflow/prdgen.yaml | 2 +-
parm/wflow/prep.yaml | 6 +-
parm/wflow/test.yaml | 2 +-
parm/wflow/verify_det.yaml | 8 +-
parm/wflow/verify_ens.yaml | 16 +-
parm/wflow/verify_pre.yaml | 18 +-
scripts/exregional_check_post_output.sh | 44 ++-
scripts/exregional_get_extrn_mdl_files.sh | 72 ++++-
scripts/exregional_get_verif_obs.sh | 27 +-
scripts/exregional_integration_test.py | 6 +-
scripts/exregional_make_grid.sh | 111 +++++++-
scripts/exregional_make_ics.sh | 92 +++++-
scripts/exregional_make_lbcs.sh | 88 +++++-
scripts/exregional_make_orog.sh | 262 ++++++++++--------
scripts/exregional_make_sfc_climo.sh | 51 +++-
scripts/exregional_run_fcst.sh | 169 +++++++++--
...onal_run_met_genensprod_or_ensemblestat.sh | 6 +-
...gional_run_met_gridstat_or_pointstat_vx.sh | 10 +-
...un_met_gridstat_or_pointstat_vx_ensmean.sh | 6 +-
...un_met_gridstat_or_pointstat_vx_ensprob.sh | 6 +-
scripts/exregional_run_met_pb2nc_obs.sh | 6 +-
scripts/exregional_run_met_pcpcombine.sh | 10 +-
scripts/exregional_run_post.sh | 76 ++++-
scripts/exregional_run_prdgen.sh | 8 +-
scripts/exsrw_aqm_ics.sh | 5 +-
scripts/exsrw_aqm_lbcs.sh | 12 +-
scripts/exsrw_bias_correction_o3.sh | 8 +-
scripts/exsrw_bias_correction_pm25.sh | 8 +-
scripts/exsrw_fire_emission.sh | 5 +-
scripts/exsrw_nexus_emission.sh | 6 +-
scripts/exsrw_nexus_gfs_sfc.sh | 7 +-
scripts/exsrw_nexus_post_split.sh | 5 +-
scripts/exsrw_point_source.sh | 6 +-
scripts/exsrw_post_stat_o3.sh | 6 +-
scripts/exsrw_post_stat_pm25.sh | 6 +-
scripts/exsrw_pre_post_stat.sh | 6 +-
tests/WE2E/utils.py | 6 +-
tests/test_python/test_retrieve_data.py | 58 ----
ush/bash_utils/check_var_valid_value.sh | 2 +-
ush/bash_utils/create_symlink_to_file.sh | 1 +
ush/bash_utils/print_msg.sh | 2 +-
ush/bash_utils/source_config.sh | 53 ----
ush/bash_utils/source_yaml.sh | 36 +++
ush/config_defaults.yaml | 16 +-
ush/create_aqm_rc_file.py | 4 +-
ush/create_diag_table_file.py | 4 +-
ush/create_model_configure_file.py | 4 +-
ush/create_ufs_configure_file.py | 4 +-
ush/generate_FV3LAM_wflow.py | 26 +-
ush/job_preamble.sh | 7 +-
ush/launch_FV3LAM_wflow.sh | 50 +---
ush/link_fix.py | 4 +-
ush/load_modules_run_task.sh | 129 +++++----
ush/machine/hera.yaml | 4 +-
ush/set_fv3nml_ens_stoch_seeds.py | 8 +-
ush/set_fv3nml_sfc_climo_filenames.py | 8 +-
ush/setup.py | 11 +-
ush/source_util_funcs.sh | 6 +-
ush/update_input_nml.py | 4 +-
ush/wrappers/run_fcst.sh | 7 +-
ush/wrappers/run_get_ics.sh | 7 +-
ush/wrappers/run_get_lbcs.sh | 7 +-
ush/wrappers/run_make_grid.sh | 7 +-
ush/wrappers/run_make_ics.sh | 7 +-
ush/wrappers/run_make_lbcs.sh | 7 +-
ush/wrappers/run_make_orog.sh | 7 +-
ush/wrappers/run_make_sfc_climo.sh | 7 +-
ush/wrappers/run_post.sh | 7 +-
117 files changed, 1748 insertions(+), 706 deletions(-)
delete mode 100644 ush/bash_utils/source_config.sh
create mode 100644 ush/bash_utils/source_yaml.sh
mode change 100755 => 100644 ush/launch_FV3LAM_wflow.sh
diff --git a/.cicd/scripts/wrapper_srw_ftest.sh b/.cicd/scripts/wrapper_srw_ftest.sh
index 950ceb7a34..ee26edadaf 100755
--- a/.cicd/scripts/wrapper_srw_ftest.sh
+++ b/.cicd/scripts/wrapper_srw_ftest.sh
@@ -24,7 +24,8 @@ fi
if [[ "${SRW_PLATFORM}" == gaea ]]; then
sed -i '15i #SBATCH --clusters=c5' ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh
sed -i 's|qos=batch|qos=normal|g' ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh
- sed -i 's|${JOBSdir}/JREGIONAL_RUN_POST|$USHdir/load_modules_run_task.sh "run_post" ${JOBSdir}/JREGIONAL_RUN_POST|g' ${WORKSPACE}/${SRW_PLATFORM}/ush/wrappers/run_post.sh
+ sed -i 's|00:30:00|00:45:00|g' ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh
+ sed -i 's|${JOBSdir}/JREGIONAL_RUN_POST|$USHdir/load_modules_run_task.sh "gaea" "run_post" ${JOBSdir}/JREGIONAL_RUN_POST|g' ${WORKSPACE}/${SRW_PLATFORM}/ush/wrappers/run_post.sh
fi
if [[ "${SRW_PLATFORM}" == hera ]]; then
diff --git a/aqm_environment.yml b/aqm_environment.yml
index afd8a7b634..11bf9e57e3 100644
--- a/aqm_environment.yml
+++ b/aqm_environment.yml
@@ -9,5 +9,5 @@ dependencies:
- pylint=2.17*
- pytest=7.2*
- scipy=1.10.*
- - uwtools=2.1*
+ - uwtools=2.3*
- xarray=2022.11.*
diff --git a/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst
index 52cce90c2c..5161268980 100644
--- a/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst
+++ b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst
@@ -293,7 +293,7 @@ WORKFLOW Configuration Parameters
If non-default parameters are selected for the variables in this section, they should be added to the ``workflow:`` section of the ``config.yaml`` file.
-``WORKFLOW_ID``: (Default: ``!nowtimestamp ''``)
+``WORKFLOW_ID``: (Default: ``''``)
Unique ID for the workflow run that will be set in ``setup.py``.
``RELATIVE_LINK_FLAG``: (Default: "--relative")
@@ -458,8 +458,8 @@ This section contains files and paths to files that are staged in the experiment
``WFLOW_XML_FN``: (Default: "FV3LAM_wflow.xml")
Name of the Rocoto workflow XML file that the experiment generation script creates. This file defines the workflow for the experiment.
-``GLOBAL_VAR_DEFNS_FN``: (Default: "var_defns.sh")
- Name of the file (a shell script) containing definitions of the primary and secondary experiment variables (parameters). This file is sourced by many scripts (e.g., the J-job scripts corresponding to each workflow task) in order to make all the experiment variables available in those scripts. The primary variables are defined in the default configuration file (``config_defaults.yaml``) and in the user configuration file (``config.yaml``). The secondary experiment variables are generated by the experiment generation script.
+``GLOBAL_VAR_DEFNS_FN``: (Default: "var_defns.yaml")
+ Name of the auto-generated experiment configuration file. It contains the primary experiment variables defined in this default configuration script and in the user-specified configuration as well as secondary experiment variables generated by the experiment generation script from machine files and other settings. This file is the primary source of information used in the scripts at run time.
``ROCOTO_YAML_FN``: (Default: "rocoto_defns.yaml")
Name of the YAML file containing the YAML workflow definition from which the Rocoto XML file is created.
diff --git a/environment.yml b/environment.yml
index e2dd6b8300..a735213198 100644
--- a/environment.yml
+++ b/environment.yml
@@ -5,4 +5,4 @@ channels:
dependencies:
- pylint=2.17*
- pytest=7.2*
- - uwtools=2.2*
+ - uwtools=2.3*
diff --git a/jobs/JREGIONAL_CHECK_POST_OUTPUT b/jobs/JREGIONAL_CHECK_POST_OUTPUT
index f55f730cf4..358b1fad72 100755
--- a/jobs/JREGIONAL_CHECK_POST_OUTPUT
+++ b/jobs/JREGIONAL_CHECK_POST_OUTPUT
@@ -3,7 +3,22 @@
#
#-----------------------------------------------------------------------
#
+# The J-Job script for checking the post output.
#
+# Run-time environment variables:
+#
+# CDATE
+# ENSMEM_INDX
+# GLOBAL_VAR_DEFNS_FP
+#
+# Experiment variables
+#
+# user:
+# SCRIPTSdir
+# USHdir
+#
+# workflow:
+# EXPTDIR
#
#-----------------------------------------------------------------------
#
@@ -16,7 +31,9 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco workflow ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
. $USHdir/job_preamble.sh
#
#-----------------------------------------------------------------------
diff --git a/jobs/JREGIONAL_GET_EXTRN_MDL_FILES b/jobs/JREGIONAL_GET_EXTRN_MDL_FILES
index 80366f0ddc..fbd582201a 100755
--- a/jobs/JREGIONAL_GET_EXTRN_MDL_FILES
+++ b/jobs/JREGIONAL_GET_EXTRN_MDL_FILES
@@ -3,20 +3,48 @@
#
#-----------------------------------------------------------------------
#
-# This script gets either from the system directory or from mass store
-# (HPSS) the files generated by the external model (specified by the
-# variable EXTRN_MDL_NAME) for either the initial conditions (ICs) or the
-# lateral boundary conditions (LBCs). Which of these we are considering
-# depends on the value of the variable ICS_OR_LBCS, which should be defined
-# in the environment (when calling this script from a rocoto workflow,
-# the workflow should define this variable, e.g. using rocoto's
-# tag).
-#
-# Note that when we refer to ICs, we are referring to not only the atmospheric
-# fields at the initial time but also various surface fields (which are
-# for now time-independent) as well as the 0-th forecast hour LBCs. Also,
-# when we refer to LBCs, we are referring to the LBCs excluding the one
-# at the 0-th hour.
+# The J-Job script for getting the model files that will be used for
+# either initial conditions or lateral boundary conditions for the
+# experiment.
+#
+# Run-time environment variables:
+#
+# CDATE
+# COMIN
+# cyc
+# DATA
+# EXTRN_MDL_STAGING_DIR
+# GLOBAL_VAR_DEFNS_FP
+# ICS_OR_LBCS
+# PDY
+#
+# Experiment variables
+#
+# user:
+# SCRIPTSdir
+# USHdir
+#
+# workflow:
+# EXPTDIR
+#
+# task_get_extrn_lbcs:
+# EXTRN_MDL_FILES_LBCS
+# EXTRN_MDL_LBCS_OFFSET_HRS
+# EXTRN_MDL_NAME_LBCS
+# EXTRN_MDL_SOURCE_BASEDIR_LBCS
+# EXTRN_MDL_SYSBASEDIR_LBCS
+# FV3GFS_FILE_FMT_LBCS
+# LBC_SPEC_INTVL_HRS
+# USE_USER_STAGED_EXTRN_FILES
+#
+# task_get_extrn_ics:
+# EXTRN_MDL_FILES_ICS
+# EXTRN_MDL_ICS_OFFSET_HRS
+# EXTRN_MDL_NAME_ICS
+# EXTRN_MDL_SOURCE_BASEDIR_ICS
+# EXTRN_MDL_SYSBASEDIR_ICS
+# FV3GFS_FILE_FMT_ICS
+# USE_USER_STAGED_EXTRN_FILES
#
#-----------------------------------------------------------------------
#
@@ -29,8 +57,12 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_get_extrn_ics|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP}
-. $USHdir/job_preamble.sh "TRUE"
+for sect in user nco workflow task_get_extrn_lbcs task_get_extrn_ics ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
+. $USHdir/job_preamble.sh
+
+
#
#-----------------------------------------------------------------------
#
diff --git a/jobs/JREGIONAL_GET_VERIF_OBS b/jobs/JREGIONAL_GET_VERIF_OBS
index 3820a739db..7c083e96c6 100755
--- a/jobs/JREGIONAL_GET_VERIF_OBS
+++ b/jobs/JREGIONAL_GET_VERIF_OBS
@@ -3,7 +3,19 @@
#
#-----------------------------------------------------------------------
#
-# This script checks, pulls, and stages observation data for model verification.
+# The J-Job script that checks, pulls, and stages observation data for
+# model verification.
+#
+# Run-time environment variables:
+#
+# CDATE
+# GLOBAL_VAR_DEFNS_FP
+#
+# Experiment variables
+#
+# user:
+# SCRIPTSdir
+# USHdir
#
#-----------------------------------------------------------------------
#
@@ -16,7 +28,9 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task " " ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco workflow ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
. $USHdir/job_preamble.sh
#
#-----------------------------------------------------------------------
diff --git a/jobs/JREGIONAL_INTEGRATION_TEST b/jobs/JREGIONAL_INTEGRATION_TEST
index cbb93e86cf..983981ecf3 100755
--- a/jobs/JREGIONAL_INTEGRATION_TEST
+++ b/jobs/JREGIONAL_INTEGRATION_TEST
@@ -1,5 +1,31 @@
#!/bin/bash
+
+#
+#-----------------------------------------------------------------------
+#
+# This J-Job script runs a set of tests at the end of WE2E tests.
+#
+# Run-time environment variables:
+#
+# GLOBAL_VAR_DEFNS_FP
+# CDATE
+# FCST_DIR
+# SLASH_ENSMEM_SUBDIR
+#
+# Experiment variables
+#
+# user:
+# RUN_ENV
+# SCRIPTSdir
+# USHdir
+#
+# workflow:
+# FCST_LEN_HRS
+#
+#-----------------------------------------------------------------------
+#
+
#
#-----------------------------------------------------------------------
#
@@ -8,8 +34,11 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_integration_test|task_run_fcst" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco workflow ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
. $USHdir/job_preamble.sh
+
#
#-----------------------------------------------------------------------
#
diff --git a/jobs/JREGIONAL_MAKE_GRID b/jobs/JREGIONAL_MAKE_GRID
index 8d65540d1c..01484041e9 100755
--- a/jobs/JREGIONAL_MAKE_GRID
+++ b/jobs/JREGIONAL_MAKE_GRID
@@ -3,97 +3,25 @@
#
#-----------------------------------------------------------------------
#
-# This script generates grid and orography files in NetCDF format that
-# are required as inputs for running the FV3-LAM model (i.e. the FV3 mo-
-# del on a regional domain). It in turn calls three other scripts whose
-# file names are specified in the variables grid_gen_scr, orog_gen_scr,
-# and orog_fltr_scr and then calls the executable defined in the varia-
-# ble shave_exec. These scripts/executable perform the following tasks:
-#
-# 1) grid_gen_scr:
-#
-# This script generates grid files that will be used by subsequent
-# preprocessing steps. It places its output in the directory defined
-# by GRID_DIR. Note that:
-#
-# a) This script creates grid files for each of the 7 tiles of the
-# cubed sphere grid (where tiles 1 through 6 cover the globe, and
-# tile 7 is the regional grid located somewhere within tile 6)
-# even though the forecast will be performed only on tile 7.
-#
-# b) The tile 7 grid file that this script creates includes a halo,
-# i.e. a layer of cells beyond the boundary of tile 7). The width
-# of this halo (i.e. the number of cells in the halo in the direc-
-# tion perpendicular to the boundary of the tile) must be made
-# large enough such that the "shave" steps later below (which take
-# this file as input and generate grid files with thinner halos)
-# have a wide enough starting halo to work with. More specifical-
-# ly, the FV3-LAM model needs as inputs two grid files: one with a
-# halo that is 3 cells and another with a halo that is 4 cells
-# wide. Thus, the halo in the grid file that the grid_gen_scr
-# script generates must be greater than 4 since otherwise, the
-# shave steps would shave off cells from within the interior of
-# tile 7. We will let NHW denote the width of the halo in the
-# grid file generated by grid_gen_scr. The "n" in this variable
-# name denotes number of cells, the "h" is used to indicate that
-# it refers to a halo region, the "w" is used to indicate that it
-# refers to a wide halo (i.e. wider than the 3-cell and 4-cell ha-
-# los that the FV3-LAM model requires as inputs, and the "T7" is
-# used to indicate that the cell count is on tile 7.
-#
-# 2) orog_gen_scr:
-#
-# This script generates the orography file. It places its output in
-# the directory defined by OROG_DIR. Note that:
-#
-# a) This script generates an orography file only on tile 7.
-#
-# b) This orography file contains a halo of the same width (NHW)
-# as the grid file for tile 7 generated by the grid_gen_scr script
-# in the previous step.
-#
-# 3) orog_fltr_scr:
-#
-# This script generates a filtered version of the orography file ge-
-# nerated by the script orog_gen_scr. This script places its output
-# in the temporary directory defined in WORKDIR_FLTR. Note that:
-#
-# a) The filtered orography file generated by this script contains a
-# halo of the same width (NHW) as the (unfiltered) orography file
-# generated by script orog_gen_scr (and the grid file generated by
-# grid_gen_scr).
-#
-# b) In analogy with the input grid files, the FV3-LAM model needs as
-# input two (filtered) orography files -- one with no halo cells
-# and another with 3. These are obtained later below by "shaving"
-# off layers of halo cells from the (filtered) orography file ge-
-# nerated in this step.
-#
-# 4) shave_exec:
-#
-# This "shave" executable is called 4 times to generate 4 files from
-# the tile 7 grid file generated by grid_gen_scr and the tile 7 fil-
-# tered orography file generated by orog_fltr_scr (both of which have
-# a halo of width NHW cells). The 4 output files are placed in the
-# temporary directory defined in WORKDIR_SHVE. More specifically:
-#
-# a) shave_exec is called to shave the halo in the tile 7 grid file
-# generated by grid_gen_scr down to a width of 3 cells and store
-# the result in a new grid file in WORKDIR_SHVE.
-#
-# b) shave_exec is called to shave the halo in the tile 7 grid file
-# generated by grid_gen_scr down to a width of 4 cells and store
-# the result in a new grid file in WORKDIR_SHVE.
-#
-# c) shave_exec is called to shave the halo in the tile 7 filtered
-# orography file generated by orog_fltr_scr down to a width of 0
-# cells (i.e. no halo) and store the result in a new filtered oro-
-# graphy file in WORKDIR_SHVE.
-#
-# d) shave_exec is called to shave the halo in the tile 7 filtered
-# orography file generated by orog_fltr_scr down to a width of 4
-# cells and store the result in a new filtered orography file in
-# WORKDIR_SHVE.
+# The J-Job that generates input NetCDF grid files for running the
+# regional configuration of FV3
+#
+# Run-time environment variables:
+#
+# GLOBAL_VAR_DEFNS_FP
+# DATA
+#
+# Experiment variables
+#
+# user:
+# USHdir
+# SCRIPTSdir
+#
+# workflow:
+# PREEXISTING_DIR_METHOD
+#
+# task_make_grid:
+# GRID_DIR
#
#-----------------------------------------------------------------------
#
@@ -106,7 +34,9 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_make_grid" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco workflow task_make_grid ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
. $USHdir/job_preamble.sh
#
#-----------------------------------------------------------------------
diff --git a/jobs/JREGIONAL_MAKE_ICS b/jobs/JREGIONAL_MAKE_ICS
index c4fb429f1b..10a3b36fb7 100755
--- a/jobs/JREGIONAL_MAKE_ICS
+++ b/jobs/JREGIONAL_MAKE_ICS
@@ -1,5 +1,31 @@
#!/usr/bin/env bash
+#
+#-----------------------------------------------------------------------
+#
+# The J-job to run chgres_cube for preparing initial conditions for the
+# FV3 forecast
+#
+# Run-time environment variables:
+#
+# COMIN
+# DATA
+# GLOBAL_VAR_DEFNS_FP
+# SLASH_ENSMEM_SUBDIR
+#
+# Experiment variables
+#
+# user:
+# RUN_ENVIR
+# SCRIPTSdir
+# USHdir
+#
+# workflow:
+# EXPTDIR
+#
+#-----------------------------------------------------------------------
+#
+
#
#-----------------------------------------------------------------------
#
@@ -8,7 +34,9 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_make_ics" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco workflow ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
. $USHdir/job_preamble.sh
#
#-----------------------------------------------------------------------
diff --git a/jobs/JREGIONAL_MAKE_LBCS b/jobs/JREGIONAL_MAKE_LBCS
index 81e2578fd4..91d9d3edbe 100755
--- a/jobs/JREGIONAL_MAKE_LBCS
+++ b/jobs/JREGIONAL_MAKE_LBCS
@@ -1,5 +1,29 @@
#!/usr/bin/env bash
+#
+#-----------------------------------------------------------------------
+#
+# The J-job to run chgres_cube for preparing lateral boundary conditions
+# for the FV3 forecast
+#
+# Run-time environment variables:
+#
+# CDATE
+# COMIN
+# DATA
+# GLOBAL_VAR_DEFNS_FP
+# SLASH_ENSMEM_SUBDIR
+#
+# Experiment variables
+#
+# user:
+# RUN_ENVIR
+# SCRIPTSdir
+# USHdir
+#
+#-----------------------------------------------------------------------
+#
+
#
#-----------------------------------------------------------------------
#
@@ -8,7 +32,9 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_make_lbcs" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco workflow ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
. $USHdir/job_preamble.sh
#
#-----------------------------------------------------------------------
diff --git a/jobs/JREGIONAL_MAKE_OROG b/jobs/JREGIONAL_MAKE_OROG
index b6f674e5ee..28e2f965a5 100755
--- a/jobs/JREGIONAL_MAKE_OROG
+++ b/jobs/JREGIONAL_MAKE_OROG
@@ -1,5 +1,27 @@
#!/usr/bin/env bash
+#
+#-----------------------------------------------------------------------
+#
+# The J-Job that generates input NetCDF orography files for running the
+# regional configuration of FV3
+#
+# Run-time environment variables:
+#
+# GLOBAL_VAR_DEFNS_FP
+#
+# Experiment variables
+#
+# user:
+# SCRIPTSdir
+# USHdir
+#
+# task_make_orog:
+# OROG_DIR
+#
+#-----------------------------------------------------------------------
+#
+
#
#-----------------------------------------------------------------------
#
@@ -8,7 +30,9 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_make_orog" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco workflow task_make_orog ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
. $USHdir/job_preamble.sh
#
#-----------------------------------------------------------------------
diff --git a/jobs/JREGIONAL_MAKE_SFC_CLIMO b/jobs/JREGIONAL_MAKE_SFC_CLIMO
index 7cbd0cc23e..30b2d2c346 100755
--- a/jobs/JREGIONAL_MAKE_SFC_CLIMO
+++ b/jobs/JREGIONAL_MAKE_SFC_CLIMO
@@ -1,5 +1,30 @@
#!/usr/bin/env bash
+#
+#-----------------------------------------------------------------------
+#
+# The J-job to run chgres_cube for preparing lateral boundary conditions
+# for the FV3 forecast
+#
+# Run-time environment variables:
+#
+# GLOBAL_VAR_DEFNS_FP
+#
+# Experiment variables
+#
+# user:
+# SCRIPTSdir
+# USHdir
+#
+# workflow:
+# PREEXISTING_DIR_METHOD
+#
+# task_make_sfc_climo:
+# SFC_CLIMO_DIR
+#
+#-----------------------------------------------------------------------
+#
+
#
#-----------------------------------------------------------------------
#
@@ -8,7 +33,9 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_make_sfc_climo" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco workflow task_make_sfc_climo ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
. $USHdir/job_preamble.sh
#
#-----------------------------------------------------------------------
diff --git a/jobs/JREGIONAL_PLOT_ALLVARS b/jobs/JREGIONAL_PLOT_ALLVARS
index 5e59abd93d..be5ee10f82 100755
--- a/jobs/JREGIONAL_PLOT_ALLVARS
+++ b/jobs/JREGIONAL_PLOT_ALLVARS
@@ -1,5 +1,45 @@
#!/usr/bin/env bash
+#
+#-----------------------------------------------------------------------
+#
+# The J-job to plot the forecast output
+#
+# Run-time environment variables:
+#
+# CDATE
+# COMOUT
+# GLOBAL_VAR_DEFNS_FP
+# SLASH_ENSMEM_SUBDIR
+#
+# Experiment variables
+#
+# user:
+# RUN_ENVIR
+# SCRIPTSdir
+# USHdir
+#
+# platform:
+# FIXshp
+#
+# workflow:
+# EXPT_SUBDIR
+# PREEXISTING_DIR_METHOD
+# PREDEF_GRID_NAME
+#
+# task_plot_allvars:
+# COMOUT_REF
+# PLOT_DOMAINS
+# PLOT_FCST_END
+# PLOT_FCST_INC
+# PLOT_FCST_START
+#
+# task_run_fcst:
+# FCST_LEN_HRS
+#
+#-----------------------------------------------------------------------
+#
+
#
#-----------------------------------------------------------------------
#
@@ -8,7 +48,9 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_plot_allvars|task_run_fcst" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow task_plot_allvars task_run_fcst ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
. $USHdir/job_preamble.sh
#
#-----------------------------------------------------------------------
@@ -67,6 +109,11 @@ COMOUT_REF=$(eval echo ${COMOUT_REF})
#-----------------------------------------------------------------------
#
+if [ -n "${SRW_GRAPHICS_ENV:-}" ] ; then
+ set +u
+ conda activate ${SRW_GRAPHICS_ENV}
+ set -u
+fi
# plot all variables
$SCRIPTSdir/exregional_plot_allvars.py \
--cycle ${CDATE} \
diff --git a/jobs/JREGIONAL_RUN_FCST b/jobs/JREGIONAL_RUN_FCST
index 45f826c0d7..2542ab32f8 100755
--- a/jobs/JREGIONAL_RUN_FCST
+++ b/jobs/JREGIONAL_RUN_FCST
@@ -3,9 +3,24 @@
#
#-----------------------------------------------------------------------
#
-# This script copies files from various directories into the experiment
-# directory, creates links to some of them, and modifies others (e.g.
-# templates) to customize them for the current experiment setup.
+# The J-Job that runs the forecast
+#
+# Run-time environment variables:
+#
+# CDATE
+# COMIN
+# DATA
+# GLOBAL_VAR_DEFNS_FP
+# SLASH_ENSMEM_SUBDIR
+#
+# Experiment variables
+#
+# user:
+# SCRIPTSdir
+# USHdir
+#
+# workflow:
+# RUN_ENVIR
#
#-----------------------------------------------------------------------
#
@@ -18,7 +33,9 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_run_fcst" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco workflow ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
. $USHdir/job_preamble.sh "TRUE"
#
#-----------------------------------------------------------------------
diff --git a/jobs/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT b/jobs/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT
index 707697b5ab..c7aee12df1 100755
--- a/jobs/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT
+++ b/jobs/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT
@@ -3,7 +3,18 @@
#
#-----------------------------------------------------------------------
#
+# The J-Job that runs that runs either METplus's gen_ens_prod tool or its
+# ensemble_stat tool for ensemble verification.
#
+# Run-time environment variables:
+#
+# GLOBAL_VAR_DEFNS_FP
+#
+# Experiment variables
+#
+# user:
+# SCRIPTSdir
+# USHdir
#
#-----------------------------------------------------------------------
#
@@ -16,7 +27,9 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_run_vx_ensgrid|task_run_vx_enspoint" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco workflow ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
. $USHdir/job_preamble.sh
#
#-----------------------------------------------------------------------
diff --git a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX
index 0301e9946a..e1207e0a81 100755
--- a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX
+++ b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX
@@ -3,8 +3,18 @@
#
#-----------------------------------------------------------------------
#
-# This script runs the METplus GridStat or PointStat tool for deterministic
-# verification.
+# This script runs the METplus GridStat or PointStat tool for
+# deterministic verification.
+#
+# Run-time environment variables:
+#
+# GLOBAL_VAR_DEFNS_FP
+#
+# Experiment variables
+#
+# user:
+# SCRIPTSdir
+# USHdir
#
#-----------------------------------------------------------------------
#
@@ -17,7 +27,9 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_run_vx_gridstat" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco workflow ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
. $USHdir/job_preamble.sh
#
#-----------------------------------------------------------------------
diff --git a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSMEAN b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSMEAN
index ab08320f33..29b22502a4 100755
--- a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSMEAN
+++ b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSMEAN
@@ -3,7 +3,19 @@
#
#-----------------------------------------------------------------------
#
+# The J-Job that runs MET/METplus's GridStat or PointStat tool to
+# perform verification on the ensemble mean of a specified field (or
+# group of fields).
#
+# Run-time environment variables:
+#
+# GLOBAL_VAR_DEFNS_FP
+#
+# Experiment variables
+#
+# user:
+# SCRIPTSdir
+# USHdir
#
#-----------------------------------------------------------------------
#
@@ -16,7 +28,9 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_run_vx_ensgrid_mean" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco workflow ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
. $USHdir/job_preamble.sh
#
#-----------------------------------------------------------------------
diff --git a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB
index 7da98212ac..731cf575a5 100755
--- a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB
+++ b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB
@@ -3,7 +3,19 @@
#
#-----------------------------------------------------------------------
#
+# The J-Job that runs METplus's GridStat or PointStat tool to perform
+# verification on the ensemble frequencies/ probabilities of a specified
+# field (or group of fields).
#
+# Run-time environment variables:
+#
+# GLOBAL_VAR_DEFNS_FP
+#
+# Experiment variables
+#
+# user:
+# SCRIPTSdir
+# USHdir
#
#-----------------------------------------------------------------------
#
@@ -16,7 +28,9 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_run_vx_ensgrid_prob" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco workflow ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
. $USHdir/job_preamble.sh
#
#-----------------------------------------------------------------------
diff --git a/jobs/JREGIONAL_RUN_MET_PB2NC_OBS b/jobs/JREGIONAL_RUN_MET_PB2NC_OBS
index 2767ae1146..89c9bb73f4 100755
--- a/jobs/JREGIONAL_RUN_MET_PB2NC_OBS
+++ b/jobs/JREGIONAL_RUN_MET_PB2NC_OBS
@@ -4,6 +4,18 @@
#-----------------------------------------------------------------------
#
#
+# The J-Job that runs METplus for point-stat by initialization time for
+# all forecast hours.
+#
+# Run-time environment variables:
+#
+# GLOBAL_VAR_DEFNS_FP
+#
+# Experiment variables
+#
+# user:
+# SCRIPTSdir
+# USHdir
#
#-----------------------------------------------------------------------
#
@@ -16,7 +28,9 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_run_met_pb2nc_obs" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco workflow ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
. $USHdir/job_preamble.sh
#
#-----------------------------------------------------------------------
diff --git a/jobs/JREGIONAL_RUN_MET_PCPCOMBINE b/jobs/JREGIONAL_RUN_MET_PCPCOMBINE
index 7364ed96c9..8ac29887e8 100755
--- a/jobs/JREGIONAL_RUN_MET_PCPCOMBINE
+++ b/jobs/JREGIONAL_RUN_MET_PCPCOMBINE
@@ -3,7 +3,20 @@
#
#-----------------------------------------------------------------------
#
+# The J-job that runs the MET/METplus PcpCombine tool on hourly
+# accumulated precipitation (APCP) data to obtain APCP for multi-hour
+# accumulation periods. The data can be from CCPA observations or a
+# focrecast.
#
+# Run-time environment variables:
+#
+# GLOBAL_VAR_DEFNS_FP
+#
+# Experiment variables
+#
+# user:
+# SCRIPTSdir
+# USHdir
#
#-----------------------------------------------------------------------
#
@@ -16,7 +29,9 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_run_met_pcpcombine" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco workflow ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
. $USHdir/job_preamble.sh
#
#-----------------------------------------------------------------------
diff --git a/jobs/JREGIONAL_RUN_POST b/jobs/JREGIONAL_RUN_POST
index 692b3ae65d..58c469fc6d 100755
--- a/jobs/JREGIONAL_RUN_POST
+++ b/jobs/JREGIONAL_RUN_POST
@@ -3,8 +3,38 @@
#
#-----------------------------------------------------------------------
#
-# This script runs the post-processor (UPP) on the NetCDF output files
-# of the write component of the FV3-LAM model.
+# The J-Job that runs the Unified Post-processor (UPP) on the NetCDF
+# output from FV3.
+#
+# Run-time environment variables:
+#
+# COMIN
+# COMOUT
+# cyc
+# DATA
+# DATAROOT
+# GLOBAL_VAR_DEFNS_FP
+# PDY
+# SLASH_ENSMEM_SUBDIR
+#
+# Experiment variables
+#
+# user:
+# SCRIPTSdir
+# USHdir
+#
+# platform:
+# WORKFLOW_MANAGER
+#
+# workflow:
+# DATE_FIRST_CYCL
+# FCST_LEN_CYCL
+# FCST_LEN_HRS
+# INCR_CYCL_FREQ
+# RUN_ENVIR
+#
+# task_run_post:
+# SUB_HOURLY_POST
#
#-----------------------------------------------------------------------
#
@@ -17,7 +47,9 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_run_post|task_run_fcst" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow task_run_post ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
. $USHdir/job_preamble.sh
#
#-----------------------------------------------------------------------
@@ -60,7 +92,7 @@ on the output files corresponding to a specified forecast hour.
# minutes (fmn) are set to "00". This is necessary in order to pass
# "fmn" into the post ex-script for the calculation of post_time.
#
-if [ "${SUB_HOURLY_POST}" != "TRUE" ]; then
+if [ $(boolify "${SUB_HOURLY_POST}") != "TRUE" ]; then
export fmn="00"
fi
#
@@ -88,7 +120,7 @@ if [ "${RUN_ENVIR}" = "community" ]; then
mkdir -p "${COMOUT}"
fi
-if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then
+if [ $(boolify "${SUB_HOURLY_POST}") = "TRUE" ]; then
export DATA_FHR="${DATA:-$COMOUT}/$fhr$fmn"
else
export DATA_FHR="${DATA:-$COMOUT}/$fhr"
diff --git a/jobs/JREGIONAL_RUN_PRDGEN b/jobs/JREGIONAL_RUN_PRDGEN
index 24479cb62d..1cf933b666 100755
--- a/jobs/JREGIONAL_RUN_PRDGEN
+++ b/jobs/JREGIONAL_RUN_PRDGEN
@@ -3,10 +3,33 @@
#
#-----------------------------------------------------------------------
#
-# This script runs wgrib2 to create various subdomain GRIB2 files from
-# the raw UPP-generated GRIB2 output from the run_post task of the
+# The J-Job that runs wgrib2 to create various subdomain GRIB2 files
+# from the raw UPP-generated GRIB2 output from the run_post task of the
# FV3-LAM model.
#
+# Run-time environment variables:
+#
+# COMIN
+# COMOUT
+# DATA
+# GLOBAL_VAR_DEFNS_FP
+# SLASH_ENSMEM_SUBDIR
+#
+# Experiment variables
+#
+# user:
+# SCRIPTSdir
+# USHdir
+#
+# platform:
+# WORKFLOW_MANAGER
+#
+# workflow:
+# RUN_ENVIR
+#
+# task_run_post:
+# SUB_HOURLY_POST
+#
#-----------------------------------------------------------------------
#
@@ -18,7 +41,9 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_run_prdgen|task_run_post" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow task_run_post ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
. $USHdir/job_preamble.sh
#
#-----------------------------------------------------------------------
@@ -87,7 +112,7 @@ fi
mkdir -p "${COMOUT}"
# subhourly post
-if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then
+if [ $(boolify "${SUB_HOURLY_POST}") = "TRUE" ]; then
export DATA_FHR="${DATA:-$COMOUT}/$fhr$fmn"
else
export DATA_FHR="${DATA:-$COMOUT}/$fhr"
diff --git a/jobs/JSRW_AQM_ICS b/jobs/JSRW_AQM_ICS
index 0c4df8aa5b..5d5f6d970e 100755
--- a/jobs/JSRW_AQM_ICS
+++ b/jobs/JSRW_AQM_ICS
@@ -31,7 +31,9 @@ export SCRIPTSsrw="${HOMEdir}/scripts"
#
export USHdir="${USHsrw}" # should be removed later
. ${USHsrw}/source_util_funcs.sh
-source_config_for_task "task_aqm_ics" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
@@ -130,7 +132,7 @@ setpdy.sh
if [ ${subcyc} -ne 0 ]; then
export cycle="t${cyc}${subcyc}z"
fi
-if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
export dot_ensmem=".mem${ENSMEM_INDX}"
else
export dot_ensmem=
diff --git a/jobs/JSRW_AQM_LBCS b/jobs/JSRW_AQM_LBCS
index 11a1420d5e..9279dbe190 100755
--- a/jobs/JSRW_AQM_LBCS
+++ b/jobs/JSRW_AQM_LBCS
@@ -31,7 +31,10 @@ export SCRIPTSsrw="${HOMEdir}/scripts"
#
export USHdir="${USHsrw}" # should be removed later
. ${USHsrw}/source_util_funcs.sh
-source_config_for_task "task_get_extrn_lbcs|task_make_orog|task_make_lbcs|cpl_aqm_parm|task_aqm_lbcs" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm task_get_extrn_lbcs \
+ task_make_orog task_make_lbcs ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
@@ -131,7 +134,7 @@ setpdy.sh
if [ ${subcyc} -ne 0 ]; then
export cycle="t${cyc}${subcyc}z"
fi
-if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
export dot_ensmem=".mem${ENSMEM_INDX}"
else
export dot_ensmem=
diff --git a/jobs/JSRW_BIAS_CORRECTION_O3 b/jobs/JSRW_BIAS_CORRECTION_O3
index 3ab2f2d40f..0849614840 100755
--- a/jobs/JSRW_BIAS_CORRECTION_O3
+++ b/jobs/JSRW_BIAS_CORRECTION_O3
@@ -30,7 +30,10 @@ export SCRIPTSsrw="${HOMEdir}/scripts"
#
export USHdir="${USHsrw}" # should be removed later
. ${USHsrw}/source_util_funcs.sh
-source_config_for_task "cpl_aqm_parm|task_run_post|task_bias_correction_o3" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm task_run_post \
+ task_bias_correction_o3 ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
@@ -123,7 +126,7 @@ setpdy.sh
if [ ${subcyc} -ne 0 ]; then
export cycle="t${cyc}${subcyc}z"
fi
-if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
export dot_ensmem=".mem${ENSMEM_INDX}"
else
export dot_ensmem=
diff --git a/jobs/JSRW_BIAS_CORRECTION_PM25 b/jobs/JSRW_BIAS_CORRECTION_PM25
index 42210e7f29..a0a7f76dad 100755
--- a/jobs/JSRW_BIAS_CORRECTION_PM25
+++ b/jobs/JSRW_BIAS_CORRECTION_PM25
@@ -30,7 +30,10 @@ export SCRIPTSsrw="${HOMEdir}/scripts"
#
export USHdir="${USHsrw}" # should be removed later
. ${USHsrw}/source_util_funcs.sh
-source_config_for_task "cpl_aqm_parm|task_run_post|task_bias_correction_pm25" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm task_run_post \
+ task_bias_correction_pm25 ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
@@ -123,7 +126,7 @@ setpdy.sh
if [ ${subcyc} -ne 0 ]; then
export cycle="t${cyc}${subcyc}z"
fi
-if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
export dot_ensmem=".mem${ENSMEM_INDX}"
else
export dot_ensmem=
diff --git a/jobs/JSRW_FIRE_EMISSION b/jobs/JSRW_FIRE_EMISSION
index ae0343e60e..8a2b581274 100755
--- a/jobs/JSRW_FIRE_EMISSION
+++ b/jobs/JSRW_FIRE_EMISSION
@@ -30,7 +30,9 @@ export SCRIPTSsrw="${HOMEdir}/scripts"
#
export USHdir="${USHsrw}" # should be removed later
. ${USHsrw}/source_util_funcs.sh
-source_config_for_task "cpl_aqm_parm|task_fire_emission" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
@@ -126,7 +128,7 @@ setpdy.sh
if [ ${subcyc} -ne 0 ]; then
export cycle="t${cyc}${subcyc}z"
fi
-if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
export dot_ensmem=".mem${ENSMEM_INDX}"
else
export dot_ensmem=
diff --git a/jobs/JSRW_NEXUS_EMISSION b/jobs/JSRW_NEXUS_EMISSION
index 33f1aca757..aab5869cff 100755
--- a/jobs/JSRW_NEXUS_EMISSION
+++ b/jobs/JSRW_NEXUS_EMISSION
@@ -30,7 +30,9 @@ export SCRIPTSsrw="${HOMEdir}/scripts"
#
export USHdir="${USHsrw}" # should be removed later
. ${USHsrw}/source_util_funcs.sh
-source_config_for_task "task_run_fcst|cpl_aqm_parm|task_nexus_emission" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm task_nexus_emission ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
@@ -128,7 +130,7 @@ setpdy.sh
if [ ${subcyc} -ne 0 ]; then
export cycle="t${cyc}${subcyc}z"
fi
-if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
export dot_ensmem=".mem${ENSMEM_INDX}"
else
export dot_ensmem=
diff --git a/jobs/JSRW_NEXUS_GFS_SFC b/jobs/JSRW_NEXUS_GFS_SFC
index 89d84c740d..ceed6be32a 100755
--- a/jobs/JSRW_NEXUS_GFS_SFC
+++ b/jobs/JSRW_NEXUS_GFS_SFC
@@ -30,7 +30,9 @@ export SCRIPTSsrw="${HOMEdir}/scripts"
#
export USHdir="${USHsrw}" # should be removed later
. ${USHsrw}/source_util_funcs.sh
-source_config_for_task "cpl_aqm_parm|task_nexus_gfs_sfc" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
@@ -128,7 +130,7 @@ setpdy.sh
if [ ${subcyc} -ne 0 ]; then
export cycle="t${cyc}${subcyc}z"
fi
-if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
export dot_ensmem=".mem${ENSMEM_INDX}"
else
export dot_ensmem=
@@ -143,7 +145,7 @@ fi
if [ ${subcyc} -ne 0 ]; then
export cycle="t${cyc}${subcyc}z"
fi
-if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
export dot_ensmem=".mem${ENSMEM_INDX}"
else
export dot_ensmem=
diff --git a/jobs/JSRW_NEXUS_POST_SPLIT b/jobs/JSRW_NEXUS_POST_SPLIT
index 6e5a0a259a..10f4101d5c 100755
--- a/jobs/JSRW_NEXUS_POST_SPLIT
+++ b/jobs/JSRW_NEXUS_POST_SPLIT
@@ -30,7 +30,9 @@ export SCRIPTSsrw="${HOMEdir}/scripts"
#
export USHdir="${USHsrw}" # should be removed later
. ${USHsrw}/source_util_funcs.sh
-source_config_for_task "cpl_aqm_parm|task_nexus_post_split" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
@@ -128,7 +130,7 @@ setpdy.sh
if [ ${subcyc} -ne 0 ]; then
export cycle="t${cyc}${subcyc}z"
fi
-if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
export dot_ensmem=".mem${ENSMEM_INDX}"
else
export dot_ensmem=
diff --git a/jobs/JSRW_POINT_SOURCE b/jobs/JSRW_POINT_SOURCE
index a112a2d275..6218acaa99 100755
--- a/jobs/JSRW_POINT_SOURCE
+++ b/jobs/JSRW_POINT_SOURCE
@@ -30,7 +30,10 @@ export SCRIPTSsrw="${HOMEdir}/scripts"
#
export USHdir="${USHsrw}" # should be removed later
. ${USHsrw}/source_util_funcs.sh
-source_config_for_task "task_run_fcst|cpl_aqm_parm|task_point_source" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm task_point_source \
+ task_run_fcst ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
@@ -123,7 +126,7 @@ setpdy.sh
if [ ${subcyc} -ne 0 ]; then
export cycle="t${cyc}${subcyc}z"
fi
-if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
export dot_ensmem=".mem${ENSMEM_INDX}"
else
export dot_ensmem=
diff --git a/jobs/JSRW_POST_STAT_O3 b/jobs/JSRW_POST_STAT_O3
index 8924cba9e5..5fadd70d30 100755
--- a/jobs/JSRW_POST_STAT_O3
+++ b/jobs/JSRW_POST_STAT_O3
@@ -30,7 +30,10 @@ export SCRIPTSsrw="${HOMEdir}/scripts"
#
export USHdir="${USHsrw}" # should be removed later
. ${USHsrw}/source_util_funcs.sh
-source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_o3" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm \
+ task_run_post ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
. $USHdir/job_preamble.sh
#
#-----------------------------------------------------------------------
@@ -124,7 +127,7 @@ setpdy.sh
if [ ${subcyc} -ne 0 ]; then
export cycle="t${cyc}${subcyc}z"
fi
-if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
export dot_ensmem=".mem${ENSMEM_INDX}"
else
export dot_ensmem=
diff --git a/jobs/JSRW_POST_STAT_PM25 b/jobs/JSRW_POST_STAT_PM25
index 83434fa8c7..2d7d6e9e88 100755
--- a/jobs/JSRW_POST_STAT_PM25
+++ b/jobs/JSRW_POST_STAT_PM25
@@ -30,7 +30,10 @@ export SCRIPTSsrw="${HOMEdir}/scripts"
#
export USHdir="${USHsrw}" # should be removed later
. ${USHsrw}/source_util_funcs.sh
-source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_pm25" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm \
+ task_run_post ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
@@ -123,7 +126,7 @@ setpdy.sh
if [ ${subcyc} -ne 0 ]; then
export cycle="t${cyc}${subcyc}z"
fi
-if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
export dot_ensmem=".mem${ENSMEM_INDX}"
else
export dot_ensmem=
diff --git a/jobs/JSRW_PRE_POST_STAT b/jobs/JSRW_PRE_POST_STAT
index 12561085c2..8c51e18510 100755
--- a/jobs/JSRW_PRE_POST_STAT
+++ b/jobs/JSRW_PRE_POST_STAT
@@ -30,7 +30,9 @@ export SCRIPTSsrw="${HOMEdir}/scripts"
#
export USHdir="${USHsrw}" # should be removed later
. ${USHsrw}/source_util_funcs.sh
-source_config_for_task "task_pre_post_stat" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
@@ -127,7 +129,7 @@ setpdy.sh
if [ ${subcyc} -ne 0 ]; then
export cycle="t${cyc}${subcyc}z"
fi
-if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
export dot_ensmem=".mem${ENSMEM_INDX}"
else
export dot_ensmem=
diff --git a/modulefiles/tasks/cheyenne/plot_allvars.local.lua b/modulefiles/tasks/cheyenne/plot_allvars.local.lua
index b49b8bb863..7cee04231e 100644
--- a/modulefiles/tasks/cheyenne/plot_allvars.local.lua
+++ b/modulefiles/tasks/cheyenne/plot_allvars.local.lua
@@ -1,3 +1,3 @@
unload("python")
load("conda")
-setenv("SRW_ENV", "srw_graphics")
+setenv("SRW_GRAPHICS_ENV", "srw_graphics")
diff --git a/modulefiles/tasks/derecho/plot_allvars.local.lua b/modulefiles/tasks/derecho/plot_allvars.local.lua
index b49b8bb863..7cee04231e 100644
--- a/modulefiles/tasks/derecho/plot_allvars.local.lua
+++ b/modulefiles/tasks/derecho/plot_allvars.local.lua
@@ -1,3 +1,3 @@
unload("python")
load("conda")
-setenv("SRW_ENV", "srw_graphics")
+setenv("SRW_GRAPHICS_ENV", "srw_graphics")
diff --git a/modulefiles/tasks/gaea/plot_allvars.local.lua b/modulefiles/tasks/gaea/plot_allvars.local.lua
index 104da06f5c..41da34ecca 100644
--- a/modulefiles/tasks/gaea/plot_allvars.local.lua
+++ b/modulefiles/tasks/gaea/plot_allvars.local.lua
@@ -1,4 +1,4 @@
unload("python")
load("conda")
-setenv("SRW_ENV", "srw_graphics")
+setenv("SRW_GRAPHICS_ENV", "srw_graphics")
diff --git a/modulefiles/tasks/hera/plot_allvars.local.lua b/modulefiles/tasks/hera/plot_allvars.local.lua
index b7e9528710..85291013c7 100644
--- a/modulefiles/tasks/hera/plot_allvars.local.lua
+++ b/modulefiles/tasks/hera/plot_allvars.local.lua
@@ -1,2 +1,2 @@
load("conda")
-setenv("SRW_ENV", "srw_graphics")
+setenv("SRW_GRAPHICS_ENV", "srw_graphics")
diff --git a/modulefiles/tasks/hercules/plot_allvars.local.lua b/modulefiles/tasks/hercules/plot_allvars.local.lua
index b49b8bb863..7cee04231e 100644
--- a/modulefiles/tasks/hercules/plot_allvars.local.lua
+++ b/modulefiles/tasks/hercules/plot_allvars.local.lua
@@ -1,3 +1,3 @@
unload("python")
load("conda")
-setenv("SRW_ENV", "srw_graphics")
+setenv("SRW_GRAPHICS_ENV", "srw_graphics")
diff --git a/modulefiles/tasks/jet/plot_allvars.local.lua b/modulefiles/tasks/jet/plot_allvars.local.lua
index b7e9528710..85291013c7 100644
--- a/modulefiles/tasks/jet/plot_allvars.local.lua
+++ b/modulefiles/tasks/jet/plot_allvars.local.lua
@@ -1,2 +1,2 @@
load("conda")
-setenv("SRW_ENV", "srw_graphics")
+setenv("SRW_GRAPHICS_ENV", "srw_graphics")
diff --git a/modulefiles/tasks/noaacloud/plot_allvars.local.lua b/modulefiles/tasks/noaacloud/plot_allvars.local.lua
index b7e9528710..2fd9b41eb5 100644
--- a/modulefiles/tasks/noaacloud/plot_allvars.local.lua
+++ b/modulefiles/tasks/noaacloud/plot_allvars.local.lua
@@ -1,2 +1,5 @@
-load("conda")
-setenv("SRW_ENV", "srw_graphics")
+unload("python")
+append_path("MODULEPATH","/contrib/EPIC/miniconda3/modulefiles")
+load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
+
+setenv("SRW_GRAPHICS_ENV", "regional_workflow")
diff --git a/modulefiles/tasks/orion/plot_allvars.local.lua b/modulefiles/tasks/orion/plot_allvars.local.lua
index b49b8bb863..7cee04231e 100644
--- a/modulefiles/tasks/orion/plot_allvars.local.lua
+++ b/modulefiles/tasks/orion/plot_allvars.local.lua
@@ -1,3 +1,3 @@
unload("python")
load("conda")
-setenv("SRW_ENV", "srw_graphics")
+setenv("SRW_GRAPHICS_ENV", "srw_graphics")
diff --git a/parm/wflow/aqm_post.yaml b/parm/wflow/aqm_post.yaml
index 5f307184d3..48a0761fef 100644
--- a/parm/wflow/aqm_post.yaml
+++ b/parm/wflow/aqm_post.yaml
@@ -22,7 +22,7 @@ default_aqm_task: &default_aqm
task_pre_post_stat:
<<: *default_aqm
- command: '&LOAD_MODULES_RUN_TASK_FP; "pre_post_stat" "&HOMEdir;/jobs/JSRW_PRE_POST_STAT"'
+ command: '&LOAD_MODULES_RUN_TASK; "pre_post_stat" "&HOMEdir;/jobs/JSRW_PRE_POST_STAT"'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
dependency:
or:
@@ -36,7 +36,7 @@ task_pre_post_stat:
task_post_stat_o3:
<<: *default_aqm
- command: '&LOAD_MODULES_RUN_TASK_FP; "post_stat_o3" "&HOMEdir;/jobs/JSRW_POST_STAT_O3"'
+ command: '&LOAD_MODULES_RUN_TASK; "post_stat_o3" "&HOMEdir;/jobs/JSRW_POST_STAT_O3"'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
memory: 120G
dependency:
@@ -46,7 +46,7 @@ task_post_stat_o3:
task_post_stat_pm25:
<<: *default_aqm
- command: '&LOAD_MODULES_RUN_TASK_FP; "post_stat_pm25" "&HOMEdir;/jobs/JSRW_POST_STAT_PM25"'
+ command: '&LOAD_MODULES_RUN_TASK; "post_stat_pm25" "&HOMEdir;/jobs/JSRW_POST_STAT_PM25"'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
memory: 120G
dependency:
@@ -56,7 +56,7 @@ task_post_stat_pm25:
task_bias_correction_o3:
<<: *default_aqm
- command: '&LOAD_MODULES_RUN_TASK_FP; "bias_correction_o3" "&HOMEdir;/jobs/JSRW_BIAS_CORRECTION_O3"'
+ command: '&LOAD_MODULES_RUN_TASK; "bias_correction_o3" "&HOMEdir;/jobs/JSRW_BIAS_CORRECTION_O3"'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
memory: 120G
dependency:
@@ -66,7 +66,7 @@ task_bias_correction_o3:
task_bias_correction_pm25:
<<: *default_aqm
- command: '&LOAD_MODULES_RUN_TASK_FP; "bias_correction_pm25" "&HOMEdir;/jobs/JSRW_BIAS_CORRECTION_PM25"'
+ command: '&LOAD_MODULES_RUN_TASK; "bias_correction_pm25" "&HOMEdir;/jobs/JSRW_BIAS_CORRECTION_PM25"'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
memory: 120G
dependency:
diff --git a/parm/wflow/aqm_prep.yaml b/parm/wflow/aqm_prep.yaml
index c57d2198f0..d90bbde60f 100644
--- a/parm/wflow/aqm_prep.yaml
+++ b/parm/wflow/aqm_prep.yaml
@@ -29,7 +29,7 @@ default_aqm_task: &default_aqm
task_nexus_gfs_sfc:
<<: *default_aqm
- command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_gfs_sfc" "&HOMEdir;/jobs/JSRW_NEXUS_GFS_SFC"'
+ command: '&LOAD_MODULES_RUN_TASK; "nexus_gfs_sfc" "&HOMEdir;/jobs/JSRW_NEXUS_GFS_SFC"'
native: '{% if platform.get("SCHED_NATIVE_CMD_HPSS") %}{{ platform.SCHED_NATIVE_CMD_HPSS }}{% else %}{{ platform.SCHED_NATIVE_CMD}}{% endif %}'
partition: '{% if platform.get("PARTITION_HPSS") %}&PARTITION_HPSS;{% else %}None{% endif %}'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
@@ -53,7 +53,7 @@ metatask_nexus_emission:
nspt: '{% for h in range(0, cpl_aqm_parm.NUM_SPLIT_NEXUS) %}{{ " %02d" % h }}{% endfor %}'
task_nexus_emission_#nspt#:
<<: *default_aqm
- command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_emission" "&HOMEdir;/jobs/JSRW_NEXUS_EMISSION"'
+ command: '&LOAD_MODULES_RUN_TASK; "nexus_emission" "&HOMEdir;/jobs/JSRW_NEXUS_EMISSION"'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
nnodes: '{{ task_nexus_emission.NNODES_NEXUS_EMISSION }}'
ppn: '{{ task_nexus_emission.PPN_NEXUS_EMISSION // 1 }}'
@@ -68,7 +68,7 @@ metatask_nexus_emission:
task_nexus_post_split:
<<: *default_aqm
- command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_post_split" "&HOMEdir;/jobs/JSRW_NEXUS_POST_SPLIT"'
+ command: '&LOAD_MODULES_RUN_TASK; "nexus_post_split" "&HOMEdir;/jobs/JSRW_NEXUS_POST_SPLIT"'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
dependency:
metataskdep:
@@ -77,13 +77,13 @@ task_nexus_post_split:
task_fire_emission:
<<: *default_aqm
- command: '&LOAD_MODULES_RUN_TASK_FP; "fire_emission" "&HOMEdir;/jobs/JSRW_FIRE_EMISSION"'
+ command: '&LOAD_MODULES_RUN_TASK; "fire_emission" "&HOMEdir;/jobs/JSRW_FIRE_EMISSION"'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
memory: 2G
task_point_source:
<<: *default_aqm
- command: '&LOAD_MODULES_RUN_TASK_FP; "point_source" "&HOMEdir;/jobs/JSRW_POINT_SOURCE"'
+ command: '&LOAD_MODULES_RUN_TASK; "point_source" "&HOMEdir;/jobs/JSRW_POINT_SOURCE"'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
walltime: 01:00:00
dependency:
@@ -101,7 +101,7 @@ task_aqm_ics_ext:
attrs:
cycledefs: at_start
maxtries: '2'
- command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_ics" "&HOMEdir;/jobs/JSRW_AQM_ICS"'
+ command: '&LOAD_MODULES_RUN_TASK; "aqm_ics" "&HOMEdir;/jobs/JSRW_AQM_ICS"'
envars:
<<: *default_vars
PREV_CYCLE_DIR: '&WARMSTART_CYCLE_DIR;'
@@ -127,7 +127,7 @@ task_aqm_ics:
attrs:
cycledefs: cycled_from_second
maxtries: '2'
- command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_ics" "&HOMEdir;/jobs/JSRW_AQM_ICS"'
+ command: '&LOAD_MODULES_RUN_TASK; "aqm_ics" "&HOMEdir;/jobs/JSRW_AQM_ICS"'
envars:
<<: *default_vars
PREV_CYCLE_DIR: '&COMIN_DIR;'
@@ -150,7 +150,7 @@ task_aqm_ics:
task_aqm_lbcs:
<<: *default_aqm
- command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_lbcs" "&HOMEdir;/jobs/JSRW_AQM_LBCS"'
+ command: '&LOAD_MODULES_RUN_TASK; "aqm_lbcs" "&HOMEdir;/jobs/JSRW_AQM_LBCS"'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
ppn: 24
dependency:
diff --git a/parm/wflow/coldstart.yaml b/parm/wflow/coldstart.yaml
index ceefe865e6..6fad0b8d83 100644
--- a/parm/wflow/coldstart.yaml
+++ b/parm/wflow/coldstart.yaml
@@ -20,7 +20,7 @@ default_task: &default_task
task_get_extrn_ics:
<<: *default_task
- command: '&LOAD_MODULES_RUN_TASK_FP; "get_extrn_ics" "&JOBSdir;/JREGIONAL_GET_EXTRN_MDL_FILES"'
+ command: '&LOAD_MODULES_RUN_TASK; "get_extrn_ics" "&JOBSdir;/JREGIONAL_GET_EXTRN_MDL_FILES"'
attrs:
cycledefs: forecast
maxtries: '2'
@@ -51,7 +51,7 @@ task_get_extrn_ics:
task_get_extrn_lbcs:
<<: *default_task
- command: '&LOAD_MODULES_RUN_TASK_FP; "get_extrn_lbcs" "&JOBSdir;/JREGIONAL_GET_EXTRN_MDL_FILES"'
+ command: '&LOAD_MODULES_RUN_TASK; "get_extrn_lbcs" "&JOBSdir;/JREGIONAL_GET_EXTRN_MDL_FILES"'
attrs:
cycledefs: forecast
maxtries: '2'
@@ -85,7 +85,7 @@ metatask_run_ensemble:
mem: '{% if global.DO_ENSEMBLE %}{%- for m in range(1, global.NUM_ENS_MEMBERS+1) -%}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}'
task_make_ics_mem#mem#:
<<: *default_task
- command: '&LOAD_MODULES_RUN_TASK_FP; "make_ics" "&JOBSdir;/JREGIONAL_MAKE_ICS"'
+ command: '&LOAD_MODULES_RUN_TASK; "make_ics" "&JOBSdir;/JREGIONAL_MAKE_ICS"'
envars:
<<: *default_vars
SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;'
@@ -124,7 +124,7 @@ metatask_run_ensemble:
task_make_lbcs_mem#mem#:
<<: *default_task
- command: '&LOAD_MODULES_RUN_TASK_FP; "make_lbcs" "&JOBSdir;/JREGIONAL_MAKE_LBCS"'
+ command: '&LOAD_MODULES_RUN_TASK; "make_lbcs" "&JOBSdir;/JREGIONAL_MAKE_LBCS"'
envars:
<<: *default_vars
SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;'
@@ -142,7 +142,7 @@ metatask_run_ensemble:
task_run_fcst_mem#mem#:
<<: *default_task
- command: '&LOAD_MODULES_RUN_TASK_FP; "run_fcst" "&JOBSdir;/JREGIONAL_RUN_FCST"'
+ command: '&LOAD_MODULES_RUN_TASK; "run_fcst" "&JOBSdir;/JREGIONAL_RUN_FCST"'
envars:
<<: *default_vars
SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;'
diff --git a/parm/wflow/default_workflow.yaml b/parm/wflow/default_workflow.yaml
index c79415b3be..e37fdae1ea 100644
--- a/parm/wflow/default_workflow.yaml
+++ b/parm/wflow/default_workflow.yaml
@@ -11,7 +11,7 @@ rocoto:
HOMEdir: '{{ user.HOMEdir }}'
JOBSdir: '{{ user.JOBSdir }}'
KEEPDATA: '{{ nco.KEEPDATA_default }}'
- LOAD_MODULES_RUN_TASK_FP: '{{ workflow.LOAD_MODULES_RUN_TASK_FP }}'
+ LOAD_MODULES_RUN_TASK: '{{ workflow.LOAD_MODULES_RUN_TASK_FP }} {{ user.MACHINE }}'
LOGEXT: ".log"
NET: '{{ nco.NET_default }}'
MRMS_OBS_DIR: '{{ platform.MRMS_OBS_DIR }}'
diff --git a/parm/wflow/plot.yaml b/parm/wflow/plot.yaml
index 6dad3e0dfa..445d238c15 100644
--- a/parm/wflow/plot.yaml
+++ b/parm/wflow/plot.yaml
@@ -26,7 +26,7 @@ default_task_plot: &default_task
task_plot_allvars:
<<: *default_task
- command: '&LOAD_MODULES_RUN_TASK_FP; "plot_allvars" "&JOBSdir;/JREGIONAL_PLOT_ALLVARS"'
+ command: '&LOAD_MODULES_RUN_TASK; "plot_allvars" "&JOBSdir;/JREGIONAL_PLOT_ALLVARS"'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
dependency:
or_do_post: &post_files_exist
diff --git a/parm/wflow/post.yaml b/parm/wflow/post.yaml
index 5672e7343f..114e5de377 100644
--- a/parm/wflow/post.yaml
+++ b/parm/wflow/post.yaml
@@ -3,7 +3,7 @@ default_task_post: &default_task
attrs:
cycledefs: '#cycledef#'
maxtries: '2'
- command: '&LOAD_MODULES_RUN_TASK_FP; "run_post" "&JOBSdir;/JREGIONAL_RUN_POST"'
+ command: '&LOAD_MODULES_RUN_TASK; "run_post" "&JOBSdir;/JREGIONAL_RUN_POST"'
envars: &default_vars
GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;'
USHdir: '&USHdir;'
diff --git a/parm/wflow/prdgen.yaml b/parm/wflow/prdgen.yaml
index 6b9f7cd4f6..3f2026a45f 100644
--- a/parm/wflow/prdgen.yaml
+++ b/parm/wflow/prdgen.yaml
@@ -10,7 +10,7 @@ metatask_run_prdgen:
attrs:
cycledefs: '#cycledef#'
maxtries: 1
- command: '&LOAD_MODULES_RUN_TASK_FP; "run_prdgen" "&JOBSdir;/JREGIONAL_RUN_PRDGEN"'
+ command: '&LOAD_MODULES_RUN_TASK; "run_prdgen" "&JOBSdir;/JREGIONAL_RUN_PRDGEN"'
envars:
GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;'
USHdir: '&USHdir;'
diff --git a/parm/wflow/prep.yaml b/parm/wflow/prep.yaml
index c9d5549909..a0c6e3119a 100644
--- a/parm/wflow/prep.yaml
+++ b/parm/wflow/prep.yaml
@@ -24,12 +24,12 @@ default_task_prep: &default_task
task_make_grid:
<<: *default_task
- command: '&LOAD_MODULES_RUN_TASK_FP; "make_grid" "&JOBSdir;/JREGIONAL_MAKE_GRID"'
+ command: '&LOAD_MODULES_RUN_TASK; "make_grid" "&JOBSdir;/JREGIONAL_MAKE_GRID"'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
task_make_orog:
<<: *default_task
- command: '&LOAD_MODULES_RUN_TASK_FP; "make_orog" "&JOBSdir;/JREGIONAL_MAKE_OROG"'
+ command: '&LOAD_MODULES_RUN_TASK; "make_orog" "&JOBSdir;/JREGIONAL_MAKE_OROG"'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
dependency:
or: &make_grid_satisfied
@@ -47,7 +47,7 @@ task_make_orog:
task_make_sfc_climo:
<<: *default_task
- command: '&LOAD_MODULES_RUN_TASK_FP; "make_sfc_climo" "&JOBSdir;/JREGIONAL_MAKE_SFC_CLIMO"'
+ command: '&LOAD_MODULES_RUN_TASK; "make_sfc_climo" "&JOBSdir;/JREGIONAL_MAKE_SFC_CLIMO"'
envars:
<<: *default_envars
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
diff --git a/parm/wflow/test.yaml b/parm/wflow/test.yaml
index 716665b228..9c084d6875 100644
--- a/parm/wflow/test.yaml
+++ b/parm/wflow/test.yaml
@@ -29,7 +29,7 @@ metatask_integration_test:
mem: '{% if global.DO_ENSEMBLE %}{%- for m in range(1, global.NUM_ENS_MEMBERS+1) -%}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}'
task_integration_test_mem#mem#:
<<: *default_task
- command: '&LOAD_MODULES_RUN_TASK_FP; "integration_test" "&JOBSdir;/JREGIONAL_INTEGRATION_TEST"'
+ command: '&LOAD_MODULES_RUN_TASK; "integration_test" "&JOBSdir;/JREGIONAL_INTEGRATION_TEST"'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
dependency:
and_run_fcst:
diff --git a/parm/wflow/verify_det.yaml b/parm/wflow/verify_det.yaml
index e82d7c61e1..a62adb4481 100644
--- a/parm/wflow/verify_det.yaml
+++ b/parm/wflow/verify_det.yaml
@@ -31,7 +31,7 @@ metatask_GridStat_CCPA_all_accums_all_mems:
<<: *default_task_verify_det
attrs:
maxtries: '2'
- command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"'
+ command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"'
envars:
<<: *default_vars
OBS_DIR: '&CCPA_OBS_DIR;'
@@ -63,7 +63,7 @@ metatask_GridStat_NOHRSC_all_accums_all_mems:
<<: *default_task_verify_det
attrs:
maxtries: '2'
- command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"'
+ command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"'
envars:
<<: *default_vars
OBS_DIR: '&NOHRSC_OBS_DIR;'
@@ -93,7 +93,7 @@ metatask_GridStat_MRMS_all_mems:
VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["REFC", "RETOP"] %}{{ "%s " % var }}{% endif %}{% endfor %}'
task_run_MET_GridStat_vx_#VAR#_mem#mem#:
<<: *default_task_verify_det
- command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"'
+ command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"'
envars:
<<: *default_vars
OBS_DIR: '&MRMS_OBS_DIR;'
@@ -124,7 +124,7 @@ metatask_PointStat_NDAS_all_mems:
VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["ADPSFC", "ADPUPA"] %}{{ "%s " % var }}{% endif %}{% endfor %}'
task_run_MET_PointStat_vx_#VAR#_mem#mem#:
<<: *default_task_verify_det
- command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"'
+ command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"'
envars:
<<: *default_vars
OBS_DIR: '&NDAS_OBS_DIR;'
diff --git a/parm/wflow/verify_ens.yaml b/parm/wflow/verify_ens.yaml
index 18b23a1eb0..71bc20b3b0 100644
--- a/parm/wflow/verify_ens.yaml
+++ b/parm/wflow/verify_ens.yaml
@@ -26,7 +26,7 @@ metatask_GenEnsProd_EnsembleStat_CCPA:
ACCUM_HH: '{% for ah in verification.VX_APCP_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}'
task_run_MET_GenEnsProd_vx_APCP#ACCUM_HH#h: &task_GenEnsProd_CCPA
<<: *default_task_verify_ens
- command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"'
+ command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"'
envars: &envars_GenEnsProd_CCPA
<<: *default_vars
ACCUM_HH: '#ACCUM_HH#'
@@ -63,7 +63,7 @@ metatask_GenEnsProd_EnsembleStat_NOHRSC:
ACCUM_HH: '{% for ah in verification.VX_ASNOW_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}'
task_run_MET_GenEnsProd_vx_ASNOW#ACCUM_HH#h: &task_GenEnsProd_NOHRSC
<<: *default_task_verify_ens
- command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"'
+ command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"'
envars: &envars_GenEnsProd_NOHRSC
<<: *default_vars
ACCUM_HH: '#ACCUM_HH#'
@@ -101,7 +101,7 @@ metatask_GenEnsProd_EnsembleStat_MRMS:
VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["REFC", "RETOP"] %}{{ "%s " % var }}{% endif %}{% endfor %}'
task_run_MET_GenEnsProd_vx_#VAR#: &task_GenEnsProd_MRMS
<<: *default_task_verify_ens
- command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"'
+ command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"'
envars: &envars_GenEnsProd_MRMS
<<: *default_vars
ACCUM_HH: '01'
@@ -137,7 +137,7 @@ metatask_GenEnsProd_EnsembleStat_NDAS:
VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["ADPSFC", "ADPUPA"] %}{{ "%s " % var }}{% endif %}{% endfor %}'
task_run_MET_GenEnsProd_vx_#VAR#: &task_GenEnsProd_NDAS
<<: *default_task_verify_ens
- command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"'
+ command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"'
envars: &envars_GenEnsProd_NDAS
<<: *default_vars
OBS_DIR: '&NDAS_OBS_DIR;'
@@ -178,7 +178,7 @@ metatask_GridStat_CCPA_ensmeanprob_all_accums:
ACCUM_HH: '{% for ah in verification.VX_APCP_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}'
task_run_MET_GridStat_vx_ens#statlc#_APCP#ACCUM_HH#h:
<<: *default_task_verify_ens
- command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"'
+ command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"'
envars:
<<: *default_vars
OBS_DIR: '&CCPA_OBS_DIR;'
@@ -202,7 +202,7 @@ metatask_GridStat_NOHRSC_ensmeanprob_all_accums:
ACCUM_HH: '{% for ah in verification.VX_ASNOW_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}'
task_run_MET_GridStat_vx_ens#statlc#_ASNOW#ACCUM_HH#h:
<<: *default_task_verify_ens
- command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"'
+ command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"'
envars:
<<: *default_vars
OBS_DIR: '&NOHRSC_OBS_DIR;'
@@ -222,7 +222,7 @@ metatask_GridStat_MRMS_ensprob:
VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["REFC", "RETOP"] %}{{ "%s " % var }}{% endif %}{% endfor %}'
task_run_MET_GridStat_vx_ensprob_#VAR#:
<<: *default_task_verify_ens
- command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB"'
+ command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB"'
envars:
<<: *default_vars
ACCUM_HH: '01'
@@ -246,7 +246,7 @@ metatask_PointStat_NDAS_ensmeanprob:
VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["ADPSFC", "ADPUPA"] %}{{ "%s " % var }}{% endif %}{% endfor %}'
task_run_MET_PointStat_vx_ens#statlc#_#VAR#:
<<: *default_task_verify_ens
- command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"'
+ command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"'
envars:
<<: *default_vars
OBS_DIR: '&NDAS_OBS_DIR;'
diff --git a/parm/wflow/verify_pre.yaml b/parm/wflow/verify_pre.yaml
index b7511bf63f..0d4e1c2448 100644
--- a/parm/wflow/verify_pre.yaml
+++ b/parm/wflow/verify_pre.yaml
@@ -23,7 +23,7 @@ default_task_verify_pre: &default_task_verify_pre
task_get_obs_ccpa:
<<: *default_task_verify_pre
- command: '&LOAD_MODULES_RUN_TASK_FP; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"'
+ command: '&LOAD_MODULES_RUN_TASK; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"'
envars:
<<: *default_vars
ACCUM_HH: '01'
@@ -37,7 +37,7 @@ task_get_obs_ccpa:
task_get_obs_nohrsc:
<<: *default_task_verify_pre
- command: '&LOAD_MODULES_RUN_TASK_FP; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"'
+ command: '&LOAD_MODULES_RUN_TASK; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"'
envars:
<<: *default_vars
OBS_DIR: '&NOHRSC_OBS_DIR;'
@@ -50,7 +50,7 @@ task_get_obs_nohrsc:
task_get_obs_mrms:
<<: *default_task_verify_pre
- command: '&LOAD_MODULES_RUN_TASK_FP; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"'
+ command: '&LOAD_MODULES_RUN_TASK; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"'
envars:
<<: *default_vars
OBS_DIR: '&MRMS_OBS_DIR;'
@@ -69,7 +69,7 @@ task_get_obs_ndas:
OBS_DIR: '&NDAS_OBS_DIR;'
OBTYPE: 'NDAS'
FHR: '{% for h in range(0, workflow.FCST_LEN_HRS+1) %}{{ " %02d" % h }}{% endfor %}'
- command: '&LOAD_MODULES_RUN_TASK_FP; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"'
+ command: '&LOAD_MODULES_RUN_TASK; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"'
queue: "&QUEUE_HPSS;"
native: '{% if platform.get("SCHED_NATIVE_CMD_HPSS") %}{{ platform.SCHED_NATIVE_CMD_HPSS }}{% else %}{{ platform.SCHED_NATIVE_CMD}}{% endif %}'
partition: '{% if platform.get("PARTITION_HPSS") %}&PARTITION_HPSS;{% else %}None{% endif %}'
@@ -80,7 +80,7 @@ task_run_MET_Pb2nc_obs:
attrs:
cycledefs: forecast
maxtries: '2'
- command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PB2NC_OBS"'
+ command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PB2NC_OBS"'
envars:
<<: *default_vars
VAR: ADPSFC
@@ -110,7 +110,7 @@ metatask_PcpCombine_obs:
attrs:
cycledefs: forecast
maxtries: '2'
- command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"'
+ command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"'
envars:
<<: *default_vars
VAR: APCP
@@ -140,7 +140,7 @@ metatask_check_post_output_all_mems:
attrs:
cycledefs: forecast
maxtries: '1'
- command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_CHECK_POST_OUTPUT"'
+ command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_CHECK_POST_OUTPUT"'
envars:
<<: *default_vars
VAR: APCP
@@ -221,7 +221,7 @@ metatask_PcpCombine_fcst_APCP_all_accums_all_mems:
attrs:
cycledefs: forecast
maxtries: '2'
- command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"'
+ command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"'
envars:
<<: *default_vars
VAR: APCP
@@ -249,7 +249,7 @@ metatask_PcpCombine_fcst_ASNOW_all_accums_all_mems:
attrs:
cycledefs: forecast
maxtries: '2'
- command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"'
+ command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"'
envars:
<<: *default_vars
VAR: ASNOW
diff --git a/scripts/exregional_check_post_output.sh b/scripts/exregional_check_post_output.sh
index ba0d141c5d..320311cc94 100755
--- a/scripts/exregional_check_post_output.sh
+++ b/scripts/exregional_check_post_output.sh
@@ -1,5 +1,43 @@
#!/usr/bin/env bash
+#
+#-----------------------------------------------------------------------
+#
+# The ex-script for checking the post output.
+#
+# Run-time environment variables:
+#
+# ACCUM_HH
+# CDATE
+# ENSMEM_INDX
+# GLOBAL_VAR_DEFNS_FP
+# VAR
+#
+# Experiment variables
+#
+# user:
+# USHdir
+#
+# workflow:
+# FCST_LEN_HRS
+#
+# global:
+# DO_ENSEMBLE
+# ENS_TIME_LAG_HRS
+#
+# verification:
+# FCST_FN_TEMPLATE
+# FCST_SUBDIR_TEMPLATE
+# NUM_MISSING_FCST_FILES_MAX
+# VX_FCST_INPUT_BASEDIR
+# VX_NDIGITS_ENSMEM_NAMES
+#
+# constants:
+# SECS_PER_HOUR
+#
+#-----------------------------------------------------------------------
+#
+
#
#-----------------------------------------------------------------------
#
@@ -8,7 +46,9 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_run_met_pcpcombine|task_run_post" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco workflow global verification constants task_run_post ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
@@ -64,7 +104,7 @@ user-staged.
#-----------------------------------------------------------------------
#
i="0"
-if [ "${DO_ENSEMBLE}" = "TRUE" ]; then
+if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]; then
i=$( bc -l <<< "${ENSMEM_INDX}-1" )
fi
time_lag=$( bc -l <<< "${ENS_TIME_LAG_HRS[$i]}*${SECS_PER_HOUR}" )
diff --git a/scripts/exregional_get_extrn_mdl_files.sh b/scripts/exregional_get_extrn_mdl_files.sh
index 018a30c285..96c3136e33 100755
--- a/scripts/exregional_get_extrn_mdl_files.sh
+++ b/scripts/exregional_get_extrn_mdl_files.sh
@@ -1,5 +1,65 @@
#!/usr/bin/env bash
+
+#
+#-----------------------------------------------------------------------
+#
+# The ex-script for getting the model files that will be used for either
+# initial conditions or lateral boundary conditions for the experiment.
+#
+# Run-time environment variables:
+#
+# CDATE
+# COMIN
+# cyc
+# DATA
+# EXTRN_MDL_CDATE
+# EXTRN_MDL_NAME
+# EXTRN_MDL_STAGING_DIR
+# GLOBAL_VAR_DEFNS_FP
+# ICS_OR_LBCS
+# NET
+# PDY
+# TIME_OFFSET_HRS
+#
+# Experiment variables
+#
+# user:
+# MACHINE
+# PARMdir
+# RUN_ENVIR
+# USHdir
+#
+# platform:
+# EXTRN_MDL_DATA_STORES
+#
+# workflow:
+# DATE_FIRST_CYCL
+# EXTRN_MDL_VAR_DEFNS_FN
+# FCST_LEN_CYCL
+# INCR_CYCL_FREQ
+# SYMLINK_FIX_FILES
+#
+# task_get_extrn_lbcs:
+# EXTRN_MDL_FILES_LBCS
+# EXTRN_MDL_SOURCE_BASEDIR_LBCS
+# EXTRN_MDL_SYSBASEDIR_LBCS
+# FV3GFS_FILE_FMT_LBCS
+# LBC_SPEC_INTVL_HRS
+#
+# task_get_extrn_ics:
+# EXTRN_MDL_FILES_ICS
+# EXTRN_MDL_SOURCE_BASEDIR_ICS
+# EXTRN_MDL_SYSBASEDIR_ICS
+# FV3GFS_FILE_FMT_ICS
+#
+# global:
+# DO_ENSEMBLE
+# NUM_ENS_MEMBERS
+#
+#-----------------------------------------------------------------------
+#
+
#
#-----------------------------------------------------------------------
#
@@ -8,7 +68,11 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_get_extrn_ics|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP}
+
+for sect in user nco platform workflow global task_get_extrn_lbcs \
+ task_get_extrn_ics ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
@@ -147,12 +211,12 @@ if [ -n "${input_file_path:-}" ] ; then
--input_file_path ${input_file_path}"
fi
-if [ $SYMLINK_FIX_FILES = "TRUE" ]; then
+if [ $(boolify $SYMLINK_FIX_FILES) = "TRUE" ]; then
additional_flags="$additional_flags \
--symlink"
fi
-if [ $DO_ENSEMBLE == "TRUE" ] ; then
+if [ $(boolify $DO_ENSEMBLE) = "TRUE" ] ; then
mem_dir="/mem{mem:03d}"
member_list=(1 ${NUM_ENS_MEMBERS})
additional_flags="$additional_flags \
@@ -222,7 +286,7 @@ if [ "${EXTRN_MDL_NAME}" = "GEFS" ]; then
for num in $(seq -f "%02g" ${NUM_ENS_MEMBERS}); do
sorted_fn=( )
for fcst_hr in "${all_fcst_hrs_array[@]}"; do
- # Read in filenames from $EXTRN_MDL_FNS and sort them
+ # Read in filenames from EXTRN_MDL_FNS and sort them
base_path="${EXTRN_MDL_STAGING_DIR}/mem`printf %03d $num`"
filenames_array=`awk -F= '/EXTRN_MDL_FNS/{print $2}' $base_path/${EXTRN_DEFNS}`
for filename in ${filenames_array[@]}; do
diff --git a/scripts/exregional_get_verif_obs.sh b/scripts/exregional_get_verif_obs.sh
index a74f11cd3a..6ad6aaed0e 100755
--- a/scripts/exregional_get_verif_obs.sh
+++ b/scripts/exregional_get_verif_obs.sh
@@ -1,5 +1,28 @@
#!/usr/bin/env bash
+#
+#-----------------------------------------------------------------------
+#
+# The ex-script that checks, pulls, and stages observation data for
+# model verification.
+#
+# Run-time environment variables:
+#
+# FHR
+# GLOBAL_VAR_DEFNS_FP
+# OBS_DIR
+# OBTYPE
+# PDY
+# VAR
+#
+# Experiment variables
+#
+# user:
+# USHdir
+# PARMdir
+#
+#-----------------------------------------------------------------------
+
#
#-----------------------------------------------------------------------
#
@@ -8,7 +31,9 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task " " ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_integration_test.py b/scripts/exregional_integration_test.py
index f0ac3d9af6..996cf6320e 100755
--- a/scripts/exregional_integration_test.py
+++ b/scripts/exregional_integration_test.py
@@ -4,16 +4,16 @@
#### Python Script Documentation Block
#
# Script name: exregional_integration_test.py
-# Script description: Ensures the correct number of netcdf files are generated
+# Script description: Ensures the correct number of netcdf files are generated
# for each experiment
#
# Author: Eddie Snyder Org: NOAA EPIC Date: 2024-02-05
-#
+#
# Instructions: 1. Pass the appropriate info for the required arguments:
# --fcst_dir=/path/to/forecast/files
# --fcst_len=
# 2. Run script with arguments
-#
+#
# Notes/future work: - Currently SRW App only accepts netcdf as the UFS WM
# output file format. If that changes, then additional
# logic is needed to address the other file formats.
diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh
index c1876651d8..104875f8dc 100755
--- a/scripts/exregional_make_grid.sh
+++ b/scripts/exregional_make_grid.sh
@@ -1,5 +1,99 @@
#!/usr/bin/env bash
+
+#
+#-----------------------------------------------------------------------
+#
+# This script generates NetCDF-formatted grid files required as input
+# the FV3 model configured for the regional domain.
+#
+# The output of this script is placed in a directory defined by GRID_DIR.
+#
+# More about the grid for regional configurations of FV3:
+#
+# a) This script creates grid files for tile 7 (reserved for the
+# regional grid located soewhere within tile 6 of the 6 global
+# tiles.
+#
+# b) Regional configurations of FV3 need two grid files, one with 3
+# halo cells and one with 4 halo cells. The width of the halo is
+# the number of cells in the direction perpendicular to the
+# boundary.
+#
+# c) The tile 7 grid file that this script creates includes a halo,
+# with at least 4 cells to accommodate this requirement. The halo
+# is made thinner in a subsequent step called "shave".
+#
+# d) We will let NHW denote the width of the wide halo that is wider
+# than the required 3- or 4-cell halos. (NHW; N=number of cells,
+# H=halo, W=wide halo)
+#
+# e) T7 indicates the cell count on tile 7.
+#
+#
+# This script does the following:
+#
+# - Create the grid, either an ESGgrid with the regional_esg_grid
+# executable or a GFDL-type grid with the hgrid executable
+# - Calculate the regional grid's global uniform cubed-sphere grid
+# equivalent resolution with the global_equiv_resol executable
+# - Use the shave executable to reduce the halo to 3 and 4 cells
+# - Call an ush script that runs the make_solo_mosaic executable
+#
+# Run-time environment variables:
+#
+# DATA
+# GLOBAL_VAR_DEFNS_FP
+# REDIRECT_OUT_ERR
+#
+# Experiment variables
+#
+# user:
+# EXECdir
+# USHdir
+#
+# platform:
+# PRE_TASK_CMDS
+# RUN_CMD_SERIAL
+
+# workflow:
+# DOT_OR_USCORE
+# GRID_GEN_METHOD
+# RES_IN_FIXLAM_FILENAMES
+# RGNL_GRID_NML_FN
+# VERBOSE
+#
+# task_make_grid:
+# GFDLgrid_NUM_CELLS
+# GFDLgrid_USE_NUM_CELLS_IN_FILENAMES
+# GRID_DIR
+#
+# constants:
+# NH3
+# NH4
+# TILE_RGNL
+#
+# grid_params:
+# DEL_ANGLE_X_SG
+# DEL_ANGLE_Y_SG
+# GFDLgrid_REFINE_RATIO
+# IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG
+# ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG
+# JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG
+# JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG
+# LAT_CTR
+# LON_CTR
+# NEG_NX_OF_DOM_WITH_WIDE_HALO
+# NEG_NY_OF_DOM_WITH_WIDE_HALO
+# NHW
+# NX
+# NY
+# PAZI
+# STRETCH_FAC
+#
+#-----------------------------------------------------------------------
+#
+
#
#-----------------------------------------------------------------------
#
@@ -8,7 +102,9 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_make_grid" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow constants grid_params task_make_grid ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
@@ -276,6 +372,7 @@ generation executable (exec_fp):
'pazi': ${PAZI}
"
+ # UW takes input from stdin when no -i/--input-config flag is provided
(cat << EOF
$settings
EOF
@@ -372,7 +469,7 @@ res_equiv=${res_equiv//$'\n'/}
#-----------------------------------------------------------------------
#
if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then
- if [ "${GFDLgrid_USE_NUM_CELLS_IN_FILENAMES}" = "TRUE" ]; then
+ if [ $(boolify "${GFDLgrid_USE_NUM_CELLS_IN_FILENAMES}") = "TRUE" ]; then
CRES="C${GFDLgrid_NUM_CELLS}"
else
CRES="C${res_equiv}"
@@ -380,7 +477,15 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then
elif [ "${GRID_GEN_METHOD}" = "ESGgrid" ]; then
CRES="C${res_equiv}"
fi
-set_file_param "${GLOBAL_VAR_DEFNS_FP}" "CRES" "'$CRES'"
+
+ # UW takes the update values from stdin when no --update-file flag is
+ # provided. It needs --update-format to do it correctly, though.
+echo "workflow: {CRES: ${CRES}}" | uw config realize \
+ --input-file $GLOBAL_VAR_DEFNS_FP \
+ --update-format yaml \
+ --output-file $GLOBAL_VAR_DEFNS_FP \
+ --verbose
+
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_make_ics.sh b/scripts/exregional_make_ics.sh
index 875249b107..8cd49076b0 100755
--- a/scripts/exregional_make_ics.sh
+++ b/scripts/exregional_make_ics.sh
@@ -1,5 +1,83 @@
#!/usr/bin/env bash
+#
+#-----------------------------------------------------------------------
+#
+# The ex-scrtipt that sets up and runs chgres_cube for preparing initial
+# conditions for the FV3 forecast
+#
+# Run-time environment variables:
+#
+# COMIN
+# COMOUT
+# COMROOT
+# DATA
+# DATAROOT
+# DATA_SHARE
+# EXTRN_MDL_CDATE
+# GLOBAL_VAR_DEFNS_FP
+# INPUT_DATA
+# NET
+# PDY
+# REDIRECT_OUT_ERR
+# SLASH_ENSMEM_SUBDIR
+#
+# Experiment variables
+#
+# user:
+# EXECdir
+# MACHINE
+# PARMdir
+# RUN_ENVIR
+# USHdir
+#
+# platform:
+# FIXgsm
+# PRE_TASK_CMDS
+# RUN_CMD_UTILS
+#
+# workflow:
+# CCPP_PHYS_SUITE
+# COLDSTART
+# CRES
+# DATE_FIRST_CYCL
+# DOT_OR_USCORE
+# EXTRN_MDL_VAR_DEFNS_FN
+# FIXlam
+# SDF_USES_RUC_LSM
+# SDF_USES_THOMPSON_MP
+# THOMPSON_MP_CLIMO_FP
+# VERBOSE
+#
+# task_make_ics:
+# FVCOM_DIR
+# FVCOM_FILE
+# FVCOM_WCSTART
+# KMP_AFFINITY_MAKE_ICS
+# OMP_NUM_THREADS_MAKE_ICS
+# OMP_STACKSIZE_MAKE_ICS
+# USE_FVCOM
+# VCOORD_FILE
+#
+# task_get_extrn_ics:
+# EXTRN_MDL_NAME_ICS
+# FV3GFS_FILE_FMT_ICS
+#
+# global:
+# HALO_BLEND
+#
+# cpl_aqm_parm:
+# CPL_AQM
+#
+# constants:
+# NH0
+# NH4
+# TILE_RGNL
+#
+#-----------------------------------------------------------------------
+#
+
+
#
#-----------------------------------------------------------------------
#
@@ -8,7 +86,9 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_make_ics|task_get_extrn_ics" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow global cpl_aqm_parm constants task_get_extrn_ics task_make_ics ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
@@ -306,7 +386,7 @@ convert_nst=""
nsoill_out="4"
if [ "${EXTRN_MDL_NAME_ICS}" = "HRRR" -o \
"${EXTRN_MDL_NAME_ICS}" = "RAP" ] && \
- [ "${SDF_USES_RUC_LSM}" = "TRUE" ]; then
+ [ $(boolify "${SDF_USES_RUC_LSM}") = "TRUE" ]; then
nsoill_out="9"
fi
#
@@ -326,7 +406,7 @@ fi
thomp_mp_climo_file=""
if [ "${EXTRN_MDL_NAME_ICS}" != "HRRR" -a \
"${EXTRN_MDL_NAME_ICS}" != "RAP" ] && \
- [ "${SDF_USES_THOMPSON_MP}" = "TRUE" ]; then
+ [ $(boolify "${SDF_USES_THOMPSON_MP}") = "TRUE" ]; then
thomp_mp_climo_file="${THOMPSON_MP_CLIMO_FP}"
fi
#
@@ -643,9 +723,9 @@ POST_STEP
#
#-----------------------------------------------------------------------
#
-if [ "${CPL_AQM}" = "TRUE" ]; then
+if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
COMOUT="${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}" #temporary path, should be removed later
- if [ "${COLDSTART}" = "TRUE" ] && [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ]; then
+ if [ $(boolify "${COLDSTART}") = "TRUE" ] && [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ]; then
data_trans_path="${COMOUT}"
else
data_trans_path="${DATA_SHARE}"
@@ -667,7 +747,7 @@ fi
#
#-----------------------------------------------------------------------
#
-if [ "${USE_FVCOM}" = "TRUE" ]; then
+if [ $(boolify "${USE_FVCOM}") = "TRUE" ]; then
#Format for fvcom_time: YYYY-MM-DDTHH:00:00.000000
fvcom_exec_fn="fvcom_to_FV3"
diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh
index 5a2d24bcea..35b4da388a 100755
--- a/scripts/exregional_make_lbcs.sh
+++ b/scripts/exregional_make_lbcs.sh
@@ -1,5 +1,83 @@
#!/usr/bin/env bash
+#
+#-----------------------------------------------------------------------
+#
+# The ex-scrtipt that sets up and runs chgres_cube for preparing lateral
+# boundary conditions for the FV3 forecast
+#
+# Run-time environment variables:
+#
+# COMIN
+# COMOUT
+# COMROOT
+# DATA
+# DATAROOT
+# DATA_SHARE
+# EXTRN_MDL_CDATE
+# INPUT_DATA
+# GLOBAL_VAR_DEFNS_FP
+# NET
+# PDY
+# REDIRECT_OUT_ERR
+# SLASH_ENSMEM_SUBDIR
+#
+# Experiment variables
+#
+# user:
+# EXECdir
+# MACHINE
+# PARMdir
+# RUN_ENVIR
+# USHdir
+#
+# platform:
+# FIXgsm
+# PRE_TASK_CMDS
+# RUN_CMD_UTILS
+#
+# workflow:
+# CCPP_PHYS_SUITE
+# COLDSTART
+# CRES
+# DATE_FIRST_CYCL
+# DOT_OR_USCORE
+# EXTRN_MDL_VAR_DEFNS_FN
+# FIXlam
+# SDF_USES_RUC_LSM
+# SDF_USES_THOMPSON_MP
+# THOMPSON_MP_CLIMO_FP
+# VERBOSE
+#
+# task_get_extrn_lbcs:
+# EXTRN_MDL_NAME_LBCS
+# FV3GFS_FILE_FMT_LBCS
+#
+# task_make_lbcs:
+# FVCOM_DIR
+# FVCOM_FILE
+# FVCOM_WCSTART
+# KMP_AFFINITY_MAKE_LBCS
+# OMP_NUM_THREADS_MAKE_LBCS
+# OMP_STACKSIZE_MAKE_LBCS
+# USE_FVCOM
+# VCOORD_FILE
+#
+# global:
+# HALO_BLEND
+#
+# cpl_aqm_parm:
+# CPL_AQM
+#
+# constants:
+# NH0
+# NH4
+# TILE_RGNL
+#
+#-----------------------------------------------------------------------
+#
+
+
#
#-----------------------------------------------------------------------
#
@@ -8,7 +86,10 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_make_lbcs|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP}
+set -x
+for sect in user nco platform workflow global cpl_aqm_parm constants task_get_extrn_lbcs task_make_lbcs ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
@@ -250,7 +331,7 @@ tracers="\"\""
thomp_mp_climo_file=""
if [ "${EXTRN_MDL_NAME_LBCS}" != "HRRR" -a \
"${EXTRN_MDL_NAME_LBCS}" != "RAP" ] && \
- [ "${SDF_USES_THOMPSON_MP}" = "TRUE" ]; then
+ [ $(boolify "${SDF_USES_THOMPSON_MP}") = "TRUE" ]; then
thomp_mp_climo_file="${THOMPSON_MP_CLIMO_FP}"
fi
#
@@ -495,6 +576,7 @@ FORTRAN namelist file has not specified for this external LBC model (EXTRN_MDL_N
"
nml_fn="fort.41"
+ # UW takes input from stdin when no -i/--input-config flag is provided
(cat << EOF
$settings
EOF
@@ -559,7 +641,7 @@ located in the following directory:
lbc_spec_fhrs=( "${EXTRN_MDL_FHRS[$i]}" )
fcst_hhh=$(( ${lbc_spec_fhrs} - ${EXTRN_MDL_LBCS_OFFSET_HRS} ))
fcst_hhh_FV3LAM=$( printf "%03d" "$fcst_hhh" )
- if [ "${CPL_AQM}" = "TRUE" ]; then
+ if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
cp -p gfs.bndy.nc ${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc
else
mv gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc
diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh
index 9a3d5da7fc..34b1675d8c 100755
--- a/scripts/exregional_make_orog.sh
+++ b/scripts/exregional_make_orog.sh
@@ -1,5 +1,86 @@
#!/usr/bin/env bash
+#
+#-----------------------------------------------------------------------
+#
+# This ex-script is responsible for creating orography files for the FV3
+# forecast.
+#
+# The output of this script is placed in a directory defined by OROG_DIR
+#
+# More about the orog for the regional configuration of the FV3:
+#
+# a) Only the tile 7 orography file is created.
+#
+# b) This orography file contains a halo of the same width (NHW)
+# as the grid file for tile 7 generated by the make_grid script
+#
+# c) Filtered versions of the orogoraphy files are created with the
+# same width (NHW) as the unfiltered orography file and the grid
+# file. FV3 requires two filtered orography files, one with no
+# halo cells and one with 4 halo cells.
+#
+# This script does the following:
+#
+# - Create the raw orography files by running the orog executable.
+# - Run the orog_gsl executable if any of several GSL-developed
+# physics suites is chosen by the user.
+# - Run the filter_topo executable on the raw orography files
+# - Run the shave executable for the 0- and 4-cell halo orography
+# files
+#
+# Run-time environment variables:
+#
+# DATA
+# GLOBAL_VAR_DEFNS_FP
+# REDIRECT_OUT_ERR
+#
+# Experiment variables
+#
+# user:
+# EXECdir
+# USHdir
+#
+# platform:
+# FIXorg
+# PRE_TASK_CMDS
+# RUN_CMD_SERIAL
+#
+# workflow:
+# CCPP_PHYS_SUITE
+# CRES
+# DOT_OR_USCORE
+# FIXam
+# FIXlam
+# GRID_GEN_METHOD
+# PREEXISTING_DIR_METHOD
+# VERBOSE
+#
+# task_make_orog:
+# KMP_AFFINITY_MAKE_OROG
+# OMP_NUM_THREADS_MAKE_OROG
+# OMP_STACKSIZE_MAKE_OROG
+# OROG_DIR
+#
+# task_make_grid:
+# GFDLgrid_NUM_CELLS
+# GFDLgrid_STRETCH_FAC
+# GFDLgrid_REFINE_RATIO
+#
+# constants:
+# NH0
+# NH4
+# TILE_RGNL
+#
+# grid_params:
+# NHW
+# NX
+# NY
+# STRETCH_FAC
+#
+#-----------------------------------------------------------------------
+#
+
#
#-----------------------------------------------------------------------
#
@@ -8,7 +89,10 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_make_orog|task_make_grid" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow constants grid_params task_make_grid task_make_orog task_make_grid ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
+
#
#-----------------------------------------------------------------------
#
@@ -30,13 +114,7 @@ source_config_for_task "task_make_orog|task_make_grid" ${GLOBAL_VAR_DEFNS_FP}
scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
scrfunc_fn=$( basename "${scrfunc_fp}" )
scrfunc_dir=$( dirname "${scrfunc_fp}" )
-#
-#-----------------------------------------------------------------------
-#
-# Print message indicating entry into script.
-#
-#-----------------------------------------------------------------------
-#
+
print_info_msg "
========================================================================
Entering script: \"${scrfunc_fn}\"
@@ -54,17 +132,7 @@ This is the ex-script for the task that generates orography files.
export KMP_AFFINITY=${KMP_AFFINITY_MAKE_OROG}
export OMP_NUM_THREADS=${OMP_NUM_THREADS_MAKE_OROG}
export OMP_STACKSIZE=${OMP_STACKSIZE_MAKE_OROG}
-#
-#-----------------------------------------------------------------------
-#
-# Load modules and set various computational parameters and directories.
-#
-# Note:
-# These module loads should all be moved to modulefiles. This has been
-# done for Hera but must still be done for other machines.
-#
-#-----------------------------------------------------------------------
-#
+
eval ${PRE_TASK_CMDS}
if [ -z "${RUN_CMD_SERIAL:-}" ] ; then
@@ -103,9 +171,6 @@ mkdir -p "${shave_dir}"
#
#-----------------------------------------------------------------------
#
-# Set the name and path to the executable that generates the raw orography
-# file and make sure that it exists.
-#
exec_fn="orog"
exec_fp="$EXECdir/${exec_fn}"
if [ ! -f "${exec_fp}" ]; then
@@ -114,10 +179,7 @@ The executable (exec_fp) for generating the orography file does not exist:
exec_fp = \"${exec_fp}\"
Please ensure that you've built this executable."
fi
-#
-# Create a temporary (work) directory in which to generate the raw orography
-# file and change location to it.
-#
+
DATA="${DATA:-${raw_dir}/tmp}"
mkdir -p "${DATA}"
cd "${DATA}"
@@ -131,15 +193,7 @@ cp ${FIXorg}/gmted2010.30sec.int fort.235
#
#-----------------------------------------------------------------------
#
-# The orography filtering code reads in from the grid mosaic file the
-# the number of tiles, the name of the grid file for each tile, and the
-# dimensions (nx and ny) of each tile. Next, set the name of the grid
-# mosaic file and create a symlink to it in filter_dir.
-#
-# Note that in the namelist file for the orography filtering code (created
-# later below), the mosaic file name is saved in a variable called
-# "grid_file". It would have been better to call this "mosaic_file"
-# instead so it doesn't get confused with the grid file for a given tile...
+# Get the grid file info from the mosaic file
#
#-----------------------------------------------------------------------
#
@@ -152,21 +206,15 @@ grid_fp="${FIXlam}/${grid_fn}"
#
#-----------------------------------------------------------------------
#
-# Set input parameters for the orography generation executable and write
-# them to a text file.
+# Set input parameters for the orog executable in a formatted text file.
+# The executable takes its parameters via the command line.
#
-# Note that it doesn't matter what lonb and latb are set to below because
-# if we specify an input grid file to the executable read in (which is
-# what we do below), then if lonb and latb are not set to the dimensions
-# of the grid specified in that file (divided by 2 since the grid file
-# specifies a "supergrid"), then lonb and latb effectively get reset to
-# the dimensions specified in the grid file.
+# Note: lonb and latb are placeholders in this case since the program
+# uses the ones obtained from the grid file.
#
#-----------------------------------------------------------------------
#
mtnres=1
-#lonb=$res
-#latb=$res
lonb=0
latb=0
jcap=0
@@ -195,15 +243,13 @@ cat "${input_redirect_fn}"
# Call the executable to generate the raw orography file corresponding
# to tile 7 (the regional domain) only.
#
-# The following will create an orography file named
+# The script moves the output file from its temporary directory to the
+# OROG_DIR and names it:
#
-# oro.${CRES}.tile7.nc
+# ${CRES}_raw_orog.tile7.halo${NHW}.nc
#
-# and will place it in OROG_DIR. Note that this file will include
-# orography for a halo of width NHW cells around tile 7. The follow-
-# ing will also create a work directory called tile7 under OROG_DIR.
-# This work directory can be removed after the orography file has been
-# created (it is currently not deleted).
+# Note that this file will include orography for a halo of width NHW
+# cells around tile 7.
#
#-----------------------------------------------------------------------
#
@@ -225,9 +271,7 @@ cd -
#
#-----------------------------------------------------------------------
#
-# Move the raw orography file from the temporary directory to raw_dir.
-# In the process, rename it such that its name includes CRES and the halo
-# width.
+# Move the raw orography file and rename it.
#
#-----------------------------------------------------------------------
#
@@ -240,9 +284,9 @@ mv "${raw_orog_fp_orig}" "${raw_orog_fp}"
#
#-----------------------------------------------------------------------
#
-# Call the code to generate the two orography statistics files (large-
-# and small-scale) needed for the drag suite in the FV3_HRRR physics
-# suite.
+# Call the orog_gsl executable to generate the two orography statistics
+# files (large- and small-scale) needed for the drag suite in certain
+# GSL physics suites.
#
#-----------------------------------------------------------------------
#
@@ -321,14 +365,14 @@ fi
# resolution of res_regional. These interpolated/extrapolated values are
# then used to perform the orography filtering.
#
-# The above approach works for a GFDLgrid type of grid. To handle ESGgrid
-# type grids, we set res in the namelist to the orography filtering code
-# the equivalent global uniform cubed-sphere resolution of the regional
-# grid, we set stretch_fac to 1 (since the equivalent resolution assumes
-# a uniform global grid), and we set refine_ratio to 1. This will cause
-# res_regional above to be set to the equivalent global uniform cubed-
-# sphere resolution, so the filtering parameter values will be interpolated/
-# extrapolated to that resolution value.
+# To handle ESGgrid type grids, we set res in the namelist to the
+# orography filtering code the equivalent global uniform cubed-sphere
+# resolution of the regional grid, we set stretch_fac to 1 (since the
+# equivalent resolution assumes a uniform global grid), and we set
+# refine_ratio to 1. This will cause res_regional above to be set to
+# the equivalent global uniform cubed-sphere resolution, so the
+# filtering parameter values will be interpolated/extrapolated to that
+# resolution value.
#
#-----------------------------------------------------------------------
#
@@ -346,13 +390,11 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then
# Really depends on what EMC wants to do.
res="${GFDLgrid_NUM_CELLS}"
-# stretch_fac="${GFDLgrid_STRETCH_FAC}"
refine_ratio="${GFDLgrid_REFINE_RATIO}"
elif [ "${GRID_GEN_METHOD}" = "ESGgrid" ]; then
res="${CRES:1}"
-# stretch_fac="${STRETCH_FAC}"
refine_ratio="1"
fi
@@ -368,17 +410,12 @@ The executable (exec_fp) for filtering the raw orography does not exist:
Please ensure that you've built this executable."
fi
#
-# The orography filtering executable replaces the contents of the given
-# raw orography file with a file containing the filtered orography. The
-# name of the input raw orography file is in effect specified by the
-# namelist variable topo_file; the orography filtering code assumes that
-# this name is constructed by taking the value of topo_file and appending
-# to it the string ".tile${N}.nc", where N is the tile number (which for
-# a regional grid, is always 7). (Note that topo_file may start with a
-# a path to the orography file that the filtering code will read in and
-# replace.) Thus, we now copy the raw orography file (whose full path is
-# specified by raw_orog_fp) to filter_dir and in the process rename it
-# such that its new name:
+# The filter_topo program overwrites its input file with filtered
+# output, which is specified by topo_file in the namelist, but with a
+# suffix ".tile7.nc" for the regional configuration. To avoid
+# overwriting the output of the orog program, copy its output file to
+# the filter_topo working directory and rename it. Here, the name is
+# chosen such that it:
#
# (1) indicates that it contains filtered orography data (because that
# is what it will contain once the orography filtering executable
@@ -392,21 +429,20 @@ filtered_orog_fp_prefix="${filter_dir}/${filtered_orog_fn_prefix}"
filtered_orog_fp="${filtered_orog_fp_prefix}.${fn_suffix_without_halo}"
cp "${raw_orog_fp}" "${filtered_orog_fp}"
#
-# The orography filtering executable looks for the grid file specified
-# in the grid mosaic file (more specifically, specified by the gridfiles
-# variable in the mosaic file) in the directory in which the executable
-# is running. Recall that above, we already extracted the name of the
-# grid file from the mosaic file and saved it in the variable grid_fn,
-# and we saved the full path to this grid file in the variable grid_fp.
-# Thus, we now create a symlink in the filter_dir directory (where the
-# filtering executable will run) with the same name as the grid file and
-# point it to the actual grid file specified by grid_fp.
+# The filter_topo program looks for the grid file specified
+# in the mosaic file (more specifically, specified by the gridfiles
+# variable in the mosaic file) in its own run directory. Make a symlink
+# to it.
#
create_symlink_to_file ${grid_fp} ${filter_dir}/${grid_fn} TRUE
#
# Create the namelist file (in the filter_dir directory) that the orography
# filtering executable will read in.
#
+# Note that in the namelist file for the orography filtering code (created
+# later below), the mosaic file name is saved in a variable called
+# "grid_file". It would have been better to call this "mosaic_file"
+# instead so it doesn't get confused with the grid file for a given tile.
cat > "${filter_dir}/input.nml" < "${filter_dir}/input.nml" < ${nml_fn}
+ > ${ascii_fn}
PREP_STEP
-eval ${RUN_CMD_SERIAL} ${exec_fp} < ${nml_fn} ${REDIRECT_OUT_ERR} || \
+eval ${RUN_CMD_SERIAL} ${exec_fp} < ${ascii_fn} ${REDIRECT_OUT_ERR} || \
print_err_msg_exit "\
Call to executable (exec_fp) to generate a (filtered) orography file with
a ${NH0}-cell-wide halo from the orography file with a {NHW}-cell-wide halo
returned with nonzero exit code:
exec_fp = \"${exec_fp}\"
-The namelist file (nml_fn) used in this call is in directory shave_dir:
- nml_fn = \"${nml_fn}\"
+The config file (ascii_fn) used in this call is in directory shave_dir:
+ ascii_fn = \"${ascii_fn}\"
shave_dir = \"${shave_dir}\""
POST_STEP
mv ${shaved_fp} ${OROG_DIR}
#
-# Create an input namelist file for the shave executable to generate an
+# Create an input config file for the shave executable to generate an
# orography file with a 4-cell-wide halo from the one with a wide halo.
# Then call the shave executable. Finally, move the resultant file to
# the OROG_DIR directory.
@@ -524,21 +554,21 @@ print_info_msg "$VERBOSE" "
\"Shaving\" filtered orography file with a ${NHW}-cell-wide halo to obtain
a filtered orography file with a ${NH4}-cell-wide halo..."
-nml_fn="input.shave.orog.halo${NH4}"
+ascii_fn="input.shave.orog.halo${NH4}"
shaved_fp="${shave_dir}/${CRES}${DOT_OR_USCORE}oro_data.tile${TILE_RGNL}.halo${NH4}.nc"
printf "%s %s %s %s %s\n" \
$NX $NY ${NH4} \"${unshaved_fp}\" \"${shaved_fp}\" \
- > ${nml_fn}
+ > ${ascii_fn}
PREP_STEP
-eval ${RUN_CMD_SERIAL} ${exec_fp} < ${nml_fn} ${REDIRECT_OUT_ERR} || \
+eval ${RUN_CMD_SERIAL} ${exec_fp} < ${ascii_fn} ${REDIRECT_OUT_ERR} || \
print_err_msg_exit "\
Call to executable (exec_fp) to generate a (filtered) orography file with
a ${NH4}-cell-wide halo from the orography file with a {NHW}-cell-wide halo
returned with nonzero exit code:
exec_fp = \"${exec_fp}\"
-The namelist file (nml_fn) used in this call is in directory shave_dir:
- nml_fn = \"${nml_fn}\"
+The namelist file (ascii_fn) used in this call is in directory shave_dir:
+ ascii_fn = \"${ascii_fn}\"
shave_dir = \"${shave_dir}\""
POST_STEP
mv "${shaved_fp}" "${OROG_DIR}"
@@ -549,8 +579,8 @@ cd -
#
#-----------------------------------------------------------------------
#
-# Add link in ORIG_DIR directory to the orography file with a 4-cell-wide
-# halo such that the link name do not contain the halo width. These links
+# Add link in OROG_DIR directory to the orography file with a 4-cell-wide
+# halo such that the link name does not contain the halo width. These links
# are needed by the make_sfc_climo task.
#
# NOTE: It would be nice to modify the sfc_climo_gen_code to read in
@@ -563,13 +593,7 @@ python3 $USHdir/link_fix.py \
--file-group "orog" || \
print_err_msg_exit "\
Call to function to create links to orography files failed."
-#
-#-----------------------------------------------------------------------
-#
-# Print message indicating successful completion of script.
-#
-#-----------------------------------------------------------------------
-#
+
print_info_msg "
========================================================================
Orography files with various halo widths generated successfully!!!
diff --git a/scripts/exregional_make_sfc_climo.sh b/scripts/exregional_make_sfc_climo.sh
index c4ee8f25b1..a916228b1f 100755
--- a/scripts/exregional_make_sfc_climo.sh
+++ b/scripts/exregional_make_sfc_climo.sh
@@ -1,5 +1,52 @@
#!/usr/bin/env bash
+#
+#-----------------------------------------------------------------------
+#
+# This ex-script generates surface climatology files needed to run FV3
+# forecasts.
+#
+# The script runs the sfc_climo_gen UFS Utils program, and links the
+# output to the SFC_CLIMO_GEN directory
+#
+# Run-time environment variables:
+#
+# DATA
+# GLOBAL_VAR_DEFNS_FP
+# REDIRECT_OUT_ERR
+#
+# Experiment variables
+#
+# user:
+# EXECdir
+# USHdir
+#
+# platform:
+# FIXsfc
+# PRE_TASK_CMDS
+# RUN_CMD_UTILS
+#
+# workflow:
+# CRES
+# DOT_OR_USCORE
+# FIXlam
+# VERBOSE
+#
+# task_make_sfc_climo:
+# KMP_AFFINITY_MAKE_SFC_CLIMO
+# OMP_NUM_THREADS_MAKE_SFC_CLIMO
+# OMP_STACKSIZE_MAKE_SFC_CLIMO
+# SFC_CLIMO_DIR
+#
+# constants:
+# GTYPE
+# NH0
+# NH4
+# TILE_RGNL
+#
+#-----------------------------------------------------------------------
+#
+
#
#-----------------------------------------------------------------------
#
@@ -8,7 +55,9 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_make_sfc_climo" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow constants task_make_sfc_climo ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh
index f769d4e225..0241dbd728 100755
--- a/scripts/exregional_run_fcst.sh
+++ b/scripts/exregional_run_fcst.sh
@@ -1,5 +1,113 @@
#!/usr/bin/env bash
+
+#
+#-----------------------------------------------------------------------
+#
+# This ex-script is responsible for running the FV3 regional forecast.
+#
+# Run-time environment variables:
+#
+# CDATE
+# COMIN
+# COMOUT
+# COMROOT
+# DATA
+# DBNROOT
+# GLOBAL_VAR_DEFNS_FP
+# INPUT_DATA
+# NET
+# PDY
+# REDIRECT_OUT_ERR
+# RUN
+# SENDDBN
+# SLASH_ENSMEM_SUBDIR
+#
+# Experiment variables
+#
+# user:
+# MACHINE
+# PARMdir
+# RUN_ENVIR
+# USHdir
+#
+# platform:
+# PRE_TASK_CMDS
+# RUN_CMD_FCST
+#
+# workflow:
+# CCPP_PHYS_DIR
+# CCPP_PHYS_SUITE
+# COLDSTART
+# CRES
+# DATA_TABLE_FN
+# DATA_TABLE_FP
+# DATE_FIRST_CYCL
+# DOT_OR_USCORE
+# EXPTDIR
+# FCST_LEN_CYCL
+# FCST_LEN_HRS
+# FIELD_DICT_FP
+# FIELD_DICT_FN
+# FIELD_TABLE_FN
+# FIELD_TABLE_FP
+# FIXam
+# FIXclim
+# FIXlam
+# FV3_NML_FN
+# FV3_NML_FP
+# FV3_NML_STOCH_FP
+# INCR_CYCL_FREQ
+# PREDEF_GRID_NAME
+# SYMLINK_FIX_FILES
+# VERBOSE
+#
+# task_get_extrn_lbcs:
+# LBC_SPEC_INTVL_HRS
+#
+# task_run_fcst:
+# DO_FCST_RESTART
+# DT_ATMOS
+# FV3_EXEC_FP
+# KMP_AFFINITY_RUN_FCST
+# OMP_NUM_THREADS_RUN_FCST
+# OMP_STACKSIZE_RUN_FCST
+# PRINT_ESMF
+# RESTART_INTERVAL
+# USE_MERRA_CLIMO
+# WRITE_DOPOST
+#
+# task_run_post:
+# CUSTOM_POST_CONFIG_FP
+# DT_SUBHOURLY_POST_MNTS
+# POST_OUTPUT_DOMAIN_NAME
+# SUB_HOURLY_POST
+# USE_CUSTOM_POST_CONFIG_FILE
+#
+# global:
+# DO_ENSEMBLE
+# DO_LSM_SPP
+# DO_SHUM
+# DO_SKEB
+# DO_SPP
+# DO_SPPT
+#
+# cpl_aqm_parm:
+# AQM_RC_PRODUCT_FN
+# CPL_AQM
+#
+# constants:
+# NH0
+# NH3
+# NH4
+# TILE_RGNL
+#
+# fixed_files:
+# CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING
+#
+#-----------------------------------------------------------------------
+#
+
#
#-----------------------------------------------------------------------
#
@@ -8,7 +116,11 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_run_fcst|task_run_post|task_get_extrn_ics|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow global cpl_aqm_parm constants fixed_files \
+ task_get_extrn_lbcs task_run_fcst task_run_post ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
+
#
#-----------------------------------------------------------------------
#
@@ -57,7 +169,7 @@ export OMP_NUM_THREADS=${OMP_NUM_THREADS_RUN_FCST}
export OMP_STACKSIZE=${OMP_STACKSIZE_RUN_FCST}
export MPI_TYPE_DEPTH=20
export ESMF_RUNTIME_COMPLIANCECHECK=OFF:depth=4
-if [ "${PRINT_ESMF}" = "TRUE" ]; then
+if [ $(boolify "${PRINT_ESMF}") = "TRUE" ]; then
export ESMF_RUNTIME_PROFILE=ON
export ESMF_RUNTIME_PROFILE_OUTPUT="SUMMARY"
fi
@@ -227,7 +339,7 @@ cd ${DATA}/INPUT
#
relative_link_flag="FALSE"
-if [ "${CPL_AQM}" = "TRUE" ]; then
+if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
COMIN="${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}" #temporary path, should be removed later
target="${COMIN}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc"
@@ -307,7 +419,7 @@ static) files in the FIXam directory:
# isn't really an advantage to using relative symlinks, so we use symlinks
# with absolute paths.
#
-if [ "${SYMLINK_FIX_FILES}" == "FALSE" ]; then
+if [ $(boolify "${SYMLINK_FIX_FILES}") = "FALSE" ]; then
relative_link_flag="TRUE"
else
relative_link_flag="FALSE"
@@ -336,7 +448,7 @@ done
#
#-----------------------------------------------------------------------
#
-if [ "${USE_MERRA_CLIMO}" = "TRUE" ]; then
+if [ $(boolify "${USE_MERRA_CLIMO}") = "TRUE" ]; then
for f_nm_path in ${FIXclim}/*; do
f_nm=$( basename "${f_nm_path}" )
pre_f="${f_nm%%.*}"
@@ -397,16 +509,16 @@ create_symlink_to_file ${FIELD_TABLE_FP} ${DATA}/${FIELD_TABLE_FN} ${relative_li
create_symlink_to_file ${FIELD_DICT_FP} ${DATA}/${FIELD_DICT_FN} ${relative_link_flag}
-if [ ${WRITE_DOPOST} = "TRUE" ]; then
+if [ $(boolify ${WRITE_DOPOST}) = "TRUE" ]; then
cp ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat
- if [ ${USE_CUSTOM_POST_CONFIG_FILE} = "TRUE" ]; then
+ if [ $(boolify ${USE_CUSTOM_POST_CONFIG_FILE}) = "TRUE" ]; then
post_config_fp="${CUSTOM_POST_CONFIG_FP}"
print_info_msg "
====================================================================
CUSTOM_POST_CONFIG_FP = \"${CUSTOM_POST_CONFIG_FP}\"
===================================================================="
else
- if [ "${CPL_AQM}" = "TRUE" ]; then
+ if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
post_config_fp="${PARMdir}/upp/postxconfig-NT-AQM.txt"
else
post_config_fp="${PARMdir}/upp/postxconfig-NT-fv3lam.txt"
@@ -420,7 +532,7 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then
cp ${post_config_fp} ./postxconfig-NT.txt
cp ${PARMdir}/upp/params_grib2_tbl_new .
# Set itag for inline-post:
- if [ "${CPL_AQM}" = "TRUE" ]; then
+ if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
post_itag_add="aqf_on=.true.,"
else
post_itag_add=""
@@ -455,11 +567,14 @@ cp ${CCPP_PHYS_DIR}/noahmptable.tbl .
#-----------------------------------------------------------------------
#
STOCH="FALSE"
-if ([ "${DO_SPP}" = "TRUE" ] || [ "${DO_SPPT}" = "TRUE" ] || [ "${DO_SHUM}" = "TRUE" ] || \
- [ "${DO_SKEB}" = "TRUE" ] || [ "${DO_LSM_SPP}" = "TRUE" ]); then
+if ([ $(boolify "${DO_SPP}") = "TRUE" ] || \
+ [ $(boolify "${DO_SPPT}") = "TRUE" ] || \
+ [ $(boolify "${DO_SHUM}") = "TRUE" ] || \
+ [ $(boolify "${DO_SKEB}") = "TRUE" ] || \
+ [ $(boolify "${DO_LSM_SPP}") = "TRUE" ]); then
STOCH="TRUE"
fi
-if [ "${STOCH}" == "TRUE" ]; then
+if [ "${STOCH}" = "TRUE" ]; then
cp ${FV3_NML_STOCH_FP} ${DATA}/${FV3_NML_FN}
else
ln -sf ${FV3_NML_FP} ${DATA}/${FV3_NML_FN}
@@ -472,7 +587,7 @@ fi
#
#-----------------------------------------------------------------------
#
-if ([ "$STOCH" == "TRUE" ] && [ "${DO_ENSEMBLE}" = "TRUE" ]); then
+if ([ "$STOCH" == "TRUE" ] && [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]); then
python3 $USHdir/set_fv3nml_ens_stoch_seeds.py \
--path-to-defns ${GLOBAL_VAR_DEFNS_FP} \
--cdate "$CDATE" || print_err_msg_exit "\
@@ -489,7 +604,7 @@ fi
#
#-----------------------------------------------------------------------
#
-if [ "${CPL_AQM}" = "TRUE" ] && [ "${PREDEF_GRID_NAME}" = "AQM_NA_13km" ]; then
+if [ $(boolify "${CPL_AQM}") = "TRUE" ] && [ "${PREDEF_GRID_NAME}" = "AQM_NA_13km" ]; then
python3 $USHdir/update_input_nml.py \
--namelist "${DATA}/${FV3_NML_FN}" \
--aqm_na_13km || print_err_msg_exit "\
@@ -507,10 +622,10 @@ fi
#-----------------------------------------------------------------------
#
flag_fcst_restart="FALSE"
-if [ "${DO_FCST_RESTART}" = "TRUE" ] && [ "$(ls -A ${DATA}/RESTART )" ]; then
+if [ $(boolify "${DO_FCST_RESTART}") = "TRUE" ] && [ "$(ls -A ${DATA}/RESTART )" ]; then
cp input.nml input.nml_orig
cp model_configure model_configure_orig
- if [ "${CPL_AQM}" = "TRUE" ]; then
+ if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
cp aqm.rc aqm.rc_orig
fi
relative_link_flag="FALSE"
@@ -574,8 +689,10 @@ fi
#
#-----------------------------------------------------------------------
#
-if [ "${CPL_AQM}" = "TRUE" ]; then
- if [ "${COLDSTART}" = "TRUE" ] && [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ] && [ "${flag_fcst_restart}" = "FALSE" ]; then
+if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
+ if [ $(boolify "${COLDSTART}") = "TRUE" ] && \
+ [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ] && \
+ [ $(boolify "${flag_fcst_restart}") = "FALSE" ]; then
init_concentrations="true"
else
init_concentrations="false"
@@ -666,7 +783,7 @@ fi
#
#-----------------------------------------------------------------------
#
-if [ "${RUN_ENVIR}" = "nco" ] && [ "${CPL_AQM}" = "TRUE" ]; then
+if [ "${RUN_ENVIR}" = "nco" ] && [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
# create an intermediate symlink to RESTART
ln -sf "${DATA}/RESTART" "${COMIN}/RESTART"
fi
@@ -725,7 +842,7 @@ POST_STEP
#
#-----------------------------------------------------------------------
#
-if [ "${CPL_AQM}" = "TRUE" ]; then
+if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
if [ "${RUN_ENVIR}" = "nco" ]; then
if [ -d "${COMIN}/RESTART" ] && [ "$(ls -A ${DATA}/RESTART)" ]; then
rm -rf "${COMIN}/RESTART"
@@ -758,8 +875,8 @@ fi
#
#-----------------------------------------------------------------------
#
-if [ ${WRITE_DOPOST} = "TRUE" ]; then
-
+if [ $(boolify ${WRITE_DOPOST}) = "TRUE" ]; then
+
yyyymmdd=${PDY}
hh=${cyc}
fmn="00"
@@ -785,7 +902,7 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then
post_fn_suffix="GrbF${fhr_d}"
post_renamed_fn_suffix="f${fhr}${post_mn_or_null}.${POST_OUTPUT_DOMAIN_NAME}.grib2"
- if [ "${CPL_AQM}" = "TRUE" ]; then
+ if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
fids=( "cmaq" )
else
fids=( "prslev" "natlev" )
@@ -800,15 +917,15 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then
if [ $RUN_ENVIR != "nco" ]; then
basetime=$( $DATE_UTIL --date "$yyyymmdd $hh" +%y%j%H%M )
symlink_suffix="_${basetime}f${fhr}${post_mn}"
- create_symlink_to_file ${post_renamed_fn} ${FID}${symlink_suffix} TRUE
+ create_symlink_to_file ${post_renamed_fn} ${FID}${symlink_suffix} TRUE
fi
# DBN alert
- if [ $SENDDBN = "TRUE" ]; then
+ if [ "$SENDDBN" = "TRUE" ]; then
$DBNROOT/bin/dbn_alert MODEL rrfs_post ${job} ${COMOUT}/${post_renamed_fn}
fi
done
- if [ "${CPL_AQM}" = "TRUE" ]; then
+ if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
mv ${DATA}/dynf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr}.nc
mv ${DATA}/phyf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr}.nc
fi
diff --git a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh
index 93caeaa7f2..05503bb963 100755
--- a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh
+++ b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh
@@ -8,7 +8,11 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_run_vx_ensgrid|task_run_post" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+ constants fixed_files grid_params \
+ task_run_post task_run_vx_ensgrid ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh
index 4f871e6e1b..03c6093943 100755
--- a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh
+++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh
@@ -8,7 +8,11 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_run_vx_gridstat|task_run_vx_pointstat|task_run_post" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+ constants fixed_files grid_params \
+ task_run_post task_run_vx_gridstat task_run_vx_pointstat ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
@@ -118,7 +122,7 @@ set_vx_params \
#-----------------------------------------------------------------------
#
i="0"
-if [ "${DO_ENSEMBLE}" = "TRUE" ]; then
+if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]; then
i=$( bc -l <<< "${ENSMEM_INDX}-1" )
fi
time_lag=$( bc -l <<< "${ENS_TIME_LAG_HRS[$i]}*${SECS_PER_HOUR}" )
@@ -151,7 +155,7 @@ else
# or, better, just remove this variale and code "/${ensmem_name}" where
# slash_ensmem_subdir_or_null currently appears below.
#
- if [ "${DO_ENSEMBLE}" = "TRUE" ]; then
+ if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]; then
slash_ensmem_subdir_or_null="/${ensmem_name}"
else
slash_ensmem_subdir_or_null=""
diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh
index 6e4a4ff33f..12a54dc21b 100755
--- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh
+++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh
@@ -8,7 +8,11 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_run_vx_ensgrid_mean|task_run_vx_enspoint_mean|task_run_post" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+ constants fixed_files grid_params \
+ task_run_post task_run_vx_ensgrid_mean task_run_vx_enspoint_mean ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh
index 924d321ec3..8fd4a59dfe 100755
--- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh
+++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh
@@ -8,7 +8,11 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_run_vx_ensgrid_prob|task_run_vx_enspoint_prob|task_run_post" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+ constants fixed_files grid_params \
+ task_run_vx_ensgrid_prob task_run_vx_enspoint_prob task_run_post ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_run_met_pb2nc_obs.sh b/scripts/exregional_run_met_pb2nc_obs.sh
index 985cd33c7f..5281021f01 100755
--- a/scripts/exregional_run_met_pb2nc_obs.sh
+++ b/scripts/exregional_run_met_pb2nc_obs.sh
@@ -8,7 +8,11 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_run_met_pb2nc_obs" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+ constants fixed_files grid_params \
+ task_run_met_pb2nc_obs ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_run_met_pcpcombine.sh b/scripts/exregional_run_met_pcpcombine.sh
index 6e64d102e6..ce9e78ab17 100755
--- a/scripts/exregional_run_met_pcpcombine.sh
+++ b/scripts/exregional_run_met_pcpcombine.sh
@@ -8,7 +8,11 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_run_met_pcpcombine|task_run_post" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+ constants fixed_files grid_params \
+ task_run_met_pcpcombine task_run_post ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
@@ -122,7 +126,7 @@ set_vx_params \
time_lag="0"
if [ "${FCST_OR_OBS}" = "FCST" ]; then
i="0"
- if [ "${DO_ENSEMBLE}" = "TRUE" ]; then
+ if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]; then
i=$( bc -l <<< "${ENSMEM_INDX}-1" )
fi
time_lag=$( bc -l <<< "${ENS_TIME_LAG_HRS[$i]}*${SECS_PER_HOUR}" )
@@ -157,7 +161,7 @@ if [ "${FCST_OR_OBS}" = "FCST" ]; then
# or, better, just remove this variale and code "/${ensmem_name}" where
# slash_ensmem_subdir_or_null currently appears below.
#
- if [ "${DO_ENSEMBLE}" = "TRUE" ]; then
+ if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]; then
slash_ensmem_subdir_or_null="/${ensmem_name}"
else
slash_ensmem_subdir_or_null=""
diff --git a/scripts/exregional_run_post.sh b/scripts/exregional_run_post.sh
index 1bf45bd965..3f0ca93df9 100755
--- a/scripts/exregional_run_post.sh
+++ b/scripts/exregional_run_post.sh
@@ -1,5 +1,62 @@
#!/usr/bin/env bash
+#
+#-----------------------------------------------------------------------
+#
+# The ex-script that runs UPP.
+#
+# Run-time environment variables:
+#
+# CDATE
+# COMOUT
+# DATA_FHR
+# DBNROOT
+# ENSMEM_INDX
+# GLOBAL_VAR_DEFNS_FP
+# NET
+# PDY
+# REDIRECT_OUT_ERR
+# SENDDBN
+#
+# Experiment variables
+#
+# user:
+# EXECdir
+# MACHINE
+# PARMdir
+# RUN_ENVIR
+# USHdir
+#
+# platform:
+# PRE_TASK_CMDS
+# RUN_CMD_POST
+#
+# workflow:
+# VERBOSE
+#
+# task_run_fcst:
+# DT_ATMOS
+#
+# task_run_post:
+# CUSTOM_POST_CONFIG_FP
+# KMP_AFFINITY_RUN_POST
+# OMP_NUM_THREADS_RUN_POST
+# OMP_STACKSIZE_RUN_POST
+# NUMX
+# POST_OUTPUT_DOMAIN_NAME
+# SUB_HOURLY_POST
+# USE_CUSTOM_POST_CONFIG_FILE
+#
+# global:
+# CRTM_DIR
+# USE_CRTM
+#
+# cpl_aqm_parm:
+# CPL_AQM
+#
+#-----------------------------------------------------------------------
+#
+
#
#-----------------------------------------------------------------------
#
@@ -8,7 +65,10 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_run_post" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow global cpl_aqm_parm \
+ task_run_fcst task_run_post ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
@@ -82,7 +142,7 @@ fi
#
rm -f fort.*
cp ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat
-if [ ${USE_CUSTOM_POST_CONFIG_FILE} = "TRUE" ]; then
+if [ $(boolify ${USE_CUSTOM_POST_CONFIG_FILE}) = "TRUE" ]; then
post_config_fp="${CUSTOM_POST_CONFIG_FP}"
print_info_msg "
====================================================================
@@ -92,7 +152,7 @@ to the temporary work directory (DATA_FHR):
DATA_FHR = \"${DATA_FHR}\"
===================================================================="
else
- if [ "${CPL_AQM}" = "TRUE" ]; then
+ if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
post_config_fp="${PARMdir}/upp/postxconfig-NT-AQM.txt"
else
post_config_fp="${PARMdir}/upp/postxconfig-NT-fv3lam.txt"
@@ -107,7 +167,7 @@ temporary work directory (DATA_FHR):
fi
cp ${post_config_fp} ./postxconfig-NT.txt
cp ${PARMdir}/upp/params_grib2_tbl_new .
-if [ ${USE_CRTM} = "TRUE" ]; then
+if [ $(boolify ${USE_CRTM}) = "TRUE" ]; then
cp ${CRTM_DIR}/Nalli.IRwater.EmisCoeff.bin ./
cp ${CRTM_DIR}/FAST*.bin ./
cp ${CRTM_DIR}/NPOESS.IRland.EmisCoeff.bin ./
@@ -155,7 +215,7 @@ hh=${cyc}
# must be set to a null string.
#
mnts_secs_str=""
-if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then
+if [ $(boolify "${SUB_HOURLY_POST}") = "TRUE" ]; then
if [ ${fhr}${fmn} = "00000" ]; then
mnts_secs_str=":"$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${DT_ATMOS} seconds" "+%M:%S" )
else
@@ -185,7 +245,7 @@ post_mn=${post_time:10:2}
#
# Create the input namelist file to the post-processor executable.
#
-if [ "${CPL_AQM}" = "TRUE" ]; then
+if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
post_itag_add="aqf_on=.true.,"
else
post_itag_add=""
@@ -273,7 +333,7 @@ post_renamed_fn_suffix="f${fhr}${post_mn_or_null}.${POST_OUTPUT_DOMAIN_NAME}.gri
cd "${COMOUT}"
basetime=$( $DATE_UTIL --date "$yyyymmdd $hh" +%y%j%H%M )
symlink_suffix="${dot_ensmem}.${basetime}f${fhr}${post_mn}"
-if [ "${CPL_AQM}" = "TRUE" ]; then
+if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then
fids=( "cmaq" )
else
fids=( "prslev" "natlev" )
@@ -287,7 +347,7 @@ for fid in "${fids[@]}"; do
create_symlink_to_file ${post_renamed_fn} ${FID}${symlink_suffix} TRUE
fi
# DBN alert
- if [ $SENDDBN = "TRUE" ]; then
+ if [ "$SENDDBN" = "TRUE" ]; then
$DBNROOT/bin/dbn_alert MODEL rrfs_post ${job} ${COMOUT}/${post_renamed_fn}
fi
done
diff --git a/scripts/exregional_run_prdgen.sh b/scripts/exregional_run_prdgen.sh
index 5d1bfbf447..5baa779821 100755
--- a/scripts/exregional_run_prdgen.sh
+++ b/scripts/exregional_run_prdgen.sh
@@ -8,7 +8,11 @@
#-----------------------------------------------------------------------
#
. $USHdir/source_util_funcs.sh
-source_config_for_task "task_run_prdgen|task_run_post" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+ constants fixed_files grid_params \
+ task_run_post task_run_prdgen ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
@@ -175,7 +179,7 @@ done
# Remap to additional output grids if requested
#-----------------------------------------------
-if [ ${DO_PARALLEL_PRDGEN} == "TRUE" ]; then
+if [ $(boolify ${DO_PARALLEL_PRDGEN}) = "TRUE" ]; then
#
# parallel run wgrib2 for product generation
#
diff --git a/scripts/exsrw_aqm_ics.sh b/scripts/exsrw_aqm_ics.sh
index efd833b092..4fd040e597 100755
--- a/scripts/exsrw_aqm_ics.sh
+++ b/scripts/exsrw_aqm_ics.sh
@@ -8,7 +8,10 @@
#-----------------------------------------------------------------------
#
. ${USHsrw}/source_util_funcs.sh
-source_config_for_task "task_aqm_ics" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+ constants fixed_files grid_params ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exsrw_aqm_lbcs.sh b/scripts/exsrw_aqm_lbcs.sh
index 93dc119ec2..7b3058ef34 100755
--- a/scripts/exsrw_aqm_lbcs.sh
+++ b/scripts/exsrw_aqm_lbcs.sh
@@ -8,7 +8,11 @@
#-----------------------------------------------------------------------
#
. ${USHsrw}/source_util_funcs.sh
-source_config_for_task "task_get_extrn_lbcs|task_make_orog|task_make_lbcs|cpl_aqm_parm|task_aqm_lbcs" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+ constants fixed_files grid_params \
+ task_get_extrn_lbcs task_make_lbcs task_make_orog ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
@@ -100,7 +104,7 @@ for hr in 0 ${LBC_SPEC_FCST_HRS[@]}; do
cp -p "${DATA_SHARE}/${aqm_lbcs_fn}" ${DATA}
done
-if [ "${DO_AQM_CHEM_LBCS}" = "TRUE" ]; then
+if [ $(boolify "${DO_AQM_CHEM_LBCS}") = "TRUE" ]; then
ext_lbcs_file="${AQM_LBCS_FILES}"
chem_lbcs_fn=${ext_lbcs_file///${MM}}
chem_lbcs_fp="${FIXaqm}/chemlbc/${chem_lbcs_fn}"
@@ -141,7 +145,7 @@ fi
#
#-----------------------------------------------------------------------
#
-if [ "${DO_AQM_GEFS_LBCS}" = "TRUE" ]; then
+if [ $(boolify "${DO_AQM_GEFS_LBCS}") = "TRUE" ]; then
AQM_GEFS_FILE_CYC=${AQM_GEFS_FILE_CYC:-"${HH}"}
AQM_GEFS_FILE_CYC=$( printf "%02d" "${AQM_GEFS_FILE_CYC}" )
@@ -153,7 +157,7 @@ if [ "${DO_AQM_GEFS_LBCS}" = "TRUE" ]; then
fi
aqm_mofile_fn="${AQM_GEFS_FILE_PREFIX}.t${AQM_GEFS_FILE_CYC}z.atmf"
- if [ "${DO_REAL_TIME}" = "TRUE" ]; then
+ if [ $(boolify "${DO_REAL_TIME}") = "TRUE" ]; then
aqm_mofile_fp="${COMINgefs}/gefs.${YYYYMMDD}/${AQM_GEFS_FILE_CYC}/chem/sfcsig/${aqm_mofile_fn}"
else
aqm_mofile_fp="${COMINgefs}/${YYYYMMDD}/${AQM_GEFS_FILE_CYC}/${aqm_mofile_fn}"
diff --git a/scripts/exsrw_bias_correction_o3.sh b/scripts/exsrw_bias_correction_o3.sh
index 1ef4012528..343e7e6f2b 100755
--- a/scripts/exsrw_bias_correction_o3.sh
+++ b/scripts/exsrw_bias_correction_o3.sh
@@ -8,7 +8,11 @@
#-----------------------------------------------------------------------
#
. ${USHsrw}/source_util_funcs.sh
-source_config_for_task "cpl_aqm_parm|task_bias_correction_o3" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+ constants fixed_files grid_params \
+ task_bias_correction_o3 ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
@@ -199,7 +203,7 @@ POST_STEP
cp ${DATA}/out/ozone/${yyyy}/*nc ${DATA}/data/bcdata.${yyyymm}/interpolated/ozone/${yyyy}
-if [ "${DO_AQM_SAVE_AIRNOW_HIST}" = "TRUE" ]; then
+if [ $(boolify "${DO_AQM_SAVE_AIRNOW_HIST}") = "TRUE" ]; then
mkdir -p ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/ozone/${yyyy}
cp ${DATA}/out/ozone/${yyyy}/*nc ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/ozone/${yyyy}
diff --git a/scripts/exsrw_bias_correction_pm25.sh b/scripts/exsrw_bias_correction_pm25.sh
index ae1a2d6f65..70cf512589 100755
--- a/scripts/exsrw_bias_correction_pm25.sh
+++ b/scripts/exsrw_bias_correction_pm25.sh
@@ -8,7 +8,11 @@
#-----------------------------------------------------------------------
#
. ${USHsrw}/source_util_funcs.sh
-source_config_for_task "cpl_aqm_parm|task_bias_correction_pm25" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+ constants fixed_files grid_params \
+ task_bias_correction_pm25 ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
@@ -198,7 +202,7 @@ POST_STEP
cp ${DATA}/out/pm25/${yyyy}/*nc ${DATA}/data/bcdata.${yyyymm}/interpolated/pm25/${yyyy}
-if [ "${DO_AQM_SAVE_AIRNOW_HIST}" = "TRUE" ]; then
+if [ $(boolify "${DO_AQM_SAVE_AIRNOW_HIST}") = "TRUE" ]; then
mkdir -p ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/pm25/${yyyy}
cp ${DATA}/out/pm25/${yyyy}/*nc ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/pm25/${yyyy}
fi
diff --git a/scripts/exsrw_fire_emission.sh b/scripts/exsrw_fire_emission.sh
index cb44c99d8d..3ae78422f5 100755
--- a/scripts/exsrw_fire_emission.sh
+++ b/scripts/exsrw_fire_emission.sh
@@ -8,7 +8,10 @@
#-----------------------------------------------------------------------
#
. ${USHsrw}/source_util_funcs.sh
-source_config_for_task "cpl_aqm_parm|task_fire_emission" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+ constants fixed_files grid_params ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exsrw_nexus_emission.sh b/scripts/exsrw_nexus_emission.sh
index a5769a6483..0fa8c48754 100755
--- a/scripts/exsrw_nexus_emission.sh
+++ b/scripts/exsrw_nexus_emission.sh
@@ -8,7 +8,11 @@
#-----------------------------------------------------------------------
#
. ${USHsrw}/source_util_funcs.sh
-source_config_for_task "cpl_aqm_parm|task_nexus_emission" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+ constants fixed_files grid_params \
+ task_nexus_emission ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exsrw_nexus_gfs_sfc.sh b/scripts/exsrw_nexus_gfs_sfc.sh
index 103842d46f..cadc27b89c 100755
--- a/scripts/exsrw_nexus_gfs_sfc.sh
+++ b/scripts/exsrw_nexus_gfs_sfc.sh
@@ -8,7 +8,10 @@
#-----------------------------------------------------------------------
#
. ${USHsrw}/source_util_funcs.sh
-source_config_for_task "cpl_aqm_parm|task_nexus_gfs_sfc" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+ constants fixed_files grid_params ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
@@ -73,7 +76,7 @@ fcst_len_hrs_offset=$(( FCST_LEN_HRS + TIME_OFFSET_HRS ))
GFS_SFC_TAR_DIR="${NEXUS_GFS_SFC_ARCHV_DIR}/rh${YYYY}/${YYYYMM}/${YYYYMMDD}"
GFS_SFC_TAR_SUB_DIR="gfs.${YYYYMMDD}/${HH}/atmos"
-if [ "${DO_REAL_TIME}" = "TRUE" ]; then
+if [ $(boolify "${DO_REAL_TIME}") = "TRUE" ]; then
GFS_SFC_LOCAL_DIR="${COMINgfs}/${GFS_SFC_TAR_SUB_DIR}"
else
GFS_SFC_LOCAL_DIR="${NEXUS_GFS_SFC_DIR}/${GFS_SFC_TAR_SUB_DIR}"
diff --git a/scripts/exsrw_nexus_post_split.sh b/scripts/exsrw_nexus_post_split.sh
index 517893b5e5..151e0a2ea5 100755
--- a/scripts/exsrw_nexus_post_split.sh
+++ b/scripts/exsrw_nexus_post_split.sh
@@ -8,7 +8,10 @@
#-----------------------------------------------------------------------
#
. ${USHsrw}/source_util_funcs.sh
-source_config_for_task "cpl_aqm_parm|task_nexus_post_split" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+ constants fixed_files grid_params ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exsrw_point_source.sh b/scripts/exsrw_point_source.sh
index 7acbc946f7..4cd693506c 100755
--- a/scripts/exsrw_point_source.sh
+++ b/scripts/exsrw_point_source.sh
@@ -8,7 +8,11 @@
#-----------------------------------------------------------------------
#
. ${USHsrw}/source_util_funcs.sh
-source_config_for_task "task_run_fcst|cpl_aqm_parm|task_point_source" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+ constants fixed_files grid_params \
+ task_point_source task_run_fcst ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exsrw_post_stat_o3.sh b/scripts/exsrw_post_stat_o3.sh
index 6fa1db7f8f..dfcdd24ffa 100755
--- a/scripts/exsrw_post_stat_o3.sh
+++ b/scripts/exsrw_post_stat_o3.sh
@@ -8,7 +8,11 @@
#-----------------------------------------------------------------------
#
. ${USHsrw}/source_util_funcs.sh
-source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_o3" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+ constants fixed_files grid_params \
+ task_run_post ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exsrw_post_stat_pm25.sh b/scripts/exsrw_post_stat_pm25.sh
index ea7c1717c3..bdbf1fcbc5 100755
--- a/scripts/exsrw_post_stat_pm25.sh
+++ b/scripts/exsrw_post_stat_pm25.sh
@@ -8,7 +8,11 @@
#-----------------------------------------------------------------------
#
. ${USHsrw}/source_util_funcs.sh
-source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_pm25" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+ constants fixed_files grid_params \
+ task_run_post ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exsrw_pre_post_stat.sh b/scripts/exsrw_pre_post_stat.sh
index dfb4c2cf9e..f6ec6a9a7d 100755
--- a/scripts/exsrw_pre_post_stat.sh
+++ b/scripts/exsrw_pre_post_stat.sh
@@ -8,7 +8,11 @@
#-----------------------------------------------------------------------
#
. ${USHsrw}/source_util_funcs.sh
-source_config_for_task "task_pre_post|task_run_post" ${GLOBAL_VAR_DEFNS_FP}
+for sect in user nco platform workflow nco global verification cpl_aqm_parm \
+ constants fixed_files grid_params \
+ task_run_post ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
#
#-----------------------------------------------------------------------
#
diff --git a/tests/WE2E/utils.py b/tests/WE2E/utils.py
index fb96dab004..eb3c49fcba 100755
--- a/tests/WE2E/utils.py
+++ b/tests/WE2E/utils.py
@@ -21,7 +21,7 @@
cfg_to_yaml_str,
flatten_dict,
load_config_file,
- load_shell_config
+ load_yaml_config
)
REPORT_WIDTH = 100
@@ -154,13 +154,13 @@ def calculate_core_hours(expts_dict: dict) -> dict:
for expt in expts_dict:
# Read variable definitions file
- vardefs_file = os.path.join(expts_dict[expt]["expt_dir"],"var_defns.sh")
+ vardefs_file = os.path.join(expts_dict[expt]["expt_dir"],"var_defns.yaml")
if not os.path.isfile(vardefs_file):
logging.warning(f"\nWARNING: For experiment {expt}, variable definitions file")
logging.warning(f"{vardefs_file}\ndoes not exist!\n\nDropping experiment from summary")
continue
logging.debug(f'Reading variable definitions file {vardefs_file}')
- vardefs = load_shell_config(vardefs_file)
+ vardefs = load_yaml_config(vardefs_file)
vdf = flatten_dict(vardefs)
cores_per_node = vdf["NCORES_PER_NODE"]
for task in expts_dict[expt]:
diff --git a/tests/test_python/test_retrieve_data.py b/tests/test_python/test_retrieve_data.py
index 1d54e0904c..2c749c97ac 100644
--- a/tests/test_python/test_retrieve_data.py
+++ b/tests/test_python/test_retrieve_data.py
@@ -493,61 +493,3 @@ def test_ufs_lbcs_from_aws(self):
# Testing that there is no failure
retrieve_data.main(args)
-
- @unittest.skipIf(os.environ.get("CI") == "true", "Skipping HPSS tests")
- def test_rap_obs_from_hpss(self):
-
- """Get RAP observations from hpss for a 06z time"""
-
- with tempfile.TemporaryDirectory(dir=self.path) as tmp_dir:
- os.chdir(tmp_dir)
-
- # fmt: off
- args = [
- '--file_set', 'obs',
- '--config', self.config,
- '--cycle_date', '2023032106',
- '--data_stores', 'hpss',
- '--data_type', 'RAP_obs',
- '--output_path', tmp_dir,
- '--debug',
- ]
- # fmt: on
-
- retrieve_data.main(args)
-
- # Verify files exist in temp dir
-
- path = os.path.join(tmp_dir, "*")
- files_on_disk = glob.glob(path)
- self.assertEqual(len(files_on_disk), 30)
-
- @unittest.skipIf(os.environ.get("CI") == "true", "Skipping HPSS tests")
- def test_rap_e_obs_from_hpss(self):
-
- """Get RAP observations from hpss for a 12z time;
- at 00z and 12z we expect to see additional files
- with the 'rap_e' naming convention"""
-
- with tempfile.TemporaryDirectory(dir=self.path) as tmp_dir:
- os.chdir(tmp_dir)
-
- # fmt: off
- args = [
- '--file_set', 'obs',
- '--config', self.config,
- '--cycle_date', '2023032112',
- '--data_stores', 'hpss',
- '--data_type', 'RAP_obs',
- '--output_path', tmp_dir,
- '--debug',
- ]
- # fmt: on
-
- retrieve_data.main(args)
-
- # Verify files exist in temp dir
-
- path = os.path.join(tmp_dir, "*")
- files_on_disk = glob.glob(path)
- self.assertEqual(len(files_on_disk), 37)
diff --git a/ush/bash_utils/check_var_valid_value.sh b/ush/bash_utils/check_var_valid_value.sh
index 21288184db..5b942c1f73 100644
--- a/ush/bash_utils/check_var_valid_value.sh
+++ b/ush/bash_utils/check_var_valid_value.sh
@@ -96,7 +96,7 @@ where the arguments are defined as follows:
var_value=${!var_name}
valid_var_values_at="$valid_var_values_array_name[@]"
- valid_var_values=("${!valid_var_values_at}")
+ valid_var_values=("${!valid_var_values_at:-}")
if [ "$#" -eq 3 ]; then
err_msg="$3"
diff --git a/ush/bash_utils/create_symlink_to_file.sh b/ush/bash_utils/create_symlink_to_file.sh
index c6a5213326..0cfcdc9fdf 100644
--- a/ush/bash_utils/create_symlink_to_file.sh
+++ b/ush/bash_utils/create_symlink_to_file.sh
@@ -30,6 +30,7 @@ fi
target=$1
symlink=$2
relative=${3:-TRUE}
+relative=$(boolify $relative)
#
#-----------------------------------------------------------------------
#
diff --git a/ush/bash_utils/print_msg.sh b/ush/bash_utils/print_msg.sh
index 28a70d1431..8b032f9698 100644
--- a/ush/bash_utils/print_msg.sh
+++ b/ush/bash_utils/print_msg.sh
@@ -68,7 +68,7 @@ function print_info_msg() {
elif [ "$#" -eq 2 ]; then
- verbose="$1"
+ verbose=$(boolify "$1")
info_msg="$2"
#
#-----------------------------------------------------------------------
diff --git a/ush/bash_utils/source_config.sh b/ush/bash_utils/source_config.sh
deleted file mode 100644
index df5a79a0df..0000000000
--- a/ush/bash_utils/source_config.sh
+++ /dev/null
@@ -1,53 +0,0 @@
-#
-#-----------------------------------------------------------------------
-# This file defines function that sources a config file (yaml/json etc)
-# into the calling shell script
-#-----------------------------------------------------------------------
-#
-
-function config_to_str() {
- $USHdir/config_utils.py -o $1 -c $2 "${@:3}"
-}
-
-#
-#-----------------------------------------------------------------------
-# Define functions for different file formats
-#-----------------------------------------------------------------------
-#
-function config_to_shell_str() {
- config_to_str shell "$@"
-}
-function config_to_ini_str() {
- config_to_str ini "$@"
-}
-function config_to_yaml_str() {
- config_to_str yaml "$@"
-}
-function config_to_json_str() {
- config_to_str json "$@"
-}
-function config_to_xml_str() {
- config_to_str xml "$@"
-}
-
-#
-#-----------------------------------------------------------------------
-# Source contents of a config file to shell script
-#-----------------------------------------------------------------------
-#
-function source_config() {
-
- source <( config_to_shell_str "$@" )
-
-}
-#
-#-----------------------------------------------------------------------
-# Source partial contents of a config file to shell script.
-# Only those variables needed by the task are sourced
-#-----------------------------------------------------------------------
-#
-function source_config_for_task() {
-
- source <( config_to_shell_str "${@:2}" -k "(^(?!task_)|$1).*" )
-
-}
diff --git a/ush/bash_utils/source_yaml.sh b/ush/bash_utils/source_yaml.sh
new file mode 100644
index 0000000000..669408416e
--- /dev/null
+++ b/ush/bash_utils/source_yaml.sh
@@ -0,0 +1,36 @@
+
+
+function source_yaml () {
+
+ local func_name="${FUNCNAME[0]}"
+
+ if [ "$#" -lt 1 ] ; then
+ print_err_msg_exit "
+Incorrect number of arguments specified:
+
+ Function name: ${func_name}
+ Number of args specified: $#
+
+Usage:
+
+ ${func_name} yaml_file [section]
+
+ yaml_file: path to the YAML file to source
+ section: optional subsection of yaml
+"
+ fi
+ local section
+ yaml_file=$1
+ section=$2
+
+ while read -r line ; do
+
+
+ # A regex to match list representations
+ line=$(echo "$line" | sed -E "s/='\[(.*)\]'/=(\1)/")
+ line=${line//,/}
+ line=${line//\"/}
+ line=${line/None/}
+ source <( echo "${line}" )
+ done < <(uw config realize -i "${yaml_file}" --output-format sh --key-path $section)
+}
diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml
index c9c0fc7cb8..90651c1b7f 100644
--- a/ush/config_defaults.yaml
+++ b/ush/config_defaults.yaml
@@ -468,7 +468,7 @@ workflow:
#
#-----------------------------------------------------------------------
#
- WORKFLOW_ID: !nowtimestamp ''
+ WORKFLOW_ID: ""
#
#-----------------------------------------------------------------------
#
@@ -718,13 +718,11 @@ workflow:
# script creates and that defines the workflow for the experiment.
#
# GLOBAL_VAR_DEFNS_FN:
- # Name of file (a shell script) containing the definitions of the primary
- # experiment variables (parameters) defined in this default configuration
- # script and in the user-specified configuration as well as secondary
- # experiment variables generated by the experiment generation script.
- # This file is sourced by many scripts (e.g. the J-job scripts corresponding
- # to each workflow task) in order to make all the experiment variables
- # available in those scripts.
+ # Name of the experiment configuration file. It contains the primary
+ # experiment variables defined in this default configuration script and in the
+ # user-specified configuration as well as secondary experiment variables
+ # generated by the experiment generation script. This file is the primary
+ # source of information used in the scripts at run time.
#
# ROCOTO_YAML_FN:
# Name of the YAML file containing the YAML workflow definition from
@@ -772,7 +770,7 @@ workflow:
FCST_MODEL: "ufs-weather-model"
WFLOW_XML_FN: "FV3LAM_wflow.xml"
- GLOBAL_VAR_DEFNS_FN: "var_defns.sh"
+ GLOBAL_VAR_DEFNS_FN: "var_defns.yaml"
ROCOTO_YAML_FN: "rocoto_defns.yaml"
EXTRN_MDL_VAR_DEFNS_FN: "extrn_mdl_var_defns"
WFLOW_LAUNCH_SCRIPT_FN: "launch_FV3LAM_wflow.sh"
diff --git a/ush/create_aqm_rc_file.py b/ush/create_aqm_rc_file.py
index 739a4d9f18..c37ed05d29 100644
--- a/ush/create_aqm_rc_file.py
+++ b/ush/create_aqm_rc_file.py
@@ -13,7 +13,7 @@
cfg_to_yaml_str,
flatten_dict,
import_vars,
- load_shell_config,
+ load_yaml_config,
print_info_msg,
print_input_args,
str_to_type,
@@ -158,7 +158,7 @@ def parse_args(argv):
if __name__ == "__main__":
args = parse_args(sys.argv[1:])
- cfg = load_shell_config(args.path_to_defns)
+ cfg = load_yaml_config(args.path_to_defns)
cfg = flatten_dict(cfg)
import_vars(dictionary=cfg)
create_aqm_rc_file(
diff --git a/ush/create_diag_table_file.py b/ush/create_diag_table_file.py
index 975165dfe5..113953172d 100644
--- a/ush/create_diag_table_file.py
+++ b/ush/create_diag_table_file.py
@@ -14,7 +14,7 @@
cfg_to_yaml_str,
flatten_dict,
import_vars,
- load_shell_config,
+ load_yaml_config,
print_info_msg,
print_input_args,
)
@@ -102,7 +102,7 @@ def parse_args(argv):
if __name__ == "__main__":
args = parse_args(sys.argv[1:])
- cfg = load_shell_config(args.path_to_defns)
+ cfg = load_yaml_config(args.path_to_defns)
cfg = flatten_dict(cfg)
import_vars(dictionary=cfg)
create_diag_table_file(args.run_dir)
diff --git a/ush/create_model_configure_file.py b/ush/create_model_configure_file.py
index cd39087688..b8767f635a 100644
--- a/ush/create_model_configure_file.py
+++ b/ush/create_model_configure_file.py
@@ -13,7 +13,7 @@
cfg_to_yaml_str,
flatten_dict,
import_vars,
- load_shell_config,
+ load_yaml_config,
lowercase,
print_info_msg,
print_input_args,
@@ -296,7 +296,7 @@ def parse_args(argv):
if __name__ == "__main__":
args = parse_args(sys.argv[1:])
- cfg = load_shell_config(args.path_to_defns)
+ cfg = load_yaml_config(args.path_to_defns)
cfg = flatten_dict(cfg)
import_vars(dictionary=cfg)
create_model_configure_file(
diff --git a/ush/create_ufs_configure_file.py b/ush/create_ufs_configure_file.py
index 9d4ea8afa4..3fd82f488b 100644
--- a/ush/create_ufs_configure_file.py
+++ b/ush/create_ufs_configure_file.py
@@ -15,7 +15,7 @@
cfg_to_yaml_str,
flatten_dict,
import_vars,
- load_shell_config,
+ load_yaml_config,
print_info_msg,
print_input_args,
)
@@ -113,7 +113,7 @@ def parse_args(argv):
if __name__ == "__main__":
args = parse_args(sys.argv[1:])
- cfg = load_shell_config(args.path_to_defns)
+ cfg = load_yaml_config(args.path_to_defns)
cfg = flatten_dict(cfg)
import_vars(dictionary=cfg)
create_ufs_configure_file(
diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py
index ba0e9f3a2b..c671a69da8 100755
--- a/ush/generate_FV3LAM_wflow.py
+++ b/ush/generate_FV3LAM_wflow.py
@@ -11,12 +11,15 @@
import logging
import os
import sys
+from stat import S_IXUSR
+from string import Template
from textwrap import dedent
from uwtools.api.config import get_nml_config, get_yaml_config, realize
from uwtools.api.template import render
from python_utils import (
+ list_to_str,
log_info,
import_vars,
export_vars,
@@ -24,7 +27,6 @@
ln_vrfy,
mkdir_vrfy,
mv_vrfy,
- create_symlink_to_file,
check_for_preexist_dir_file,
cfg_to_yaml_str,
find_pattern_in_str,
@@ -137,9 +139,23 @@ def generate_FV3LAM_wflow(
verbose=debug,
)
- create_symlink_to_file(
- wflow_launch_script_fp, os.path.join(exptdir, wflow_launch_script_fn), False
- )
+ with open(wflow_launch_script_fp, "r", encoding='utf-8') as launch_script_file:
+ launch_script_content = launch_script_file.read()
+
+ # Stage an experiment-specific launch file in the experiment directory
+ template = Template(launch_script_content)
+
+ # The script needs several variables from the workflow and user sections
+ template_variables = {**expt_config["user"], **expt_config["workflow"],
+ "valid_vals_BOOLEAN": list_to_str(expt_config["constants"]["valid_vals_BOOLEAN"])}
+ launch_content = template.safe_substitute(template_variables)
+
+ launch_fp = os.path.join(exptdir, wflow_launch_script_fn)
+ with open(launch_fp, "w", encoding='utf-8') as expt_launch_fn:
+ expt_launch_fn.write(launch_content)
+
+ os.chmod(launch_fp, os.stat(launch_fp).st_mode|S_IXUSR)
+
#
# -----------------------------------------------------------------------
#
@@ -639,7 +655,7 @@ def generate_FV3LAM_wflow(
input_format="nml",
output_file=FV3_NML_STOCH_FP,
output_format="nml",
- supplemental_configs=[settings],
+ update_config=get_nml_config(settings),
)
#
diff --git a/ush/job_preamble.sh b/ush/job_preamble.sh
index 16b99393a2..ecfb94fb50 100644
--- a/ush/job_preamble.sh
+++ b/ush/job_preamble.sh
@@ -1,5 +1,7 @@
#!/bin/bash
+set +u
+
#
#-----------------------------------------------------------------------
#
@@ -67,13 +69,13 @@ export COMOUTwmo="${COMOUTwmo:-${COMOUT}/wmo}"
#
#-----------------------------------------------------------------------
#
-if [ ${subcyc} -ne 0 ]; then
+if [ ${subcyc:-0} -ne 0 ]; then
export cycle="t${cyc}${subcyc}z"
else
export cycle="t${cyc}z"
fi
-if [ "${RUN_ENVIR}" = "nco" ] && [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z $ENSMEM_INDX ]; then
+if [ "${RUN_ENVIR}" = "nco" ] && [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z $ENSMEM_INDX ]; then
export dot_ensmem=".mem${ENSMEM_INDX}"
else
export dot_ensmem=
@@ -215,4 +217,3 @@ In directory: \"${scrfunc_dir}\"
========================================================================"
}
-
diff --git a/ush/launch_FV3LAM_wflow.sh b/ush/launch_FV3LAM_wflow.sh
old mode 100755
new mode 100644
index 92dd24aee6..7c26511f4f
--- a/ush/launch_FV3LAM_wflow.sh
+++ b/ush/launch_FV3LAM_wflow.sh
@@ -34,43 +34,10 @@ scrfunc_dir=$( dirname "${scrfunc_fp}" )
#
#-----------------------------------------------------------------------
#
-# Get the experiment directory. We assume that there is a symlink to
-# this script in the experiment directory, and this script is called via
-# that symlink. Thus, finding the directory in which the symlink is
-# located will give us the experiment directory. We find this by first
-# obtaining the directory portion (i.e. the portion without the name of
-# this script) of the command that was used to called this script (i.e.
-# "$0") and then use the "readlink -f" command to obtain the corresponding
-# absolute path. This will work for all four of the following ways in
-# which the symlink in the experiment directory pointing to this script
-# may be called:
-#
-# 1) Call this script from the experiment directory:
-# > cd /path/to/experiment/directory
-# > launch_FV3LAM_wflow.sh
-#
-# 2) Call this script from the experiment directory but using "./" before
-# the script name:
-# > cd /path/to/experiment/directory
-# > ./launch_FV3LAM_wflow.sh
-#
-# 3) Call this script from any directory using the absolute path to the
-# symlink in the experiment directory:
-# > /path/to/experiment/directory/launch_FV3LAM_wflow.sh
-#
-# 4) Call this script from a directory that is several levels up from the
-# experiment directory (but not necessarily at the root directory):
-# > cd /path/to
-# > experiment/directory/launch_FV3LAM_wflow.sh
-#
-# Note that given just a file name, e.g. the name of this script without
-# any path before it, the "dirname" command will return a ".", e.g. in
-# bash,
-#
-# > exptdir=$( dirname "launch_FV3LAM_wflow.sh" )
-# > echo $exptdir
-#
-# will print out ".".
+# This script will be configured for a specific experiment when
+# generate_FV3LAM_wflow.py. That process fills in what is necessary so
+# this configured script in the experiment directory will need no
+# additional information at run time.
#
#-----------------------------------------------------------------------
#
@@ -94,7 +61,12 @@ fi
#
#-----------------------------------------------------------------------
#
-. $exptdir/var_defns.sh
+
+# These variables are assumed to exist in the global environment by the
+# bash_utils, which is a Very Bad (TM) thing.
+export USHdir=$USHdir
+export valid_vals_BOOLEAN=${valid_vals_BOOLEAN}
+
. $USHdir/source_util_funcs.sh
#
#-----------------------------------------------------------------------
@@ -369,7 +341,7 @@ by expt_name has completed with the following workflow status (wflow_status):
# Thus, there is no need to try to relaunch it. We also append a message
# to the completion message above to indicate this.
#
- if [ "${USE_CRON_TO_RELAUNCH}" = "TRUE" ]; then
+ if [ $(boolify "${USE_CRON_TO_RELAUNCH}") = "TRUE" ]; then
msg="${msg}\
Thus, there is no need to relaunch the workflow via a cron job. Removing
diff --git a/ush/link_fix.py b/ush/link_fix.py
index fdd9a65f28..f0d103d8ea 100755
--- a/ush/link_fix.py
+++ b/ush/link_fix.py
@@ -18,7 +18,7 @@
cd_vrfy,
mkdir_vrfy,
find_pattern_in_str,
- load_shell_config,
+ load_yaml_config,
)
@@ -403,7 +403,7 @@ def parse_args(argv):
if __name__ == "__main__":
args = parse_args(sys.argv[1:])
- cfg = load_shell_config(args.path_to_defns)
+ cfg = load_yaml_config(args.path_to_defns)
link_fix(
verbose=cfg["workflow"]["VERBOSE"],
file_group=args.file_group,
diff --git a/ush/load_modules_run_task.sh b/ush/load_modules_run_task.sh
index 89f3addf41..5ede278bfd 100755
--- a/ush/load_modules_run_task.sh
+++ b/ush/load_modules_run_task.sh
@@ -3,33 +3,43 @@
#
#-----------------------------------------------------------------------
#
-# Source necessary files.
+# This script loads the appropriate modules for a given task in an
+# experiment.
#
-#-----------------------------------------------------------------------
+# It requires the following global environment variables:
#
-. ${GLOBAL_VAR_DEFNS_FP}
-. $USHdir/source_util_funcs.sh
+# GLOBAL_VAR_DEFNS_FP
#
-#-----------------------------------------------------------------------
+# And uses these variables from the GLOBAL_VAR_DEFNS_FP file
#
-# Save current shell options (in a global array). Then set new options
-# for this script/function.
+# platform:
+# BUILD_MOD_FN
+# RUN_VER_FN
+#
+# workflow:
+# VERBOSE
#
#-----------------------------------------------------------------------
#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
+
+# Get the location of this file -- it's the USHdir
+scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" )
+scrfunc_fn=$( basename "${scrfunc_fp}" )
+USHdir=$( dirname "${scrfunc_fp}" )
+HOMEdir=$( dirname $USHdir )
+
+source $USHdir/source_util_funcs.sh
+
#
#-----------------------------------------------------------------------
#
-# Get the full path to the file in which this script/function is located
-# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
-# which the file is located (scrfunc_dir).
+# Save current shell options (in a global array). Then set new options
+# for this script/function.
#
#-----------------------------------------------------------------------
#
-scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
-scrfunc_fn=$( basename "${scrfunc_fp}" )
-scrfunc_dir=$( dirname "${scrfunc_fp}" )
+{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
+
#
#-----------------------------------------------------------------------
#
@@ -37,7 +47,7 @@ scrfunc_dir=$( dirname "${scrfunc_fp}" )
#
#-----------------------------------------------------------------------
#
-if [ "$#" -ne 2 ]; then
+if [ "$#" -ne 3 ]; then
print_err_msg_exit "
Incorrect number of arguments specified:
@@ -46,15 +56,17 @@ Incorrect number of arguments specified:
Usage:
- ${scrfunc_fn} task_name jjob_fp
+ ${scrfunc_fn} machine task_name jjob_fp
where the arguments are defined as follows:
+ machine: The name of the supported platform
+
task_name:
The name of the rocoto task for which this script will load modules
and launch the J-job.
- jjob_fp
+ jjob_fp:
The full path to the J-job script corresponding to task_name. This
script will launch this J-job using the \"exec\" command (which will
first terminate this script and then launch the j-job; see man page of
@@ -65,12 +77,13 @@ fi
#
#-----------------------------------------------------------------------
#
-# Get the task name and the name of the J-job script.
+# Save arguments
#
#-----------------------------------------------------------------------
#
-task_name="$1"
-jjob_fp="$2"
+machine=$(echo_lowercase $1)
+task_name="$2"
+jjob_fp="$3"
#
#-----------------------------------------------------------------------
#
@@ -99,12 +112,38 @@ set -u
#-----------------------------------------------------------------------
#
default_modules_dir="$HOMEdir/modulefiles"
-machine=$(echo_lowercase $MACHINE)
-if [ "${WORKFLOW_MANAGER}" != "ecflow" ]; then
+test ! $(module is-loaded ecflow > /dev/null 2>&1) && ecflow_loaded=false
+
+if [ "$ecflow_loaded" = "false" ] ; then
source "${HOMEdir}/etc/lmod-setup.sh" ${machine}
fi
module use "${default_modules_dir}"
+# Load workflow environment
+
+if [ -f ${default_modules_dir}/python_srw.lua ] ; then
+ module load python_srw || print_err_msg_exit "\
+ Loading SRW common python module failed. Expected python_srw.lua
+ in the modules directory here:
+ modules_dir = \"${default_modules_dir}\""
+fi
+
+# Modules that use conda and need an environment activated will set the
+# SRW_ENV variable to the name of the environment to be activated. That
+# must be done within the script, and not inside the module. Do that
+# now.
+if [ -n "${SRW_ENV:-}" ] ; then
+ set +u
+ conda deactivate
+ conda activate ${SRW_ENV}
+ set -u
+fi
+
+# Source the necessary blocks of the experiment config YAML
+for sect in platform workflow ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
+
if [ "${machine}" != "wcoss2" ]; then
module load "${BUILD_MOD_FN}" || print_err_msg_exit "\
Loading of platform- and compiler-specific module file (BUILD_MOD_FN)
@@ -116,26 +155,15 @@ fi
#
#-----------------------------------------------------------------------
#
-# Set the directory (modules_dir) in which the module files for the va-
-# rious workflow tasks are located. Also, set the name of the module
-# file for the specified task.
-#
-# A module file is a file whose first line is the "magic cookie" string
-# '#%Module'. It is interpreted by the "module load ..." command. It
-# sets environment variables (including prepending/appending to paths)
-# and loads modules.
-#
-# The UFS SRW App repository contains module files for the
-# workflow tasks in the template rocoto XML file for the FV3-LAM work-
-# flow that need modules not loaded in the BUILD_MOD_FN above.
+# Set the directory for the modulefiles included with SRW and the
+# specific module for the requested task.
#
# The full path to a module file for a given task is
#
# $HOMEdir/modulefiles/$machine/${task_name}.local
#
-# where HOMEdir is the base directory of the workflow, machine is the
-# name of the machine that we're running on (in lowercase), and task_-
-# name is the name of the current task (an input to this script).
+# where HOMEdir is the SRW clone, machine is the name of the platform
+# being used, and task_name is the current task to run.
#
#-----------------------------------------------------------------------
#
@@ -154,10 +182,10 @@ Loading modules for task \"${task_name}\" ..."
module use "${modules_dir}" || print_err_msg_exit "\
Call to \"module use\" command failed."
-# source version file (run) only if it is specified in versions directory
-VERSION_FILE="${HOMEdir}/versions/${RUN_VER_FN}"
-if [ -f ${VERSION_FILE} ]; then
- . ${VERSION_FILE}
+# source version file only if it exists in the versions directory
+version_file="${HOMEdir}/versions/${RUN_VER_FN}"
+if [ -f ${version_file} ]; then
+ source ${version_file}
fi
#
# Load the .local module file if available for the given task
@@ -170,20 +198,11 @@ specified task (task_name) failed:
task_name = \"${task_name}\"
modulefile_local = \"${modulefile_local}\"
modules_dir = \"${modules_dir}\""
-elif [ -f ${default_modules_dir}/python_srw.lua ] ; then
- module load python_srw || print_err_msg_exit "\
- Loading SRW common python module failed. Expected python_srw.lua
- in the modules directory here:
- modules_dir = \"${default_modules_dir}\""
fi
-
module list
-# Modules that use conda and need an environment activated will set the
-# SRW_ENV variable to the name of the environment to be activated. That
-# must be done within the script, and not inside the module. Do that
-# now.
-
+# Reactivate the workflow environment to ensure the correct Python
+# environment is available first in the environment.
if [ -n "${SRW_ENV:-}" ] ; then
set +u
conda deactivate
@@ -204,11 +223,7 @@ Launching J-job (jjob_fp) for task \"${task_name}\" ...
jjob_fp = \"${jjob_fp}\"
"
-if [ "${WORKFLOW_MANAGER}" = "ecflow" ]; then
- /bin/bash "${jjob_fp}"
-else
- exec "${jjob_fp}"
-fi
+source "${jjob_fp}"
#
#-----------------------------------------------------------------------
diff --git a/ush/machine/hera.yaml b/ush/machine/hera.yaml
index 4d836af317..80fbb8fc98 100644
--- a/ush/machine/hera.yaml
+++ b/ush/machine/hera.yaml
@@ -20,8 +20,8 @@ platform:
RUN_CMD_UTILS: srun --export=ALL
RUN_CMD_NEXUS: srun -n ${nprocs} --export=ALL
RUN_CMD_AQMLBC: srun --export=ALL -n ${numts}
- SCHED_NATIVE_CMD: --export=NONE
- SCHED_NATIVE_CMD_HPSS: -n 1 --export=NONE
+ SCHED_NATIVE_CMD: "--export=NONE"
+ SCHED_NATIVE_CMD_HPSS: "-n 1 --export=NONE"
PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }'
TEST_EXTRN_MDL_SOURCE_BASEDIR: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/input_model_data
TEST_AQM_INPUT_BASEDIR: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/aqm_data
diff --git a/ush/set_fv3nml_ens_stoch_seeds.py b/ush/set_fv3nml_ens_stoch_seeds.py
index 3459fa8707..0b9b186210 100644
--- a/ush/set_fv3nml_ens_stoch_seeds.py
+++ b/ush/set_fv3nml_ens_stoch_seeds.py
@@ -10,12 +10,12 @@
import sys
from textwrap import dedent
-from uwtools.api.config import realize
+from uwtools.api.config import get_nml_config, realize
from python_utils import (
cfg_to_yaml_str,
import_vars,
- load_shell_config,
+ load_yaml_config,
print_input_args,
print_info_msg,
)
@@ -112,7 +112,7 @@ def set_fv3nml_ens_stoch_seeds(cdate, expt_config):
input_format="nml",
output_file=fv3_nml_ensmem_fp,
output_format="nml",
- supplemental_configs=[settings],
+ update_config=get_nml_config(settings),
)
def parse_args(argv):
@@ -142,5 +142,5 @@ def parse_args(argv):
if __name__ == "__main__":
args = parse_args(sys.argv[1:])
- cfg = load_shell_config(args.path_to_defns)
+ cfg = load_yaml_config(args.path_to_defns)
set_fv3nml_ens_stoch_seeds(args.cdate, cfg)
diff --git a/ush/set_fv3nml_sfc_climo_filenames.py b/ush/set_fv3nml_sfc_climo_filenames.py
index 417aa0b5ee..7251a5b0e6 100644
--- a/ush/set_fv3nml_sfc_climo_filenames.py
+++ b/ush/set_fv3nml_sfc_climo_filenames.py
@@ -10,14 +10,14 @@
import sys
from textwrap import dedent
-from uwtools.api.config import get_yaml_config, realize
+from uwtools.api.config import get_nml_config, get_yaml_config, realize
from python_utils import (
cfg_to_yaml_str,
check_var_valid_value,
flatten_dict,
import_vars,
- load_shell_config,
+ load_yaml_config,
print_info_msg,
)
@@ -105,7 +105,7 @@ def set_fv3nml_sfc_climo_filenames(config, debug=False):
input_format="nml",
output_file=FV3_NML_FP,
output_format="nml",
- supplemental_configs=[settings],
+ update_config=get_nml_config(settings),
)
def parse_args(argv):
@@ -127,6 +127,6 @@ def parse_args(argv):
if __name__ == "__main__":
args = parse_args(sys.argv[1:])
- cfg = load_shell_config(args.path_to_defns)
+ cfg = load_yaml_config(args.path_to_defns)
cfg = flatten_dict(cfg)
set_fv3nml_sfc_climo_filenames(cfg, args.debug)
diff --git a/ush/setup.py b/ush/setup.py
index 0511653fa2..51d5b2a084 100644
--- a/ush/setup.py
+++ b/ush/setup.py
@@ -10,10 +10,12 @@
from textwrap import dedent
import yaml
+from uwtools.api.config import get_yaml_config
from python_utils import (
log_info,
cd_vrfy,
+ date_to_str,
mkdir_vrfy,
rm_vrfy,
check_var_valid_value,
@@ -1499,10 +1501,13 @@ def dict_find(user_dict, substring):
yaml.Dumper.ignore_aliases = lambda *args : True
yaml.dump(expt_config.get("rocoto"), f, sort_keys=False)
- var_defns_cfg = copy.deepcopy(expt_config)
+ var_defns_cfg = get_yaml_config(config=expt_config)
del var_defns_cfg["rocoto"]
- with open(global_var_defns_fp, "a") as f:
- f.write(cfg_to_shell_str(var_defns_cfg))
+
+ # Fixup a couple of data types:
+ for dates in ("DATE_FIRST_CYCL", "DATE_LAST_CYCL"):
+ var_defns_cfg["workflow"][dates] = date_to_str(var_defns_cfg["workflow"][dates])
+ var_defns_cfg.dump(global_var_defns_fp)
#
diff --git a/ush/source_util_funcs.sh b/ush/source_util_funcs.sh
index 7fe3025d6a..9feceaf68e 100644
--- a/ush/source_util_funcs.sh
+++ b/ush/source_util_funcs.sh
@@ -220,15 +220,15 @@ function source_util_funcs() {
#-----------------------------------------------------------------------
#
. ${bashutils_dir}/eval_METplus_timestr_tmpl.sh
+
#
#-----------------------------------------------------------------------
#
-# Source the file containing the function that sources config files.
+# Source the file that sources YAML files as if they were bash
#
#-----------------------------------------------------------------------
#
- . ${bashutils_dir}/source_config.sh
-
+ . ${bashutils_dir}/source_yaml.sh
}
source_util_funcs
diff --git a/ush/update_input_nml.py b/ush/update_input_nml.py
index e975d9bc08..b85bbacd4a 100644
--- a/ush/update_input_nml.py
+++ b/ush/update_input_nml.py
@@ -9,7 +9,7 @@
import sys
from textwrap import dedent
-from uwtools.api.config import realize
+from uwtools.api.config import get_nml_config, realize
from python_utils import (
print_input_args,
@@ -77,7 +77,7 @@ def update_input_nml(namelist, restart, aqm_na_13km):
input_format="nml",
output_file=namelist,
output_format="nml",
- supplemental_configs=[settings],
+ update_config=get_nml_config(settings),
)
def parse_args(argv):
diff --git a/ush/wrappers/run_fcst.sh b/ush/wrappers/run_fcst.sh
index 7450de7cc5..c875cb16c0 100755
--- a/ush/wrappers/run_fcst.sh
+++ b/ush/wrappers/run_fcst.sh
@@ -1,7 +1,10 @@
#!/usr/bin/env bash
-export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh"
+export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml"
+. $USHdir/source_util_funcs.sh
+for sect in workflow ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
set -xa
-source ${GLOBAL_VAR_DEFNS_FP}
export CDATE=${DATE_FIRST_CYCL}
export CYCLE_DIR=${EXPTDIR}/${CDATE}
export cyc=${DATE_FIRST_CYCL:8:2}
diff --git a/ush/wrappers/run_get_ics.sh b/ush/wrappers/run_get_ics.sh
index 0ee521a67d..494eab6850 100755
--- a/ush/wrappers/run_get_ics.sh
+++ b/ush/wrappers/run_get_ics.sh
@@ -1,7 +1,10 @@
#!/usr/bin/env bash
-export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh"
+export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml"
+. $USHdir/source_util_funcs.sh
+for sect in workflow task_get_extrn_ics ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
set -xa
-source ${GLOBAL_VAR_DEFNS_FP}
export CDATE=${DATE_FIRST_CYCL}
export CYCLE_DIR=${EXPTDIR}/${CDATE}
export cyc=${DATE_FIRST_CYCL:8:2}
diff --git a/ush/wrappers/run_get_lbcs.sh b/ush/wrappers/run_get_lbcs.sh
index 543ab6e47d..ec6fa23892 100755
--- a/ush/wrappers/run_get_lbcs.sh
+++ b/ush/wrappers/run_get_lbcs.sh
@@ -1,7 +1,10 @@
#!/usr/bin/env bash
-export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh"
+export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml"
+. $USHdir/source_util_funcs.sh
+for sect in workflow task_get_extrn_lbcs ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
set -xa
-source ${GLOBAL_VAR_DEFNS_FP}
export CDATE=${DATE_FIRST_CYCL}
export CYCLE_DIR=${EXPTDIR}/${CDATE}
export cyc=${DATE_FIRST_CYCL:8:2}
diff --git a/ush/wrappers/run_make_grid.sh b/ush/wrappers/run_make_grid.sh
index 2d55beaf94..f7a6f8aeed 100755
--- a/ush/wrappers/run_make_grid.sh
+++ b/ush/wrappers/run_make_grid.sh
@@ -1,7 +1,10 @@
#!/usr/bin/env bash
-export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh"
+export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml"
+. $USHdir/source_util_funcs.sh
+for sect in workflow ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
set -xa
-source ${GLOBAL_VAR_DEFNS_FP}
export CDATE=${DATE_FIRST_CYCL}
export CYCLE_DIR=${EXPTDIR}/${CDATE}
diff --git a/ush/wrappers/run_make_ics.sh b/ush/wrappers/run_make_ics.sh
index 5c629722fc..adcdc16180 100755
--- a/ush/wrappers/run_make_ics.sh
+++ b/ush/wrappers/run_make_ics.sh
@@ -1,7 +1,10 @@
#!/usr/bin/env bash
-export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh"
+export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml"
+. $USHdir/source_util_funcs.sh
+for sect in workflow ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
set -xa
-source ${GLOBAL_VAR_DEFNS_FP}
export CDATE=${DATE_FIRST_CYCL}
export CYCLE_DIR=${EXPTDIR}/${CDATE}
export cyc=${DATE_FIRST_CYCL:8:2}
diff --git a/ush/wrappers/run_make_lbcs.sh b/ush/wrappers/run_make_lbcs.sh
index 27c94c127f..f9fe35d9da 100755
--- a/ush/wrappers/run_make_lbcs.sh
+++ b/ush/wrappers/run_make_lbcs.sh
@@ -1,7 +1,10 @@
#!/usr/bin/env bash
-export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh"
+export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml"
+. $USHdir/source_util_funcs.sh
+for sect in workflow ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
set -xa
-source ${GLOBAL_VAR_DEFNS_FP}
export CDATE=${DATE_FIRST_CYCL}
export CYCLE_DIR=${EXPTDIR}/${CDATE}
export cyc=${DATE_FIRST_CYCL:8:2}
diff --git a/ush/wrappers/run_make_orog.sh b/ush/wrappers/run_make_orog.sh
index 5f02ff9599..ebc5259ec1 100755
--- a/ush/wrappers/run_make_orog.sh
+++ b/ush/wrappers/run_make_orog.sh
@@ -1,7 +1,10 @@
#!/usr/bin/env bash
-export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh"
+export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml"
+. $USHdir/source_util_funcs.sh
+for sect in workflow ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
set -xa
-source ${GLOBAL_VAR_DEFNS_FP}
export CDATE=${DATE_FIRST_CYCL}
export CYCLE_DIR=${EXPTDIR}/${CDATE}
diff --git a/ush/wrappers/run_make_sfc_climo.sh b/ush/wrappers/run_make_sfc_climo.sh
index fab33f75d6..8024f529fc 100755
--- a/ush/wrappers/run_make_sfc_climo.sh
+++ b/ush/wrappers/run_make_sfc_climo.sh
@@ -1,7 +1,10 @@
#!/usr/bin/env bash
-export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh"
+export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml"
+. $USHdir/source_util_funcs.sh
+for sect in workflow ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
set -xa
-source ${GLOBAL_VAR_DEFNS_FP}
export CDATE=${DATE_FIRST_CYCL}
export CYCLE_DIR=${EXPTDIR}/${CDATE}
diff --git a/ush/wrappers/run_post.sh b/ush/wrappers/run_post.sh
index 46ef104365..ca060acb1f 100755
--- a/ush/wrappers/run_post.sh
+++ b/ush/wrappers/run_post.sh
@@ -1,7 +1,10 @@
#!/usr/bin/env bash
-export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh"
+export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml"
+. $USHdir/source_util_funcs.sh
+for sect in workflow ; do
+ source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
+done
set -xa
-source ${GLOBAL_VAR_DEFNS_FP}
export CDATE=${DATE_FIRST_CYCL}
export CYCLE_DIR=${EXPTDIR}/${CDATE}
export cyc=${DATE_FIRST_CYCL:8:2}
From 0a933381aa2159cb073e382b113b3e506b518a05 Mon Sep 17 00:00:00 2001
From: EdwardSnyder-NOAA <96196752+EdwardSnyder-NOAA@users.noreply.github.com>
Date: Tue, 30 Jul 2024 09:15:29 -0500
Subject: [PATCH 24/39] [develop] Fixes for PW Jenkins Nightly Builds (#1091)
* Adds logic to handle GCP's default conda env, which conflicts with the SRW App's conda env. Fixes a Parallel Works naming convention bug in the script.
* It also addresses a known issue with a Ruby warning on PW instances that prevents the run_WE2E_tests.py from exiting gracefully. The solution we use in our bootstrap for /contrib doesn't seem to work for the /lustre directory, which is why the warning is hardcoded into the monitor_jobs.py script.
* The new spack-stack build on Azure is missing a gnu library, so added the path to this missing library to the proper run scripts and cleaned up the wflow noaacloud lua file.
* Removed log and error files from the qsub wrapper script so that qsub can generate these files with the job id in the files name. Also, fixed typo in the wrapper script.
---
.cicd/Jenkinsfile | 6 +++---
.cicd/scripts/qsub_srw_ftest.sh | 2 --
.cicd/scripts/srw_ftest.sh | 5 +++++
.cicd/scripts/srw_metric.sh | 12 +++++++-----
.cicd/scripts/wrapper_srw_ftest.sh | 2 +-
tests/WE2E/utils.py | 2 +-
6 files changed, 17 insertions(+), 12 deletions(-)
diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile
index 1c92a1bd65..030661bd27 100644
--- a/.cicd/Jenkinsfile
+++ b/.cicd/Jenkinsfile
@@ -235,8 +235,6 @@ pipeline {
sh "SRW_WE2E_COMPREHENSIVE_TESTS=${run_we2e_comprehensive_tests} SRW_WE2E_SINGLE_TEST=${single_test}" + ' bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/srw_test.sh"'
- // Archive the test log files
- sh "[[ -d ${SRW_WE2E_EXPERIMENT_BASE_DIR} ]] && cd ${SRW_WE2E_EXPERIMENT_BASE_DIR} && tar --create --gzip --verbose --dereference --file ${WORKSPACE}/${SRW_PLATFORM}/we2e_test_logs-${SRW_PLATFORM}-${SRW_COMPILER}.tgz */log.generate_FV3LAM_wflow */log/* ${WORKSPACE}/${SRW_PLATFORM}/tests/WE2E/WE2E_tests_*yaml WE2E_summary*txt ${WORKSPACE}/${SRW_PLATFORM}/tests/WE2E/log.* || cat /dev/null > ${WORKSPACE}/${SRW_PLATFORM}/we2e_test_logs-${SRW_PLATFORM}-${SRW_COMPILER}.tgz"
}
sh "STAGE_NAME=${env.STAGE_NAME} " + 'bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/disk_usage.sh"'
}
@@ -244,10 +242,12 @@ pipeline {
post {
success {
- s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/*_test_results-*-*.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/we2e_test_logs-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.tgz", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/*-skill-score.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
}
always {
+ // Archive the test log files
+ sh "[[ -d ${SRW_WE2E_EXPERIMENT_BASE_DIR} ]] && cd ${SRW_WE2E_EXPERIMENT_BASE_DIR} && tar --create --gzip --verbose --dereference --file ${env.WORKSPACE}/${env.SRW_PLATFORM}/we2e_test_logs-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.tgz */log.generate_FV3LAM_wflow */log/* ${env.WORKSPACE}/${env.SRW_PLATFORM}/tests/WE2E/WE2E_tests_*yaml WE2E_summary*txt ${env.WORKSPACE}/${env.SRW_PLATFORM}/tests/WE2E/log.* || cat /dev/null > ${env.WORKSPACE}/${env.SRW_PLATFORM}/we2e_test_logs-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.tgz"
+ s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/*_test_results-*-*.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/we2e_test_logs-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.tgz", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-time-srw_test.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-disk-usage${env.STAGE_NAME}.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: []
// Remove the data sets from the experiments directory to conserve disk space
diff --git a/.cicd/scripts/qsub_srw_ftest.sh b/.cicd/scripts/qsub_srw_ftest.sh
index e9f0170a05..8b2569ca69 100644
--- a/.cicd/scripts/qsub_srw_ftest.sh
+++ b/.cicd/scripts/qsub_srw_ftest.sh
@@ -9,7 +9,5 @@
#PBS -l select=1:ncpus=24:mpiprocs=24:ompthreads=1
#PBS -l walltime=00:30:00
#PBS -V
-#PBS -o log_wrap.%j.log
-#PBS -e err_wrap.%j.err
bash ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/srw_ftest.sh
diff --git a/.cicd/scripts/srw_ftest.sh b/.cicd/scripts/srw_ftest.sh
index b77ee767f3..95530a89aa 100755
--- a/.cicd/scripts/srw_ftest.sh
+++ b/.cicd/scripts/srw_ftest.sh
@@ -66,6 +66,9 @@ sed "s|^workflow:|workflow:\n EXEC_SUBDIR: ${workspace}/install_${SRW_COMPILER}
# Decrease forecast length since we are running all the steps
sed "s|^ FCST_LEN_HRS: 12| FCST_LEN_HRS: 6|g" -i ush/config.yaml
+# Update compiler
+sed "s|^ COMPILER: intel| COMPILER: ${SRW_COMPILER}|g" -i ush/config.yaml
+
# DATA_LOCATION differs on each platform ... find it.
export DATA_LOCATION=$(grep TEST_EXTRN_MDL_SOURCE_BASEDIR ${workspace}/ush/machine/${platform,,}.yaml | awk '{printf "%s", $2}')
echo "DATA_LOCATION=${DATA_LOCATION}"
@@ -85,6 +88,8 @@ source etc/lmod-setup.sh ${platform,,}
module use modulefiles
module load build_${platform,,}_${SRW_COMPILER}
module load wflow_${platform,,}
+# Deactivate conflicting conda env on GCP
+[[ "${SRW_PLATFORM}" =~ "gclusternoaa" ]] && conda deactivate
[[ ${FORGIVE_CONDA} == true ]] && set +e +u # Some platforms have incomplete python3 or conda support, but wouldn't necessarily block workflow tests
conda activate srw_app
diff --git a/.cicd/scripts/srw_metric.sh b/.cicd/scripts/srw_metric.sh
index e645a2c916..aec28c2253 100755
--- a/.cicd/scripts/srw_metric.sh
+++ b/.cicd/scripts/srw_metric.sh
@@ -78,6 +78,8 @@ cd ${workspace}
# Activate workflow environment
module load wflow_${platform,,}
+# Deactivate conflicting conda env on GCP
+[[ "${SRW_PLATFORM}" =~ "gclusternoaa" ]] && conda deactivate
[[ ${FORGIVE_CONDA} == true ]] && set +e +u # Some platforms have incomplete python3 or conda support, but would not necessarily block workflow tests
conda activate srw_app
@@ -98,7 +100,7 @@ if [[ ${RUN_STAT_ANLY_OPT} == true ]]; then
# Clear out data
rm -rf ${workspace}/Indy-Severe-Weather/
# Check if metprd data exists locally otherwise get it from S3
- TEST_EXTRN_MDL_SOURCE_BASEDIR=$(grep TEST_EXTRN_MDL_SOURCE_BASEDIR ${workspace}/ush/machine/${SRW_PLATFORM}.yaml | awk '{print $NF}')
+ TEST_EXTRN_MDL_SOURCE_BASEDIR=$(grep TEST_EXTRN_MDL_SOURCE_BASEDIR ${workspace}/ush/machine/${platform}.yaml | awk '{print $NF}')
if [[ -d $(dirname ${TEST_EXTRN_MDL_SOURCE_BASEDIR})/metprd/point_stat ]] ; then
mkdir -p Indy-Severe-Weather/metprd/point_stat
cp -rp $(dirname ${TEST_EXTRN_MDL_SOURCE_BASEDIR})/metprd/point_stat Indy-Severe-Weather/metprd
@@ -108,7 +110,7 @@ if [[ ${RUN_STAT_ANLY_OPT} == true ]]; then
wget https://noaa-ufs-srw-pds.s3.amazonaws.com/sample_cases/release-public-v2.1.0/Indy-Severe-Weather.tgz
tar xvfz Indy-Severe-Weather.tgz
fi
- [[ -f ${platform,,}-${srw_compiler}-skill-score.txt ]] && rm ${platform,,}-${srw_compiler}-skill-score.txt
+ [[ -f ${SRW_PLATFORM,,}-${srw_compiler}-skill-score.txt ]] && rm ${SRW_PLATFORM,,}-${srw_compiler}-skill-score.txt
# Skill score index is computed over several terms that are defined in parm/metplus/STATAnalysisConfig_skill_score.
# It is computed by aggregating the output from earlier runs of the Point-Stat and/or Grid-Stat tools over one or more cases.
# In this example, skill score index is a weighted average of 4 skill scores of RMSE statistics for wind speed, dew point temperature,
@@ -126,15 +128,15 @@ if [[ ${RUN_STAT_ANLY_OPT} == true ]]; then
sed -i 's|--load("conda")|load("conda")|g' ${workspace}/modulefiles/tasks/${platform,,}/run_vx.local.lua
fi
# Run stat_analysis
- stat_analysis -config parm/metplus/STATAnalysisConfig_skill_score -lookin ${workspace}/Indy-Severe-Weather/metprd/point_stat -v 2 -out ${platform,,}-${srw_compiler}-skill-score.txt
+ stat_analysis -config parm/metplus/STATAnalysisConfig_skill_score -lookin ${workspace}/Indy-Severe-Weather/metprd/point_stat -v 2 -out ${SRW_PLATFORM,,}-${srw_compiler}-skill-score.txt
# check skill-score.txt
- cat ${platform,,}-${srw_compiler}-skill-score.txt
+ cat ${SRW_PLATFORM,,}-${srw_compiler}-skill-score.txt
# get skill-score (SS_INDEX) and check if it is significantly smaller than 1.0
# A value greater than 1.0 indicates that the forecast model outperforms the reference,
# while a value less than 1.0 indicates that the reference outperforms the forecast.
- tmp_string=$( tail -2 ${platform,,}-${srw_compiler}-skill-score.txt | head -1 )
+ tmp_string=$( tail -2 ${SRW_PLATFORM,,}-${srw_compiler}-skill-score.txt | head -1 )
SS_INDEX=$(echo $tmp_string | awk -F " " '{print $NF}')
echo "Skill Score: ${SS_INDEX}"
if [[ ${SS_INDEX} < "0.700" ]]; then
diff --git a/.cicd/scripts/wrapper_srw_ftest.sh b/.cicd/scripts/wrapper_srw_ftest.sh
index ee26edadaf..33fd966efa 100755
--- a/.cicd/scripts/wrapper_srw_ftest.sh
+++ b/.cicd/scripts/wrapper_srw_ftest.sh
@@ -67,7 +67,7 @@ do
# Return exit code and check for results file first
results_file="${WORKSPACE}/${SRW_PLATFORM}/functional_test_results_${SRW_PLATFORM}_${SRW_COMPILER}.txt"
if [ ! -f "$results_file" ]; then
- echo "Missing results file! \nexit 1"
+ echo -e "Missing results file! \nexit 1"
exit 1
fi
diff --git a/tests/WE2E/utils.py b/tests/WE2E/utils.py
index eb3c49fcba..0e6629ad17 100755
--- a/tests/WE2E/utils.py
+++ b/tests/WE2E/utils.py
@@ -530,7 +530,7 @@ def compare_rocotostat(expt_dict,name):
continue
line_array = line.split()
# Skip header lines
- if line_array[0] == 'CYCLE':
+ if line_array[0] == 'CYCLE' or line_array[0] == '/apps/rocoto/1.3.3/lib/workflowmgr/launchserver.rb:40:':
continue
# We should now just have lines describing jobs, in the form:
# line_array = ['cycle','task','jobid','status','exit status','num tries','walltime']
From dc2310cdd1b37f74b4978c8320b9832c1887b708 Mon Sep 17 00:00:00 2001
From: EdwardSnyder-NOAA <96196752+EdwardSnyder-NOAA@users.noreply.github.com>
Date: Thu, 1 Aug 2024 11:38:19 -0500
Subject: [PATCH 25/39] [develop] S3 doc updates (#1104)
As part of the data governance initiative, all s3 buckets will need some sort of versioning control. To meet these needs the AWS S3 Bucket was reorganized with the develop data stored under a 'develop-date' folder and the verification sample case and the document case (current_release_data) moved under a new folder called 'experiment-user-cases'.
---------
Co-authored-by: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com>
---
.cicd/scripts/srw_metric.sh | 2 +-
doc/UsersGuide/BuildingRunningTesting/AQM.rst | 2 +-
.../ContainerQuickstart.rst | 6 +-
.../BuildingRunningTesting/RunSRW.rst | 2 +-
.../BuildingRunningTesting/Tutorial.rst | 6 +-
.../BuildingRunningTesting/VXCases.rst | 8 +-
.../InputOutputFiles.rst | 10 +-
doc/tables/fix_file_list.rst | 1514 ++++++++---------
8 files changed, 775 insertions(+), 775 deletions(-)
diff --git a/.cicd/scripts/srw_metric.sh b/.cicd/scripts/srw_metric.sh
index aec28c2253..8f6eed85b0 100755
--- a/.cicd/scripts/srw_metric.sh
+++ b/.cicd/scripts/srw_metric.sh
@@ -107,7 +107,7 @@ if [[ ${RUN_STAT_ANLY_OPT} == true ]]; then
elif [[ -f Indy-Severe-Weather.tgz ]]; then
tar xvfz Indy-Severe-Weather.tgz
else
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/sample_cases/release-public-v2.1.0/Indy-Severe-Weather.tgz
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.1.0/METplus-vx-sample/Indy-Severe-Weather.tgz
tar xvfz Indy-Severe-Weather.tgz
fi
[[ -f ${SRW_PLATFORM,,}-${srw_compiler}-skill-score.txt ]] && rm ${SRW_PLATFORM,,}-${srw_compiler}-skill-score.txt
diff --git a/doc/UsersGuide/BuildingRunningTesting/AQM.rst b/doc/UsersGuide/BuildingRunningTesting/AQM.rst
index 6d2ae0f193..7186de6618 100644
--- a/doc/UsersGuide/BuildingRunningTesting/AQM.rst
+++ b/doc/UsersGuide/BuildingRunningTesting/AQM.rst
@@ -123,7 +123,7 @@ The community AQM configuration assumes that users have :term:`HPSS` access and
USE_USER_STAGED_EXTRN_FILES: true
EXTRN_MDL_SOURCE_BASEDIR_LBCS: /path/to/data
-On Level 1 systems, users can find :term:`ICs/LBCs` in the usual :ref:`input data locations ` under ``FV3GFS/netcdf/2023021700`` and ``FV3GFS/netcdf/2023021706``. Users can also download the data required for the community experiment from the `UFS SRW App Data Bucket `__.
+On Level 1 systems, users can find :term:`ICs/LBCs` in the usual :ref:`input data locations ` under ``FV3GFS/netcdf/2023021700`` and ``FV3GFS/netcdf/2023021706``. Users can also download the data required for the community experiment from the `UFS SRW App Data Bucket `__.
Users may also wish to change :term:`cron`-related parameters in ``config.yaml``. In the ``config.aqm.community.yaml`` file, which was copied into ``config.yaml``, cron is used for automatic submission and resubmission of the workflow:
diff --git a/doc/UsersGuide/BuildingRunningTesting/ContainerQuickstart.rst b/doc/UsersGuide/BuildingRunningTesting/ContainerQuickstart.rst
index 9e4f58f0bd..d9dd1a0afc 100644
--- a/doc/UsersGuide/BuildingRunningTesting/ContainerQuickstart.rst
+++ b/doc/UsersGuide/BuildingRunningTesting/ContainerQuickstart.rst
@@ -188,8 +188,8 @@ The SRW App requires input files to run. These include static datasets, initial
.. code-block:: console
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/current_srw_release_data/fix_data.tgz
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/current_srw_release_data/gst_data.tgz
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.2.0/out-of-the-box/fix_data.tgz
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.2.0/out-of-the-box/gst_data.tgz
tar -xzf fix_data.tgz
tar -xzf gst_data.tgz
@@ -439,4 +439,4 @@ If users have the PBS resource manager installed on their system, the allocation
For more information on the ``qsub`` command options, see the `PBS Manual §2.59.3 `__, (p. 1416).
-These commands should output a hostname. Users can then run ``ssh ``. After "ssh-ing" to the compute node, they can run the container from that node. To run larger experiments, it may be necessary to allocate multiple compute nodes.
\ No newline at end of file
+These commands should output a hostname. Users can then run ``ssh ``. After "ssh-ing" to the compute node, they can run the container from that node. To run larger experiments, it may be necessary to allocate multiple compute nodes.
diff --git a/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst b/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst
index 9d42aaf0dc..d7fd7407a8 100644
--- a/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst
+++ b/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst
@@ -584,7 +584,7 @@ the same cycle starting date/time and forecast hours. Other parameters may diffe
Cartopy Shapefiles
`````````````````````
-The Python plotting tasks require a path to the directory where the Cartopy Natural Earth shapefiles are located. The medium scale (1:50m) cultural and physical shapefiles are used to create coastlines and other geopolitical borders on the map. On :srw-wiki:`Level 1 ` systems, this path is already set in the system's machine file using the variable ``FIXshp``. Users on other systems will need to download the shapefiles and update the path of ``$FIXshp`` in the machine file they are using (e.g., ``$SRW/ush/machine/macos.yaml`` for a generic MacOS system, where ``$SRW`` is the path to the ``ufs-srweather-app`` directory). The subset of shapefiles required for the plotting task can be obtained from the `SRW Data Bucket `__. The full set of medium-scale (1:50m) Cartopy shapefiles can be downloaded `here `__.
+The Python plotting tasks require a path to the directory where the Cartopy Natural Earth shapefiles are located. The medium scale (1:50m) cultural and physical shapefiles are used to create coastlines and other geopolitical borders on the map. On :srw-wiki:`Level 1 ` systems, this path is already set in the system's machine file using the variable ``FIXshp``. Users on other systems will need to download the shapefiles and update the path of ``$FIXshp`` in the machine file they are using (e.g., ``$SRW/ush/machine/macos.yaml`` for a generic MacOS system, where ``$SRW`` is the path to the ``ufs-srweather-app`` directory). The subset of shapefiles required for the plotting task can be obtained from the `SRW Data Bucket `__. The full set of medium-scale (1:50m) Cartopy shapefiles can be downloaded `here `__.
Task Configuration
`````````````````````
diff --git a/doc/UsersGuide/BuildingRunningTesting/Tutorial.rst b/doc/UsersGuide/BuildingRunningTesting/Tutorial.rst
index 445dee1b8f..a21b7aa9bd 100644
--- a/doc/UsersGuide/BuildingRunningTesting/Tutorial.rst
+++ b/doc/UsersGuide/BuildingRunningTesting/Tutorial.rst
@@ -45,12 +45,12 @@ On :srw-wiki:`Level 1 ` systems, users can fi
* FV3GFS data for the first forecast (``control``) is located at:
- * https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#input_model_data/FV3GFS/grib2/2019061518/
+ * https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#develop-20240618/input_model_data/FV3GFS/grib2/2019061518/
* HRRR and RAP data for the second forecast (``test_expt``) is located at:
- * https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#input_model_data/HRRR/2019061518/
- * https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#input_model_data/RAP/2019061518/
+ * https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#develop-20240618/input_model_data/HRRR/2019061518/
+ * https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#develop-20240618/input_model_data/RAP/2019061518/
Load the Workflow
--------------------
diff --git a/doc/UsersGuide/BuildingRunningTesting/VXCases.rst b/doc/UsersGuide/BuildingRunningTesting/VXCases.rst
index 080e180b14..2bf6f775d0 100644
--- a/doc/UsersGuide/BuildingRunningTesting/VXCases.rst
+++ b/doc/UsersGuide/BuildingRunningTesting/VXCases.rst
@@ -45,21 +45,21 @@ On :srw-wiki:`Level 1 ` systems, users can fi
On other systems, users need to download the ``Indy-Severe-Weather.tgz`` file using any of the following methods:
- #. Download directly from the S3 bucket using a browser. The data is available at https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#sample_cases/release-public-v2.2.0/.
+ #. Download directly from the S3 bucket using a browser. The data is available at https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#experiment-user-cases/release-public-v2.2.0/METplus-vx-sample/.
#. Download from a terminal using the AWS command line interface (CLI), if installed:
.. code-block:: console
- aws s3 cp https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#sample_cases/release-public-v2.2.0/Indy-Severe-Weather.tgz Indy-Severe-Weather.tgz
+ aws s3 cp https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#experiment-user-cases/release-public-v2.2.0/METplus-vx-sample/Indy-Severe-Weather.tgz Indy-Severe-Weather.tgz
#. Download from a terminal using ``wget``:
.. code-block:: console
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/sample_cases/release-public-v2.2.0/Indy-Severe-Weather.tgz
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.2.0/METplus-vx-sample/Indy-Severe-Weather.tgz
-This tar file contains :term:`IC/LBC ` files, observation data, model/forecast output, and MET verification output for the sample forecast. Users who have never run the SRW App on their system before will also need to download (1) the fix files required for SRW App forecasts and (2) the NaturalEarth shapefiles required for plotting. Users can download the fix file data from a browser at https://noaa-ufs-srw-pds.s3.amazonaws.com/current_srw_release_data/fix_data.tgz or visit :numref:`Section %s ` for instructions on how to download the data with ``wget``. NaturalEarth files are available at https://noaa-ufs-srw-pds.s3.amazonaws.com/NaturalEarth/NaturalEarth.tgz. See the :numref:`Section %s ` for more information on plotting.
+This tar file contains :term:`IC/LBC ` files, observation data, model/forecast output, and MET verification output for the sample forecast. Users who have never run the SRW App on their system before will also need to download (1) the fix files required for SRW App forecasts and (2) the NaturalEarth shapefiles required for plotting. Users can download the fix file data from a browser at https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.2.0/out-of-the-box/fix_data.tgz or visit :numref:`Section %s ` for instructions on how to download the data with ``wget``. NaturalEarth files are available at https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/NaturalEarth/NaturalEarth.tgz. See the :numref:`Section %s ` for more information on plotting.
After downloading ``Indy-Severe-Weather.tgz`` using one of the three methods above, untar the downloaded compressed archive file:
diff --git a/doc/UsersGuide/CustomizingTheWorkflow/InputOutputFiles.rst b/doc/UsersGuide/CustomizingTheWorkflow/InputOutputFiles.rst
index 128b080655..40227d7a2b 100644
--- a/doc/UsersGuide/CustomizingTheWorkflow/InputOutputFiles.rst
+++ b/doc/UsersGuide/CustomizingTheWorkflow/InputOutputFiles.rst
@@ -225,14 +225,14 @@ A set of input files, including static (fix) data and raw initial and lateral bo
Static Files
--------------
-Static files are available in the `"fix" directory `__ of the SRW App Data Bucket. Users can download the full set of fix files as a tar file:
+Static files are available in the `"fix" directory `__ of the SRW App Data Bucket. Users can download the full set of fix files as a tar file:
.. code-block:: console
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/current_srw_release_data/fix_data.tgz
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.2.0/out-of-the-box/fix_data.tgz
tar -xzf fix_data.tgz
-Alternatively, users can download the static files individually from the `"fix" directory `__ of the SRW Data Bucket using the ``wget`` command for each required file. Users will need to create an appropriate directory structure for the files when downloading them individually. The best solution is to download the files into directories that mirror the structure of the `Data Bucket `__.
+Alternatively, users can download the static files individually from the `"fix" directory `__ of the SRW Data Bucket using the ``wget`` command for each required file. Users will need to create an appropriate directory structure for the files when downloading them individually. The best solution is to download the files into directories that mirror the structure of the `Data Bucket `__.
The environment variables ``FIXgsm``, ``FIXorg``, and ``FIXsfc`` indicate the path to the directories where the static files are located. After downloading the experiment data, users must set the paths to the files in ``config.yaml``. Add the following code to the ``task_run_fcst:`` section of the ``config.yaml`` file, and alter the variable paths accordingly:
@@ -252,7 +252,7 @@ To download the model input data for the 12-hour "out-of-the-box" experiment con
.. code-block:: console
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/current_srw_release_data/gst_data.tgz
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.2.0/out-of-the-box/gst_data.tgz
tar -xzf gst_data.tgz
To download data for different dates, model types, and formats, users can explore the ``input_model_data`` section of the data bucket and replace the links above with ones that fetch their desired data.
@@ -318,7 +318,7 @@ Default Initial and Lateral Boundary Conditions
-----------------------------------------------
The default initial and lateral boundary condition files are set to be a severe weather case from June 15, 2019 (20190615) at 18 UTC. FV3GFS GRIB2 files are the default model and file format. A tar file
-(``gst_data.tgz``) containing the model data for this case is available in the `UFS SRW App Data Bucket `__.
+(``gst_data.tgz``) containing the model data for this case is available in the `UFS SRW App Data Bucket `__.
Running the App for Different Dates
-----------------------------------
diff --git a/doc/tables/fix_file_list.rst b/doc/tables/fix_file_list.rst
index a20bd39245..628c124bc3 100644
--- a/doc/tables/fix_file_list.rst
+++ b/doc/tables/fix_file_list.rst
@@ -11,599 +11,599 @@ Static Files for SRW App Release v2.1.0
.. code-block:: console
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m01.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m02.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m03.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m04.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m05.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m06.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m07.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m08.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m09.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m10.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m11.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m12.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m01.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m02.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m03.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m04.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m05.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m06.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m07.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m08.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m09.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m10.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m11.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m12.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m01.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m02.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m03.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m04.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m05.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m06.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m07.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m08.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m09.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m10.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m11.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m12.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m01.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m02.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m03.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m04.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m05.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m06.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m07.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m08.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m09.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m10.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m11.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m12.nc
``fix_am`` Files
---------------------
.. code-block:: console
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/CCN_ACTIVATE.BIN
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/cfs_ice1x1monclim19822001.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/cfs_oi2sst1x1monclim19822001.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/cfs_v2_soilmcpc.1x1.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/CFSR.OISST.1982.2010.monthly.clim
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/CFSR.OISST.1999.2012.monthly.clim.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/CFSR.SEAICE.1982.2010.monthly.clim
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/CFSR.SEAICE.1982.2012.monthly.clim.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2monthlycyc.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/emcsfc_gland5min.grib2
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/emcsfc_snow_cover.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/emcsfc_snow_cover_climo.grib2
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/freezeH2O.dat
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/geo_em.d01.lat-lon.2.5m.HGT_M.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/geo_em.d01.nc_HRRR_AK
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/geo_em.d01.nc_HRRRX
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/geo_em.d01.nc_RAPX
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_1x1_paramlist
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_1x1_paramlist.anl
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_1x1_paramlist.f00
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeroinfo.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m01.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m02.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m03.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m04.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m05.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m06.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m07.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m08.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m09.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m10.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m11.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m12.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_albedo4.1x1.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_cldtune.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_climaeropac_global.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2con.l28.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2con.l42.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2con.l64.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1956.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1957.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1958.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1959.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1960.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1961.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1962.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1963.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1964.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1965.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1966.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1967.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1968.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1969.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1970.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1971.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1972.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1973.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1974.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1975.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1976.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1977.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1978.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1979.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1980.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1981.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1982.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1983.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1984.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1985.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1986.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1987.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1988.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1989.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1990.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1991.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1992.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1993.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1994.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1995.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1996.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1997.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1998.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1999.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2000.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2001.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2002.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2003.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2004.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2005.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2006.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2007.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2008.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2009.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2010.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2011.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2012.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2013.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_glob.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2monthlycyc1976_2006.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2monthlycyc1976_2007.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2monthlycyc1976_2009.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_divten.l28.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_divten.l42.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_divten.l64.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_emissivity_coefs.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_gaussian_latitudes.t1148.2304.1152.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_gaussian_latitudes.t1534.3072.1536.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_gaussian_latitudes.t574.1152.576.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_gaussian_latitudes.t670.1344.672.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_gaussian_latitudes.t766.1536.768.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_gaussian_latitudes.t94.192.96.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_glacier.2x2.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_h2o_pltc.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hd_paramlist
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hd_paramlist.f00
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l128.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l128C.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l150.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l28.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l42.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l60.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l64.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l64sl.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l65.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l65.txt_0.1hPa
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l91.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l98.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev3.l28.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev3.l42.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev3.l60.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev3.l64.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_iceclim.2x2.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_coeff_hflux.dat
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_coeff_lflux.dat
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_coeff_lte.150
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_coeff_lte.360
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_coeff_lte.540
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_coeff_lte.720
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_ggww_in1.par
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_ggww_in4.par
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_h2ort_kg7t.par
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_h2ovb_kg7t.par
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_wei96.cofcnts
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_kplist.1d.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_kplist.hd.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_kplist.master.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t1148.2304.1152.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t126.384.190.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t1534.3072.1536.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t170.512.256.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t190.384.192.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t190.576.288.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t254.512.256.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t254.768.384.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t382.1152.576.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t382.768.384.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t574.1152.576.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t574.1760.880.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t62.192.94.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t670.1344.672.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t878.1760.880.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t878.2640.1320.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t92.192.94.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t1148.2304.1152.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t126.384.190.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t1534.3072.1536.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t170.512.256.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t190.384.192.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t190.576.288.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t254.512.256.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t254.768.384.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t382.1152.576.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t382.768.384.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t574.1152.576.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t574.1760.880.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t62.192.94.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t670.1344.672.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t878.1760.880.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t878.2640.1320.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t92.192.94.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t1148.2304.1152.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t126.384.190.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t1534.3072.1536.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t170.512.256.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t190.384.192.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t190.576.288.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t254.512.256.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t254.768.384.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t3070.6144.3072.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t382.1152.576.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t382.768.384.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t574.1152.576.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t574.1760.880.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t62.192.94.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t670.1344.672.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t766.1536.768.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t878.1760.880.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t878.2640.1320.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t92.192.94.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t94.192.96.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_maskh.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_master-catchup_parmlist
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_maxice.2x2.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t1148.2304.1152.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t126.384.190.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t126.384.190.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t1534.3072.1536.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t1534.3072.1536.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t170.512.256.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t190.384.192.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t190.384.192.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t190.576.288.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t254.512.256.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t254.512.256.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t254.768.384.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t382.1152.576.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t382.768.384.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t382.768.384.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t574.1152.576.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t574.1152.576.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t574.1760.880.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t62.192.94.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t670.1344.672.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t670.1344.672.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t766.1536.768.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t878.1760.880.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t878.2640.1320.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t92.192.94.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t92.192.94.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t1148.2304.1152.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t126.384.190.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t126.384.190.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t1534.3072.1536.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t1534.3072.1536.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t170.512.256.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t190.384.192.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t190.384.192.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t190.576.288.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t190.576.288.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t254.512.256.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t254.512.256.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t254.768.384.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t382.1152.576.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t382.768.384.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t382.768.384.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t574.1152.576.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t574.1152.576.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t574.1760.880.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t62.192.94.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t62.192.94.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t670.1344.672.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t670.1344.672.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t766.1536.768.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t766.1536.768.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t878.1760.880.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t878.2640.1320.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t92.192.94.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t92.192.94.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t94.192.96.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_npoess_paramlist
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_o3clim.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_o3prdlos.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t1148.2304.1152.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t126.384.190.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t126.384.190.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t126.384.190.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t1534.3072.1536.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t1534.3072.1536.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t1534.3072.1536.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t170.512.256.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t190.384.192.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t190.384.192.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t190.384.192.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t190.576.288.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t254.512.256.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t254.512.256.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t254.512.256.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t254.768.384.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t382.1152.576.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t382.768.384.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t382.768.384.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t382.768.384.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t574.1152.576.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t574.1152.576.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t574.1152.576.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t574.1760.880.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t62.192.94.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t62.192.94.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t670.1344.672.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t670.1344.672.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t670.1344.672.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t766.1536.768.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t766.1536.768.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t878.1760.880.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t878.2640.1320.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t92.192.94.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t92.192.94.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t92.192.94.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t1148.2304.1152.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t126.384.190.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t126.384.190.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t126.384.190.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t1534.3072.1536.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t1534.3072.1536.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t1534.3072.1536.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t170.512.256.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t190.384.192.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t190.384.192.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t190.384.192.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t190.576.288.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t254.512.256.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t254.512.256.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t254.512.256.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t254.768.384.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t382.1152.576.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t382.768.384.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t382.768.384.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t382.768.384.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t574.1152.576.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t574.1152.576.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t574.1152.576.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t574.1760.880.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t62.192.94.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t62.192.94.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t670.1344.672.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t670.1344.672.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t670.1344.672.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t766.1536.768.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t766.1536.768.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t878.1760.880.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t878.2640.1320.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t92.192.94.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t92.192.94.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t92.192.94.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_0.5x0.5.dat
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_salclm.t1534.3072.1536.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_sfc_emissivity_idx.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_shdmax.0.144x0.144.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_shdmax.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_shdmin.0.144x0.144.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_shdmin.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_siglevel.l28.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_siglevel.l42.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_siglevel.l64.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t1148.2304.1152.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t126.384.190.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t126.384.190.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t126.384.190.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t1534.3072.1536.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t1534.3072.1536.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t1534.3072.1536.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t170.512.256.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t190.384.192.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t190.384.192.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t190.384.192.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t190.576.288.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t254.512.256.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t254.512.256.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t254.512.256.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t254.768.384.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t382.1152.576.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t382.768.384.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t382.768.384.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t382.768.384.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t574.1152.576.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t574.1152.576.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t574.1152.576.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t574.1760.880.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t62.192.94.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t62.192.94.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t670.1344.672.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t670.1344.672.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t670.1344.672.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t766.1536.768.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t766.1536.768.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t766.1536.768.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t878.1760.880.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t878.2640.1320.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t92.192.94.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t92.192.94.rg.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t92.192.94.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slope.1x1.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slptyp.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snoalb.1x1.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snoalb.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snoclim.1.875.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t1148.2304.1152.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t126.384.190.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t126.384.190.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t1534.3072.1536.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t1534.3072.1536.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t170.512.256.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t190.384.192.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t190.384.192.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t190.576.288.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t190.576.288.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t254.512.256.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t254.512.256.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t254.768.384.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t382.1152.576.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t382.768.384.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t382.768.384.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t574.1152.576.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t574.1152.576.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t574.1760.880.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t62.192.94.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t62.192.94.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t670.1344.672.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t670.1344.672.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t766.1536.768.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t766.1536.768.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t878.1760.880.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t878.2640.1320.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t92.192.94.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t92.192.94.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t94.192.96.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmcpc.1x1.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t1534.3072.1536.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t254.512.256.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t382.1152.576.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t382.768.384.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t574.1152.576.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t766.1536.768.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t92.192.94.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t94.192.96.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t1148.2304.1152.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t126.384.190.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t1534.3072.1536.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t170.512.256.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t190.384.192.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t190.576.288.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t190.576.288.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t254.512.256.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t254.768.384.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t382.1152.576.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t382.768.384.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t574.1152.576.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t574.1760.880.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t62.192.94.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t670.1344.672.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t766.1536.768.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t878.1760.880.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t878.2640.1320.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t92.192.94.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.1x1.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t1148.2304.1152.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t126.384.190.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t126.384.190.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t1534.3072.1536.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t1534.3072.1536.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t170.512.256.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t190.384.192.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t190.384.192.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t190.576.288.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t190.576.288.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t254.512.256.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t254.512.256.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t254.768.384.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t382.1152.576.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t382.768.384.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t382.768.384.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t574.1152.576.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t574.1152.576.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t574.1760.880.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t62.192.94.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t62.192.94.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t670.1344.672.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t670.1344.672.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t766.1536.768.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t766.1536.768.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t878.1760.880.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t878.2640.1320.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t92.192.94.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t92.192.94.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t94.192.96.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstant_cmip_an.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstant_cmip_mn.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstant_noaa_a0.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstant_noaa_an.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstant_noaa_an.txt_v2011
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstant_noaa_an.txt_v2019
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstantdata.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_spectral_coefs.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_sstclim.2x2.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_tbthe.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_tg3clim.2.6x1.5.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_transmittance_coefs.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vars.l28.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vars.l42.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vars.l64.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegfrac.0.144.decpercent.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegfrac.1x1.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.1x1.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t1148.2304.1152.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t126.384.190.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t126.384.190.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t1534.3072.1536.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t1534.3072.1536.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t170.512.256.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t190.384.192.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t190.384.192.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t190.576.288.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t190.576.288.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t254.512.256.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t254.512.256.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t254.768.384.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t382.1152.576.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t382.768.384.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t382.768.384.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t574.1152.576.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t574.1152.576.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t574.1760.880.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t62.192.94.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t62.192.94.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t670.1344.672.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t670.1344.672.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t766.1536.768.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t766.1536.768.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t878.1760.880.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t878.2640.1320.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t92.192.94.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t92.192.94.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t94.192.96.rg.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1850-1859.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1860-1869.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1870-1879.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1880-1889.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1890-1899.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1900-1909.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1910-1919.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1920-1929.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1930-1939.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1940-1949.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1950-1959.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1960-1969.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1970-1979.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1980-1989.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1990-1999.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_zorclim.1x1.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/HGT.Beljaars_filtered.lat-lon.30s_res.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/latlon_grid3.32769.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/ozone.clim
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/qr_acr_qg.dat
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/qr_acr_qgV2.dat
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/qr_acr_qs.dat
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/qr_acr_qsV2.dat
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/rrtmgp-cloud-optics-coeffs-lw.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/rrtmgp-cloud-optics-coeffs-sw.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/rrtmgp-data-lw-g256-2018-12-04.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/rrtmgp-data-sw-g224-2018-12-04.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/rrtmgp-lw-prototype-g128-210413.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/rrtmgp-sw-prototype-g131-210413.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/RTGSST.1982.2012.monthly.clim.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/seaice_newland.grb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/syndat_fildef.vit
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/syndat_slmask.t126.gaussian
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/syndat_stmnames
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/syndat_stmnames_old
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/syndat_stmnames_old1
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/syndat_stmnames_old2
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/Thompson_MP_MONTHLY_CLIMO.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/ugwp_limb_tau.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/CCN_ACTIVATE.BIN
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/cfs_ice1x1monclim19822001.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/cfs_oi2sst1x1monclim19822001.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/cfs_v2_soilmcpc.1x1.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/CFSR.OISST.1982.2010.monthly.clim
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/CFSR.OISST.1999.2012.monthly.clim.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/CFSR.SEAICE.1982.2010.monthly.clim
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/CFSR.SEAICE.1982.2012.monthly.clim.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2monthlycyc.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/emcsfc_gland5min.grib2
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/emcsfc_snow_cover.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/emcsfc_snow_cover_climo.grib2
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/freezeH2O.dat
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/geo_em.d01.lat-lon.2.5m.HGT_M.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/geo_em.d01.nc_HRRR_AK
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/geo_em.d01.nc_HRRRX
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/geo_em.d01.nc_RAPX
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_1x1_paramlist
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_1x1_paramlist.anl
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_1x1_paramlist.f00
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeroinfo.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m01.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m02.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m03.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m04.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m05.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m06.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m07.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m08.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m09.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m10.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m11.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m12.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_albedo4.1x1.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_cldtune.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_climaeropac_global.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2con.l28.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2con.l42.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2con.l64.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1956.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1957.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1958.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1959.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1960.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1961.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1962.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1963.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1964.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1965.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1966.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1967.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1968.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1969.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1970.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1971.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1972.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1973.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1974.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1975.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1976.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1977.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1978.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1979.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1980.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1981.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1982.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1983.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1984.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1985.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1986.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1987.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1988.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1989.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1990.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1991.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1992.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1993.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1994.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1995.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1996.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1997.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1998.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1999.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2000.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2001.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2002.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2003.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2004.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2005.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2006.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2007.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2008.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2009.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2010.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2011.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2012.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2013.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_glob.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2monthlycyc1976_2006.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2monthlycyc1976_2007.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2monthlycyc1976_2009.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_divten.l28.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_divten.l42.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_divten.l64.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_emissivity_coefs.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_gaussian_latitudes.t1148.2304.1152.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_gaussian_latitudes.t1534.3072.1536.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_gaussian_latitudes.t574.1152.576.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_gaussian_latitudes.t670.1344.672.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_gaussian_latitudes.t766.1536.768.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_gaussian_latitudes.t94.192.96.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_glacier.2x2.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_h2o_pltc.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hd_paramlist
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hd_paramlist.f00
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l128.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l128C.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l150.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l28.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l42.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l60.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l64.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l64sl.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l65.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l65.txt_0.1hPa
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l91.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l98.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev3.l28.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev3.l42.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev3.l60.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev3.l64.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_iceclim.2x2.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_coeff_hflux.dat
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_coeff_lflux.dat
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_coeff_lte.150
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_coeff_lte.360
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_coeff_lte.540
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_coeff_lte.720
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_ggww_in1.par
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_ggww_in4.par
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_h2ort_kg7t.par
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_h2ovb_kg7t.par
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_wei96.cofcnts
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_kplist.1d.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_kplist.hd.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_kplist.master.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t1148.2304.1152.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t126.384.190.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t1534.3072.1536.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t170.512.256.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t190.384.192.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t190.576.288.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t254.512.256.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t254.768.384.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t382.1152.576.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t382.768.384.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t574.1152.576.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t574.1760.880.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t62.192.94.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t670.1344.672.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t878.1760.880.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t878.2640.1320.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t92.192.94.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t1148.2304.1152.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t126.384.190.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t1534.3072.1536.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t170.512.256.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t190.384.192.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t190.576.288.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t254.512.256.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t254.768.384.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t382.1152.576.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t382.768.384.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t574.1152.576.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t574.1760.880.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t62.192.94.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t670.1344.672.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t878.1760.880.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t878.2640.1320.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t92.192.94.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t1148.2304.1152.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t126.384.190.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t1534.3072.1536.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t170.512.256.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t190.384.192.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t190.576.288.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t254.512.256.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t254.768.384.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t3070.6144.3072.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t382.1152.576.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t382.768.384.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t574.1152.576.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t574.1760.880.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t62.192.94.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t670.1344.672.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t766.1536.768.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t878.1760.880.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t878.2640.1320.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t92.192.94.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t94.192.96.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_maskh.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_master-catchup_parmlist
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_maxice.2x2.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t1148.2304.1152.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t126.384.190.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t126.384.190.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t1534.3072.1536.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t1534.3072.1536.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t170.512.256.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t190.384.192.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t190.384.192.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t190.576.288.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t254.512.256.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t254.512.256.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t254.768.384.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t382.1152.576.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t382.768.384.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t382.768.384.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t574.1152.576.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t574.1152.576.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t574.1760.880.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t62.192.94.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t670.1344.672.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t670.1344.672.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t766.1536.768.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t878.1760.880.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t878.2640.1320.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t92.192.94.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t92.192.94.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t1148.2304.1152.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t126.384.190.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t126.384.190.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t1534.3072.1536.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t1534.3072.1536.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t170.512.256.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t190.384.192.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t190.384.192.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t190.576.288.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t190.576.288.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t254.512.256.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t254.512.256.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t254.768.384.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t382.1152.576.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t382.768.384.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t382.768.384.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t574.1152.576.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t574.1152.576.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t574.1760.880.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t62.192.94.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t62.192.94.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t670.1344.672.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t670.1344.672.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t766.1536.768.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t766.1536.768.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t878.1760.880.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t878.2640.1320.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t92.192.94.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t92.192.94.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t94.192.96.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_npoess_paramlist
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_o3clim.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_o3prdlos.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t1148.2304.1152.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t126.384.190.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t126.384.190.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t126.384.190.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t1534.3072.1536.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t1534.3072.1536.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t1534.3072.1536.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t170.512.256.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t190.384.192.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t190.384.192.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t190.384.192.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t190.576.288.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t254.512.256.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t254.512.256.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t254.512.256.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t254.768.384.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t382.1152.576.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t382.768.384.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t382.768.384.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t382.768.384.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t574.1152.576.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t574.1152.576.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t574.1152.576.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t574.1760.880.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t62.192.94.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t62.192.94.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t670.1344.672.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t670.1344.672.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t670.1344.672.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t766.1536.768.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t766.1536.768.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t878.1760.880.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t878.2640.1320.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t92.192.94.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t92.192.94.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t92.192.94.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t1148.2304.1152.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t126.384.190.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t126.384.190.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t126.384.190.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t1534.3072.1536.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t1534.3072.1536.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t1534.3072.1536.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t170.512.256.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t190.384.192.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t190.384.192.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t190.384.192.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t190.576.288.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t254.512.256.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t254.512.256.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t254.512.256.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t254.768.384.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t382.1152.576.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t382.768.384.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t382.768.384.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t382.768.384.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t574.1152.576.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t574.1152.576.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t574.1152.576.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t574.1760.880.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t62.192.94.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t62.192.94.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t670.1344.672.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t670.1344.672.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t670.1344.672.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t766.1536.768.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t766.1536.768.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t878.1760.880.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t878.2640.1320.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t92.192.94.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t92.192.94.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t92.192.94.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_0.5x0.5.dat
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_salclm.t1534.3072.1536.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_sfc_emissivity_idx.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_shdmax.0.144x0.144.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_shdmax.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_shdmin.0.144x0.144.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_shdmin.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_siglevel.l28.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_siglevel.l42.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_siglevel.l64.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t1148.2304.1152.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t126.384.190.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t126.384.190.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t126.384.190.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t1534.3072.1536.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t1534.3072.1536.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t1534.3072.1536.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t170.512.256.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t190.384.192.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t190.384.192.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t190.384.192.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t190.576.288.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t254.512.256.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t254.512.256.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t254.512.256.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t254.768.384.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t382.1152.576.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t382.768.384.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t382.768.384.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t382.768.384.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t574.1152.576.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t574.1152.576.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t574.1152.576.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t574.1760.880.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t62.192.94.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t62.192.94.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t670.1344.672.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t670.1344.672.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t670.1344.672.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t766.1536.768.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t766.1536.768.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t766.1536.768.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t878.1760.880.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t878.2640.1320.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t92.192.94.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t92.192.94.rg.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t92.192.94.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slope.1x1.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slptyp.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snoalb.1x1.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snoalb.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snoclim.1.875.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t1148.2304.1152.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t126.384.190.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t126.384.190.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t1534.3072.1536.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t1534.3072.1536.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t170.512.256.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t190.384.192.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t190.384.192.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t190.576.288.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t190.576.288.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t254.512.256.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t254.512.256.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t254.768.384.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t382.1152.576.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t382.768.384.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t382.768.384.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t574.1152.576.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t574.1152.576.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t574.1760.880.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t62.192.94.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t62.192.94.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t670.1344.672.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t670.1344.672.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t766.1536.768.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t766.1536.768.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t878.1760.880.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t878.2640.1320.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t92.192.94.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t92.192.94.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t94.192.96.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmcpc.1x1.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t1534.3072.1536.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t254.512.256.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t382.1152.576.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t382.768.384.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t574.1152.576.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t766.1536.768.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t92.192.94.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t94.192.96.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t1148.2304.1152.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t126.384.190.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t1534.3072.1536.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t170.512.256.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t190.384.192.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t190.576.288.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t190.576.288.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t254.512.256.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t254.768.384.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t382.1152.576.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t382.768.384.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t574.1152.576.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t574.1760.880.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t62.192.94.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t670.1344.672.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t766.1536.768.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t878.1760.880.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t878.2640.1320.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t92.192.94.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.1x1.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t1148.2304.1152.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t126.384.190.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t126.384.190.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t1534.3072.1536.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t1534.3072.1536.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t170.512.256.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t190.384.192.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t190.384.192.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t190.576.288.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t190.576.288.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t254.512.256.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t254.512.256.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t254.768.384.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t382.1152.576.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t382.768.384.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t382.768.384.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t574.1152.576.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t574.1152.576.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t574.1760.880.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t62.192.94.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t62.192.94.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t670.1344.672.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t670.1344.672.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t766.1536.768.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t766.1536.768.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t878.1760.880.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t878.2640.1320.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t92.192.94.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t92.192.94.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t94.192.96.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstant_cmip_an.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstant_cmip_mn.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstant_noaa_a0.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstant_noaa_an.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstant_noaa_an.txt_v2011
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstant_noaa_an.txt_v2019
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstantdata.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_spectral_coefs.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_sstclim.2x2.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_tbthe.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_tg3clim.2.6x1.5.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_transmittance_coefs.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vars.l28.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vars.l42.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vars.l64.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegfrac.0.144.decpercent.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegfrac.1x1.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.1x1.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t1148.2304.1152.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t126.384.190.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t126.384.190.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t1534.3072.1536.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t1534.3072.1536.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t170.512.256.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t190.384.192.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t190.384.192.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t190.576.288.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t190.576.288.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t254.512.256.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t254.512.256.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t254.768.384.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t382.1152.576.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t382.768.384.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t382.768.384.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t574.1152.576.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t574.1152.576.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t574.1760.880.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t62.192.94.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t62.192.94.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t670.1344.672.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t670.1344.672.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t766.1536.768.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t766.1536.768.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t878.1760.880.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t878.2640.1320.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t92.192.94.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t92.192.94.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t94.192.96.rg.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1850-1859.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1860-1869.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1870-1879.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1880-1889.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1890-1899.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1900-1909.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1910-1919.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1920-1929.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1930-1939.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1940-1949.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1950-1959.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1960-1969.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1970-1979.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1980-1989.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1990-1999.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_zorclim.1x1.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/HGT.Beljaars_filtered.lat-lon.30s_res.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/latlon_grid3.32769.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/ozone.clim
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/qr_acr_qg.dat
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/qr_acr_qgV2.dat
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/qr_acr_qs.dat
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/qr_acr_qsV2.dat
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/rrtmgp-cloud-optics-coeffs-lw.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/rrtmgp-cloud-optics-coeffs-sw.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/rrtmgp-data-lw-g256-2018-12-04.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/rrtmgp-data-sw-g224-2018-12-04.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/rrtmgp-lw-prototype-g128-210413.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/rrtmgp-sw-prototype-g131-210413.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/RTGSST.1982.2012.monthly.clim.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/seaice_newland.grb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/syndat_fildef.vit
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/syndat_slmask.t126.gaussian
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/syndat_stmnames
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/syndat_stmnames_old
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/syndat_stmnames_old1
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/syndat_stmnames_old2
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/Thompson_MP_MONTHLY_CLIMO.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/ugwp_limb_tau.nc
``fix_am/co2dat_4a/`` Files:
@@ -611,102 +611,102 @@ Static Files for SRW App Release v2.1.0
.. code-block:: console
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1956.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1957.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1958.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1959.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1960.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1961.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1962.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1963.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1964.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1965.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1966.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1967.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1968.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1969.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1970.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1971.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1972.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1973.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1974.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1975.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1976.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1977.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1978.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1979.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1980.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1981.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1982.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1983.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1984.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1985.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1986.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1987.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1988.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1989.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1990.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1991.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1992.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1993.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1994.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1995.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1996.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1997.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1998.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1999.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2000.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2001.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2002.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2003.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2004.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2005.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2006.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2007.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2008.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2009.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2009.txt_proj
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2009.txt_proj_u
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2010.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2010.txt_proj
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2010.txt_proj_u
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2011.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2011.txt_proj
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2011.txt_proj_u
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2012.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2012.txt_proj
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2012.txt_proj_u
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2013.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2013.txt_proj
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2013.txt_proj_u
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2014.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2014.txt_proj
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2014.txt_proj_u
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2015.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2015.txt_proj
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2015.txt_proj_u
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2016.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2016.txt_proj
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2016.txt_proj_u
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2017.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2017.txt_proj
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2017.txt_proj_u
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2018.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2018.txt_proj
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2018.txt_proj_u
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2019.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2019.txt_proj
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2019.txt_proj_u
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2020.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2020.txt_proj
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2020.txt_proj_u
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2021.txt_proj
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2021.txt_proj_u
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2022.txt_proj
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_glob.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2monthlycyc1976_2006.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2monthlycyc1976_2009.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/MEMO
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1956.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1957.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1958.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1959.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1960.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1961.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1962.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1963.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1964.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1965.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1966.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1967.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1968.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1969.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1970.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1971.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1972.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1973.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1974.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1975.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1976.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1977.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1978.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1979.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1980.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1981.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1982.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1983.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1984.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1985.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1986.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1987.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1988.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1989.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1990.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1991.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1992.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1993.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1994.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1995.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1996.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1997.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1998.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1999.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2000.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2001.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2002.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2003.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2004.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2005.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2006.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2007.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2008.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2009.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2009.txt_proj
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2009.txt_proj_u
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2010.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2010.txt_proj
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2010.txt_proj_u
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2011.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2011.txt_proj
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2011.txt_proj_u
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2012.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2012.txt_proj
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2012.txt_proj_u
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2013.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2013.txt_proj
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2013.txt_proj_u
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2014.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2014.txt_proj
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2014.txt_proj_u
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2015.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2015.txt_proj
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2015.txt_proj_u
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2016.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2016.txt_proj
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2016.txt_proj_u
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2017.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2017.txt_proj
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2017.txt_proj_u
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2018.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2018.txt_proj
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2018.txt_proj_u
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2019.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2019.txt_proj
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2019.txt_proj_u
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2020.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2020.txt_proj
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2020.txt_proj_u
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2021.txt_proj
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2021.txt_proj_u
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2022.txt_proj
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_glob.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2monthlycyc1976_2006.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2monthlycyc1976_2009.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/MEMO
``fix_am/fix_co2_proj`` Files:
@@ -714,20 +714,20 @@ Static Files for SRW App Release v2.1.0
.. code-block:: console
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2009.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2010.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2011.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2012.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2013.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2014.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2015.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2016.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2017.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2018.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2019.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2020.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2021.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2022.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2009.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2010.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2011.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2012.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2013.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2014.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2015.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2016.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2017.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2018.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2019.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2020.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2021.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2022.txt
``fix_am/fix_co2_update`` Files:
@@ -735,19 +735,19 @@ Static Files for SRW App Release v2.1.0
.. code-block:: console
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2009.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2010.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2011.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2012.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2013.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2014.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2015.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2016.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2017.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2018.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2019.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2020.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2021.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2009.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2010.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2011.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2012.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2013.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2014.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2015.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2016.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2017.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2018.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2019.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2020.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2021.txt
``fix_lut`` Files
@@ -755,12 +755,12 @@ Static Files for SRW App Release v2.1.0
.. code-block:: console
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_lut/optics_BC.v1_3.dat
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_lut/optics_DU.v15_3.dat
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_lut/optics_DU.v15_3.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_lut/optics_OC.v1_3.dat
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_lut/optics_SS.v3_3.dat
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_lut/optics_SU.v1_3.dat
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_lut/optics_BC.v1_3.dat
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_lut/optics_DU.v15_3.dat
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_lut/optics_DU.v15_3.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_lut/optics_OC.v1_3.dat
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_lut/optics_SS.v3_3.dat
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_lut/optics_SU.v1_3.dat
``fix_orog`` Files
@@ -768,26 +768,26 @@ Static Files for SRW App Release v2.1.0
.. code-block:: console
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/clmgrb
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/clmgrb.index
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/convert.f90
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/gmted2010.30sec.flt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/gmted2010.30sec.int
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/gmted2010.30sec.flt.ctl
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/gmted2010.30sec.int.ctl
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/thirty.second.antarctic.new.bin
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/GlobalLakeDepth.dat
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/GlobalLakeDepth.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/GlobalLakeStatus.dat
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/GlobalLakeStatus.txt
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/gtopo30_gg.fine
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/gtopo30_gg.fine.nh
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/landcover30.fixed
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/makefile
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/run.lsf
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/TOP8M_avg.20I4.asc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/TOP8M_max.20I4.asc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/TOP8M_slm.80I1.asc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/clmgrb
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/clmgrb.index
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/convert.f90
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/gmted2010.30sec.flt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/gmted2010.30sec.int
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/gmted2010.30sec.flt.ctl
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/gmted2010.30sec.int.ctl
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/thirty.second.antarctic.new.bin
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/GlobalLakeDepth.dat
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/GlobalLakeDepth.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/GlobalLakeStatus.dat
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/GlobalLakeStatus.txt
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/gtopo30_gg.fine
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/gtopo30_gg.fine.nh
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/landcover30.fixed
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/makefile
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/run.lsf
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/TOP8M_avg.20I4.asc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/TOP8M_max.20I4.asc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/TOP8M_slm.80I1.asc
@@ -796,26 +796,26 @@ Static Files for SRW App Release v2.1.0
.. code-block:: console
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/facsf.1.0.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.igbp.0.03.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/leaf_area_index.0.05.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.igbp.0.05.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/maximum_snow_albedo.0.05.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.igbp.conus.0.01.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/slope_type.1.0.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.modis.igbp.0.03.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/snowfree_albedo.4comp.0.05.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.modis.igbp.0.05.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/soil_type.statsgo.0.03.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.modis.igbp.conus.0.01.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/soil_type.statsgo.0.05.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.viirs.igbp.0.03.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/soil_type.statsgo.conus.0.01.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.viirs.igbp.0.05.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/substrate_temperature.1.0.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.viirs.igbp.0.1.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/substrate_temperature.2.6x1.5.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.viirs.igbp.conus.0.01.nc
- wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_greenness.0.144.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/facsf.1.0.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.igbp.0.03.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/leaf_area_index.0.05.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.igbp.0.05.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/maximum_snow_albedo.0.05.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.igbp.conus.0.01.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/slope_type.1.0.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.modis.igbp.0.03.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/snowfree_albedo.4comp.0.05.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.modis.igbp.0.05.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/soil_type.statsgo.0.03.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.modis.igbp.conus.0.01.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/soil_type.statsgo.0.05.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.viirs.igbp.0.03.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/soil_type.statsgo.conus.0.01.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.viirs.igbp.0.05.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/substrate_temperature.1.0.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.viirs.igbp.0.1.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/substrate_temperature.2.6x1.5.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.viirs.igbp.conus.0.01.nc
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_greenness.0.144.nc
From 7e8213f7aea91f3948a5f98bddcd0b6103a248a2 Mon Sep 17 00:00:00 2001
From: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com>
Date: Mon, 12 Aug 2024 09:29:41 -0400
Subject: [PATCH 26/39] [develop] Update ufs-weather-model hash and further
clean the machines tested in PULL_REQUEST_TEMPLATE (#1096)
* Update ufs-weather-model hash to b5a1976 (July 30)
* Add hera.gnu, remove cheyenne.intel, cheyenne.gnu, and gaeac5.intel, and alphabetize the machines in the TESTS CONDUCTED section of the PULL_REQUEST_TEMPLATE
* Correct behavior of Jenkins Functional WorkflowTaskTests. Currently, TASK_DEPTH is set to null, resulting in no tests being run during the Functional WorkflowTaskTests stage. Replaced env with params in Jenkinsfile for setting TASK_DEPTH. Testing shows that this will correctly set TASK_DEPTH to the default value of 9 and allow the tests to run
* Removed extraneous entries from the verification scripts to remove KeyError messages in the associated verification log files
* Reapplied necessary modification to modulefiles/tasks/noaacloud/plot_allvars.local.lua to allow plotting tasks to run on NOAA cloud platforms
---
.cicd/Jenkinsfile | 4 ++--
.github/PULL_REQUEST_TEMPLATE | 10 ++++------
Externals.cfg | 2 +-
doc/ContribGuide/contributing.rst | 10 ++++------
modulefiles/tasks/noaacloud/plot_allvars.local.lua | 7 ++-----
.../exregional_run_met_genensprod_or_ensemblestat.sh | 2 +-
scripts/exregional_run_met_gridstat_or_pointstat_vx.sh | 2 +-
...egional_run_met_gridstat_or_pointstat_vx_ensmean.sh | 2 +-
...egional_run_met_gridstat_or_pointstat_vx_ensprob.sh | 2 +-
scripts/exregional_run_met_pb2nc_obs.sh | 3 +--
scripts/exregional_run_met_pcpcombine.sh | 2 +-
11 files changed, 19 insertions(+), 27 deletions(-)
diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile
index 030661bd27..e6ed9515f2 100644
--- a/.cicd/Jenkinsfile
+++ b/.cicd/Jenkinsfile
@@ -193,9 +193,9 @@ pipeline {
// Try a few Workflow Task scripts to make sure E2E tests can be launched in a follow-on 'Test' stage
stage('Functional WorkflowTaskTests') {
environment {
- TASK_DEPTH = "${env.SRW_WRAPPER_TASK_DEPTH}"
+ TASK_DEPTH = "${params.SRW_WRAPPER_TASK_DEPTH}"
}
-
+
steps {
dir ("${env.SRW_PLATFORM}") {
echo "Running ${TASK_DEPTH} simple workflow script task tests on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})"
diff --git a/.github/PULL_REQUEST_TEMPLATE b/.github/PULL_REQUEST_TEMPLATE
index 1c363c651f..29a878d4a4 100644
--- a/.github/PULL_REQUEST_TEMPLATE
+++ b/.github/PULL_REQUEST_TEMPLATE
@@ -30,15 +30,13 @@
-- [ ] hera.intel
-- [ ] orion.intel
-- [ ] hercules.intel
-- [ ] cheyenne.intel
-- [ ] cheyenne.gnu
- [ ] derecho.intel
- [ ] gaea.intel
-- [ ] gaeac5.intel
+- [ ] hera.gnu
+- [ ] hera.intel
+- [ ] hercules.intel
- [ ] jet.intel
+- [ ] orion.intel
- [ ] wcoss2.intel
- [ ] NOAA Cloud (indicate which platform)
- [ ] Jenkins
diff --git a/Externals.cfg b/Externals.cfg
index 25ec5f79b9..4545cd8ca5 100644
--- a/Externals.cfg
+++ b/Externals.cfg
@@ -12,7 +12,7 @@ protocol = git
repo_url = https://github.com/ufs-community/ufs-weather-model
# Specify either a branch name or a hash but not both.
#branch = develop
-hash = 1c6b4d4
+hash = b5a1976
local_path = sorc/ufs-weather-model
required = True
diff --git a/doc/ContribGuide/contributing.rst b/doc/ContribGuide/contributing.rst
index ed1671363e..eb995efb41 100644
--- a/doc/ContribGuide/contributing.rst
+++ b/doc/ContribGuide/contributing.rst
@@ -227,15 +227,13 @@ Here is the template that is provided when developers click "Create pull request
- - [ ] hera.intel
- - [ ] orion.intel
- - [ ] hercules.intel
- - [ ] cheyenne.intel
- - [ ] cheyenne.gnu
- [ ] derecho.intel
- [ ] gaea.intel
- - [ ] gaeac5.intel
+ - [ ] hera.gnu
+ - [ ] hera.intel
+ - [ ] hercules.intel
- [ ] jet.intel
+ - [ ] orion.intel
- [ ] wcoss2.intel
- [ ] NOAA Cloud (indicate which platform)
- [ ] Jenkins
diff --git a/modulefiles/tasks/noaacloud/plot_allvars.local.lua b/modulefiles/tasks/noaacloud/plot_allvars.local.lua
index 2fd9b41eb5..85291013c7 100644
--- a/modulefiles/tasks/noaacloud/plot_allvars.local.lua
+++ b/modulefiles/tasks/noaacloud/plot_allvars.local.lua
@@ -1,5 +1,2 @@
-unload("python")
-append_path("MODULEPATH","/contrib/EPIC/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
-
-setenv("SRW_GRAPHICS_ENV", "regional_workflow")
+load("conda")
+setenv("SRW_GRAPHICS_ENV", "srw_graphics")
diff --git a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh
index 05503bb963..1c09dc09c6 100755
--- a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh
+++ b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh
@@ -10,7 +10,7 @@
. $USHdir/source_util_funcs.sh
for sect in user nco platform workflow nco global verification cpl_aqm_parm \
constants fixed_files grid_params \
- task_run_post task_run_vx_ensgrid ; do
+ task_run_post ; do
source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
done
#
diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh
index 03c6093943..abe5e3dd31 100755
--- a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh
+++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh
@@ -10,7 +10,7 @@
. $USHdir/source_util_funcs.sh
for sect in user nco platform workflow nco global verification cpl_aqm_parm \
constants fixed_files grid_params \
- task_run_post task_run_vx_gridstat task_run_vx_pointstat ; do
+ task_run_post ; do
source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
done
#
diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh
index 12a54dc21b..2c8378c128 100755
--- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh
+++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh
@@ -10,7 +10,7 @@
. $USHdir/source_util_funcs.sh
for sect in user nco platform workflow nco global verification cpl_aqm_parm \
constants fixed_files grid_params \
- task_run_post task_run_vx_ensgrid_mean task_run_vx_enspoint_mean ; do
+ task_run_post ; do
source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
done
#
diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh
index 8fd4a59dfe..eae1850ad8 100755
--- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh
+++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh
@@ -10,7 +10,7 @@
. $USHdir/source_util_funcs.sh
for sect in user nco platform workflow nco global verification cpl_aqm_parm \
constants fixed_files grid_params \
- task_run_vx_ensgrid_prob task_run_vx_enspoint_prob task_run_post ; do
+ task_run_post ; do
source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
done
#
diff --git a/scripts/exregional_run_met_pb2nc_obs.sh b/scripts/exregional_run_met_pb2nc_obs.sh
index 5281021f01..7e79fb4efb 100755
--- a/scripts/exregional_run_met_pb2nc_obs.sh
+++ b/scripts/exregional_run_met_pb2nc_obs.sh
@@ -9,8 +9,7 @@
#
. $USHdir/source_util_funcs.sh
for sect in user nco platform workflow nco global verification cpl_aqm_parm \
- constants fixed_files grid_params \
- task_run_met_pb2nc_obs ; do
+ constants fixed_files grid_params ; do
source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
done
#
diff --git a/scripts/exregional_run_met_pcpcombine.sh b/scripts/exregional_run_met_pcpcombine.sh
index ce9e78ab17..026afb4eb2 100755
--- a/scripts/exregional_run_met_pcpcombine.sh
+++ b/scripts/exregional_run_met_pcpcombine.sh
@@ -10,7 +10,7 @@
. $USHdir/source_util_funcs.sh
for sect in user nco platform workflow nco global verification cpl_aqm_parm \
constants fixed_files grid_params \
- task_run_met_pcpcombine task_run_post ; do
+ task_run_post ; do
source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect}
done
#
From 83f173c431c5ac91bf6e3eb7765cd2a6be94f6db Mon Sep 17 00:00:00 2001
From: Natalie Perlin <68030316+natalie-perlin@users.noreply.github.com>
Date: Fri, 23 Aug 2024 08:56:34 -0400
Subject: [PATCH 27/39] [develop] Updates to devclean.sh script and plotting
scripts and tasks (#1100)
* ./devclean.sh script that cleans SRW builds is updated, all the cleaning tasks are done for the directories under the main SRW tree
* Documentation updated for the devclean.sh script changes
* Plotting scripts updated to have geographical data visible over the colored fields
* Plotting task updated to allow graphics output for individual ensemble members
* Use python3 to checkout external sub-modules in a checkout_externals script; python3 is a default for other scripts; some systems such as MacOS no longer come with python2
---------
Co-authored-by: Natalie Perlin
Co-authored-by: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com>
Co-authored-by: gsketefian <31046882+gsketefian@users.noreply.github.com>
Co-authored-by: Gillian Petro <96886803+gspetro-NOAA@users.noreply.github.com>
Co-authored-by: Christina Holt <56881914+christinaholtNOAA@users.noreply.github.com>
Co-authored-by: Michael Kavulich
Co-authored-by: michael.lueken
Co-authored-by: EdwardSnyder-NOAA <96196752+EdwardSnyder-NOAA@users.noreply.github.com>
Co-authored-by: Natalie Perlin
---
devclean.sh | 207 +++++++++---------
doc/UsersGuide/Reference/FAQ.rst | 28 ++-
manage_externals/checkout_externals | 2 +-
parm/wflow/plot.yaml | 53 +++--
scripts/exregional_plot_allvars.py | 4 +
scripts/exregional_plot_allvars_diff.py | 4 +
...S_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml | 6 +-
7 files changed, 167 insertions(+), 137 deletions(-)
diff --git a/devclean.sh b/devclean.sh
index 01ace7a7d9..6cd9bed11f 100755
--- a/devclean.sh
+++ b/devclean.sh
@@ -4,33 +4,31 @@
usage () {
cat << EOF_USAGE
-Clean the UFS-SRW Application build
+Clean the UFS-SRW Application build.
+
+NOTE: If user included custom directories at build time, those directories must be deleted manually
+
Usage: $0 [OPTIONS] ...
OPTIONS
-h, --help
- show this help guide
+ Show this help guide
-a, --all
- removes "bin", "build" directories, and other build artifacts
- --remove
- removes the "build" directory, keeps the "bin", "lib" and other build artifacts intact
- --clean
- removes "bin", "build" directories, and other build artifacts (same as "-a", "--all")
- --conda
- removes "conda" directory and conda_loc file in SRW
- --install-dir=INSTALL_DIR
- installation directory name (\${SRW_DIR} by default)
- --build-dir=BUILD_DIR
- main build directory, absolute path (\${SRW_DIR}/build/ by default)
- --bin-dir=BIN_DIR
- binary directory name ("exec" by default); full path is \${INSTALL_DIR}/\${BIN_DIR})
- --conda-dir=CONDA_DIR
- directory where conda is installed. caution: if outside the SRW clone, it may have broader use
- --sub-modules
- remove sub-module directories. They will need to be checked out again by sourcing "\${SRW_DIR}/manage_externals/checkout_externals" before attempting subsequent builds
+ Remove all build artifacts, conda and submodules (equivalent to \`-b -c -s\`)
+ -b, --build
+ Remove build directories and artifacts: build/ exec/ share/ include/ lib/ lib64/
+ -c, --conda
+ Remove "conda" directory and conda_loc file in SRW main directory
+ --container
+ For cleaning builds within the SRW containers, will remove the "container-bin"
+ directory rather than "exec". Has no effect if \`-b\` is not specified.
+ -f, --force
+ Remove directories as requested, without asking for user confirmation of their deletion.
+ -s, -sub-modules
+ Remove sub-module directories. They need to be checked out again by sourcing "\${SRW_DIR}/manage_externals/checkout_externals" before attempting subsequent builds
-v, --verbose
- provide more verbose output
-
+ Provide more verbose output
+
EOF_USAGE
}
@@ -39,17 +37,10 @@ settings () {
cat << EOF_SETTINGS
Settings:
- INSTALL_DIR=${INSTALL_DIR}
- BUILD_DIR=${BUILD_DIR}
- BIN_DIR=${BIN_DIR}
- CONDA_DIR=${CONDA_DIR}
- REMOVE=${REMOVE}
+ FORCE=${REMOVE}
VERBOSE=${VERBOSE}
-
-Default cleaning options: (if no arguments provided, then nothing is cleaned)
- REMOVE=${REMOVE}
- CLEAN=${CLEAN}
- INCLUDE_SUB_MODULES=${INCLUDE_SUB_MODULES}
+ REMOVE_SUB_MODULES=${REMOVE_SUB_MODULES}
+ REMOVE_CONDA=${REMOVE_CONDA}
EOF_SETTINGS
}
@@ -63,46 +54,28 @@ usage_error () {
# default settings
SRW_DIR=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}" )" )" && pwd -P)
-INSTALL_DIR=${INSTALL_DIR:-${SRW_DIR}}
-BUILD_DIR=${BUILD_DIR:-"${SRW_DIR}/build"}
-BIN_DIR="exec"
-CONDA_DIR=${CONDA_DIR:-"${SRW_DIR}/conda"}
-REMOVE=false
VERBOSE=false
# default clean options
REMOVE=false
-CLEAN=false
-INCLUDE_SUB_MODULES=false #changes to true if '--sub-modules' option is provided
+REMOVE_BUILD=false
+REMOVE_CONDA=false
+REMOVE_SUB_MODULES=false
+CONTAINER=false
-# process requires arguments
-if [[ ("$1" == "--help") || ("$1" == "-h") ]]; then
- usage
- exit 0
-fi
-
-# process optional arguments
+# process arguments
while :; do
case $1 in
--help|-h) usage; exit 0 ;;
- --all|-a) ALL_CLEAN=true ;;
- --remove) REMOVE=true ;;
- --remove=?*|--remove=) usage_error "$1 argument ignored." ;;
- --clean) CLEAN=true ;;
- --conda) REMOVE_CONDA=true ;;
- --install-dir=?*) INSTALL_DIR=${1#*=} ;;
- --install-dir|--install-dir=) usage_error "$1 requires argument." ;;
- --build-dir=?*) BUILD_DIR=${1#*=} ;;
- --build-dir|--build-dir=) usage_error "$1 requires argument." ;;
- --bin-dir=?*) BIN_DIR=${1#*=} ;;
- --bin-dir|--bin-dir=) usage_error "$1 requires argument." ;;
- --conda-dir=?*) CONDA_DIR=${1#*=} ;;
- --conda-dir|--conda-dir=) usage_error "$1 requires argument." ;;
- --sub-modules) INCLUDE_SUB_MODULES=true ;;
+ --all|-a) REMOVE_BUILD=true; REMOVE_CONDA=true; REMOVE_SUB_MODULES=true ;;
+ --build|-b) REMOVE_BUILD=true ;;
+ --conda|-c) REMOVE_CONDA=true ;;
+ --container) CONTAINER=true ;;
+ --force) REMOVE=true ;;
+ --force=?*|--force=) usage_error "$1 argument ignored." ;;
+ --sub-modules|-s) REMOVE_SUB_MODULES=true ;;
+ --sub-modules=?*|--sub-modules=) usage_error "$1 argument ignored." ;;
--verbose|-v) VERBOSE=true ;;
- --verbose=?*|--verbose=) usage_error "$1 argument ignored." ;;
- # targets
- default) ALL_CLEAN=false ;;
# unknown
-?*|?*) usage_error "Unknown option $1" ;;
*) break ;;
@@ -110,66 +83,94 @@ while :; do
shift
done
-# choose defaults to clean
-if [ "${ALL_CLEAN}" = true ]; then
- CLEAN=true
-fi
# print settings
if [ "${VERBOSE}" = true ] ; then
settings
fi
-# clean if build directory already exists
-if [ "${REMOVE}" = true ] && [ "${CLEAN}" = false ] ; then
- printf '%s\n' "Remove the \"build\" directory only, BUILD_DIR = $BUILD_DIR "
- [[ -d ${BUILD_DIR} ]] && rm -rf ${BUILD_DIR} && printf '%s\n' "rm -rf ${BUILD_DIR}"
-elif [ "${CLEAN}" = true ]; then
- printf '%s\n' "Remove build directory, bin directory, and other build artifacts "
- printf '%s\n' " from the installation directory = ${INSTALL_DIR} "
-
- directories=( \
- "${BUILD_DIR}" \
- "${INSTALL_DIR}/${BIN_DIR}" \
- "${INSTALL_DIR}/share" \
- "${INSTALL_DIR}/include" \
- "${INSTALL_DIR}/lib" \
- "${INSTALL_DIR}/lib64" \
+# Populate "removal_list" as an array of files/directories to remove, based on user selections
+declare -a removal_list='()'
+
+# Clean standard build artifacts
+if [ ${REMOVE_BUILD} == true ]; then
+ removal_list=( \
+ "${SRW_DIR}/build" \
+ "${SRW_DIR}/share" \
+ "${SRW_DIR}/include" \
+ "${SRW_DIR}/lib" \
+ "${SRW_DIR}/lib64" \
)
- if [ ${#directories[@]} -ge 1 ]; then
- for dir in ${directories[@]}; do
- [[ -d "${dir}" ]] && rm -rfv ${dir}
- done
- echo " "
+ if [ ${CONTAINER} == true ]; then
+ removal_list+=("${SRW_DIR}/container-bin")
+ else
+ removal_list+=("${SRW_DIR}/exec")
fi
fi
-# Clean all the submodules if requested. Note: Need to check out them again before attempting subsequent builds, by sourcing ${SRW_DIR}/manage_externals/checkout_externals
-if [ ${INCLUDE_SUB_MODULES} == true ]; then
- printf '%s\n' "Removing submodules ..."
+
+# Clean all the submodules if requested.
+if [ ${REMOVE_SUB_MODULES} == true ]; then
declare -a submodules='()'
- submodules=(${SRW_DIR}/sorc/*)
-# echo " submodules are: ${submodules[@]} (total of ${#submodules[@]}) "
- if [ ${#submodules[@]} -ge 1 ]; then
- for sub in ${submodules[@]}; do [[ -d "${sub}" ]] && ( rm -rf ${sub} && printf '%s\n' "rm -rf ${sub}" ); done
+ submodules=(./sorc/*)
+ # Only add directories to make sure we don't delete CMakeLists.txt
+ for sub in ${submodules[@]}; do [[ -d "${sub}" ]] && removal_list+=( "${sub}" ); done
+ if [ "${VERBOSE}" = true ] ; then
+ printf '%s\n' "Note: Need to check out submodules again for any subsequent builds, " \
+ " by running ${SRW_DIR}/manage_externals/checkout_externals "
fi
- printf '%s\n' "Note: Need to check out submodules again for any subsequent builds, " \
- " by sourcing ${SRW_DIR}/manage_externals/checkout_externals "
fi
-#
# Clean conda if requested
if [ "${REMOVE_CONDA}" = true ] ; then
- printf '%s\n' "Removing conda location file"
- rm -rf ${SRW_DIR}/conda_loc
- printf '%s\n' "Removing conda installation"
- rm -rf ${CONDA_DIR}
+ # Do not read "conda_loc" file to determine location of conda install; if the user has changed it to a different location
+ # they likely do not want to remove it!
+ conda_location=$(<${SRW_DIR}/conda_loc)
+ if [ "${VERBOSE}" = true ] ; then
+ echo "conda_location=$conda_location"
+ fi
+ if [ "${conda_location}" == "${SRW_DIR}/conda" ]; then
+ removal_list+=("${SRW_DIR}/conda_loc")
+ removal_list+=("${SRW_DIR}/conda")
+ else
+ echo "WARNING: location of conda build in ${SRW_DIR}/conda_loc is not the default location!"
+ echo "Will not attempt to remove conda!"
+ fi
fi
+# If array is empty, that means user has not selected any removal options
+if [ ${#removal_list[@]} -eq 0 ]; then
+ usage_error "No removal options specified"
+fi
+while [ ${REMOVE} == false ]; do
+ # Make user confirm deletion of directories unless '--force' option was provided
+ printf "The following files/directories will be deleted:\n\n"
+ for i in "${removal_list[@]}"; do
+ echo "$i"
+ done
+ echo ""
+ read -p "Confirm that you want to delete these files/directories! (Yes/No): " choice
+ case ${choice} in
+ [Yy]* ) REMOVE=true ;;
+ [Nn]* ) echo "User chose not to delete, exiting..."; exit ;;
+ * ) printf "Invalid option selected.\n" ;;
+ esac
+done
+
+if [ ${REMOVE} == true ]; then
+ for dir in ${removal_list[@]}; do
+ echo "Removing ${dir}"
+ if [ "${VERBOSE}" = true ] ; then
+ rm -rfv ${dir}
+ else
+ rm -rf ${dir}
+ fi
+ done
+ echo " "
+ echo "All the requested cleaning tasks have been completed"
+ echo " "
+fi
-echo " "
-echo "All the requested cleaning tasks have been completed"
-echo " "
exit 0
diff --git a/doc/UsersGuide/Reference/FAQ.rst b/doc/UsersGuide/Reference/FAQ.rst
index 21bef328a3..e8c3df0dec 100644
--- a/doc/UsersGuide/Reference/FAQ.rst
+++ b/doc/UsersGuide/Reference/FAQ.rst
@@ -20,34 +20,48 @@ Building the SRW App
How can I clean up the SRW App code if something went wrong during the build?
===============================================================================
-The ``ufs-srweather-app`` repository contains a ``devclean.sh`` convenience script. This script can be used to clean up code if something goes wrong when checking out externals or building the application. To view usage instructions and to get help, run with the ``-h`` flag:
+The ``ufs-srweather-app`` repository contains a ``devclean.sh`` convenience script. This script can be used to clean up code if something goes wrong when checking out externals or building the application. To view usage instructions and to get help, run with the ``-h`` or ``--help`` flag:
.. code-block:: console
./devclean.sh -h
-To remove the ``build`` directory, run:
+To remove all the build artifacts and directories except conda installation, use the ``-b`` or ``--build`` flag:
.. code-block:: console
- ./devclean.sh --remove
+ ./devclean.sh --build
-To remove all build artifacts (including ``build``, ``exec``, ``lib``, and ``share``), run:
+When using a containerized approach of running the SRW, use the ``--container`` option that will make sure to remove ``container-bin`` directory in lieu of the ``exec``, i.e.:
.. code-block:: console
- ./devclean.sh --clean
+ ./devclean.sh -b --container
+
+To remove only conda directory and conda_loc file in the main SRW directory, run with the ``-c`` or ``--conda`` flag:
+
+.. code-block:: console
+
+ ./devclean.sh --conda
OR
- ./devclean.sh -a
+ ./devclean.sh -c
-To remove external submodules, run:
+To remove external submodules, run with the ``-s`` or ``--sub-modules`` flag:
.. code-block:: console
./devclean.sh --sub-modules
+To remove all build artifacts, conda and submodules (equivalent to \`-b -c -s\`), run with the ``-a`` or ``--all`` flag:
+
+.. code-block:: console
+
+ ./devclean.sh --all
+
+
Users will need to check out the external submodules again before building the application.
+
In addition to the options above, many standard terminal commands can be run to remove unwanted files and directories (e.g., ``rm -rf expt_dirs``). A complete explanation of these options is beyond the scope of this User's Guide.
===========================
diff --git a/manage_externals/checkout_externals b/manage_externals/checkout_externals
index a0698baef0..48bce24010 100755
--- a/manage_externals/checkout_externals
+++ b/manage_externals/checkout_externals
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
"""Main driver wrapper around the manic/checkout utility.
diff --git a/parm/wflow/plot.yaml b/parm/wflow/plot.yaml
index 445d238c15..0c98e51711 100644
--- a/parm/wflow/plot.yaml
+++ b/parm/wflow/plot.yaml
@@ -12,10 +12,12 @@ default_task_plot: &default_task
PDY: !cycstr "@Y@m@d"
cyc: !cycstr "@H"
subcyc: !cycstr "@M"
+ fhr: '#fhr#'
LOGDIR: !cycstr "&LOGDIR;"
SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;'
ENSMEM_INDX: '#mem#'
- nprocs: '{{ nnodes * ppn }}'
+ nprocs: '{{ parent.nnodes * parent.ppn }}'
+ join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
native: '{{ platform.SCHED_NATIVE_CMD }}'
nnodes: 1
nodes: '{{ nnodes }}:ppn={{ ppn }}'
@@ -24,25 +26,30 @@ default_task_plot: &default_task
queue: '&QUEUE_DEFAULT;'
walltime: 01:00:00
-task_plot_allvars:
- <<: *default_task
- command: '&LOAD_MODULES_RUN_TASK; "plot_allvars" "&JOBSdir;/JREGIONAL_PLOT_ALLVARS"'
- join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
- dependency:
- or_do_post: &post_files_exist
- and_run_post: # If post was meant to run, wait on the whole post metatask
- taskvalid:
- attrs:
- task: run_post_mem000_f000
- metataskdep:
- attrs:
- metatask: run_ens_post
- and_inline_post: # If inline post ran, wait on the forecast task to complete
- not:
- taskvalid:
- attrs:
- task: run_post_mem000_f000
- taskdep:
- attrs:
- task: run_fcst_mem000
-
+metatask_plot_allvars:
+ var:
+ mem: '{% if global.DO_ENSEMBLE %}{%- for m in range(1, global.NUM_ENS_MEMBERS+1) -%}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}'
+ metatask_plot_allvars_mem#mem#_all_fhrs:
+ var:
+ fhr: '{% for h in range(0, workflow.LONG_FCST_LEN+1) %}{{ " %03d" % h }}{% endfor %}'
+ cycledef: '{% for h in range(0, workflow.LONG_FCST_LEN+1) %}{% if h <= workflow.FCST_LEN_CYCL|min %}forecast {% else %}long_forecast {% endif %}{% endfor %}'
+ task_plot_allvars_mem#mem#_f#fhr#:
+ <<: *default_task
+ command: '&LOAD_MODULES_RUN_TASK; "plot_allvars" "&JOBSdir;/JREGIONAL_PLOT_ALLVARS"'
+ dependency:
+ or_do_post: &post_files_exist
+ and_run_post: # If post was meant to run, wait on the whole post metatask
+ taskvalid:
+ attrs:
+ task: run_post_mem#mem#_f#fhr#
+ metataskdep:
+ attrs:
+ metatask: run_ens_post
+ and_inline_post: # If inline post ran, wait on the forecast task to complete
+ not:
+ taskvalid:
+ attrs:
+ task: run_post_mem#mem#_f#fhr#
+ taskdep:
+ attrs:
+ task: run_post_mem#mem#_f#fhr#
\ No newline at end of file
diff --git a/scripts/exregional_plot_allvars.py b/scripts/exregional_plot_allvars.py
index 27eff0f4b0..040e17b012 100755
--- a/scripts/exregional_plot_allvars.py
+++ b/scripts/exregional_plot_allvars.py
@@ -577,6 +577,7 @@ def plot_all(dom):
facecolor="none",
linewidth=fline_wd,
alpha=falpha,
+ zorder=4,
)
coastline = cfeature.NaturalEarthFeature(
"physical",
@@ -586,6 +587,7 @@ def plot_all(dom):
facecolor="none",
linewidth=fline_wd,
alpha=falpha,
+ zorder=4,
)
states = cfeature.NaturalEarthFeature(
"cultural",
@@ -596,6 +598,7 @@ def plot_all(dom):
linewidth=fline_wd,
linestyle=":",
alpha=falpha,
+ zorder=4,
)
borders = cfeature.NaturalEarthFeature(
"cultural",
@@ -605,6 +608,7 @@ def plot_all(dom):
facecolor="none",
linewidth=fline_wd,
alpha=falpha,
+ zorder=4,
)
# All lat lons are earth relative, so setup the associated projection correct for that data
diff --git a/scripts/exregional_plot_allvars_diff.py b/scripts/exregional_plot_allvars_diff.py
index e51a3a6b57..61efcdb82b 100755
--- a/scripts/exregional_plot_allvars_diff.py
+++ b/scripts/exregional_plot_allvars_diff.py
@@ -652,6 +652,7 @@ def plot_all(dom):
facecolor="none",
linewidth=fline_wd,
alpha=falpha,
+ zorder=4,
)
coastline = cfeature.NaturalEarthFeature(
"physical",
@@ -661,6 +662,7 @@ def plot_all(dom):
facecolor="none",
linewidth=fline_wd,
alpha=falpha,
+ zorder=4,
)
states = cfeature.NaturalEarthFeature(
"cultural",
@@ -671,6 +673,7 @@ def plot_all(dom):
linewidth=fline_wd,
linestyle=":",
alpha=falpha,
+ zorder=4,
)
borders = cfeature.NaturalEarthFeature(
"cultural",
@@ -680,6 +683,7 @@ def plot_all(dom):
facecolor="none",
linewidth=fline_wd,
alpha=falpha,
+ zorder=4,
)
# All lat lons are earth relative, so setup the associated projection correct for that data
diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml
index 933042c82f..8e93259539 100644
--- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml
+++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml
@@ -3,8 +3,8 @@ metadata:
This test is to ensure that the workflow running in community mode
completes successfully on the RRFS_CONUS_25km grid using the GFS_v16
physics suite with ICs and LBCs derived from the NAM.
- This test also runs with two ensemble members, and ensures the MET
- ensemble-specific tasks run successfully.
+ This test also runs with two ensemble members, runs ploting tasks for each
+ ensemble member, and ensures the MET ensemble-specific tasks run successfully.
user:
RUN_ENVIR: community
workflow:
@@ -16,7 +16,7 @@ workflow:
PREEXISTING_DIR_METHOD: rename
rocoto:
tasks:
- taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml", "parm/wflow/verify_ens.yaml", "parm/wflow/test.yaml"]|include }}'
+ taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml", "parm/wflow/verify_ens.yaml", "parm/wflow/test.yaml"]|include }}'
metatask_run_ensemble:
task_run_fcst_mem#mem#:
walltime: 01:00:00
From d9c5fec963243903f154c0e6225ac6acec2927cf Mon Sep 17 00:00:00 2001
From: Anna Kimball <131040494+ankimball@users.noreply.github.com>
Date: Wed, 4 Sep 2024 08:10:18 -0500
Subject: [PATCH 28/39] [develop] Fix for SonarQube forked repo renaming
failure (#1115)
SonarQube job fails to find user's repository if they rename when creating a fork, this change to the Jenkinsfile will pass the user's url to the SonarQube job so that it doesn't have to form the URL itself. Also passes change ID (PR number) so that information on the SonarQube job can be archived to s3 and properly aligned with the corresponding PR.
---
.cicd/Jenkinsfile | 8 +++++++-
1 file changed, 7 insertions(+), 1 deletion(-)
diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile
index e6ed9515f2..5b90ab1173 100644
--- a/.cicd/Jenkinsfile
+++ b/.cicd/Jenkinsfile
@@ -29,9 +29,15 @@ pipeline {
stage('Launch SonarQube') {
steps {
script {
+ echo "BRANCH_NAME=${env.CHANGE_BRANCH}"
+ echo "FORK_NAME=${env.CHANGE_FORK}"
+ echo "CHANGE_URL=${env.CHANGE_URL}"
+ echo "CHANGE_ID=${env.CHANGE_ID}"
build job: '/ufs-srweather-app/ufs-srw-sonarqube', parameters: [
string(name: 'BRANCH_NAME', value: env.CHANGE_BRANCH ?: 'develop'),
- string(name: 'FORK_NAME', value: env.CHANGE_FORK ?: '')
+ string(name: 'FORK_NAME', value: env.CHANGE_FORK ?: ''),
+ string(name: 'CHANGE_URL', value: env.CHANGE_URL ?: ''),
+ string(name: 'CHANGE_ID', value: env.CHANGE_ID ?: '')
], wait: false
}
}
From 26cdad8e2045612d4c67d201d6870016e8582afd Mon Sep 17 00:00:00 2001
From: Natalie Perlin <68030316+natalie-perlin@users.noreply.github.com>
Date: Thu, 12 Sep 2024 09:01:54 -0400
Subject: [PATCH 29/39] [develop] Added an option for RRFS external model files
used as ICS and LBCS (#1089)
* An option to use RRFS model output (control) files are added as initial and lateral boundary conditions, ICS and LBCS.
RRFS_a data for the test was retrieved from the NODD website ((https://registry.opendata.aws/noaa-rrfs/)), pressure-level grib2 files from the control directory, RRFS forecasts interpolated into 3-km regular grid.
* A new test has been added grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta with RRFS input files for the event on 06/05/2024 with the tornadoes reported in Maryland.
---------
Co-authored-by: Natalie Perlin
Co-authored-by: Natalie Perlin
Co-authored-by: Christina Holt <56881914+christinaholtNOAA@users.noreply.github.com>
Co-authored-by: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com>
Co-authored-by: Gillian Petro <96886803+gspetro-NOAA@users.noreply.github.com>
---
devclean.sh | 2 +-
doc/UsersGuide/BackgroundInfo/Components.rst | 2 +-
.../BuildingRunningTesting/RunSRW.rst | 1 +
.../BuildingRunningTesting/WE2Etests.rst | 1 +
.../CustomizingTheWorkflow/ConfigWorkflow.rst | 4 +--
.../InputOutputFiles.rst | 14 +++++----
.../CustomizingTheWorkflow/LAMGrids.rst | 2 +-
doc/UsersGuide/Reference/Glossary.rst | 3 +-
parm/data_locations.yml | 14 +++++++++
parm/wflow/plot.yaml | 3 +-
scripts/exregional_make_ics.sh | 18 +++++++-----
scripts/exregional_make_lbcs.sh | 13 ++++++---
tests/WE2E/machine_suites/comprehensive | 1 +
.../WE2E/machine_suites/comprehensive.derecho | 1 +
.../machine_suites/comprehensive.noaacloud | 1 +
tests/WE2E/machine_suites/comprehensive.orion | 1 +
tests/WE2E/machine_suites/coverage.orion | 1 +
..._ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta.yaml | 29 +++++++++++++++++++
ush/setup.py | 4 +--
ush/valid_param_vals.yaml | 4 +--
20 files changed, 92 insertions(+), 27 deletions(-)
create mode 100644 tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta.yaml
diff --git a/devclean.sh b/devclean.sh
index 6cd9bed11f..b26988dd93 100755
--- a/devclean.sh
+++ b/devclean.sh
@@ -24,7 +24,7 @@ OPTIONS
directory rather than "exec". Has no effect if \`-b\` is not specified.
-f, --force
Remove directories as requested, without asking for user confirmation of their deletion.
- -s, -sub-modules
+ -s, --sub-modules
Remove sub-module directories. They need to be checked out again by sourcing "\${SRW_DIR}/manage_externals/checkout_externals" before attempting subsequent builds
-v, --verbose
Provide more verbose output
diff --git a/doc/UsersGuide/BackgroundInfo/Components.rst b/doc/UsersGuide/BackgroundInfo/Components.rst
index 1ba9349d8d..559576725d 100644
--- a/doc/UsersGuide/BackgroundInfo/Components.rst
+++ b/doc/UsersGuide/BackgroundInfo/Components.rst
@@ -22,7 +22,7 @@ UFS Preprocessing Utilities (UFS_UTILS)
The SRW Application includes a number of pre-processing utilities (UFS_UTILS) that initialize and prepare the model. Since the SRW App provides forecast predictions over a limited area (rather than globally), these utilities generate a regional grid (``regional_esg_grid/make_hgrid``) along with :term:`orography` (``orog``) and surface climatology (``sfc_climo_gen``) files on that grid. Grids include a strip, or "halo," of six cells that surround the regional grid and feed in lateral boundary condition data. Since different grid and orography files require different numbers of :term:`halo` cells, additional utilities handle topography filtering and shave the number of halo points (based on downstream workflow component requirements). The pre-processing software :term:`chgres_cube` is used to convert the raw external model data into initial and lateral boundary condition files in :term:`netCDF` format. These are needed as input to the :term:`FV3` limited area model (:term:`LAM`). Additional information about the UFS pre-processing utilities can be found in the :doc:`UFS_UTILS Technical Documentation ` and in the `UFS_UTILS Scientific Documentation `__.
-The SRW Application can be initialized from a range of operational initial condition files. It is possible to initialize the model from the Global Forecast System (:term:`GFS`), North American Mesoscale (:term:`NAM`) Forecast System, Rapid Refresh (:term:`RAP`), and High-Resolution Rapid Refresh (:term:`HRRR`) files in Gridded Binary v2 (:term:`GRIB2`) format. GFS files also come in :term:`NEMSIO` format for past dates.
+The SRW Application can be initialized from a range of operational initial condition files. It is possible to initialize the model from the Global Forecast System (:term:`GFS`), North American Mesoscale (:term:`NAM`) Forecast System, Rapid Refresh (:term:`RAP`), High-Resolution Rapid Refresh (:term:`HRRR`), and Rapid Refresh Forecast System (:term:`RRFS`) files in Gridded Binary v2 (:term:`GRIB2`) format. GFS files also come in :term:`NEMSIO` format for past dates.
.. WARNING::
For GFS data, dates prior to 1 January 2018 may work but are not guaranteed. Public archives of model data can be accessed through the `NOAA Operational Model Archive and Distribution System `__ (NOMADS). Raw external model data may be pre-staged on disk by the user.
diff --git a/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst b/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst
index d7fd7407a8..b9471acd69 100644
--- a/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst
+++ b/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst
@@ -549,6 +549,7 @@ The ``data:`` section of the machine file can point to various data sources that
netcdf: /Users/username/DATA/UFS/FV3GFS/netcdf
RAP: /Users/username/DATA/UFS/RAP/grib2
HRRR: /Users/username/DATA/UFS/HRRR/grib2
+ RRFS: /Users/username/DATA/UFS/RRFS/grib2
This can be helpful when conducting multiple experiments with different types of data.
diff --git a/doc/UsersGuide/BuildingRunningTesting/WE2Etests.rst b/doc/UsersGuide/BuildingRunningTesting/WE2Etests.rst
index 4fca53b575..b3a7bf847b 100644
--- a/doc/UsersGuide/BuildingRunningTesting/WE2Etests.rst
+++ b/doc/UsersGuide/BuildingRunningTesting/WE2Etests.rst
@@ -78,6 +78,7 @@ For convenience, the WE2E tests are currently grouped into the following categor
FV3GFS:
RAP:
HRRR:
+ RRFS:
Some tests are duplicated among the above categories via symbolic links, both for legacy reasons (when tests for different capabilities were consolidated) and for convenience when a user would like to run all tests for a specific category (e.g., verification tests).
diff --git a/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst
index 5161268980..50835a2451 100644
--- a/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst
+++ b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst
@@ -912,7 +912,7 @@ Basic Task Parameters
For each workflow task, certain parameter values must be passed to the job scheduler (e.g., Slurm), which submits a job for the task.
``EXTRN_MDL_NAME_ICS``: (Default: "FV3GFS")
- The name of the external model that will provide fields from which initial condition (IC) files, surface files, and 0-th hour boundary condition files will be generated for input into the forecast model. Valid values: ``"GSMGFS"`` | ``"FV3GFS"`` | ``"GEFS"`` | ``"GDAS"`` | ``"RAP"`` | ``"HRRR"`` | ``"NAM"`` | ``"UFS-CASE-STUDY"``
+ The name of the external model that will provide fields from which initial condition (IC) files, surface files, and 0-th hour boundary condition files will be generated for input into the forecast model. Valid values: ``"GSMGFS"`` | ``"FV3GFS"`` | ``"GEFS"`` | ``"GDAS"`` | ``"RAP"`` | ``"HRRR"`` | ``"RRFS"`` | ``"NAM"`` | ``"UFS-CASE-STUDY"``
``EXTRN_MDL_ICS_OFFSET_HRS``: (Default: 0)
Users may wish to start a forecast using forecast data from a previous cycle of an external model. This variable indicates how many hours earlier the external model started than the FV3 forecast configured here. For example, if the forecast should start from a 6-hour forecast of the GFS, then ``EXTRN_MDL_ICS_OFFSET_HRS: "6"``.
@@ -966,7 +966,7 @@ Basic Task Parameters
For each workflow task, certain parameter values must be passed to the job scheduler (e.g., Slurm), which submits a job for the task.
``EXTRN_MDL_NAME_LBCS``: (Default: "FV3GFS")
- The name of the external model that will provide fields from which lateral boundary condition (LBC) files (except for the 0-th hour LBC file) will be generated for input into the forecast model. Valid values: ``"GSMGFS"`` | ``"FV3GFS"`` | ``"GEFS"`` | ``"GDAS"`` | ``"RAP"`` | ``"HRRR"`` | ``"NAM"`` | ``"UFS-CASE-STUDY"``
+ The name of the external model that will provide fields from which lateral boundary condition (LBC) files (except for the 0-th hour LBC file) will be generated for input into the forecast model. Valid values: ``"GSMGFS"`` | ``"FV3GFS"`` | ``"GEFS"`` | ``"GDAS"`` | ``"RAP"`` | ``"HRRR"`` | ``"RRFS"`` | ``"NAM"`` | ``"UFS-CASE-STUDY"``
``LBC_SPEC_INTVL_HRS``: (Default: 6)
The interval (in integer hours) at which LBC files will be generated. This is also referred to as the *boundary update interval*. Note that the model selected in ``EXTRN_MDL_NAME_LBCS`` must have data available at a frequency greater than or equal to that implied by ``LBC_SPEC_INTVL_HRS``. For example, if ``LBC_SPEC_INTVL_HRS`` is set to "6", then the model must have data available at least every 6 hours. It is up to the user to ensure that this is the case.
diff --git a/doc/UsersGuide/CustomizingTheWorkflow/InputOutputFiles.rst b/doc/UsersGuide/CustomizingTheWorkflow/InputOutputFiles.rst
index 40227d7a2b..bf24055de4 100644
--- a/doc/UsersGuide/CustomizingTheWorkflow/InputOutputFiles.rst
+++ b/doc/UsersGuide/CustomizingTheWorkflow/InputOutputFiles.rst
@@ -20,8 +20,9 @@ The external model files needed for initializing an experiment can be obtained i
ways, including:
* Pulled from the `SRW App Data Bucket `__,
- * Pulled from the NOAA High Performance Storage System (:term:`HPSS`) during the workflow execution (requires user access), or
- * Obtained and staged by the user from a different source.
+ * Pulled from the NOAA High Performance Storage System (:term:`HPSS`) during the workflow execution (requires user access),
+ * Obtained and staged by the user from a different source, or
+ * Pulled from the `RRFS data bucket (rrfs_a data) `_.
The data format for these files can be :term:`GRIB2` or :term:`NEMSIO`. More information on downloading and setting up the external model data can be found in :numref:`Section %s `. Once the data is set up, the end-to-end application will run the system and write output files to disk.
@@ -246,7 +247,7 @@ The environment variables ``FIXgsm``, ``FIXorg``, and ``FIXsfc`` indicate the pa
Initial Condition/Lateral Boundary Condition File Formats and Source
-----------------------------------------------------------------------
-The SRW Application currently supports raw initial and lateral boundary conditions from numerous models (i.e., FV3GFS, GEFS, GDAS, NAM, RAP, HRRR). The data can be provided in three formats: :term:`NEMSIO`, :term:`netCDF`, or :term:`GRIB2`.
+The SRW Application currently supports raw initial and lateral boundary conditions from numerous models (i.e., FV3GFS, GEFS, GDAS, NAM, RAP, HRRR, RRFS). The data can be provided in three formats: :term:`NEMSIO`, :term:`netCDF`, or :term:`GRIB2`.
To download the model input data for the 12-hour "out-of-the-box" experiment configuration in ``config.community.yaml`` file, run:
@@ -273,7 +274,7 @@ The paths to ``EXTRN_MDL_SOURCE_BASEDIR_ICS`` and ``EXTRN_MDL_SOURCE_BASEDIR_LBC
USE_USER_STAGED_EXTRN_FILES: true
EXTRN_MDL_SOURCE_BASEDIR_LBCS: /path/to/ufs-srweather-app/input_model_data/FV3GFS/grib2/YYYYMMDDHH
-The two ``EXTRN_MDL_SOURCE_BASEDIR_*CS`` variables describe where the :term:`IC ` and :term:`LBC ` file directories are located, respectively. For ease of reusing ``config.yaml`` across experiments, it is recommended that users set up the raw :term:`IC/LBC ` file paths to include the model name (e.g., FV3GFS, GEFS, GDAS, NAM, RAP, HRRR), data format (e.g., grib2, nemsio), and date (in ``YYYYMMDDHH`` format). For example: ``/path/to/input_model_data/FV3GFS/grib2/2019061518/``. While there is flexibility to modify these settings, this structure will provide the most reusability for multiple dates when using the SRW Application workflow.
+The two ``EXTRN_MDL_SOURCE_BASEDIR_*CS`` variables describe where the :term:`IC ` and :term:`LBC ` file directories are located, respectively. For ease of reusing ``config.yaml`` across experiments, it is recommended that users set up the raw :term:`IC/LBC ` file paths to include the model name (e.g., FV3GFS, GEFS, GDAS, NAM, RAP, HRRR, RRFS), data format (e.g., grib2, nemsio, netcdf), and date (in ``YYYYMMDDHH`` format). For example: ``/path/to/input_model_data/FV3GFS/grib2/2019061518/``. While there is flexibility to modify these settings, this structure will provide the most reusability for multiple dates when using the SRW Application workflow.
When files are pulled from NOAA :term:`HPSS` (rather than downloaded from the data bucket), the naming convention looks something like this:
@@ -290,11 +291,12 @@ When files are pulled from NOAA :term:`HPSS` (rather than downloaded from the da
* RAP (GRIB2): ``rap.t{cycle}z.wrfprsf{fhr}.grib2``
* HRRR (GRIB2): ``hrrr.t{cycle}z.wrfprsf{fhr}.grib2``
+* RRFS (GRIB2): ``rrfs.t{cycle}z.prslev.f{fhr}.conus.grib2``
where:
* ``{cycle}`` corresponds to the 2-digit hour of the day when the forecast cycle starts, and
- * ``{fhr}`` corresponds to the 2- or 3-digit nth hour of the forecast (3-digits for FV3GFS/GDAS data and 2 digits for RAP/HRRR data).
+ * ``{fhr}`` corresponds to the 2- or 3-digit nth hour of the forecast (3-digits for FV3GFS/GDAS/RRFS data and 2 digits for RAP/HRRR data).
For example, a forecast using FV3GFS GRIB2 data that starts at 18h00 UTC would have a ``{cycle}`` value of 18, which is the 000th forecast hour. The LBCS file for 21h00 UTC would be named ``gfs.t18z.pgrb2.0p25.f003``.
@@ -353,6 +355,8 @@ AWS S3 Data Buckets:
* GDAS: https://registry.opendata.aws/noaa-gfs-bdp-pds/
* HRRR: https://registry.opendata.aws/noaa-hrrr-pds/ (necessary fields for initializing available for dates 2015 and newer)
* A list of the NOAA Open Data Dissemination (NODD) datasets can be found here: https://www.noaa.gov/nodd/datasets
+* RRFS - experimental data is available starting 02/01/2024 for deteministic forecasts starting hourly. Forecast data are available out to 60 hours for 00, 06, 12, and 18 UTC starting times (cycles), and out to 18 hours for other cycles. Earlier dates, from 05/01/2023 to 01/31/2024, may contain only forecasts at 00, 06, 12, 18 UTC; user needs to verify that data exist for needed dates.
+ https://noaa-rrfs-pds.s3.amazonaws.com/index.html#rrfs_a/
NCEI Archive:
diff --git a/doc/UsersGuide/CustomizingTheWorkflow/LAMGrids.rst b/doc/UsersGuide/CustomizingTheWorkflow/LAMGrids.rst
index 1fd163e8c6..482caf8590 100644
--- a/doc/UsersGuide/CustomizingTheWorkflow/LAMGrids.rst
+++ b/doc/UsersGuide/CustomizingTheWorkflow/LAMGrids.rst
@@ -75,7 +75,7 @@ The 3-km CONUS domain is ideal for running the ``FV3_RRFS_v1beta`` physics suite
The boundary of the ``RRFS_CONUS_3km`` domain is shown in :numref:`Figure %s ` (in red), and the boundary of the :ref:`write component grid ` sits just inside the computational domain (in blue). This extra grid is required because the post-processing utility (:term:`UPP`) is unable to process data on the native FV3 gnomonic grid (in red). Therefore, model data are interpolated to a Lambert conformal grid (the write component grid) in order for the :term:`UPP` to read in and correctly process the data.
.. note::
- While it is possible to initialize the FV3-LAM with coarser external model data when using the ``RRFS_CONUS_3km`` domain, it is generally advised to use external model data (such as HRRR or RAP data) that has a resolution similar to that of the native FV3-LAM (predefined) grid.
+ While it is possible to initialize the FV3-LAM with coarser external model data when using the ``RRFS_CONUS_3km`` domain, it is generally advised to use external model data (such as HRRR, RRFS, or RAP data) that has a resolution similar to that of the native FV3-LAM (predefined) grid.
Predefined SUBCONUS Grid Over Indianapolis
diff --git a/doc/UsersGuide/Reference/Glossary.rst b/doc/UsersGuide/Reference/Glossary.rst
index 90f9c8ab89..2612d4fbe8 100644
--- a/doc/UsersGuide/Reference/Glossary.rst
+++ b/doc/UsersGuide/Reference/Glossary.rst
@@ -227,7 +227,8 @@ Glossary
A central location in which files (e.g., data, code, documentation) are stored and managed.
RRFS
- The `Rapid Refresh Forecast System `__ (RRFS) is NOAA's next-generation convection-allowing, rapidly-updated, ensemble-based data assimilation and forecasting system currently scheduled for operational implementation in 2024. It is designed to run forecasts on a 3-km :term:`CONUS` domain.
+ The `Rapid Refresh Forecast System `__ (RRFS) is NOAA's next-generation convection-allowing, rapidly-updated, ensemble-based data assimilation and forecasting system currently scheduled for operational implementation in 2024. It is designed to run forecasts on a 3-km :term:`CONUS` domain, see also `NOAA Rapid Refresh Forecast System (RRFS) `__. Experimental data is currently available from the `AWS S3 NOAA-RRFS `__ bucket for deterministic forecasts out to 60 hours at 00, 06, 12, and 18 UTC. Additionally, hourly forecasts out to 18 hours may be available for more recent RRFS model runs; the user needs to verify that data exists for needed dates.
+
SDF
Suite Definition File. An external file containing information about the construction of a physics suite. It describes the schemes that are called, in which order they are called, whether they are subcycled, and whether they are assembled into groups to be called together.
diff --git a/parm/data_locations.yml b/parm/data_locations.yml
index 7901f4c085..e65a796739 100644
--- a/parm/data_locations.yml
+++ b/parm/data_locations.yml
@@ -236,6 +236,20 @@ RAP:
file_names:
<<: *rap_file_names
+RRFS:
+ hpss:
+ protocol: htar
+ file_names: &rrfs_file_names
+ anl:
+ - rrfs.t{hh}z.prslev.f{fcst_hr:03d}.conus.grib2
+ fcst:
+ - rrfs.t{hh}z.prslev.f{fcst_hr:03d}.conus.grib2
+ aws:
+ protocol: download
+ url: https://noaa-rrfs-pds.s3.amazonaws.com/rrfs_a/rrfs_a.{yyyymmdd}/{hh}/control/
+ file_names:
+ <<: *rrfs_file_names
+
HRRR:
hpss:
protocol: htar
diff --git a/parm/wflow/plot.yaml b/parm/wflow/plot.yaml
index 0c98e51711..8448bc3f9e 100644
--- a/parm/wflow/plot.yaml
+++ b/parm/wflow/plot.yaml
@@ -52,4 +52,5 @@ metatask_plot_allvars:
task: run_post_mem#mem#_f#fhr#
taskdep:
attrs:
- task: run_post_mem#mem#_f#fhr#
\ No newline at end of file
+ task: run_post_mem#mem#_f#fhr#
+
diff --git a/scripts/exregional_make_ics.sh b/scripts/exregional_make_ics.sh
index 8cd49076b0..debf526798 100755
--- a/scripts/exregional_make_ics.sh
+++ b/scripts/exregional_make_ics.sh
@@ -197,6 +197,7 @@ case "${CCPP_PHYS_SUITE}" in
"FV3_HRRR" | \
"FV3_RAP" )
if [ "${EXTRN_MDL_NAME_ICS}" = "RAP" ] || \
+ [ "${EXTRN_MDL_NAME_ICS}" = "RRFS" ] || \
[ "${EXTRN_MDL_NAME_ICS}" = "HRRR" ]; then
varmap_file="GSDphys_var_map.txt"
elif [ "${EXTRN_MDL_NAME_ICS}" = "NAM" ] || \
@@ -245,7 +246,7 @@ esac
#
# fn_grib2:
# Name (not including path) of the grib2 file generated by the external
-# model. Currently used for NAM, RAP, and HRRR external model data.
+# model. Currently used for NAM, RAP, and HRRR/RRFS external model data.
#
# input_type:
# The "type" of input being provided to chgres_cube. This contains a combi-
@@ -321,7 +322,7 @@ esac
# tracers_input(:), it must also be 3rd in tracers(:). How can this be checked?
#
# NOTE: Really should use a varmap table for GFS, just like we do for
-# RAP/HRRR.
+# RAP/HRRR/RRFS.
#
# A non-prognostic variable that appears in the field_table for GSD physics
# is cld_amt. Why is that in the field_table at all (since it is a non-
@@ -354,7 +355,7 @@ convert_nst=""
#
# If the external model is not one that uses the RUC land surface model
# (LSM) -- which currently includes all valid external models except the
-# HRRR and the RAP -- then we set the number of soil levels to include
+# HRRR/RRFS and the RAP -- then we set the number of soil levels to include
# in the output NetCDF file that chgres_cube generates (nsoill_out; this
# is a variable in the namelist that chgres_cube reads in) to 4. This
# is because FV3 can handle this regardless of the LSM that it is using
@@ -365,7 +366,7 @@ convert_nst=""
# 4 soil layers to the 9 layers that it uses.
#
# On the other hand, if the external model is one that uses the RUC LSM
-# (currently meaning that it is either the HRRR or the RAP), then what
+# (currently meaning that it is either the HRRR/RRFS or the RAP), then what
# we set nsoill_out to depends on whether the RUC or the Noah/Noah MP
# LSM is used in the SDF. If the SDF uses RUC, then both the external
# model and FV3 use RUC (which expects 9 soil levels), so we simply set
@@ -379,12 +380,13 @@ convert_nst=""
# 9 to 4 levels.
#
# In summary, we can set nsoill_out to 4 unless the external model is
-# the HRRR or RAP AND the forecast model is using the RUC LSM.
+# the HRRR/RRFS or RAP AND the forecast model is using the RUC LSM.
#
#-----------------------------------------------------------------------
#
nsoill_out="4"
if [ "${EXTRN_MDL_NAME_ICS}" = "HRRR" -o \
+ "${EXTRN_MDL_NAME_ICS}" = "RRFS" -o \
"${EXTRN_MDL_NAME_ICS}" = "RAP" ] && \
[ $(boolify "${SDF_USES_RUC_LSM}") = "TRUE" ]; then
nsoill_out="9"
@@ -393,7 +395,7 @@ fi
#-----------------------------------------------------------------------
#
# If the external model for ICs is one that does not provide the aerosol
-# fields needed by Thompson microphysics (currently only the HRRR and
+# fields needed by Thompson microphysics (currently only the HRRR/RRFS and
# RAP provide aerosol data) and if the physics suite uses Thompson
# microphysics, set the variable thomp_mp_climo_file in the chgres_cube
# namelist to the full path of the file containing aerosol climatology
@@ -405,6 +407,7 @@ fi
#
thomp_mp_climo_file=""
if [ "${EXTRN_MDL_NAME_ICS}" != "HRRR" -a \
+ "${EXTRN_MDL_NAME_ICS}" != "RRFS" -a \
"${EXTRN_MDL_NAME_ICS}" != "RAP" ] && \
[ $(boolify "${SDF_USES_THOMPSON_MP}") = "TRUE" ]; then
thomp_mp_climo_file="${THOMPSON_MP_CLIMO_FP}"
@@ -519,8 +522,9 @@ case "${EXTRN_MDL_NAME_ICS}" in
tg3_from_soil=False
;;
-"HRRR")
+"HRRR"|"RRFS")
external_model="HRRR"
+
fn_grib2="${EXTRN_MDL_FNS[0]}"
input_type="grib2"
#
diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh
index 35b4da388a..acbe97a56b 100755
--- a/scripts/exregional_make_lbcs.sh
+++ b/scripts/exregional_make_lbcs.sh
@@ -196,6 +196,7 @@ case "${CCPP_PHYS_SUITE}" in
"FV3_HRRR" | \
"FV3_RAP")
if [ "${EXTRN_MDL_NAME_LBCS}" = "RAP" ] || \
+ [ "${EXTRN_MDL_NAME_LBCS}" = "RRFS" ] || \
[ "${EXTRN_MDL_NAME_LBCS}" = "HRRR" ]; then
varmap_file="GSDphys_var_map.txt"
elif [ "${EXTRN_MDL_NAME_LBCS}" = "NAM" ] || \
@@ -239,7 +240,7 @@ esac
#
# fn_grib2:
# Name (not including path) of the grib2 file generated by the external
-# model. Currently used for NAM, RAP, and HRRR external model data.
+# model. Currently used for NAM, RAP, and HRRR/RRFS external model data.
#
# input_type:
# The "type" of input being provided to chgres_cube. This contains a combi-
@@ -294,7 +295,7 @@ esac
# tracers_input(:), it must also be 3rd in tracers(:). How can this be checked?
#
# NOTE: Really should use a varmap table for GFS, just like we do for
-# RAP/HRRR.
+# RAP/HRRR/RRFS.
#
# A non-prognostic variable that appears in the field_table for GSD physics
@@ -318,7 +319,7 @@ tracers="\"\""
#-----------------------------------------------------------------------
#
# If the external model for LBCs is one that does not provide the aerosol
-# fields needed by Thompson microphysics (currently only the HRRR and
+# fields needed by Thompson microphysics (currently only the HRRR/RRFS and
# RAP provide aerosol data) and if the physics suite uses Thompson
# microphysics, set the variable thomp_mp_climo_file in the chgres_cube
# namelist to the full path of the file containing aerosol climatology
@@ -330,6 +331,7 @@ tracers="\"\""
#
thomp_mp_climo_file=""
if [ "${EXTRN_MDL_NAME_LBCS}" != "HRRR" -a \
+ "${EXTRN_MDL_NAME_LBCS}" != "RRFS" -a \
"${EXTRN_MDL_NAME_LBCS}" != "RAP" ] && \
[ $(boolify "${SDF_USES_THOMPSON_MP}") = "TRUE" ]; then
thomp_mp_climo_file="${THOMPSON_MP_CLIMO_FP}"
@@ -401,7 +403,7 @@ case "${EXTRN_MDL_NAME_LBCS}" in
input_type="grib2"
;;
-"HRRR")
+"HRRR"|"RRFS")
external_model="HRRR"
input_type="grib2"
;;
@@ -502,6 +504,9 @@ for (( ii=0; ii<${num_fhrs}; ii=ii+bcgrpnum10 )); do
"HRRR")
fn_grib2="${EXTRN_MDL_FNS[$i]}"
;;
+ "RRFS")
+ fn_grib2="${EXTRN_MDL_FNS[$i]}"
+ ;;
"NAM")
fn_grib2="${EXTRN_MDL_FNS[$i]}"
;;
diff --git a/tests/WE2E/machine_suites/comprehensive b/tests/WE2E/machine_suites/comprehensive
index 8c546918a0..8397e5d0c0 100644
--- a/tests/WE2E/machine_suites/comprehensive
+++ b/tests/WE2E/machine_suites/comprehensive
@@ -57,6 +57,7 @@ grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta
grid_RRFS_CONUScompact_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR
grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta
+grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta
grid_RRFS_CONUScompact_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR
grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta
diff --git a/tests/WE2E/machine_suites/comprehensive.derecho b/tests/WE2E/machine_suites/comprehensive.derecho
index a28718a10a..5464a053d8 100644
--- a/tests/WE2E/machine_suites/comprehensive.derecho
+++ b/tests/WE2E/machine_suites/comprehensive.derecho
@@ -48,6 +48,7 @@ grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta
grid_RRFS_CONUScompact_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR
grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta
+grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta
grid_RRFS_CONUScompact_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR
grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta
diff --git a/tests/WE2E/machine_suites/comprehensive.noaacloud b/tests/WE2E/machine_suites/comprehensive.noaacloud
index 6c01bd70a8..c9bb96ae64 100644
--- a/tests/WE2E/machine_suites/comprehensive.noaacloud
+++ b/tests/WE2E/machine_suites/comprehensive.noaacloud
@@ -37,6 +37,7 @@ grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta
grid_RRFS_CONUScompact_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR
grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta
+grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta
grid_RRFS_CONUScompact_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR
grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta
diff --git a/tests/WE2E/machine_suites/comprehensive.orion b/tests/WE2E/machine_suites/comprehensive.orion
index ce71fe05db..5930843582 100644
--- a/tests/WE2E/machine_suites/comprehensive.orion
+++ b/tests/WE2E/machine_suites/comprehensive.orion
@@ -48,6 +48,7 @@ grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta
grid_RRFS_CONUScompact_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR
grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta
+grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta
grid_RRFS_CONUScompact_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR
grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta
diff --git a/tests/WE2E/machine_suites/coverage.orion b/tests/WE2E/machine_suites/coverage.orion
index c698648b10..5cb4441437 100644
--- a/tests/WE2E/machine_suites/coverage.orion
+++ b/tests/WE2E/machine_suites/coverage.orion
@@ -5,6 +5,7 @@ grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta
grid_RRFS_AK_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot
grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_RRFS_v1beta
grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR
+grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta
grid_RRFS_CONUScompact_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
grid_RRFS_CONUScompact_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0
diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta.yaml
new file mode 100644
index 0000000000..908b79dc43
--- /dev/null
+++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta.yaml
@@ -0,0 +1,29 @@
+metadata:
+ description: |-
+ This test is to ensure that the workflow running in community mode
+ completes successfully on the RRFS_CONUScompact_25km grid using the RRFS_v1beta
+ physics suite. It uses RRFS forecasts mapped onto 3-km regular grid (rrfs*.conus.grib2) for
+ ICs and LBCs. This test uses old v1 sfc_data, not the v2 fractional grid sfc_data.
+user:
+ RUN_ENVIR: community
+workflow:
+ CCPP_PHYS_SUITE: FV3_RRFS_v1beta
+ PREDEF_GRID_NAME: RRFS_CONUScompact_25km
+ DATE_FIRST_CYCL: '2024060517'
+ DATE_LAST_CYCL: '2024060517'
+ FCST_LEN_HRS: 3
+ PREEXISTING_DIR_METHOD: rename
+task_get_extrn_ics:
+ EXTRN_MDL_NAME_ICS: RRFS
+ FV3GFS_FILE_FMT_ICS: grib2
+ USE_USER_STAGED_EXTRN_FILES: true
+task_get_extrn_lbcs:
+ EXTRN_MDL_NAME_LBCS: RRFS
+ LBC_SPEC_INTVL_HRS: 1
+ FV3GFS_FILE_FMT_LBCS: grib2
+ USE_USER_STAGED_EXTRN_FILES: true
+task_plot_allvars:
+ COMOUT_REF: ""
+rocoto:
+ tasks:
+ taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml"]|include }}'
diff --git a/ush/setup.py b/ush/setup.py
index 51d5b2a084..335ce229e1 100644
--- a/ush/setup.py
+++ b/ush/setup.py
@@ -1447,8 +1447,8 @@ def dict_find(user_dict, substring):
# If the model ICs or BCs are not from RAP or HRRR, they will not contain aerosol
# climatology data needed by the Thompson scheme, so we need to provide a separate file
- if (get_extrn_ics["EXTRN_MDL_NAME_ICS"] not in ["HRRR", "RAP"] or
- get_extrn_lbcs["EXTRN_MDL_NAME_LBCS"] not in ["HRRR", "RAP"]):
+ if (get_extrn_ics["EXTRN_MDL_NAME_ICS"] not in ["HRRR", "RRFS", "RAP"] or
+ get_extrn_lbcs["EXTRN_MDL_NAME_LBCS"] not in ["HRRR", "RRFS", "RAP"]):
fixed_files["THOMPSON_FIX_FILES"].append(workflow_config["THOMPSON_MP_CLIMO_FN"])
# Add thompson-specific fix files to CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING and
diff --git a/ush/valid_param_vals.yaml b/ush/valid_param_vals.yaml
index 3530b51ae9..fd21b3e1cf 100644
--- a/ush/valid_param_vals.yaml
+++ b/ush/valid_param_vals.yaml
@@ -37,8 +37,8 @@ valid_vals_CCPP_PHYS_SUITE: [
"FV3_RAP"
]
valid_vals_GFDLgrid_NUM_CELLS: [48, 96, 192, 384, 768, 1152, 3072]
-valid_vals_EXTRN_MDL_NAME_ICS: ["GSMGFS", "FV3GFS", "UFS-CASE-STUDY", "GEFS", "GDAS", "RAP", "HRRR", "NAM"]
-valid_vals_EXTRN_MDL_NAME_LBCS: ["GSMGFS", "FV3GFS", "UFS-CASE-STUDY", "GEFS", "GDAS", "RAP", "HRRR", "NAM"]
+valid_vals_EXTRN_MDL_NAME_ICS: ["GSMGFS", "FV3GFS", "UFS-CASE-STUDY", "GEFS", "GDAS", "RAP", "HRRR", "RRFS", "NAM"]
+valid_vals_EXTRN_MDL_NAME_LBCS: ["GSMGFS", "FV3GFS", "UFS-CASE-STUDY", "GEFS", "GDAS", "RAP", "HRRR", "RRFS", "NAM"]
valid_vals_USE_USER_STAGED_EXTRN_FILES: [True, False]
valid_vals_FV3GFS_FILE_FMT_ICS: ["nemsio", "grib2", "netcdf"]
valid_vals_FV3GFS_FILE_FMT_LBCS: ["nemsio", "grib2", "netcdf"]
From 2308fb57d7e60c8f19ccbe6128eaa585d9881039 Mon Sep 17 00:00:00 2001
From: jdkublnick <47824899+jdkublnick@users.noreply.github.com>
Date: Mon, 16 Sep 2024 16:04:20 -0400
Subject: [PATCH 30/39] [develop]: Updated ConfigWorkflow.rst to reflect
changes to config_defaults.yaml (PI13) (#1133)
Updated ConfigWorkflow.rst to reflect recent changes to config_defaults.yaml in order to keep documentation up to date.
---------
Co-authored-by: Gillian Petro <96886803+gspetro-NOAA@users.noreply.github.com>
Co-authored-by: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com>
---
doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst
index 50835a2451..4d88173028 100644
--- a/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst
+++ b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst
@@ -393,7 +393,7 @@ Set File Name Parameters
Name of a file that contains settings and configurations for the :term:`NUOPC`/:term:`ESMF` main component. In general, users should not set this variable in their configuration file (see :ref:`note `).
``UFS_CONFIG_FN``: (Default: "ufs.configure")
- Name of a file that contains information about the various :term:`UFS` components and their run sequence. In general, users should not set this variable in their configuration file (see :ref:`note `).
+ Name of a template file that contains information about the various :term:`UFS` components and their run sequence. In general, users should not set this variable in their configuration file (see :ref:`note `).
``AQM_RC_FN``: (Default: "aqm.rc")
Name of resource file for NOAA Air Quality Model (AQM).
@@ -1096,10 +1096,10 @@ For each workflow task, certain parameter values must be passed to the job sched
For more information, see the `Intel Development Reference Guide `__.
-``OMP_NUM_THREADS_RUN_FCST``: (Default: 1)
+``OMP_NUM_THREADS_RUN_FCST``: (Default: 2)
The number of OpenMP threads to use for parallel regions. Corresponds to the ``atmos_nthreads`` value in ``model_configure``.
-``OMP_STACKSIZE_RUN_FCST``: (Default: "512m")
+``OMP_STACKSIZE_RUN_FCST``: (Default: "1024m")
Controls the size of the stack for threads created by the OpenMP implementation.
.. _ModelConfigParams:
From 4212296724e646de92ed1a3b82409fefb4a0088f Mon Sep 17 00:00:00 2001
From: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com>
Date: Fri, 20 Sep 2024 13:00:10 -0400
Subject: [PATCH 31/39] [develop] Update ufs-weather-model hash and remove
machine/modulefiles/WE2E test suites for decommissioned machines (#1117)
* Update ufs-weather-model hash to a1143cc (August 12)
* Remove machine files, modulefiles, and WE2E test suites for decommissioned machine, Cheyenne
* Update build_jet_intel.lua modulefile to point to the /contrib spack-stack location, rather than /lfs4. At the end of September, /lfs4 is scheduled to be unmounted.
* Update ush/machine/jet.yaml machine file to point to /lfs5 instead of /lfs4 for staged data on Jet. At the end of September, /lfs4 is schedules to be unmounted.
* Updated ContainerQuickstart.rst and RunSRW.rst to note the new container location on Jet (/lfs5 instead of /lfs4) and the location of the staged data (/lfs5 instead of /lfs4).
* Uncommented four WE2E tests that had previously failed in make_sfc_climo in the comprehensive WE2E suite for Derecho (comprehensive.derecho).
---
.github/CODEOWNERS | 2 +-
Externals.cfg | 2 +-
.../ContainerQuickstart.rst | 2 +-
.../BuildingRunningTesting/RunSRW.rst | 2 +-
modulefiles/build_cheyenne_gnu.lua | 33 ------------
modulefiles/build_cheyenne_intel.lua | 27 ----------
modulefiles/build_jet_intel.lua | 2 +-
modulefiles/tasks/cheyenne/aqm_ics.local.lua | 3 --
modulefiles/tasks/cheyenne/aqm_lbcs.local.lua | 3 --
.../tasks/cheyenne/fire_emission.local.lua | 2 -
.../tasks/cheyenne/nexus_emission.local.lua | 5 --
.../tasks/cheyenne/nexus_gfs_sfc.local.lua | 2 -
.../tasks/cheyenne/nexus_post_split.local.lua | 3 --
.../tasks/cheyenne/plot_allvars.local.lua | 3 --
.../tasks/cheyenne/point_source.local.lua | 2 -
.../tasks/cheyenne/pre_post_stat.local.lua | 2 -
modulefiles/tasks/cheyenne/python_srw.lua | 3 --
modulefiles/tasks/cheyenne/run_vx.local.lua | 25 ---------
modulefiles/wflow_cheyenne.lua | 23 --------
.../machine_suites/comprehensive.cheyenne | 53 -------------------
.../WE2E/machine_suites/comprehensive.derecho | 8 +--
tests/WE2E/machine_suites/coverage.cheyenne | 8 ---
ush/machine/cheyenne.yaml | 35 ------------
ush/machine/jet.yaml | 32 +++++------
24 files changed, 25 insertions(+), 257 deletions(-)
delete mode 100644 modulefiles/build_cheyenne_gnu.lua
delete mode 100644 modulefiles/build_cheyenne_intel.lua
delete mode 100644 modulefiles/tasks/cheyenne/aqm_ics.local.lua
delete mode 100644 modulefiles/tasks/cheyenne/aqm_lbcs.local.lua
delete mode 100644 modulefiles/tasks/cheyenne/fire_emission.local.lua
delete mode 100644 modulefiles/tasks/cheyenne/nexus_emission.local.lua
delete mode 100644 modulefiles/tasks/cheyenne/nexus_gfs_sfc.local.lua
delete mode 100644 modulefiles/tasks/cheyenne/nexus_post_split.local.lua
delete mode 100644 modulefiles/tasks/cheyenne/plot_allvars.local.lua
delete mode 100644 modulefiles/tasks/cheyenne/point_source.local.lua
delete mode 100644 modulefiles/tasks/cheyenne/pre_post_stat.local.lua
delete mode 100644 modulefiles/tasks/cheyenne/python_srw.lua
delete mode 100644 modulefiles/tasks/cheyenne/run_vx.local.lua
delete mode 100644 modulefiles/wflow_cheyenne.lua
delete mode 100644 tests/WE2E/machine_suites/comprehensive.cheyenne
delete mode 100644 tests/WE2E/machine_suites/coverage.cheyenne
delete mode 100644 ush/machine/cheyenne.yaml
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index bdc428c736..700cea255c 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -3,7 +3,7 @@
# These owners will be the default owners for everything in the repo.
#* @defunkt
-* @mkavulich @gsketefian @JeffBeck-NOAA @RatkoVasic-NOAA @BenjaminBlake-NOAA @ywangwof @chan-hoo @panll @christinaholtNOAA @christopherwharrop-noaa @danielabdi-noaa @mark-a-potts @jkbk2004 @willmayfield @dmwright526 @gspetro-NOAA @natalie-perlin @EdwardSnyder-NOAA @MichaelLueken
+* @mkavulich @gsketefian @JeffBeck-NOAA @RatkoVasic-NOAA @BenjaminBlake-NOAA @ywangwof @chan-hoo @panll @christinaholtNOAA @christopherwharrop-noaa @danielabdi-noaa @mark-a-potts @jkbk2004 @willmayfield @dmwright526 @gspetro-NOAA @natalie-perlin @EdwardSnyder-NOAA @MichaelLueken @rickgrubin-noaa
# Order is important. The last matching pattern has the most precedence.
# So if a pull request only touches javascript files, only these owners
diff --git a/Externals.cfg b/Externals.cfg
index 4545cd8ca5..b57d63957e 100644
--- a/Externals.cfg
+++ b/Externals.cfg
@@ -12,7 +12,7 @@ protocol = git
repo_url = https://github.com/ufs-community/ufs-weather-model
# Specify either a branch name or a hash but not both.
#branch = develop
-hash = b5a1976
+hash = a1143cc
local_path = sorc/ufs-weather-model
required = True
diff --git a/doc/UsersGuide/BuildingRunningTesting/ContainerQuickstart.rst b/doc/UsersGuide/BuildingRunningTesting/ContainerQuickstart.rst
index d9dd1a0afc..0607a232b5 100644
--- a/doc/UsersGuide/BuildingRunningTesting/ContainerQuickstart.rst
+++ b/doc/UsersGuide/BuildingRunningTesting/ContainerQuickstart.rst
@@ -81,7 +81,7 @@ On most Level 1 systems, a container named ``ubuntu20.04-intel-ue-1.4.1-srw-dev.
* - Hera
- /scratch1/NCEPDEV/nems/role.epic/containers
* - Jet
- - /mnt/lfs4/HFIP/hfv3gfs/role.epic/containers
+ - /mnt/lfs5/HFIP/hfv3gfs/role.epic/containers
* - NOAA Cloud
- /contrib/EPIC/containers
* - Orion/Hercules [#fn]_
diff --git a/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst b/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst
index b9471acd69..831b2a6345 100644
--- a/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst
+++ b/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst
@@ -62,7 +62,7 @@ The SRW App requires input files to run. These include static datasets, initial
* - Hercules
- /work/noaa/epic/role-epic/contrib/UFS_SRW_data/|data|/input_model_data/
* - Jet
- - /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/|data|/input_model_data/
+ - /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/|data|/input_model_data/
* - NOAA Cloud
- /contrib/EPIC/UFS_SRW_data/|data|/input_model_data/
* - Orion
diff --git a/modulefiles/build_cheyenne_gnu.lua b/modulefiles/build_cheyenne_gnu.lua
deleted file mode 100644
index 58398891b2..0000000000
--- a/modulefiles/build_cheyenne_gnu.lua
+++ /dev/null
@@ -1,33 +0,0 @@
-help([[
-This module loads libraries for building the UFS SRW App on
-the CISL machine Cheyenne using GNU
-]])
-
-whatis([===[Loads libraries needed for building the UFS SRW App on Cheyenne ]===])
-
-load(pathJoin("cmake", os.getenv("cmake_ver") or "3.22.0"))
-load(pathJoin("ncarenv", os.getenv("ncarenv_ver") or "1.3"))
-load(pathJoin("gnu", os.getenv("gnu_ver") or "11.2.0"))
-load(pathJoin("mpt", os.getenv("mpt_ver") or "2.25"))
-setenv("MKLROOT", "/glade/u/apps/opt/intel/2022.1/mkl/latest")
-load(pathJoin("ncarcompilers", os.getenv("ncarcompilers_ver") or "0.5.0"))
-unload("netcdf")
-
-prepend_path("MODULEPATH","/glade/work/epicufsrt/contrib/hpc-stack/gnu11.2.0_ncdf492/modulefiles/stack")
-load(pathJoin("hpc", os.getenv("hpc_ver") or "1.2.0"))
-load(pathJoin("hpc-gnu", os.getenv("hpc_gnu_ver") or "11.2.0"))
-load(pathJoin("hpc-mpt", os.getenv("hpc_mpt_ver") or "2.25"))
-
-load("srw_common")
-
-load(pathJoin("openblas", os.getenv("openblas_ver") or "0.3.23"))
-
-unsetenv("MKLROOT")
-setenv("CMAKE_C_COMPILER","mpicc")
-setenv("CMAKE_CXX_COMPILER","mpicxx")
-setenv("CMAKE_Fortran_COMPILER","mpif90")
-setenv("CMAKE_Platform","cheyenne.gnu")
-setenv("CC", "mpicc")
-setenv("CXX", "mpicxx")
-setenv("FC", "mpif90")
-
diff --git a/modulefiles/build_cheyenne_intel.lua b/modulefiles/build_cheyenne_intel.lua
deleted file mode 100644
index 298c9879ac..0000000000
--- a/modulefiles/build_cheyenne_intel.lua
+++ /dev/null
@@ -1,27 +0,0 @@
-help([[
-This module loads libraries for building the UFS SRW App on
-the CISL machine Cheyenne using Intel-2022.1
-]])
-
-whatis([===[Loads libraries needed for building the UFS SRW App on Cheyenne ]===])
-
-load(pathJoin("cmake", os.getenv("cmake_ver") or "3.22.0"))
-load(pathJoin("ncarenv", os.getenv("ncarenv_ver") or "1.3"))
-load(pathJoin("intel", os.getenv("intel_ver") or "2022.1"))
-load(pathJoin("mpt", os.getenv("mpt_ver") or "2.25"))
-load(pathJoin("mkl", os.getenv("mkl_ver") or "2022.1"))
-load(pathJoin("ncarcompilers", os.getenv("ncarcompilers_ver") or "0.5.0"))
-unload("netcdf")
-
-prepend_path("MODULEPATH","/glade/work/epicufsrt/contrib/hpc-stack/intel2022.1_ncdf492/modulefiles/stack")
-load(pathJoin("hpc", os.getenv("hpc_ver") or "1.2.0"))
-load(pathJoin("hpc-intel", os.getenv("hpc_intel_ver") or "2022.1"))
-load(pathJoin("hpc-mpt", os.getenv("hpc_mpt_ver") or "2.25"))
-
-load("srw_common")
-
-setenv("CMAKE_C_COMPILER","mpicc")
-setenv("CMAKE_CXX_COMPILER","mpicpc")
-setenv("CMAKE_Fortran_COMPILER","mpif90")
-setenv("CMAKE_Platform","cheyenne.intel")
-
diff --git a/modulefiles/build_jet_intel.lua b/modulefiles/build_jet_intel.lua
index 854b4404cb..a0169a684a 100644
--- a/modulefiles/build_jet_intel.lua
+++ b/modulefiles/build_jet_intel.lua
@@ -5,7 +5,7 @@ the NOAA RDHPC machine Jet using Intel-2021.5.0
whatis([===[Loads libraries needed for building the UFS SRW App on Jet ]===])
-prepend_path("MODULEPATH","/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.6.0/envs/unified-env-rocky8/install/modulefiles/Core")
+prepend_path("MODULEPATH","/contrib/spack-stack/spack-stack-1.6.0/envs/unified-env-rocky8/install/modulefiles/Core")
load("stack-intel/2021.5.0")
load("stack-intel-oneapi-mpi/2021.5.1")
diff --git a/modulefiles/tasks/cheyenne/aqm_ics.local.lua b/modulefiles/tasks/cheyenne/aqm_ics.local.lua
deleted file mode 100644
index 9c9f0ca3d5..0000000000
--- a/modulefiles/tasks/cheyenne/aqm_ics.local.lua
+++ /dev/null
@@ -1,3 +0,0 @@
-load(pathJoin("cmake", os.getenv("cmake_ver") or "3.22.0"))
-load("nco/4.9.5")
-load("python_srw_aqm")
diff --git a/modulefiles/tasks/cheyenne/aqm_lbcs.local.lua b/modulefiles/tasks/cheyenne/aqm_lbcs.local.lua
deleted file mode 100644
index 9c9f0ca3d5..0000000000
--- a/modulefiles/tasks/cheyenne/aqm_lbcs.local.lua
+++ /dev/null
@@ -1,3 +0,0 @@
-load(pathJoin("cmake", os.getenv("cmake_ver") or "3.22.0"))
-load("nco/4.9.5")
-load("python_srw_aqm")
diff --git a/modulefiles/tasks/cheyenne/fire_emission.local.lua b/modulefiles/tasks/cheyenne/fire_emission.local.lua
deleted file mode 100644
index 86252a9a4f..0000000000
--- a/modulefiles/tasks/cheyenne/fire_emission.local.lua
+++ /dev/null
@@ -1,2 +0,0 @@
-load("ncarenv")
-load("python_srw_aqm")
diff --git a/modulefiles/tasks/cheyenne/nexus_emission.local.lua b/modulefiles/tasks/cheyenne/nexus_emission.local.lua
deleted file mode 100644
index 3c690fa12a..0000000000
--- a/modulefiles/tasks/cheyenne/nexus_emission.local.lua
+++ /dev/null
@@ -1,5 +0,0 @@
-load("nco/4.9.5")
-load("mpt/2.25")
-
-load("ncarenv")
-load("python_srw_aqm")
diff --git a/modulefiles/tasks/cheyenne/nexus_gfs_sfc.local.lua b/modulefiles/tasks/cheyenne/nexus_gfs_sfc.local.lua
deleted file mode 100644
index 86252a9a4f..0000000000
--- a/modulefiles/tasks/cheyenne/nexus_gfs_sfc.local.lua
+++ /dev/null
@@ -1,2 +0,0 @@
-load("ncarenv")
-load("python_srw_aqm")
diff --git a/modulefiles/tasks/cheyenne/nexus_post_split.local.lua b/modulefiles/tasks/cheyenne/nexus_post_split.local.lua
deleted file mode 100644
index e3f4bbe95d..0000000000
--- a/modulefiles/tasks/cheyenne/nexus_post_split.local.lua
+++ /dev/null
@@ -1,3 +0,0 @@
-load(pathJoin("nco", os.getenv("nco_ver") or "4.9.5"))
-load("ncarenv")
-load("python_srw_aqm")
diff --git a/modulefiles/tasks/cheyenne/plot_allvars.local.lua b/modulefiles/tasks/cheyenne/plot_allvars.local.lua
deleted file mode 100644
index 7cee04231e..0000000000
--- a/modulefiles/tasks/cheyenne/plot_allvars.local.lua
+++ /dev/null
@@ -1,3 +0,0 @@
-unload("python")
-load("conda")
-setenv("SRW_GRAPHICS_ENV", "srw_graphics")
diff --git a/modulefiles/tasks/cheyenne/point_source.local.lua b/modulefiles/tasks/cheyenne/point_source.local.lua
deleted file mode 100644
index 86252a9a4f..0000000000
--- a/modulefiles/tasks/cheyenne/point_source.local.lua
+++ /dev/null
@@ -1,2 +0,0 @@
-load("ncarenv")
-load("python_srw_aqm")
diff --git a/modulefiles/tasks/cheyenne/pre_post_stat.local.lua b/modulefiles/tasks/cheyenne/pre_post_stat.local.lua
deleted file mode 100644
index 042eb2f732..0000000000
--- a/modulefiles/tasks/cheyenne/pre_post_stat.local.lua
+++ /dev/null
@@ -1,2 +0,0 @@
-load("nco/4.9.5")
-load("python_srw_aqm")
diff --git a/modulefiles/tasks/cheyenne/python_srw.lua b/modulefiles/tasks/cheyenne/python_srw.lua
deleted file mode 100644
index fe6c73a7d5..0000000000
--- a/modulefiles/tasks/cheyenne/python_srw.lua
+++ /dev/null
@@ -1,3 +0,0 @@
-unload("python")
-load("conda")
-setenv("SRW_ENV", "srw_app")
diff --git a/modulefiles/tasks/cheyenne/run_vx.local.lua b/modulefiles/tasks/cheyenne/run_vx.local.lua
deleted file mode 100644
index 54cc632c21..0000000000
--- a/modulefiles/tasks/cheyenne/run_vx.local.lua
+++ /dev/null
@@ -1,25 +0,0 @@
---[[
-Compiler-specific modules are used for met and metplus libraries
---]]
-local met_ver = (os.getenv("met_ver") or "10.1.2")
-local metplus_ver = (os.getenv("metplus_ver") or "4.1.3")
-if (mode() == "load") then
- load(pathJoin("met", met_ver))
- load(pathJoin("metplus",metplus_ver))
-end
-local base_met = os.getenv("met_ROOT") or os.getenv("MET_ROOT")
-local base_metplus = os.getenv("metplus_ROOT") or os.getenv("METPLUS_ROOT")
-
-setenv("MET_INSTALL_DIR", base_met)
-setenv("MET_BIN_EXEC", pathJoin(base_met,"bin"))
-setenv("MET_BASE", pathJoin(base_met,"share/met"))
-setenv("MET_VERSION", met_ver)
-setenv("METPLUS_VERSION", metplus_ver)
-setenv("METPLUS_ROOT", base_metplus)
-setenv("METPLUS_PATH", base_metplus)
-
-if (mode() == "unload") then
- unload(pathJoin("met", met_ver))
- unload(pathJoin("metplus",metplus_ver))
-end
-load("python_srw")
diff --git a/modulefiles/wflow_cheyenne.lua b/modulefiles/wflow_cheyenne.lua
deleted file mode 100644
index fd4bc3eae5..0000000000
--- a/modulefiles/wflow_cheyenne.lua
+++ /dev/null
@@ -1,23 +0,0 @@
-help([[
-This module loads python environement for running the UFS SRW App on
-on the CISL machine Cheyenne
-]])
-
-whatis([===[Loads libraries needed for running the UFS SRW App on Cheyenne ]===])
-
-load("ncarenv")
-
-append_path("MODULEPATH","/glade/p/ral/jntp/UFS_SRW_app/modules")
-load("rocoto")
-
-unload("python")
-
-load("conda")
-
-
-if mode() == "load" then
- LmodMsgRaw([===[Please do the following to activate conda:
- > conda activate srw_app
-]===])
-end
-
diff --git a/tests/WE2E/machine_suites/comprehensive.cheyenne b/tests/WE2E/machine_suites/comprehensive.cheyenne
deleted file mode 100644
index 96792e37b0..0000000000
--- a/tests/WE2E/machine_suites/comprehensive.cheyenne
+++ /dev/null
@@ -1,53 +0,0 @@
-community
-custom_ESGgrid
-custom_ESGgrid_Central_Asia_3km
-custom_ESGgrid_IndianOcean_6km
-custom_ESGgrid_NewZealand_3km
-custom_ESGgrid_Peru_12km
-custom_ESGgrid_SF_1p1km
-custom_GFDLgrid__GFDLgrid_USE_NUM_CELLS_IN_FILENAMES_eq_FALSE
-custom_GFDLgrid
-deactivate_tasks
-grid_CONUS_25km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
-grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta
-grid_RRFS_AK_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot
-grid_RRFS_AK_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR
-grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_RAP
-grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot
-grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR
-grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta
-grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2
-grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot
-grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v17_p8_plot
-grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR
-grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_RAP
-grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta
-grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_RAP_suite_RAP
-grid_RRFS_CONUS_25km_ics_GSMGFS_lbcs_GSMGFS_suite_GFS_v15p2
-grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16
-grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_RRFS_v1beta
-grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2
-grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km
-grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
-grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR
-grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta
-grid_RRFS_CONUScompact_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
-grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_HRRR
-grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta
-grid_RRFS_CONUScompact_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
-grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR
-grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta
-grid_RRFS_CONUScompact_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
-grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR
-grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta
-grid_RRFS_NA_13km_ics_FV3GFS_lbcs_FV3GFS_suite_RAP
-grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0
-grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_HRRR_suite_HRRR
-grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_RAP_suite_WoFS_v0
-grid_SUBCONUS_Ind_3km_ics_NAM_lbcs_NAM_suite_GFS_v16
-grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot
-MET_ensemble_verification_only_vx
-MET_verification_only_vx
-pregen_grid_orog_sfc_climo
-specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS
-specify_template_filenames
diff --git a/tests/WE2E/machine_suites/comprehensive.derecho b/tests/WE2E/machine_suites/comprehensive.derecho
index 5464a053d8..5930843582 100644
--- a/tests/WE2E/machine_suites/comprehensive.derecho
+++ b/tests/WE2E/machine_suites/comprehensive.derecho
@@ -9,9 +9,9 @@
2020_jan_cold_blast
community
custom_ESGgrid
-#custom_ESGgrid_Central_Asia_3km
+custom_ESGgrid_Central_Asia_3km
custom_ESGgrid_IndianOcean_6km
-#custom_ESGgrid_NewZealand_3km
+custom_ESGgrid_NewZealand_3km
custom_ESGgrid_Peru_12km
custom_ESGgrid_SF_1p1km
custom_GFDLgrid__GFDLgrid_USE_NUM_CELLS_IN_FILENAMES_eq_FALSE
@@ -20,8 +20,8 @@ deactivate_tasks
get_from_AWS_ics_GEFS_lbcs_GEFS_fmt_grib2_2022040400_ensemble_2mems
get_from_NOMADS_ics_FV3GFS_lbcs_FV3GFS
grid_CONUS_25km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
-#grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta
-#grid_RRFS_AK_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot
+grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta
+grid_RRFS_AK_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot
grid_RRFS_AK_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR
grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_RAP
grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot
diff --git a/tests/WE2E/machine_suites/coverage.cheyenne b/tests/WE2E/machine_suites/coverage.cheyenne
deleted file mode 100644
index 8f3c3ec78c..0000000000
--- a/tests/WE2E/machine_suites/coverage.cheyenne
+++ /dev/null
@@ -1,8 +0,0 @@
-custom_ESGgrid_IndianOcean_6km
-grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot
-grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16
-grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_HRRR
-grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta
-grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_HRRR_suite_HRRR
-pregen_grid_orog_sfc_climo
-specify_template_filenames
diff --git a/ush/machine/cheyenne.yaml b/ush/machine/cheyenne.yaml
deleted file mode 100644
index a91c52e170..0000000000
--- a/ush/machine/cheyenne.yaml
+++ /dev/null
@@ -1,35 +0,0 @@
-platform:
- WORKFLOW_MANAGER: rocoto
- NCORES_PER_NODE: 36
- SCHED: pbspro
- TEST_CCPA_OBS_DIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/obs_data/ccpa/proc
- TEST_MRMS_OBS_DIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/obs_data/mrms/proc
- TEST_NDAS_OBS_DIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/obs_data/ndas/proc
- TEST_NOHRSC_OBS_DIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/obs_data/nohrsc/proc
- DOMAIN_PREGEN_BASEDIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/FV3LAM_pregen
- QUEUE_DEFAULT: regular
- QUEUE_FCST: regular
- QUEUE_HPSS: regular
- RUN_CMD_FCST: mpirun -np ${PE_MEMBER01}
- RUN_CMD_POST: mpirun -np $nprocs
- RUN_CMD_PRDGEN: mpirun -np $nprocs
- RUN_CMD_SERIAL: time
- RUN_CMD_UTILS: mpirun -np $nprocs
- RUN_CMD_NEXUS: mpirun -np $nprocs
- RUN_CMD_AQMLBC: mpirun -np ${NUMTS}
- PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }'
- TEST_EXTRN_MDL_SOURCE_BASEDIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/input_model_data
- TEST_AQM_INPUT_BASEDIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/aqm_data
- TEST_PREGEN_BASEDIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/FV3LAM_pregen
- TEST_ALT_EXTRN_MDL_SYSBASEDIR_ICS: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/dummy_FV3GFS_sys_dir
- TEST_ALT_EXTRN_MDL_SYSBASEDIR_LBCS: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/dummy_FV3GFS_sys_dir
- TEST_VX_FCST_INPUT_BASEDIR: '{{ "/glade/work/epicufsrt/contrib/UFS_SRW_data/develop/output_data/fcst_" }}{{ "ens" if (global.NUM_ENS_MEMBERS > 0) else "det" }}{{ "/{{workflow.PREDEF_GRID_NAME}}" }}{% raw %}{% endraw %}'
- FIXaer: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/fix/fix_aer
- FIXgsm: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/fix/fix_am
- FIXlut: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/fix/fix_lut
- FIXorg: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/fix/fix_orog
- FIXsfc: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/fix/fix_sfc_climo
- FIXshp: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/NaturalEarth
-data:
- ics_lbcs:
- FV3GFS: /glade/p/ral/jntp/UFS_CAM/COMGFS/gfs.${yyyymmdd}/${hh}
diff --git a/ush/machine/jet.yaml b/ush/machine/jet.yaml
index 93d375ee02..375711c61a 100644
--- a/ush/machine/jet.yaml
+++ b/ush/machine/jet.yaml
@@ -2,11 +2,11 @@ platform:
WORKFLOW_MANAGER: rocoto
NCORES_PER_NODE: 24
SCHED: slurm
- TEST_CCPA_OBS_DIR: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/obs_data/ccpa/proc
- TEST_MRMS_OBS_DIR: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/obs_data/mrms/proc
- TEST_NDAS_OBS_DIR: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/obs_data/ndas/proc
- TEST_NOHRSC_OBS_DIR: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/obs_data/nohrsc/proc
- DOMAIN_PREGEN_BASEDIR: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/FV3LAM_pregen
+ TEST_CCPA_OBS_DIR: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/obs_data/ccpa/proc
+ TEST_MRMS_OBS_DIR: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/obs_data/mrms/proc
+ TEST_NDAS_OBS_DIR: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/obs_data/ndas/proc
+ TEST_NOHRSC_OBS_DIR: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/obs_data/nohrsc/proc
+ DOMAIN_PREGEN_BASEDIR: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/FV3LAM_pregen
PARTITION_DEFAULT: sjet,vjet,kjet,xjet
QUEUE_DEFAULT: batch
PARTITION_FCST: xjet
@@ -21,17 +21,17 @@ platform:
SCHED_NATIVE_CMD: --export=NONE
SCHED_NATIVE_CMD_HPSS: -n 1 --export=NONE
PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }'
- TEST_EXTRN_MDL_SOURCE_BASEDIR: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/input_model_data
- TEST_PREGEN_BASEDIR: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/FV3LAM_pregen
- TEST_ALT_EXTRN_MDL_SYSBASEDIR_ICS: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/dummy_FV3GFS_sys_dir
- TEST_ALT_EXTRN_MDL_SYSBASEDIR_LBCS: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/dummy_FV3GFS_sys_dir
- TEST_VX_FCST_INPUT_BASEDIR: '{{ "/mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/output_data/fcst_" }}{{ "ens" if (global.NUM_ENS_MEMBERS > 0) else "det" }}{{ "/{{workflow.PREDEF_GRID_NAME}}" }}{% raw %}{% endraw %}'
- FIXaer: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/fix/fix_aer
- FIXgsm: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/fix/fix_am
- FIXlut: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/fix/fix_lut
- FIXorg: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/fix/fix_orog
- FIXsfc: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/fix/fix_sfc_climo
- FIXshp: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/NaturalEarth
+ TEST_EXTRN_MDL_SOURCE_BASEDIR: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/input_model_data
+ TEST_PREGEN_BASEDIR: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/FV3LAM_pregen
+ TEST_ALT_EXTRN_MDL_SYSBASEDIR_ICS: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/dummy_FV3GFS_sys_dir
+ TEST_ALT_EXTRN_MDL_SYSBASEDIR_LBCS: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/dummy_FV3GFS_sys_dir
+ TEST_VX_FCST_INPUT_BASEDIR: '{{ "/mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/output_data/fcst_" }}{{ "ens" if (global.NUM_ENS_MEMBERS > 0) else "det" }}{{ "/{{workflow.PREDEF_GRID_NAME}}" }}{% raw %}{% endraw %}'
+ FIXaer: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/fix/fix_aer
+ FIXgsm: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/fix/fix_am
+ FIXlut: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/fix/fix_lut
+ FIXorg: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/fix/fix_orog
+ FIXsfc: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/fix/fix_sfc_climo
+ FIXshp: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/NaturalEarth
EXTRN_MDL_DATA_STORES: hpss aws nomads
data:
ics_lbcs:
From 1b01c644198637e4274638d35537f4d2294a6e00 Mon Sep 17 00:00:00 2001
From: Natalie Perlin <68030316+natalie-perlin@users.noreply.github.com>
Date: Wed, 25 Sep 2024 13:14:15 -0400
Subject: [PATCH 32/39] Modulefile changes for NOAA Cloud Rocky 8 platforms
(#1129)
Updated modulefiles to use NOAA Cloud Rocky8 platforms
Files changed:
* modulefiles/build_noaacloud_intel.lua (loading spack-stack procedure change)
* modulefiles/wflow_noaacloud.lua
Co-authored-by: Natalie Perlin
---
modulefiles/build_noaacloud_intel.lua | 5 +++--
modulefiles/wflow_noaacloud.lua | 7 -------
2 files changed, 3 insertions(+), 9 deletions(-)
diff --git a/modulefiles/build_noaacloud_intel.lua b/modulefiles/build_noaacloud_intel.lua
index dd774e8ed9..b1a6adbbf0 100644
--- a/modulefiles/build_noaacloud_intel.lua
+++ b/modulefiles/build_noaacloud_intel.lua
@@ -5,11 +5,12 @@ the NOAA cloud using Intel-oneapi
whatis([===[Loads libraries needed for building the UFS SRW App on NOAA cloud ]===])
-prepend_path("MODULEPATH", "/contrib/spack-stack/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core")
+prepend_path("MODULEPATH", "/contrib/spack-stack-rocky8/spack-stack-1.6.0/envs/ue-intel/install/modulefiles/Core")
prepend_path("MODULEPATH", "/apps/modules/modulefiles")
-prepend_path("PATH", "/contrib/EPIC/bin")
+load("gnu")
load("stack-intel")
load("stack-intel-oneapi-mpi")
+unload("gnu")
load("cmake/3.23.1")
load("srw_common")
diff --git a/modulefiles/wflow_noaacloud.lua b/modulefiles/wflow_noaacloud.lua
index 5e0c0ca50a..27fb8296c1 100644
--- a/modulefiles/wflow_noaacloud.lua
+++ b/modulefiles/wflow_noaacloud.lua
@@ -10,13 +10,6 @@ load("rocoto")
load("conda")
-prepend_path("PATH","/contrib/EPIC/bin")
-
--- Add missing libstdc binary for Azure
-if os.getenv("PW_CSP") == "azure" then
- setenv("LD_PRELOAD","/opt/nvidia/nsight-systems/2023.1.2/host-linux-x64/libstdc++.so.6")
-end
-
if mode() == "load" then
LmodMsgRaw([===[Please do the following to activate conda:
> conda activate srw_app
From 13d3ce5932d72ff276a271300888536c7db93e6c Mon Sep 17 00:00:00 2001
From: Gillian Petro <96886803+gspetro-NOAA@users.noreply.github.com>
Date: Wed, 2 Oct 2024 13:35:01 -0400
Subject: [PATCH 33/39] [develop]: Update python docstrings and generate
preliminary technical documentation (#1131)
* Initial implementation of SRW App technical documentation. Adds a section called "Technical Documentation" to the SRW App docs.
* This update also removes outdated/redundant text files in doc (i.e., RUNTIME and INSTALL).
---------
Co-authored-by: gspetro
Co-authored-by: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com>
---
.gitignore | 1 +
.readthedocs.yaml | 4 -
doc/INSTALL | 61 ---
doc/README | 1 +
doc/RUNTIME | 22 --
doc/TechDocs/index.rst | 8 +
doc/TechDocs/tests/WE2E/WE2E_summary.rst | 7 +
doc/TechDocs/tests/WE2E/modules.rst | 11 +
doc/TechDocs/tests/WE2E/monitor_jobs.rst | 7 +
doc/TechDocs/tests/WE2E/print_test_info.rst | 7 +
doc/TechDocs/tests/WE2E/run_WE2E_tests.rst | 7 +
doc/TechDocs/tests/WE2E/utils.rst | 7 +
doc/TechDocs/tests/index.rst | 7 +
doc/TechDocs/ush/UFS_plot_domains.rst | 7 +
doc/TechDocs/ush/calculate_cost.rst | 7 +
doc/TechDocs/ush/check_python_version.rst | 7 +
doc/TechDocs/ush/config_utils.rst | 7 +
doc/TechDocs/ush/create_aqm_rc_file.rst | 7 +
doc/TechDocs/ush/create_diag_table_file.rst | 7 +
.../ush/create_model_configure_file.rst | 7 +
.../ush/create_ufs_configure_file.rst | 7 +
doc/TechDocs/ush/generate_FV3LAM_wflow.rst | 7 +
doc/TechDocs/ush/get_crontab_contents.rst | 7 +
doc/TechDocs/ush/link_fix.rst | 7 +
doc/TechDocs/ush/modules.rst | 29 ++
doc/TechDocs/ush/mrms_pull_topofhour.rst | 7 +
doc/TechDocs/ush/python_utils.rst | 114 ++++++
doc/TechDocs/ush/retrieve_data.rst | 7 +
doc/TechDocs/ush/run_srw_tests.rst | 7 +
.../ush/set_FV3nml_ens_stoch_seeds.rst | 7 +
.../ush/set_FV3nml_sfc_climo_filenames.rst | 7 +
doc/TechDocs/ush/set_cycle_dates.rst | 7 +
doc/TechDocs/ush/set_gridparams_ESGgrid.rst | 7 +
doc/TechDocs/ush/set_gridparams_GFDLgrid.rst | 7 +
doc/TechDocs/ush/set_predef_grid_params.rst | 7 +
doc/TechDocs/ush/setup.rst | 7 +
doc/TechDocs/ush/update_input_nml.rst | 7 +
.../BuildingRunningTesting/Quickstart.rst | 4 +-
.../BuildingRunningTesting/RunSRW.rst | 4 +-
doc/UsersGuide/Reference/Glossary.rst | 2 +-
doc/UsersGuide/index.rst | 1 +
doc/_static/theme_overrides.css | 12 +-
doc/conf.py | 51 ++-
doc/index.rst | 8 +-
doc/make.bat | 2 +-
doc/requirements.in | 1 +
doc/requirements.txt | 5 +
tests/WE2E/WE2E_summary.py | 9 +-
tests/WE2E/monitor_jobs.py | 23 +-
tests/WE2E/print_test_info.py | 2 +
tests/WE2E/run_WE2E_tests.py | 43 ++-
tests/WE2E/utils.py | 128 +++----
ush/UFS_plot_domains.py | 286 +++++++-------
ush/calculate_cost.py | 12 +
ush/check_python_version.py | 9 +-
ush/config_defaults.yaml | 37 +-
ush/config_utils.py | 6 +-
ush/create_aqm_rc_file.py | 123 +++---
ush/create_diag_table_file.py | 15 +-
ush/create_model_configure_file.py | 40 +-
ush/create_ufs_configure_file.py | 14 +-
ush/generate_FV3LAM_wflow.py | 25 +-
ush/get_crontab_contents.py | 52 +--
ush/link_fix.py | 41 +-
ush/mrms_pull_topofhour.py | 14 +
.../check_for_preexist_dir_file.py | 9 +-
ush/python_utils/check_var_valid_value.py | 10 +-
ush/python_utils/config_parser.py | 172 ++++++---
ush/python_utils/create_symlink_to_file.py | 8 +-
ush/python_utils/define_macos_utilities.py | 12 +-
ush/python_utils/environment.py | 98 +++--
ush/python_utils/filesys_cmds_vrfy.py | 57 ++-
ush/python_utils/fv3write_parms_lambert.py | 34 +-
ush/python_utils/misc.py | 32 +-
ush/python_utils/print_input_args.py | 4 +-
ush/python_utils/print_msg.py | 32 +-
ush/python_utils/run_command.py | 4 +-
ush/python_utils/xml_parser.py | 16 +-
ush/retrieve_data.py | 353 +++++++++++-------
ush/run_srw_tests.py | 30 +-
ush/set_cycle_dates.py | 17 +-
ush/set_fv3nml_ens_stoch_seeds.py | 24 +-
ush/set_fv3nml_sfc_climo_filenames.py | 15 +-
ush/set_gridparams_ESGgrid.py | 37 +-
ush/set_gridparams_GFDLgrid.py | 49 ++-
ush/set_predef_grid_params.py | 10 +-
ush/setup.py | 121 +++---
ush/update_input_nml.py | 17 +-
88 files changed, 1616 insertions(+), 961 deletions(-)
delete mode 100644 doc/INSTALL
delete mode 100644 doc/RUNTIME
create mode 100644 doc/TechDocs/index.rst
create mode 100644 doc/TechDocs/tests/WE2E/WE2E_summary.rst
create mode 100644 doc/TechDocs/tests/WE2E/modules.rst
create mode 100644 doc/TechDocs/tests/WE2E/monitor_jobs.rst
create mode 100644 doc/TechDocs/tests/WE2E/print_test_info.rst
create mode 100644 doc/TechDocs/tests/WE2E/run_WE2E_tests.rst
create mode 100644 doc/TechDocs/tests/WE2E/utils.rst
create mode 100644 doc/TechDocs/tests/index.rst
create mode 100644 doc/TechDocs/ush/UFS_plot_domains.rst
create mode 100644 doc/TechDocs/ush/calculate_cost.rst
create mode 100644 doc/TechDocs/ush/check_python_version.rst
create mode 100644 doc/TechDocs/ush/config_utils.rst
create mode 100644 doc/TechDocs/ush/create_aqm_rc_file.rst
create mode 100644 doc/TechDocs/ush/create_diag_table_file.rst
create mode 100644 doc/TechDocs/ush/create_model_configure_file.rst
create mode 100644 doc/TechDocs/ush/create_ufs_configure_file.rst
create mode 100644 doc/TechDocs/ush/generate_FV3LAM_wflow.rst
create mode 100644 doc/TechDocs/ush/get_crontab_contents.rst
create mode 100644 doc/TechDocs/ush/link_fix.rst
create mode 100644 doc/TechDocs/ush/modules.rst
create mode 100644 doc/TechDocs/ush/mrms_pull_topofhour.rst
create mode 100644 doc/TechDocs/ush/python_utils.rst
create mode 100644 doc/TechDocs/ush/retrieve_data.rst
create mode 100644 doc/TechDocs/ush/run_srw_tests.rst
create mode 100644 doc/TechDocs/ush/set_FV3nml_ens_stoch_seeds.rst
create mode 100644 doc/TechDocs/ush/set_FV3nml_sfc_climo_filenames.rst
create mode 100644 doc/TechDocs/ush/set_cycle_dates.rst
create mode 100644 doc/TechDocs/ush/set_gridparams_ESGgrid.rst
create mode 100644 doc/TechDocs/ush/set_gridparams_GFDLgrid.rst
create mode 100644 doc/TechDocs/ush/set_predef_grid_params.rst
create mode 100644 doc/TechDocs/ush/setup.rst
create mode 100644 doc/TechDocs/ush/update_input_nml.rst
diff --git a/.gitignore b/.gitignore
index ed78ca4182..99f71c9590 100644
--- a/.gitignore
+++ b/.gitignore
@@ -23,6 +23,7 @@ ush/python_utils/__pycache__/
ush/python_utils/workflow-tools/
ush/*.swp
conda_loc
+*/.DS_Store
*.swp
__pycache__
diff --git a/.readthedocs.yaml b/.readthedocs.yaml
index c8ce6064b2..8aabbaae76 100644
--- a/.readthedocs.yaml
+++ b/.readthedocs.yaml
@@ -28,8 +28,4 @@ python:
install:
- requirements: doc/requirements.txt
-submodules:
- include:
- - hpc-stack-mod
- recursive: true
diff --git a/doc/INSTALL b/doc/INSTALL
deleted file mode 100644
index 53dc159bbd..0000000000
--- a/doc/INSTALL
+++ /dev/null
@@ -1,61 +0,0 @@
-# Simple setup instructions for the UFS SRW App
-# For more details, see the "Getting Started" guide:
-# https://github.com/ufs-community/ufs-srweather-app/wiki/Getting-Started
-
-# Getting the UFS SRW App code
-#
-# The SRW App can be downloaded directly from github, either by using `git clone` or by downloading
-# from the web.
-
-git clone https://github.com/ufs-community/ufs-srweather-app.git
-
-cd ufs-srweather-app/
-./manage_externals/checkout_externals
-
-# We can build ufs-srweather-app binaries in two ways.
-
-# Method 1
-# ========
-
-# This is the simplest way to build the binaries
-
-./devbuild.sh --platform=PLATFORM
-
-# If compiler auto-detection fails, specify it using
-
-./devbuild.sh --platform=PLATFORM --compiler=COMPILER
-
-# Method 2
-# ========
-
-# The above instructions will work atleast on Tier-1 systems, if not on all supported machines.
-# However, if it fails for some reason, we can build directly with cmake.
-
-# First, we need to make sure that there is a modulefile "build_[PLATFORM]_[COMPILER]" in the
-# "modulefiles" directory. Also, on some systems (e.g. Gaea/Odin) that come with cray module app,
-# we may need to swap that for Lmod instead. Assuming your login shell is bash, run
-
-source etc/lmod-setup.sh PLATFORM
-
-# and if your login schell is csh/tcsh, source etc/lmod-setup.csh instead.
-
-# From here on, we can assume Lmod is loaded and ready to go. Then we load the specific
-# module for a given PLATFORM and COMPILER as follows
-
-module use $PWD/modulefiles #full path to modulefiles directory
-module load build_[PLATFORM]_[COMPILER]
-
-# Supported CMake flags:
-# -DCMAKE_INSTALL_PREFIX Location where the bin/ include/ lib/ and share/ directories containing
-# the various components of the SRW App will be created. Recommended value
-# is "..", one directory up from the build directory
-# -DCCPP_SUITES A comma-separated list of CCPP suites to build with the UFS weather
-# model. See the User's Guide for a full list of available suites. The
-# default is to build with the released supported suites: FV3_GFS_v15p2 and
-# FV3_RRFS_v1beta
-
-mkdir build && cd build
-cmake .. -DCMAKE_INSTALL_PREFIX=..
-make -j 8
-
-
diff --git a/doc/README b/doc/README
index 017f865384..ce2e37f76e 100644
--- a/doc/README
+++ b/doc/README
@@ -9,6 +9,7 @@ Steps to build and use the Sphinx documentation tool:
pip install sphinx
pip install sphinxcontrib-bibtex
pip install sphinx-rtd-theme
+ pip install sphinxcontrib.autoyaml
One approach that has worked to resolve "Module Not Found" errors for users with MacPorts package manager:
$ sudo port install py-six # may not be necessary
diff --git a/doc/RUNTIME b/doc/RUNTIME
deleted file mode 100644
index e2ca78894d..0000000000
--- a/doc/RUNTIME
+++ /dev/null
@@ -1,22 +0,0 @@
-# Users should load the appropriate python environment for the workflow.
-# The workflow requires Python 3, with the packages 'PyYAML', 'Jinja2', and 'f90nml' available.
-
-# For users' convenience, the python environment for the workflow can be activated by loading wflow_[PLATFORM] modulefile
-
-# For example, on Hera:
-
-module load wflow_hera
-
-# Due to older version of Lmod, inconsistency with TCL modulefiles etc, you may have to activate
-# conda manually using instructions that the previous module command prints.
-# Hera is one of those systems, so execute:
-
-conda activate regional_workflow
-
-# After that we can setup an experiment in the directory
-
-cd regional_workflow/ush
-
-# Once we prepare experiment file config.sh, we can generate workflow using
-
-./generate_FV3LAM_wflow.sh
diff --git a/doc/TechDocs/index.rst b/doc/TechDocs/index.rst
new file mode 100644
index 0000000000..1ca8ec7309
--- /dev/null
+++ b/doc/TechDocs/index.rst
@@ -0,0 +1,8 @@
+Technical Documentation
+===========================
+
+.. toctree::
+ :maxdepth: 3
+
+ tests/index
+ ush/modules
\ No newline at end of file
diff --git a/doc/TechDocs/tests/WE2E/WE2E_summary.rst b/doc/TechDocs/tests/WE2E/WE2E_summary.rst
new file mode 100644
index 0000000000..d15eb54012
--- /dev/null
+++ b/doc/TechDocs/tests/WE2E/WE2E_summary.rst
@@ -0,0 +1,7 @@
+WE2E\_summary module
+====================
+
+.. automodule:: WE2E_summary
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/tests/WE2E/modules.rst b/doc/TechDocs/tests/WE2E/modules.rst
new file mode 100644
index 0000000000..5012d5229f
--- /dev/null
+++ b/doc/TechDocs/tests/WE2E/modules.rst
@@ -0,0 +1,11 @@
+WE2E
+====
+
+.. toctree::
+ :maxdepth: 4
+
+ WE2E_summary
+ monitor_jobs
+ print_test_info
+ run_WE2E_tests
+ utils
diff --git a/doc/TechDocs/tests/WE2E/monitor_jobs.rst b/doc/TechDocs/tests/WE2E/monitor_jobs.rst
new file mode 100644
index 0000000000..9630b5310c
--- /dev/null
+++ b/doc/TechDocs/tests/WE2E/monitor_jobs.rst
@@ -0,0 +1,7 @@
+monitor\_jobs module
+====================
+
+.. automodule:: monitor_jobs
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/tests/WE2E/print_test_info.rst b/doc/TechDocs/tests/WE2E/print_test_info.rst
new file mode 100644
index 0000000000..83e66e9bf8
--- /dev/null
+++ b/doc/TechDocs/tests/WE2E/print_test_info.rst
@@ -0,0 +1,7 @@
+print\_test\_info module
+========================
+
+.. automodule:: print_test_info
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/tests/WE2E/run_WE2E_tests.rst b/doc/TechDocs/tests/WE2E/run_WE2E_tests.rst
new file mode 100644
index 0000000000..648ae51bd2
--- /dev/null
+++ b/doc/TechDocs/tests/WE2E/run_WE2E_tests.rst
@@ -0,0 +1,7 @@
+run\_WE2E\_tests module
+=======================
+
+.. automodule:: run_WE2E_tests
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/tests/WE2E/utils.rst b/doc/TechDocs/tests/WE2E/utils.rst
new file mode 100644
index 0000000000..44cef9edab
--- /dev/null
+++ b/doc/TechDocs/tests/WE2E/utils.rst
@@ -0,0 +1,7 @@
+utils module
+============
+
+.. automodule:: utils
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/tests/index.rst b/doc/TechDocs/tests/index.rst
new file mode 100644
index 0000000000..d3c05fdf84
--- /dev/null
+++ b/doc/TechDocs/tests/index.rst
@@ -0,0 +1,7 @@
+tests
+======
+
+.. toctree::
+ :maxdepth: 3
+
+ WE2E/modules
\ No newline at end of file
diff --git a/doc/TechDocs/ush/UFS_plot_domains.rst b/doc/TechDocs/ush/UFS_plot_domains.rst
new file mode 100644
index 0000000000..bd1b42314a
--- /dev/null
+++ b/doc/TechDocs/ush/UFS_plot_domains.rst
@@ -0,0 +1,7 @@
+UFS\_plot\_domains module
+=========================
+
+.. automodule:: UFS_plot_domains
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/ush/calculate_cost.rst b/doc/TechDocs/ush/calculate_cost.rst
new file mode 100644
index 0000000000..aa85031292
--- /dev/null
+++ b/doc/TechDocs/ush/calculate_cost.rst
@@ -0,0 +1,7 @@
+calculate\_cost module
+======================
+
+.. automodule:: calculate_cost
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/ush/check_python_version.rst b/doc/TechDocs/ush/check_python_version.rst
new file mode 100644
index 0000000000..15a22a03c9
--- /dev/null
+++ b/doc/TechDocs/ush/check_python_version.rst
@@ -0,0 +1,7 @@
+check\_python\_version module
+=============================
+
+.. automodule:: check_python_version
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/ush/config_utils.rst b/doc/TechDocs/ush/config_utils.rst
new file mode 100644
index 0000000000..449200270d
--- /dev/null
+++ b/doc/TechDocs/ush/config_utils.rst
@@ -0,0 +1,7 @@
+config\_utils module
+====================
+
+.. automodule:: config_utils
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/ush/create_aqm_rc_file.rst b/doc/TechDocs/ush/create_aqm_rc_file.rst
new file mode 100644
index 0000000000..033b996dbc
--- /dev/null
+++ b/doc/TechDocs/ush/create_aqm_rc_file.rst
@@ -0,0 +1,7 @@
+create\_aqm\_rc\_file module
+============================
+
+.. automodule:: create_aqm_rc_file
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/ush/create_diag_table_file.rst b/doc/TechDocs/ush/create_diag_table_file.rst
new file mode 100644
index 0000000000..5a317526c1
--- /dev/null
+++ b/doc/TechDocs/ush/create_diag_table_file.rst
@@ -0,0 +1,7 @@
+create\_diag\_table\_file module
+================================
+
+.. automodule:: create_diag_table_file
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/ush/create_model_configure_file.rst b/doc/TechDocs/ush/create_model_configure_file.rst
new file mode 100644
index 0000000000..7a8dd0cd54
--- /dev/null
+++ b/doc/TechDocs/ush/create_model_configure_file.rst
@@ -0,0 +1,7 @@
+create\_model\_configure\_file module
+=====================================
+
+.. automodule:: create_model_configure_file
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/ush/create_ufs_configure_file.rst b/doc/TechDocs/ush/create_ufs_configure_file.rst
new file mode 100644
index 0000000000..56c17689b7
--- /dev/null
+++ b/doc/TechDocs/ush/create_ufs_configure_file.rst
@@ -0,0 +1,7 @@
+create\_ufs\_configure\_file module
+===================================
+
+.. automodule:: create_ufs_configure_file
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/ush/generate_FV3LAM_wflow.rst b/doc/TechDocs/ush/generate_FV3LAM_wflow.rst
new file mode 100644
index 0000000000..f8e3dbc91e
--- /dev/null
+++ b/doc/TechDocs/ush/generate_FV3LAM_wflow.rst
@@ -0,0 +1,7 @@
+generate\_FV3LAM\_wflow module
+==============================
+
+.. automodule:: generate_FV3LAM_wflow
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/ush/get_crontab_contents.rst b/doc/TechDocs/ush/get_crontab_contents.rst
new file mode 100644
index 0000000000..8e62d5a258
--- /dev/null
+++ b/doc/TechDocs/ush/get_crontab_contents.rst
@@ -0,0 +1,7 @@
+get\_crontab\_contents module
+=============================
+
+.. automodule:: get_crontab_contents
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/ush/link_fix.rst b/doc/TechDocs/ush/link_fix.rst
new file mode 100644
index 0000000000..79a7611115
--- /dev/null
+++ b/doc/TechDocs/ush/link_fix.rst
@@ -0,0 +1,7 @@
+link\_fix module
+================
+
+.. automodule:: link_fix
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/ush/modules.rst b/doc/TechDocs/ush/modules.rst
new file mode 100644
index 0000000000..2070e75ad0
--- /dev/null
+++ b/doc/TechDocs/ush/modules.rst
@@ -0,0 +1,29 @@
+ush
+===
+
+.. toctree::
+ :maxdepth: 4
+
+ UFS_plot_domains
+ calculate_cost
+ check_python_version
+ config_utils
+ create_aqm_rc_file
+ create_diag_table_file
+ create_model_configure_file
+ create_ufs_configure_file
+ generate_FV3LAM_wflow
+ get_crontab_contents
+ link_fix
+ mrms_pull_topofhour
+ python_utils
+ retrieve_data
+ run_srw_tests
+ set_cycle_dates
+ set_fv3nml_ens_stoch_seeds
+ set_fv3nml_sfc_climo_filenames
+ set_gridparams_ESGgrid
+ set_gridparams_GFDLgrid
+ set_predef_grid_params
+ setup
+ update_input_nml
diff --git a/doc/TechDocs/ush/mrms_pull_topofhour.rst b/doc/TechDocs/ush/mrms_pull_topofhour.rst
new file mode 100644
index 0000000000..c688cd70ca
--- /dev/null
+++ b/doc/TechDocs/ush/mrms_pull_topofhour.rst
@@ -0,0 +1,7 @@
+mrms\_pull\_topofhour module
+============================
+
+.. automodule:: mrms_pull_topofhour
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/ush/python_utils.rst b/doc/TechDocs/ush/python_utils.rst
new file mode 100644
index 0000000000..51dadd8b35
--- /dev/null
+++ b/doc/TechDocs/ush/python_utils.rst
@@ -0,0 +1,114 @@
+python\_utils package
+=====================
+
+.. automodule:: python_utils
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Submodules
+----------
+
+python\_utils.check\_for\_preexist\_dir\_file module
+----------------------------------------------------
+
+.. automodule:: python_utils.check_for_preexist_dir_file
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+python\_utils.check\_var\_valid\_value module
+---------------------------------------------
+
+.. automodule:: python_utils.check_var_valid_value
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+python\_utils.config\_parser module
+-----------------------------------
+
+.. automodule:: python_utils.config_parser
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+python\_utils.create\_symlink\_to\_file module
+----------------------------------------------
+
+.. automodule:: python_utils.create_symlink_to_file
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+python\_utils.define\_macos\_utilities module
+---------------------------------------------
+
+.. automodule:: python_utils.define_macos_utilities
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+python\_utils.environment module
+--------------------------------
+
+.. automodule:: python_utils.environment
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+python\_utils.filesys\_cmds\_vrfy module
+----------------------------------------
+
+.. automodule:: python_utils.filesys_cmds_vrfy
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+python\_utils.fv3write\_parms\_lambert module
+---------------------------------------------
+
+.. automodule:: python_utils.fv3write_parms_lambert
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+python\_utils.misc module
+-------------------------
+
+.. automodule:: python_utils.misc
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+python\_utils.print\_input\_args module
+---------------------------------------
+
+.. automodule:: python_utils.print_input_args
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+python\_utils.print\_msg module
+-------------------------------
+
+.. automodule:: python_utils.print_msg
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+python\_utils.run\_command module
+---------------------------------
+
+.. automodule:: python_utils.run_command
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+python\_utils.xml\_parser module
+--------------------------------
+
+.. automodule:: python_utils.xml_parser
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/ush/retrieve_data.rst b/doc/TechDocs/ush/retrieve_data.rst
new file mode 100644
index 0000000000..ff53326f08
--- /dev/null
+++ b/doc/TechDocs/ush/retrieve_data.rst
@@ -0,0 +1,7 @@
+retrieve\_data module
+=====================
+
+.. automodule:: retrieve_data
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/ush/run_srw_tests.rst b/doc/TechDocs/ush/run_srw_tests.rst
new file mode 100644
index 0000000000..d75e1a765f
--- /dev/null
+++ b/doc/TechDocs/ush/run_srw_tests.rst
@@ -0,0 +1,7 @@
+run\_srw\_tests module
+======================
+
+.. automodule:: run_srw_tests
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/ush/set_FV3nml_ens_stoch_seeds.rst b/doc/TechDocs/ush/set_FV3nml_ens_stoch_seeds.rst
new file mode 100644
index 0000000000..049c01d059
--- /dev/null
+++ b/doc/TechDocs/ush/set_FV3nml_ens_stoch_seeds.rst
@@ -0,0 +1,7 @@
+set\_fv3nml\_ens\_stoch\_seeds module
+=====================================
+
+.. automodule:: set_fv3nml_ens_stoch_seeds
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/ush/set_FV3nml_sfc_climo_filenames.rst b/doc/TechDocs/ush/set_FV3nml_sfc_climo_filenames.rst
new file mode 100644
index 0000000000..bc0a942e82
--- /dev/null
+++ b/doc/TechDocs/ush/set_FV3nml_sfc_climo_filenames.rst
@@ -0,0 +1,7 @@
+set\_fv3nml\_sfc\_climo\_filenames module
+=========================================
+
+.. automodule:: set_fv3nml_sfc_climo_filenames
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/ush/set_cycle_dates.rst b/doc/TechDocs/ush/set_cycle_dates.rst
new file mode 100644
index 0000000000..1af14392fd
--- /dev/null
+++ b/doc/TechDocs/ush/set_cycle_dates.rst
@@ -0,0 +1,7 @@
+set\_cycle\_dates module
+========================
+
+.. automodule:: set_cycle_dates
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/ush/set_gridparams_ESGgrid.rst b/doc/TechDocs/ush/set_gridparams_ESGgrid.rst
new file mode 100644
index 0000000000..0728326fa0
--- /dev/null
+++ b/doc/TechDocs/ush/set_gridparams_ESGgrid.rst
@@ -0,0 +1,7 @@
+set\_gridparams\_ESGgrid module
+===============================
+
+.. automodule:: set_gridparams_ESGgrid
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/ush/set_gridparams_GFDLgrid.rst b/doc/TechDocs/ush/set_gridparams_GFDLgrid.rst
new file mode 100644
index 0000000000..b207ffa986
--- /dev/null
+++ b/doc/TechDocs/ush/set_gridparams_GFDLgrid.rst
@@ -0,0 +1,7 @@
+set\_gridparams\_GFDLgrid module
+================================
+
+.. automodule:: set_gridparams_GFDLgrid
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/ush/set_predef_grid_params.rst b/doc/TechDocs/ush/set_predef_grid_params.rst
new file mode 100644
index 0000000000..23c5d30dfe
--- /dev/null
+++ b/doc/TechDocs/ush/set_predef_grid_params.rst
@@ -0,0 +1,7 @@
+set\_predef\_grid\_params module
+================================
+
+.. automodule:: set_predef_grid_params
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/ush/setup.rst b/doc/TechDocs/ush/setup.rst
new file mode 100644
index 0000000000..552eb49d6d
--- /dev/null
+++ b/doc/TechDocs/ush/setup.rst
@@ -0,0 +1,7 @@
+setup module
+============
+
+.. automodule:: setup
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/TechDocs/ush/update_input_nml.rst b/doc/TechDocs/ush/update_input_nml.rst
new file mode 100644
index 0000000000..8f3a51f729
--- /dev/null
+++ b/doc/TechDocs/ush/update_input_nml.rst
@@ -0,0 +1,7 @@
+update\_input\_nml module
+=========================
+
+.. automodule:: update_input_nml
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/doc/UsersGuide/BuildingRunningTesting/Quickstart.rst b/doc/UsersGuide/BuildingRunningTesting/Quickstart.rst
index 36ea8eb7fb..3e58d6117f 100644
--- a/doc/UsersGuide/BuildingRunningTesting/Quickstart.rst
+++ b/doc/UsersGuide/BuildingRunningTesting/Quickstart.rst
@@ -89,10 +89,10 @@ For a detailed explanation of how to build and run the SRW App on any supported
CRON_RELAUNCH_INTVL_MNTS: 3
task_get_extrn_ics:
USE_USER_STAGED_EXTRN_FILES: true
- EXTRN_MDL_SOURCE_BASEDIR_ICS: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/v2p2/input_model_data/FV3GFS/grib2/${yyyymmddhh}
+ EXTRN_MDL_SOURCE_BASEDIR_ICS: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/input_model_data/FV3GFS/grib2/${yyyymmddhh}
task_get_extrn_lbcs:
USE_USER_STAGED_EXTRN_FILES: true
- EXTRN_MDL_SOURCE_BASEDIR_LBCS: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/v2p2/input_model_data/FV3GFS/grib2/${yyyymmddhh}
+ EXTRN_MDL_SOURCE_BASEDIR_LBCS: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/input_model_data/FV3GFS/grib2/${yyyymmddhh}
Users on a different system would update the machine, account, and data paths accordingly. Additional changes may be required based on the system and experiment. More detailed guidance is available in :numref:`Section %s `. Parameters and valid values are listed in :numref:`Section %s `.
diff --git a/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst b/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst
index 831b2a6345..0eb10e1519 100644
--- a/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst
+++ b/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst
@@ -55,8 +55,8 @@ The SRW App requires input files to run. These include static datasets, initial
- File location
* - Derecho
- /glade/work/epicufsrt/contrib/UFS_SRW_data/|data|/input_model_data
- * - Gaea (C3/C4/C5)
- - /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/|data|/input_model_data/
+ * - Gaea
+ - /gpfs/f5/epic/world-shared/UFS_SRW_data/|data|/input_model_data/
* - Hera
- /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/|data|/input_model_data/
* - Hercules
diff --git a/doc/UsersGuide/Reference/Glossary.rst b/doc/UsersGuide/Reference/Glossary.rst
index 2612d4fbe8..748d0f33b6 100644
--- a/doc/UsersGuide/Reference/Glossary.rst
+++ b/doc/UsersGuide/Reference/Glossary.rst
@@ -78,7 +78,7 @@ Glossary
The radar-indicated top of an area of precipitation. Specifically, it contains the height of the 18 dBZ reflectivity value.
EMC
- The `Environmental Modeling Center `__.
+ The `Environmental Modeling Center `__.
EPIC
The `Earth Prediction Innovation Center `__ seeks to accelerate scientific research and modeling contributions through continuous and sustained community engagement in order to produce the most accurate and reliable operational modeling system in the world.
diff --git a/doc/UsersGuide/index.rst b/doc/UsersGuide/index.rst
index 58c6fe6089..3deb9351e1 100644
--- a/doc/UsersGuide/index.rst
+++ b/doc/UsersGuide/index.rst
@@ -2,6 +2,7 @@ User's Guide
==============
.. toctree::
+ :numbered:
:maxdepth: 3
BackgroundInfo/index
diff --git a/doc/_static/theme_overrides.css b/doc/_static/theme_overrides.css
index a4e5cab82f..fe54c2e2e9 100644
--- a/doc/_static/theme_overrides.css
+++ b/doc/_static/theme_overrides.css
@@ -1,9 +1,10 @@
+/* !important prevents the common CSS stylesheets from overriding
+this as on RTD they are loaded after this stylesheet */
+
.wy-table-responsive table td {
- /* !important prevents the common CSS stylesheets from overriding
- this as on RTD they are loaded after this stylesheet */
white-space: normal !important;
}
-
+
.wy-nav-content {
max-width: 100% !important;
}
@@ -12,11 +13,6 @@
overflow: visible !important;
}
-/* Darken background for contrast with logo */
-.wy-side-nav-search, .wy-nav-top {
- background: #2779B0;
-}
-
/* Darken navbar blue background for contrast with logo */
.wy-side-nav-search, .wy-nav-top {
background: #2779B0;
diff --git a/doc/conf.py b/doc/conf.py
index 6b0f461ba8..0d440a733b 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -16,6 +16,9 @@
import sys
import sphinx
from sphinx.util import logging
+sys.path.insert(0, os.path.abspath('../ush'))
+sys.path.insert(0, os.path.abspath('../tests'))
+sys.path.insert(0, os.path.abspath('../tests/WE2E'))
@@ -33,6 +36,9 @@
numfig = True
+nitpick_ignore = [('py:class', 'obj'),('py:class',
+ 'yaml.dumper.Dumper'),('py:class',
+ 'xml.etree.ElementTree'),]
# -- General configuration ---------------------------------------------------
@@ -40,13 +46,11 @@
extensions = [
'sphinx_rtd_theme',
'sphinx.ext.autodoc',
+ 'sphinxcontrib.autoyaml',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.extlinks',
- 'sphinx.ext.coverage',
'sphinx.ext.mathjax',
- 'sphinx.ext.ifconfig',
- 'sphinx.ext.viewcode',
'sphinx.ext.napoleon',
'sphinxcontrib.bibtex',
]
@@ -151,6 +155,7 @@
def setup(app):
app.add_css_file('custom.css') # may also be an URL
app.add_css_file('theme_overrides.css') # may also be a URL
+ app.connect('autodoc-process-docstring', warn_undocumented_members)
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
@@ -244,6 +249,36 @@ def setup(app):
# -- Extension configuration -------------------------------------------------
+# -- Options for autodoc extension ---------------------------------------
+
+autodoc_mock_imports = ["f90nml","cartopy","mpl_toolkits.basemap","fill_jinja_template",
+ "matplotlib","numpy","uwtools","mpl_toolkits",
+ ]
+
+logger = logging.getLogger(__name__)
+
+members_to_watch = ['function', 'attribute', 'method']
+def warn_undocumented_members(app, what, name, obj, options, lines):
+ if(what in members_to_watch and len(lines)==0):
+ message = what + " is undocumented: " + name + "(%d)"% len(lines)
+ logger.warning(message)
+
+autodoc_default_options = {
+ "members": True,
+ "undoc-members": True,
+ "show-inheritance": True,
+ #"private-members": True
+}
+
+add_module_names = False
+
+# -- Options for napoleon extension ---------------------------------------
+
+napoleon_numpy_docstring = False
+napoleon_google_docstring = True
+napoleon_custom_sections = [('Returns', 'params_style')] # Allows return of multiple values
+
+
# -- Options for intersphinx extension ---------------------------------------
# Example configuration for intersphinx: refer to the Python standard library.
@@ -271,4 +306,12 @@ def setup(app):
'srw-repo': ('https://github.com/ufs-community/ufs-srweather-app/%s', '%s'),
'srw-wiki': ('https://github.com/ufs-community/ufs-srweather-app/wiki/%s','%s'),
'uw': ('https://uwtools.readthedocs.io/en/main/%s', '%s'),
- }
\ No newline at end of file
+ }
+
+# -- Options for autoyaml extension ---------------------------------------
+
+autoyaml_root = "../ush"
+autoyaml_doc_delimiter = "###" # Character(s) which start a documentation comment.
+autoyaml_comment = "#" #Comment start character(s).
+autoyaml_level = 6
+#autoyaml_safe_loader = False
\ No newline at end of file
diff --git a/doc/index.rst b/doc/index.rst
index c8cf2b32fc..d6a93695df 100644
--- a/doc/index.rst
+++ b/doc/index.rst
@@ -1,9 +1,9 @@
-UFS Short-Range Weather App Documentation (|version|)
-=====================================================
-
+UFS Short-Range Weather (SRW) App Documentation (|version|)
+============================================================
+
.. toctree::
- :numbered:
:maxdepth: 3
UsersGuide/index
+ TechDocs/index
ContribGuide/index
diff --git a/doc/make.bat b/doc/make.bat
index 4d9eb83d9f..295c7e3942 100644
--- a/doc/make.bat
+++ b/doc/make.bat
@@ -7,7 +7,7 @@ REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
-set SOURCEDIR=source
+set SOURCEDIR=doc
set BUILDDIR=build
if "%1" == "" goto help
diff --git a/doc/requirements.in b/doc/requirements.in
index 26c778f4aa..831cdc431d 100644
--- a/doc/requirements.in
+++ b/doc/requirements.in
@@ -1,3 +1,4 @@
sphinx>=6.0.0
sphinx_rtd_theme
sphinxcontrib-bibtex
+sphinxcontrib-autoyaml
diff --git a/doc/requirements.txt b/doc/requirements.txt
index e6d38a4eb8..38fdd2ef01 100644
--- a/doc/requirements.txt
+++ b/doc/requirements.txt
@@ -42,6 +42,8 @@ pyyaml==6.0.1
# via pybtex
requests==2.32.2
# via sphinx
+ruamel-yaml==0.16.13
+ # via sphinxcontrib-autoyaml
six==1.16.0
# via
# latexcodec
@@ -52,12 +54,15 @@ sphinx==7.2.6
# via
# -r requirements.in
# sphinx-rtd-theme
+ # sphinxcontrib-autoyaml
# sphinxcontrib-bibtex
# sphinxcontrib-jquery
sphinx-rtd-theme==2.0.0
# via -r requirements.in
sphinxcontrib-applehelp==1.0.8
# via sphinx
+sphinxcontrib-autoyaml==1.1.1
+ # via -r requirements.in
sphinxcontrib-bibtex==2.6.2
# via -r requirements.in
sphinxcontrib-devhelp==1.0.6
diff --git a/tests/WE2E/WE2E_summary.py b/tests/WE2E/WE2E_summary.py
index de478a0f38..16b94aaf3b 100755
--- a/tests/WE2E/WE2E_summary.py
+++ b/tests/WE2E/WE2E_summary.py
@@ -14,8 +14,13 @@
def setup_logging(debug: bool = False) -> None:
"""
- Sets up logging, printing high-priority (INFO and higher) messages to screen, and printing all
- messages with detailed timing and routine info in the specified text file.
+ Sets up logging to print high-priority (INFO and higher) messages to the console and to print all
+ messages with detailed timing and routine info to the specified text file.
+
+ Args:
+ debug (bool): Set to True to print more verbose output to the console
+ Returns:
+ None
"""
logging.getLogger().setLevel(logging.DEBUG)
diff --git a/tests/WE2E/monitor_jobs.py b/tests/WE2E/monitor_jobs.py
index 93db4fb1a5..db7f1a1a8e 100755
--- a/tests/WE2E/monitor_jobs.py
+++ b/tests/WE2E/monitor_jobs.py
@@ -18,20 +18,17 @@
def monitor_jobs(expts_dict: dict, monitor_file: str = '', procs: int = 1,
mode: str = 'continuous', debug: bool = False) -> str:
- """Function to monitor and run jobs for the specified experiment using Rocoto
+ """Monitors and runs jobs for the specified experiment using Rocoto
Args:
- expts_dict (dict): A dictionary containing the information needed to run
- one or more experiments. See example file monitor_jobs.yaml
- monitor_file (str): [optional]
- mode (str): [optional] Mode of job monitoring
- continuous (default): monitor jobs continuously until complete
- advance: increment jobs once, then quit
+ expts_dict (dict): A dictionary containing the information needed to run one or more experiments. See example file ``monitor_jobs.yaml``.
+ monitor_file (str): [optional] Name of the file used to monitor experiment results. Default is ``monitor_jobs.yaml``.
+ procs (int): [optional] The number of parallel processes to run
+ mode (str): [optional] Mode of job monitoring. Options: (1) ``'continuous'`` (default): monitor jobs continuously until complete or (2) ``'advance'``: increment jobs once, then quit.
debug (bool): [optional] Enable extra output for debugging
Returns:
- str: The name of the file used for job monitoring (when script is finished, this
- contains results/summary)
+ monitor_file: The name of the file used for job monitoring (when script is finished, this contains results/summary)
"""
monitor_start = datetime.now()
# Write monitor_file, which will contain information on each monitored experiment
@@ -143,6 +140,12 @@ def setup_logging(logfile: str = "log.run_WE2E_tests", debug: bool = False) -> N
"""
Sets up logging, printing high-priority (INFO and higher) messages to screen, and printing all
messages with detailed timing and routine info in the specified text file.
+
+ Args:
+ logfile (str): Name of log file for WE2E tests (default: ``log.run_WE2E_tests``)
+ debug (bool): Set to True to enable extra output for debugging
+ Returns:
+ None
"""
logging.getLogger().setLevel(logging.DEBUG)
@@ -187,7 +190,7 @@ def setup_logging(logfile: str = "log.run_WE2E_tests", debug: bool = False) -> N
'advance: will only advance each experiment one step')
parser.add_argument('-d', '--debug', action='store_true',
help='Script will be run in debug mode with more verbose output. ' +
- 'WARNING: increased verbosity may run very slow on some platforms')
+ 'WARNING: increased verbosity may run very slowly on some platforms')
args = parser.parse_args()
diff --git a/tests/WE2E/print_test_info.py b/tests/WE2E/print_test_info.py
index f2301bb690..a46142aa0c 100755
--- a/tests/WE2E/print_test_info.py
+++ b/tests/WE2E/print_test_info.py
@@ -1,5 +1,7 @@
#!/usr/bin/env python3
+"""Script for parsing all test files in the ``test_configs`` directory and printing a pipe-delimited summary file of the details of each test."""
+
import argparse
import sys
diff --git a/tests/WE2E/run_WE2E_tests.py b/tests/WE2E/run_WE2E_tests.py
index 5d4bd81105..d6d0c016e6 100755
--- a/tests/WE2E/run_WE2E_tests.py
+++ b/tests/WE2E/run_WE2E_tests.py
@@ -1,5 +1,6 @@
#!/usr/bin/env python3
# pylint: disable=logging-fstring-interpolation
+
import os
import sys
import glob
@@ -22,11 +23,11 @@
from utils import print_test_info
def run_we2e_tests(homedir, args) -> None:
- """Function to run the WE2E tests selected by the user
+ """Runs the Workflow End-to-End (WE2E) tests selected by the user
Args:
- homedir (str): The full path of the top-level app directory
- args (obj): The argparse.Namespace object containing command-line arguments
+ homedir (str): The full path to the top-level application directory
+ args (argparse.Namespace): Command-line arguments
Returns:
None
@@ -274,13 +275,13 @@ def run_we2e_tests(homedir, args) -> None:
def check_tests(tests: list) -> list:
"""
- Function for checking that all tests in a provided list of tests are valid
+ Checks that all tests in a provided list of tests are valid
Args:
tests (list): List of potentially valid test names
Returns:
- list: List of config files corresponding to test names
+ tests_to_run: List of configuration files corresponding to test names
"""
testfiles = glob.glob('test_configs/**/config*.yaml', recursive=True)
@@ -309,7 +310,7 @@ def check_tests(tests: list) -> list:
if not match:
raise Exception(f"Could not find test {test}")
tests_to_run.append(match)
- # Because some test files are symlinks to other tests, check that we don't
+ # Because some test files are symlinked to other tests, check that we don't
# include the same test twice
for testfile in tests_to_run.copy():
if os.path.islink(testfile):
@@ -328,13 +329,13 @@ def check_tests(tests: list) -> list:
def check_test(test: str) -> str:
"""
- Function for checking that a string corresponds to a valid test name
+ Checks that a string corresponds to a valid test name
Args:
- test (str) : String of potential test name
+ test (str): Potential test name
Returns:
- str: File name of test config file (empty string if no test file found)
+ config: Name of the test configuration file (empty string if no test file is found)
"""
# potential test files
testfiles = glob.glob('test_configs/**/config*.yaml', recursive=True)
@@ -350,17 +351,17 @@ def check_test(test: str) -> str:
def check_task_get_extrn_bcs(cfg: dict, mach: dict, dflt: dict, ics_or_lbcs: str = "") -> dict:
"""
- Function for checking and updating various settings in task_get_extrn_ics or
- task_get_extrn_lbcs section of test config yaml
+ Checks and updates various settings in the ``task_get_extrn_ics`` or
+ ``task_get_extrn_lbcs`` section of the test's configuration YAML file
Args:
- cfg (dict): Dictionary loaded from test config file
- mach (dict): Dictionary loaded from machine settings file
- dflt (dict): Dictionary loaded from default config file
- ics_or_lbcs (bool): Perform checks for ICs task or LBCs task
+ cfg (dict): Contents loaded from test configuration file
+ mach (dict): Contents loaded from machine settings file
+ dflt (dict): Contents loaded from default configuration file (``config_defaults.yaml``)
+ ics_or_lbcs (str): Perform checks for either the ICs task or the LBCs task. Valid values: ``"ics"`` | ``"lbcs"``
Returns:
- dict: Updated dictionary for task_get_extrn_[ics|lbcs] section of test config
+ cfg_bcs: Updated dictionary for ``task_get_extrn_[ics|lbcs]`` section of test configuration file
"""
if ics_or_lbcs not in ["lbcs", "ics"]:
@@ -434,8 +435,14 @@ def check_task_get_extrn_bcs(cfg: dict, mach: dict, dflt: dict, ics_or_lbcs: str
def setup_logging(logfile: str = "log.run_WE2E_tests", debug: bool = False) -> None:
"""
- Sets up logging, printing high-priority (INFO and higher) messages to screen, and printing all
- messages with detailed timing and routine info in the specified text file.
+ Sets up logging, prints high-priority (INFO and higher) messages to screen, and prints all
+ messages with detailed timing and routine info to the specified text file.
+
+ Args:
+ logfile (str): Name of the test logging file (default: ``log.run_WE2E_tests``)
+ debug (bool): Set to True for more detailed output/information
+ Returns:
+ None
"""
logging.getLogger().setLevel(logging.DEBUG)
diff --git a/tests/WE2E/utils.py b/tests/WE2E/utils.py
index 0e6629ad17..33b514d456 100755
--- a/tests/WE2E/utils.py
+++ b/tests/WE2E/utils.py
@@ -28,11 +28,11 @@
EXPT_COLUMN_WIDTH = 65
TASK_COLUMN_WIDTH = 40
def print_WE2E_summary(expts_dict: dict, debug: bool = False):
- """Function that creates a summary for the specified experiment
+ """Creates a summary of the specified experiment
Args:
expts_dict (dict): A dictionary containing the information needed to run
- one or more experiments. See example file WE2E_tests.yaml
+ one or more experiments. See example file ``WE2E_tests.yaml``.
debug (bool): [optional] Enable extra output for debugging
Returns:
@@ -105,16 +105,16 @@ def print_WE2E_summary(expts_dict: dict, debug: bool = False):
for line in expt_details:
f.write(f"{line}\n")
-def create_expts_dict(expt_dir: str) -> dict:
+def create_expts_dict(expt_dir: str):
"""
- Function takes in a directory, searches that directory for subdirectories containing
- experiments, and creates a skeleton dictionary that can be filled out by update_expt_status()
+ Takes in a directory, searches that directory for subdirectories containing
+ experiments, and creates a skeleton dictionary that can be filled out by ``update_expt_status()``
Args:
- expt_dir (str): Experiment directory
+ expt_dir (str): Experiment directory name
Returns:
- dict: Experiment dictionary
+ (summary_file, expts_dict): A tuple including the name of the summary file (``WE2E_tests_YYYYMMDDHHmmSS.yaml``) and the experiment dictionary
"""
contents = sorted(os.listdir(expt_dir))
@@ -141,15 +141,14 @@ def create_expts_dict(expt_dir: str) -> dict:
def calculate_core_hours(expts_dict: dict) -> dict:
"""
- Function takes in an experiment dictionary, reads the var_defns file for necessary information,
- and calculates the core hours used by each task, updating expts_dict with this info
+ Takes in an experiment dictionary, reads the ``var_defns.sh`` file for necessary information,
+ and calculates the core hours used by each task, updating ``expts_dict`` with this information
Args:
- expts_dict (dict): A dictionary containing the information needed to run
- one or more experiments. See example file WE2E_tests.yaml
+ expts_dict (dict): The information needed to run one or more experiments. See example file ``WE2E_tests.yaml``
Returns:
- dict: Experiments dictionary updated with core hours
+ expts_dict: Experiment dictionary updated with core hours
"""
for expt in expts_dict:
@@ -188,6 +187,16 @@ def calculate_core_hours(expts_dict: dict) -> dict:
def write_monitor_file(monitor_file: str, expts_dict: dict):
+ """Writes status of tests to file
+
+ Args:
+ monitor_file (str): File name
+ expts_dict (dict): Experiments being monitored
+ Returns:
+ None
+ Raises:
+ KeyboardInterrupt: If a user attempts to disrupt program execution (e.g., with ``Ctrl+C``) while program is writing information to ``monitor_file``.
+ """
try:
with open(monitor_file,"w", encoding="utf-8") as f:
f.write("### WARNING ###\n")
@@ -209,55 +218,29 @@ def write_monitor_file(monitor_file: str, expts_dict: dict):
def update_expt_status(expt: dict, name: str, refresh: bool = False, debug: bool = False,
submit: bool = True) -> dict:
"""
- This function reads the dictionary showing the location of a given experiment, runs a
- `rocotorun` command to update the experiment (running new jobs and updating the status of
- previously submitted ones), and reads the rocoto database file to update the status of
- each job for that experiment in the experiment dictionary.
-
- The function then and uses a simple set of rules to combine the statuses of every task
- into a useful "status" for the whole experiment, and returns the updated experiment dictionary.
-
- Experiment "status" levels explained:
- CREATED: The experiments have been created, but the monitor script has not yet processed them.
- This is immediately overwritten at the beginning of the "monitor_jobs" function, so we
- should never see this status in this function. Including just for completeness sake.
- SUBMITTING: All jobs are in status SUBMITTING or SUCCEEDED. This is a normal state; we will
- continue to monitor this experiment.
- DYING: One or more tasks have died (status "DEAD"), so this experiment has had an error.
- We will continue to monitor this experiment until all tasks are either status DEAD or
- status SUCCEEDED (see next entry).
- DEAD: One or more tasks are at status DEAD, and the rest are either DEAD or SUCCEEDED. We
- will no longer monitor this experiment.
- ERROR: Could not read the rocoto database file. This will require manual intervention to
- solve, so we will no longer monitor this experiment.
- This status may also appear if we fail to read the rocoto database file.
- RUNNING: One or more jobs are at status RUNNING, and the rest are either status QUEUED,
- SUBMITTED, or SUCCEEDED. This is a normal state; we will continue to monitor this
- experiment.
- QUEUED: One or more jobs are at status QUEUED, and some others may be at status SUBMITTED or
- SUCCEEDED.
- This is a normal state; we will continue to monitor this experiment.
- SUCCEEDED: All jobs are status SUCCEEDED; we will monitor for one more cycle in case there are
- unsubmitted jobs remaining.
- COMPLETE:All jobs are status SUCCEEDED, and we have monitored this job for an additional cycle
- to ensure there are no un-submitted jobs. We will no longer monitor this experiment.
+ This function reads the dictionary for a given experiment, runs the ``rocotorun`` command to update the experiment (by running new jobs and updating the status of previously submitted ones), and reads the Rocoto database (``.db``) file to update the status of each job in the experiment dictionary. The function then uses a simple set of rules to combine the statuses of every task into a useful summary status for the whole experiment and returns the updated experiment dictionary.
+
+ Experiment status levels explained:
+
+ * **CREATED:** The experiments have been created, but the monitor script has not yet processed them. This is immediately overwritten at the beginning of the ``monitor_jobs()`` function.
+ * **SUBMITTING:** All jobs are in status SUBMITTING or SUCCEEDED. This is a normal state; experiment monitoring will continue.
+ * **DYING:** One or more tasks have died (status DEAD), so this experiment has an error. Experiment monitoring will continue until all previously submitted tasks are in either status DEAD or status SUCCEEDED (see next entry).
+ * **DEAD:** One or more tasks are in status DEAD, and other previously submitted jobs are either DEAD or SUCCEEDED. This experiment will no longer be monitored.
+ * **ERROR:** Could not read the Rocoto database (``.db``) file. This will require manual intervention to solve, so the experiment will no longer be monitored.
+ * **RUNNING:** One or more jobs are in status RUNNING, and other previously submitted jobs are in status QUEUED, SUBMITTED, or SUCCEEDED. This is a normal state; experiment monitoring will continue.
+ * **QUEUED:** One or more jobs are in status QUEUED, and some others may be in status SUBMITTED or SUCCEEDED. This is a normal state; experiment monitoring will continue.
+ * **SUCCEEDED:** All jobs are in status SUCCEEDED; experiment monitoring will continue for one more cycle in case there are unsubmitted jobs remaining.
+ * **COMPLETE:** All jobs are in status SUCCEEDED, and the experiment has been monitored for an additional cycle to ensure that there are no unsubmitted jobs. This experiment will no longer be monitored.
Args:
- expt (dict): A dictionary containing the information for an individual experiment, as
- described in the main monitor_jobs() function.
+ expt (dict): A dictionary containing the information for an individual experiment, as described in the main ``monitor_jobs()`` function.
name (str): Name of the experiment; used for logging only
- refresh (bool): If true, this flag will check an experiment status even if it is listed
- as DEAD, ERROR, or COMPLETE. Used for initial checks for experiments
- that may have been restarted.
- debug (bool): Will capture all output from rocotorun. This will allow information such
- as job cards and job submit messages to appear in the log files, but can
- slow down the process drastically.
- submit (bool): In addition to reading the rocoto database, script will advance the
- workflow by calling rocotorun. If simply generating a report, set this
- to False
+ refresh (bool): If True, this flag will check an experiment status even if it is listed as DEAD, ERROR, or COMPLETE. Used for initial checks for experiments that may have been restarted.
+ debug (bool): Will capture all output from ``rocotorun``. This will allow information such as job cards and job submit messages to appear in the log files, but turning on this option can drastically slow down the testing process.
+ submit (bool): In addition to reading the Rocoto database (``.db``) file, the script will advance the workflow by calling ``rocotorun``. If simply generating a report, set this to False.
Returns:
- dict: The updated experiment dictionary.
+ expt: The updated experiment dictionary
"""
#If we are no longer tracking this experiment, return unchanged
@@ -305,8 +288,8 @@ def update_expt_status(expt: dict, name: str, refresh: bool = False, debug: bool
for task in db:
# For each entry from rocoto database, store that task's info under a dictionary key named
- # TASKNAME_CYCLE; Cycle comes from the database in Unix Time (seconds), so convert to
- # human-readable
+ # TASKNAME_CYCLE; cycle comes from the database in Unix Time (seconds), so convert to
+ # human-readable time
cycle = datetime.utcfromtimestamp(task[1]).strftime('%Y%m%d%H%M')
if f"{task[0]}_{cycle}" not in expt:
expt[f"{task[0]}_{cycle}"] = dict()
@@ -377,20 +360,18 @@ def update_expt_status_parallel(expts_dict: dict, procs: int, refresh: bool = Fa
debug: bool = False) -> dict:
"""
This function updates an entire set of experiments in parallel, drastically speeding up
- the process if given enough parallel processes. Given a dictionary of experiments, it will
- pass each individual experiment dictionary to update_expt_status() to be updated, making use
- of the python multiprocessing starmap functionality to achieve this in parallel
+ the testing if given enough parallel processes. Given a dictionary of experiments, it will
+ pass each individual experiment dictionary to ``update_expt_status()``, making use
+ of the Python multiprocessing ``starmap()`` functionality to update the experiments in parallel.
Args:
expts_dict (dict): A dictionary containing information for all experiments
procs (int): The number of parallel processes
- refresh (bool): "Refresh" flag to pass to update_expt_status()
- debug (bool): Will capture all output from rocotorun. This will allow information such
- as job cards and job submit messages to appear in the log files, but can
- slow down the process drastically.
+ refresh (bool): "Refresh" flag to pass to ``update_expt_status()``. If True, this flag will check an experiment status even if it is listed as DEAD, ERROR, or COMPLETE. Used for initial checks for experiments that may have been restarted.
+ debug (bool): Will capture all output from ``rocotorun``. This will allow information such as job cards and job submit messages to appear in the log files, but can drastically slow down the testing process.
Returns:
- dict: The updated dictionary of experiment dictionaries
+ expts_dict: The updated dictionary of experiment dictionaries
"""
args = []
@@ -413,11 +394,12 @@ def update_expt_status_parallel(expts_dict: dict, procs: int, refresh: bool = Fa
def print_test_info(txtfile: str = "WE2E_test_info.txt") -> None:
- """Prints a pipe ( | ) delimited text file containing summaries of each test defined by a
- config file in test_configs/*
+ """Prints a pipe-delimited ( ``|`` ) text file containing summaries of each test with a configuration file in ``test_configs/*``
Args:
- txtfile (str): File name for test details file
+ txtfile (str): File name for test details file (default: ``WE2E_test_info.txt``)
+ Returns:
+ None
"""
testfiles = glob.glob('test_configs/**/config*.yaml', recursive=True)
@@ -508,9 +490,15 @@ def print_test_info(txtfile: str = "WE2E_test_info.txt") -> None:
def compare_rocotostat(expt_dict,name):
- """Reads the dictionary showing the location of a given experiment, runs a `rocotostat` command
+ """Reads the dictionary showing the location of a given experiment, runs a ``rocotostat`` command
to get the full set of tasks for the experiment, and compares the two to see if there are any
unsubmitted tasks remaining.
+
+ Args:
+ expt_dict (dict): A dictionary containing the information for an individual experiment
+ name (str): Name of the experiment
+ Returns:
+ expt_dict: A dictionary containing the information for an individual experiment
"""
# Call rocotostat and store output
diff --git a/ush/UFS_plot_domains.py b/ush/UFS_plot_domains.py
index 847cd96725..827991287a 100755
--- a/ush/UFS_plot_domains.py
+++ b/ush/UFS_plot_domains.py
@@ -1,143 +1,39 @@
#!/usr/bin/env python
+"""This script generates a plot of the ESG grid and the write component grid.
+ To use this script, modify the ESG grid and write component grid parameters in this script
+ manually to reflect the configuration of the grids you wish to plot. Then run the script. """
+
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import Basemap
from matplotlib.path import Path
import matplotlib.patches as patches
import numpy as np
-#### User-defined variables
-
-
-# Computational grid definitions
-ESGgrid_LON_CTR = -153.0
-ESGgrid_LAT_CTR = 61.0
-ESGgrid_DELX = 3000.0
-ESGgrid_DELY = 3000.0
-ESGgrid_NX = 1344
-ESGgrid_NY = 1152
-
-# Write component grid definitions
-
-WRTCMP_nx = 1340
-WRTCMP_ny = 1132
-WRTCMP_lon_lwr_left = 151.5
-WRTCMP_lat_lwr_left = 42.360
-WRTCMP_dx = ESGgrid_DELX
-WRTCMP_dy = ESGgrid_DELY
-
-# Plot-specific definitions
-
-plot_res = "i" # background map resolution
-
-# Note: Resolution can be 'c' (crude), 'l' (low), 'i' (intermediate), 'h' (high), or 'f' (full)
-# To plot maps with higher resolution than low,
-# you will need to download and install the basemap-data-hires package
-
-
-#### END User-defined variables
-
-
-ESGgrid_width = ESGgrid_NX * ESGgrid_DELX
-ESGgrid_height = ESGgrid_NY * ESGgrid_DELY
-
-big_grid_width = np.ceil(ESGgrid_width * 1.25)
-big_grid_height = np.ceil(ESGgrid_height * 1.25)
-
-WRTCMP_width = WRTCMP_nx * WRTCMP_dx
-WRTCMP_height = WRTCMP_ny * WRTCMP_dy
-
-fig = plt.figure()
-
-# ax1 = plt.axes
-ax1 = plt.subplot2grid((1, 1), (0, 0))
-
-map1 = Basemap(
- projection="gnom",
- resolution=plot_res,
- lon_0=ESGgrid_LON_CTR,
- lat_0=ESGgrid_LAT_CTR,
- width=big_grid_width,
- height=big_grid_height,
-)
-
-map1.drawmapboundary(fill_color="#9999FF")
-map1.fillcontinents(color="#ddaa66", lake_color="#9999FF")
-map1.drawcoastlines()
-
-map2 = Basemap(
- projection="gnom",
- lon_0=ESGgrid_LON_CTR,
- lat_0=ESGgrid_LAT_CTR,
- width=ESGgrid_width,
- height=ESGgrid_height,
-)
-
-# map2.drawmapboundary(fill_color='#9999FF')
-# map2.fillcontinents(color='#ddaa66',lake_color='#9999FF')
-# map2.drawcoastlines()
-
-
-map3 = Basemap(
- llcrnrlon=WRTCMP_lon_lwr_left,
- llcrnrlat=WRTCMP_lat_lwr_left,
- width=WRTCMP_width,
- height=WRTCMP_height,
- resolution=plot_res,
- projection="lcc",
- lat_0=ESGgrid_LAT_CTR,
- lon_0=ESGgrid_LON_CTR,
-)
-
-# map3.drawmapboundary(fill_color='#9999FF')
-# map3.fillcontinents(color='#ddaa66',lake_color='#9999FF',alpha=0.5)
-# map3.drawcoastlines()
-
-
-# Draw gnomonic compute grid rectangle:
-
-lbx1, lby1 = map1(*map2(map2.xmin, map2.ymin, inverse=True))
-ltx1, lty1 = map1(*map2(map2.xmin, map2.ymax, inverse=True))
-rtx1, rty1 = map1(*map2(map2.xmax, map2.ymax, inverse=True))
-rbx1, rby1 = map1(*map2(map2.xmax, map2.ymin, inverse=True))
-
-verts1 = [
- (lbx1, lby1), # left, bottom
- (ltx1, lty1), # left, top
- (rtx1, rty1), # right, top
- (rbx1, rby1), # right, bottom
- (lbx1, lby1), # ignored
-]
-
-codes2 = [
- Path.MOVETO,
- Path.LINETO,
- Path.LINETO,
- Path.LINETO,
- Path.CLOSEPOLY,
-]
-
-path = Path(verts1, codes2)
-patch = patches.PathPatch(path, facecolor="r", lw=2, alpha=0.5)
-ax1.add_patch(patch)
-
-
-# Draw lambert write grid rectangle:
-
# Define a function to get the lambert points in the gnomonic space
-
-
+
def get_lambert_points(gnomonic_map, lambert_map, pps):
- # This function takes the lambert domain we have defined, lambert_map, as well as
- # pps (the number of points to interpolate and draw for each side of the lambert "rectangle"),
- # and returns an array of two lists: one a list of tuples of the 4*ppf + 4 vertices mapping the approximate shape
- # of the lambert domain on the gnomonic map, the other a list of "draw" instructions to be used by
- # the PathPatch function
-
- # pps is recommended 10 or less due to time of calculation
-
- # Start array with bottom left point, "MOVETO" instruction
+ """This function takes the lambert domain we have defined, ``lambert_map``, and ``pps``,
+ and returns an array of two lists: one a list of tuples of the ``4*ppf + 4`` vertices mapping
+ the approximate shape of the lambert domain on the gnomonic map, the other a list of "draw"
+ instructions to be used by the PathPatch function.
+
+ Start array with bottom left point, "MOVETO" instruction
+
+ Args:
+ gnomonic_map (Basemap): A map of the ESG grid
+ lambert_map (Basemap): A map of the write component grid
+ pps: The number of points to interpolate and draw for each side of the lambert
+ "rectangle." It is recommended to set to 10 or less due to time of calculation.
+
+ Returns:
+ vertices, instructions: A tuple of two lists---a list of tuples of the ``4*ppf + 4``
+ vertices mapping the approximate shape of the lambert domain
+ on the gnomonic map, the other a list of "draw" instructions
+ to be used by the PathPatch function.
+ """
+
vertices = [
gnomonic_map(*lambert_map(lambert_map.xmin, lambert_map.ymin, inverse=True))
]
@@ -189,15 +85,133 @@ def get_lambert_points(gnomonic_map, lambert_map, pps):
return vertices, instructions
-# Call the function we just defined to generate a polygon roughly approximating the lambert "rectangle" in gnomonic space
-verts3, codes3 = get_lambert_points(map1, map3, 10)
+#### User-defined variables
+
+if __name__ == "__main__":
+
+ # Computational grid definitions
+ ESGgrid_LON_CTR = -153.0
+ ESGgrid_LAT_CTR = 61.0
+ ESGgrid_DELX = 3000.0
+ ESGgrid_DELY = 3000.0
+ ESGgrid_NX = 1344
+ ESGgrid_NY = 1152
+
+ # Write component grid definitions
+
+ WRTCMP_nx = 1340
+ WRTCMP_ny = 1132
+ WRTCMP_lon_lwr_left = 151.5
+ WRTCMP_lat_lwr_left = 42.360
+ WRTCMP_dx = ESGgrid_DELX
+ WRTCMP_dy = ESGgrid_DELY
+
+ # Plot-specific definitions
+
+ plot_res = "i" # background map resolution
+
+ # Note: Resolution can be 'c' (crude), 'l' (low), 'i' (intermediate), 'h' (high), or 'f' (full)
+ # To plot maps with higher resolution than low,
+ # you will need to download and install the basemap-data-hires package
+
+
+ #### END User-defined variables
+
+
+ ESGgrid_width = ESGgrid_NX * ESGgrid_DELX
+ ESGgrid_height = ESGgrid_NY * ESGgrid_DELY
+
+ big_grid_width = np.ceil(ESGgrid_width * 1.25)
+ big_grid_height = np.ceil(ESGgrid_height * 1.25)
+
+ WRTCMP_width = WRTCMP_nx * WRTCMP_dx
+ WRTCMP_height = WRTCMP_ny * WRTCMP_dy
+
+ fig = plt.figure()
+
+ # ax1 = plt.axes
+ ax1 = plt.subplot2grid((1, 1), (0, 0))
+
+ map1 = Basemap(
+ projection="gnom",
+ resolution=plot_res,
+ lon_0=ESGgrid_LON_CTR,
+ lat_0=ESGgrid_LAT_CTR,
+ width=big_grid_width,
+ height=big_grid_height,
+ )
+
+ map1.drawmapboundary(fill_color="#9999FF")
+ map1.fillcontinents(color="#ddaa66", lake_color="#9999FF")
+ map1.drawcoastlines()
+
+ map2 = Basemap(
+ projection="gnom",
+ lon_0=ESGgrid_LON_CTR,
+ lat_0=ESGgrid_LAT_CTR,
+ width=ESGgrid_width,
+ height=ESGgrid_height,
+ )
+
+ # map2.drawmapboundary(fill_color='#9999FF')
+ # map2.fillcontinents(color='#ddaa66',lake_color='#9999FF')
+ # map2.drawcoastlines()
+
+
+ map3 = Basemap(
+ llcrnrlon=WRTCMP_lon_lwr_left,
+ llcrnrlat=WRTCMP_lat_lwr_left,
+ width=WRTCMP_width,
+ height=WRTCMP_height,
+ resolution=plot_res,
+ projection="lcc",
+ lat_0=ESGgrid_LAT_CTR,
+ lon_0=ESGgrid_LON_CTR,
+ )
+
+ # map3.drawmapboundary(fill_color='#9999FF')
+ # map3.fillcontinents(color='#ddaa66',lake_color='#9999FF',alpha=0.5)
+ # map3.drawcoastlines()
+
+
+ # Draw gnomonic compute grid rectangle:
+
+ lbx1, lby1 = map1(*map2(map2.xmin, map2.ymin, inverse=True))
+ ltx1, lty1 = map1(*map2(map2.xmin, map2.ymax, inverse=True))
+ rtx1, rty1 = map1(*map2(map2.xmax, map2.ymax, inverse=True))
+ rbx1, rby1 = map1(*map2(map2.xmax, map2.ymin, inverse=True))
+
+ verts1 = [
+ (lbx1, lby1), # left, bottom
+ (ltx1, lty1), # left, top
+ (rtx1, rty1), # right, top
+ (rbx1, rby1), # right, bottom
+ (lbx1, lby1), # ignored
+ ]
+
+ codes2 = [
+ Path.MOVETO,
+ Path.LINETO,
+ Path.LINETO,
+ Path.LINETO,
+ Path.CLOSEPOLY,
+ ]
+
+ path = Path(verts1, codes2)
+ patch = patches.PathPatch(path, facecolor="r", lw=2, alpha=0.5)
+ ax1.add_patch(patch)
+
+ # Draw lambert write grid rectangle:
+ # Call get_lambert_points() to generate a polygon roughly approximating the lambert "rectangle" in gnomonic space
+
+ verts3, codes3 = get_lambert_points(map1, map3, 10)
-# Now draw!
+ # Now draw!
-path = Path(verts3, codes3)
-patch = patches.PathPatch(path, facecolor="w", lw=2, alpha=0.5)
-ax1.add_patch(patch)
+ path = Path(verts3, codes3)
+ patch = patches.PathPatch(path, facecolor="w", lw=2, alpha=0.5)
+ ax1.add_patch(patch)
-plt.show()
+ plt.show()
diff --git a/ush/calculate_cost.py b/ush/calculate_cost.py
index b5e952b252..5fd22bb5b9 100755
--- a/ush/calculate_cost.py
+++ b/ush/calculate_cost.py
@@ -15,6 +15,18 @@
def calculate_cost(config_fn):
+ """Calculates the cost of running an experiment based on its configuration file details
+
+ Args:
+ config_fn (str): Name of a configuration file containing experiment parameters
+
+ Returns:
+ cost (list): Cost array containing information related to experiment parameters
+ (e.g., time step and grid)
+
+ Raises:
+ ValueError: If ``GRID_GEN_METHOD`` is set to an invalid value
+ """
ushdir = os.path.dirname(os.path.abspath(__file__))
cfg_u = load_config_file(config_fn)
diff --git a/ush/check_python_version.py b/ush/check_python_version.py
index afc3dac62f..8c61cba865 100755
--- a/ush/check_python_version.py
+++ b/ush/check_python_version.py
@@ -7,8 +7,13 @@
def check_python_version():
- """Check if python version >= 3.6 and presence of some
- non-standard packages currently jinja2, yaml, f90nml"""
+ """Checks for python version >= 3.6 and for presence of some
+ non-standard packages (currently ``jinja2``, ``yaml``, ``f90nml``)
+
+ Raises:
+ ImportError: If checked packages are missing.
+ Exception: If Python version is less than 3.6
+ """
# Check for non-standard python packages
try:
diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml
index 90651c1b7f..488c2d5d3d 100644
--- a/ush/config_defaults.yaml
+++ b/ush/config_defaults.yaml
@@ -5,33 +5,22 @@ metadata:
description: >-
Default configuration for an experiment. The valid values for most of the
parameters are specified in valid_param_vals.yaml
-#----------------------------
-# USER config parameters
-#----------------------------
+###
+# USER-related configuration parameters
+#
user:
+
+ ###
+ # (Default: ``"nco"``)
+ #
+ # This variable determines the workflow mode. The user can choose between two options: "nco" and "community". The "nco" mode uses a directory structure that mimics what is used in operations at NOAA/NCEP Central Operations (NCO) and at the NOAA/NCEP/Environmental Modeling Center (EMC), which works with NCO on pre-implementation testing. Specifics of the conventions used in "nco" mode can be found in the following :nco:`WCOSS Implementation Standards <>` document:
#
- #-----------------------------------------------------------------------
- #
- # Set the RUN_ENVIR variable that is listed and described in the WCOSS
- # Implementation Standards document:
- #
- # NCEP Central Operations
- # WCOSS Implementation Standards
- # April 19, 2022
- # Version 11.0.0
- #
- # RUN_ENVIR is described in this document as follows:
- #
- # Set to "nco" if running in NCO's production environment. Used to
- # distinguish between organizations.
- #
- # Valid values are "nco" and "community". Here, we use it to generate
- # and run the experiment either in NCO mode (if RUN_ENVIR is set to "nco")
- # or in community mode (if RUN_ENVIR is set to "community"). This has
- # implications on the experiment variables that need to be set and the
- # the directory structure used.
+ # | NCEP Central Operations
+ # | WCOSS Implementation Standards
+ # | January 19, 2022
+ # | Version 11.0.0
#
- #-----------------------------------------------------------------------
+ # Setting ``RUN_ENVIR`` to "community" is recommended in most cases for users who are not running in NCO's production environment. Valid values: ``"nco"`` | ``"community"``
#
RUN_ENVIR: "nco"
#
diff --git a/ush/config_utils.py b/ush/config_utils.py
index 84949c7db4..7e5f2d77e4 100755
--- a/ush/config_utils.py
+++ b/ush/config_utils.py
@@ -2,9 +2,11 @@
"""
Interface to configuration file management utilities.
-To see what it can do:
+To see what it can do, run:
- ./config_utils --help
+.. code-block:: console
+
+ ./config_utils --help
"""
diff --git a/ush/create_aqm_rc_file.py b/ush/create_aqm_rc_file.py
index c37ed05d29..c459b6a587 100644
--- a/ush/create_aqm_rc_file.py
+++ b/ush/create_aqm_rc_file.py
@@ -19,74 +19,81 @@
str_to_type,
)
+
def create_aqm_rc_file(cdate, run_dir, init_concentrations):
- """ Creates an aqm.rc file in the specified run directory
+ """Creates an ``aqm.rc`` file in the specified run directory
Args:
- cdate: cycle date
- run_dir: run directory
- init_concentrations
+ cdate (str): Cycle date
+ run_dir (str): Run directory
+ init_concentrations (bool): Flag to reset initial AQM concentrations (tracer values) to
+ zero.
Returns:
- Boolean
+ True
"""
print_input_args(locals())
- #import all environment variables
+ # import all environment variables
import_vars()
- #pylint: disable=undefined-variable
+ # pylint: disable=undefined-variable
#
- #-----------------------------------------------------------------------
+ # -----------------------------------------------------------------------
#
# Create the aqm.rc file in the specified run directory.
#
- #-----------------------------------------------------------------------
+ # -----------------------------------------------------------------------
#
- print_info_msg(f'''
+ print_info_msg(
+ f'''
Creating the aqm.rc file (\"{AQM_RC_FN}\") in the specified
run directory (run_dir):
- run_dir = \"{run_dir}\"''', verbose=VERBOSE)
+ run_dir = \"{run_dir}\"''',
+ verbose=VERBOSE,
+ )
#
# Set output file path
#
- aqm_rc_fp=os.path.join(run_dir, AQM_RC_FN)
+ aqm_rc_fp = os.path.join(run_dir, AQM_RC_FN)
#
# Extract from cdate the starting year, month, and day of the forecast.
#
- yyyymmdd=cdate.strftime('%Y%m%d')
- mm=f"{cdate.month:02d}" # pylint: disable=invalid-name
- hh=f"{cdate.hour:02d}" # pylint: disable=invalid-name
+ yyyymmdd = cdate.strftime("%Y%m%d")
+ mm = f"{cdate.month:02d}" # pylint: disable=invalid-name
+ hh = f"{cdate.hour:02d}" # pylint: disable=invalid-name
#
# Set parameters in the aqm.rc file.
#
- aqm_rc_bio_file_fp=os.path.join(FIXaqm,"bio", AQM_BIO_FILE)
+ aqm_rc_bio_file_fp = os.path.join(FIXaqm, "bio", AQM_BIO_FILE)
# Fire config
- aqm_rc_fire_file_fp=os.path.join(
- COMIN,
- f"{AQM_FIRE_FILE_PREFIX}_{yyyymmdd}_t{hh}z{AQM_FIRE_FILE_SUFFIX}"
- )
+ aqm_rc_fire_file_fp = os.path.join(
+ COMIN, f"{AQM_FIRE_FILE_PREFIX}_{yyyymmdd}_t{hh}z{AQM_FIRE_FILE_SUFFIX}"
+ )
# Dust config
- aqm_rc_dust_file_fp=os.path.join(
- FIXaqm,"dust",
- f"{AQM_DUST_FILE_PREFIX}_{PREDEF_GRID_NAME}{AQM_DUST_FILE_SUFFIX}",
- )
+ aqm_rc_dust_file_fp = os.path.join(
+ FIXaqm,
+ "dust",
+ f"{AQM_DUST_FILE_PREFIX}_{PREDEF_GRID_NAME}{AQM_DUST_FILE_SUFFIX}",
+ )
# Canopy config
- aqm_rc_canopy_file_fp=os.path.join(
- FIXaqm,"canopy",PREDEF_GRID_NAME,
+ aqm_rc_canopy_file_fp = os.path.join(
+ FIXaqm,
+ "canopy",
+ PREDEF_GRID_NAME,
f"{AQM_CANOPY_FILE_PREFIX}.{mm}{AQM_CANOPY_FILE_SUFFIX}",
- )
+ )
#
- #-----------------------------------------------------------------------
+ # -----------------------------------------------------------------------
#
# Create a multiline variable that consists of a yaml-compliant string
# specifying the values that the jinja variables in the template
# AQM_RC_TMPL_FN file should be set to.
#
- #-----------------------------------------------------------------------
+ # -----------------------------------------------------------------------
#
settings = {
"do_aqm_dust": DO_AQM_DUST,
@@ -101,7 +108,7 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations):
"aqm_rc_dust_file_fp": aqm_rc_dust_file_fp,
"aqm_rc_canopy_file_fp": aqm_rc_canopy_file_fp,
"aqm_rc_product_fn": AQM_RC_PRODUCT_FN,
- "aqm_rc_product_frequency": AQM_RC_PRODUCT_FREQUENCY
+ "aqm_rc_product_frequency": AQM_RC_PRODUCT_FREQUENCY,
}
settings_str = cfg_to_yaml_str(settings)
@@ -116,48 +123,56 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations):
verbose=VERBOSE,
)
#
- #-----------------------------------------------------------------------
+ # -----------------------------------------------------------------------
#
# Call a python script to generate the experiment's actual AQM_RC_FN
# file from the template file.
#
- #-----------------------------------------------------------------------
+ # -----------------------------------------------------------------------
#
render(
- input_file = AQM_RC_TMPL_FP,
- output_file = aqm_rc_fp,
- values_src = settings,
+ input_file=AQM_RC_TMPL_FP,
+ output_file=aqm_rc_fp,
+ values_src=settings,
)
return True
-def parse_args(argv):
- """ Parse command line arguments"""
+
+def _parse_args(argv):
+ """Parses command line arguments"""
parser = argparse.ArgumentParser(description="Creates aqm.rc file.")
- parser.add_argument("-r", "--run-dir",
- dest="run_dir",
- required=True,
- help="Run directory.")
+ parser.add_argument("-r", "--run-dir", dest="run_dir", required=True, help="Run directory.")
- parser.add_argument("-c", "--cdate",
- dest="cdate",
- required=True,
- help="Date string in YYYYMMDD format.")
+ parser.add_argument(
+ "-c",
+ "--cdate",
+ dest="cdate",
+ required=True,
+ help="Date string in YYYYMMDD format.",
+ )
- parser.add_argument("-i", "--init_concentrations",
- dest="init_concentrations",
- required=True,
- help="Flag for initial concentrations.")
+ parser.add_argument(
+ "-i",
+ "--init_concentrations",
+ dest="init_concentrations",
+ required=True,
+ help="Flag for initial concentrations.",
+ )
- parser.add_argument("-p", "--path-to-defns",
- dest="path_to_defns",
- required=True,
- help="Path to var_defns file.")
+ parser.add_argument(
+ "-p",
+ "--path-to-defns",
+ dest="path_to_defns",
+ required=True,
+ help="Path to var_defns file.",
+ )
return parser.parse_args(argv)
+
if __name__ == "__main__":
- args = parse_args(sys.argv[1:])
+ args = _parse_args(sys.argv[1:])
cfg = load_yaml_config(args.path_to_defns)
cfg = flatten_dict(cfg)
import_vars(dictionary=cfg)
diff --git a/ush/create_diag_table_file.py b/ush/create_diag_table_file.py
index 113953172d..466bd3e94b 100644
--- a/ush/create_diag_table_file.py
+++ b/ush/create_diag_table_file.py
@@ -1,8 +1,7 @@
#!/usr/bin/env python3
"""
-Function to create a diag_table file for the FV3 model using a
-template.
+Creates a ``diag_table`` file for the FV3 model using a template
"""
import argparse
import os
@@ -21,12 +20,12 @@
def create_diag_table_file(run_dir):
- """Creates a diagnostic table file for each cycle to be run
+ """Creates an FV3 diagnostic table (``diag_table``) file for each cycle to be run
Args:
- run_dir: run directory
+ run_dir (str): Run directory
Returns:
- Boolean
+ True
"""
print_input_args(locals())
@@ -81,8 +80,8 @@ def create_diag_table_file(run_dir):
return True
-def parse_args(argv):
- """Parse command line arguments"""
+def _parse_args(argv):
+ """Parses command line arguments"""
parser = argparse.ArgumentParser(description="Creates diagnostic table file.")
parser.add_argument(
@@ -101,7 +100,7 @@ def parse_args(argv):
if __name__ == "__main__":
- args = parse_args(sys.argv[1:])
+ args = _parse_args(sys.argv[1:])
cfg = load_yaml_config(args.path_to_defns)
cfg = flatten_dict(cfg)
import_vars(dictionary=cfg)
diff --git a/ush/create_model_configure_file.py b/ush/create_model_configure_file.py
index b8767f635a..9b430b83ae 100644
--- a/ush/create_model_configure_file.py
+++ b/ush/create_model_configure_file.py
@@ -1,7 +1,6 @@
#!/usr/bin/env python3
"""
-Create a model_configure file for the FV3 forecast model from a
-template.
+Creates a ``model_configure`` file for the FV3 forecast model from a template.
"""
import argparse
import os
@@ -24,19 +23,20 @@
def create_model_configure_file(
cdate, fcst_len_hrs, fhrot, run_dir, sub_hourly_post, dt_subhourly_post_mnts, dt_atmos
): #pylint: disable=too-many-arguments
- """Creates a model configuration file in the specified
- run directory
+ """Creates a model configuration file in the specified run directory
Args:
- cdate: cycle date
- fcst_len_hrs: forecast length in hours
- fhrot: forecast hour at restart
- run_dir: run directory
- sub_hourly_post
- dt_subhourly_post_mnts
- dt_atmos
+ cdate (int): Cycle date in ``YYYYMMDD`` format
+ fcst_len_hrs (int): Forecast length in hours
+ fhrot (int): Forecast hour at restart
+ run_dir (str): Run directory
+ sub_hourly_post (bool): Sets subhourly post to either ``True`` or ``False``
+ dt_subhourly_post_mnts (int): Subhourly forecast model output and post-processing
+ frequency in minutes
+ dt_atmos (int): Atmospheric forecast model's main timestep in seconds
+
Returns:
- Boolean
+ True
"""
print_input_args(locals())
@@ -87,7 +87,7 @@ def create_model_configure_file(
}
#
# If the write-component is to be used, then specify a set of computational
- # parameters and a set of grid parameters. The latter depends on the type
+ # parameters and a set of grid parameters. The latter depends on the type
# (coordinate system) of the grid that the write-component will be using.
#
if QUILTING:
@@ -161,22 +161,22 @@ def create_model_configure_file(
)
#
# If sub_hourly_post is set to "TRUE", then the forecast model must be
- # directed to generate output files on a sub-hourly interval. Do this
+ # directed to generate output files on a sub-hourly interval. Do this
# by specifying the output interval in the model configuration file
# (MODEL_CONFIG_FN) in units of number of forecat model time steps (nsout).
# nsout is calculated using the user-specified output time interval
# dt_subhourly_post_mnts (in units of minutes) and the forecast model's
- # main time step dt_atmos (in units of seconds). Note that nsout is
+ # main time step dt_atmos (in units of seconds). Note that nsout is
# guaranteed to be an integer because the experiment generation scripts
# require that dt_subhourly_post_mnts (after conversion to seconds) be
- # evenly divisible by dt_atmos. Also, in this case, the variable output_fh
+ # evenly divisible by dt_atmos. Also, in this case, the variable output_fh
# [which specifies the output interval in hours;
# see the jinja model_config template file] is set to 0, although this
- # doesn't matter because any positive of nsout will override output_fh.
+ # doesn't matter because any positive value of nsout will override output_fh.
#
# If sub_hourly_post is set to "FALSE", then the workflow is hard-coded
# (in the jinja model_config template file) to direct the forecast model
- # to output files every hour. This is done by setting (1) output_fh to 1
+ # to output files every hour. This is done by setting (1) output_fh to 1
# here, and (2) nsout to -1 here which turns off output by time step interval.
#
# Note that the approach used here of separating how hourly and subhourly
@@ -227,7 +227,7 @@ def create_model_configure_file(
return True
-def parse_args(argv):
+def _parse_args(argv):
"""Parse command line arguments"""
parser = argparse.ArgumentParser(description="Creates model configuration file.")
@@ -295,7 +295,7 @@ def parse_args(argv):
if __name__ == "__main__":
- args = parse_args(sys.argv[1:])
+ args = _parse_args(sys.argv[1:])
cfg = load_yaml_config(args.path_to_defns)
cfg = flatten_dict(cfg)
import_vars(dictionary=cfg)
diff --git a/ush/create_ufs_configure_file.py b/ush/create_ufs_configure_file.py
index 3fd82f488b..dc6a43420d 100644
--- a/ush/create_ufs_configure_file.py
+++ b/ush/create_ufs_configure_file.py
@@ -1,8 +1,7 @@
#!/usr/bin/env python3
"""
-Function to create a UFS configuration file for the FV3 forecast
-model(s) from a template.
+Creates a UFS configuration file for the FV3 forecast model(s) from a template.
"""
import argparse
@@ -21,13 +20,12 @@
)
def create_ufs_configure_file(run_dir):
- """ Creates a ufs configuration file in the specified
- run directory
+ """ Creates a UFS configuration file in the specified run directory
Args:
- run_dir: run directory
+ run_dir (str): Run directory
Returns:
- Boolean
+ True
"""
print_input_args(locals())
@@ -93,7 +91,7 @@ def create_ufs_configure_file(run_dir):
)
return True
-def parse_args(argv):
+def _parse_args(argv):
""" Parse command line arguments"""
parser = argparse.ArgumentParser(
description='Creates UFS configuration file.'
@@ -112,7 +110,7 @@ def parse_args(argv):
return parser.parse_args(argv)
if __name__ == "__main__":
- args = parse_args(sys.argv[1:])
+ args = _parse_args(sys.argv[1:])
cfg = load_yaml_config(args.path_to_defns)
cfg = flatten_dict(cfg)
import_vars(dictionary=cfg)
diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py
index c671a69da8..8990f04c6d 100755
--- a/ush/generate_FV3LAM_wflow.py
+++ b/ush/generate_FV3LAM_wflow.py
@@ -1,8 +1,8 @@
#!/usr/bin/env python3
"""
-User interface to create an experiment directory consistent with the
-user-defined config.yaml file.
+User interface to create an experiment directory consistent with the user-defined ``config.yaml``
+file.
"""
# pylint: disable=invalid-name
@@ -43,11 +43,12 @@ def generate_FV3LAM_wflow(
ushdir,
logfile: str = "log.generate_FV3LAM_wflow",
debug: bool = False) -> str:
- """Function to setup a forecast experiment and create a workflow
- (according to the parameters specified in the config file)
+ """
+ Sets up a forecast experiment and creates a workflow (according to the parameters specified
+ in the configuration file)
Args:
- ushdir (str) : The full path of the ush/ directory where this script is located
+ ushdir (str) : The full path of the ``ush/`` directory where this script is located
logfile (str) : The name of the file where logging is written
debug (bool): Enable extra output for debugging
Returns:
@@ -75,7 +76,7 @@ def generate_FV3LAM_wflow(
#
# -----------------------------------------------------------------------
#
- # Set the full path to the experiment's rocoto workflow xml file. This
+ # Set the full path to the experiment's rocoto workflow xml file. This
# file will be placed at the top level of the experiment directory and
# then used by rocoto to run the workflow.
#
@@ -726,10 +727,16 @@ def generate_FV3LAM_wflow(
def setup_logging(logfile: str = "log.generate_FV3LAM_wflow", debug: bool = False) -> None:
"""
- Sets up logging, printing high-priority (INFO and higher) messages to screen, and printing all
- messages with detailed timing and routine info in the specified text file.
+ Sets up logging, printing high-priority (INFO and higher) messages to screen and printing all
+ messages with detailed timing and routine info in the specified text file. If ``debug = True``,
+ print all messages to both screen and log file.
+
+ Args:
+ logfile (str) : The name of the file where logging information is written
+ debug (bool): Enable extra output for debugging
+ Returns:
+ None
- If debug = True, print all messages to both screen and log file.
"""
logging.getLogger().setLevel(logging.DEBUG)
diff --git a/ush/get_crontab_contents.py b/ush/get_crontab_contents.py
index 6b0548141c..7f08346b5d 100644
--- a/ush/get_crontab_contents.py
+++ b/ush/get_crontab_contents.py
@@ -15,30 +15,19 @@ def get_crontab_contents(called_from_cron, machine, debug):
"""
This function returns the contents of the user's cron table, as well as the command used to
manipulate the cron table. Typically this latter value will be `crontab`, but on some
- platforms the version or location of this may change depending on other circumstances, e.g. on
- Cheyenne, this depends on whether a script that wants to call `crontab` is itself being called
- from a cron job.
+ platforms the version or location of this may change depending on other circumstances.
Args:
- called_from_cron (bool): Set this to True if script is called from within a crontab
+ called_from_cron (bool): Set this value to ``True`` if script is called from within a
+ crontab
machine (str) : The name of the current machine
- debug (bool): True will give more verbose output
+ debug (bool): ``True`` will give more verbose output
Returns:
crontab_cmd (str) : String containing the "crontab" command for this machine
crontab_contents (str) : String containing the contents of the user's cron table.
"""
- #
- # On Cheyenne, simply typing "crontab" will launch the crontab command
- # at "/glade/u/apps/ch/opt/usr/bin/crontab". This is a containerized
- # version of crontab that will not work if called from scripts that are
- # themselves being called as cron jobs. In that case, we must instead
- # call the system version of crontab at /usr/bin/crontab.
- #
crontab_cmd = "crontab"
- if machine == "CHEYENNE":
- if called_from_cron:
- crontab_cmd = "/usr/bin/crontab"
print_info_msg(
f"""
@@ -69,8 +58,17 @@ def get_crontab_contents(called_from_cron, machine, debug):
return crontab_cmd, crontab_contents
-def add_crontab_line(called_from_cron, machine, crontab_line, exptdir, debug):
- """Add crontab line to cron table"""
+def add_crontab_line(called_from_cron, machine, crontab_line, exptdir, debug) -> None:
+ """Adds crontab line to cron table
+
+ Args:
+ called_from_cron (bool): Set this value to ``True`` if script is called from within
+ a crontab.
+ machine (str) : The name of the current machine
+ crontab_line (str) : Line to be added to cron table
+ exptdir (str) : Path to the experiment directory
+ debug (bool): ``True`` will give more verbose output
+ """
#
# Make a backup copy of the user's crontab file and save it in a file.
@@ -135,9 +133,17 @@ def add_crontab_line(called_from_cron, machine, crontab_line, exptdir, debug):
)
-def delete_crontab_line(called_from_cron, machine, crontab_line, debug):
- """Delete crontab line after job is complete i.e. either SUCCESS/FAILURE
- but not IN PROGRESS status"""
+def delete_crontab_line(called_from_cron, machine, crontab_line, debug) -> None:
+ """Deletes crontab line after job is complete i.e., either SUCCESS/FAILURE
+ but not IN PROGRESS status
+
+ Args:
+ called_from_cron (bool): Set this value to ``True`` if script is called from within
+ a crontab
+ machine (str) : The name of the current machine
+ crontab_line (str) : Line to be deleted from cron table
+ debug (bool): ``True`` will give more verbose output
+ """
#
# Get the full contents of the user's cron table.
@@ -176,7 +182,7 @@ def delete_crontab_line(called_from_cron, machine, crontab_line, debug):
)
-def parse_args(argv):
+def _parse_args(argv):
"""Parse command line arguments for deleting crontab line.
This is needed because it is called from shell script.
If 'delete' argument is not passed, print the crontab contents
@@ -218,7 +224,7 @@ def parse_args(argv):
)
# Check that inputs are correct and consistent
- args = parser.parse_args(argv)
+ args = parser._parse_args(argv)
if args.remove:
if args.line is None:
@@ -228,7 +234,7 @@ def parse_args(argv):
if __name__ == "__main__":
- args = parse_args(sys.argv[1:])
+ args = _parse_args(sys.argv[1:])
if args.remove:
delete_crontab_line(args.called_from_cron,args.machine,args.line,args.debug)
else:
diff --git a/ush/link_fix.py b/ush/link_fix.py
index f0d103d8ea..1e4a7c6254 100755
--- a/ush/link_fix.py
+++ b/ush/link_fix.py
@@ -35,25 +35,24 @@ def link_fix(
sfc_climo_fields,
**kwargs,
):
- """This file defines a function that links fix files to the target
- directory for a given SRW experiment. Only links files for one group
- at a time.
+ """Links fix files to the target directory for a given SRW experiment.
+ It only links files for one group at a time.
Args:
- cfg_d: dictionary of settings
- file_group: could be on of ["grid", "orog", "sfc_climo"]
- source_dir: the path to directory where the file_group fix files
- are linked from
- target_dir: the directory where the fix files should be linked to
- dot_or_uscore: str containing either a dot or an underscore
- nhw: grid parameter setting
- constants: dict containing the constants used by SRW
- run_task: boolean value indicating whether the task is to be run
- in the experiment
- climo_fields: list of fields needed for climo
+ cfg_d (dict): Dictionary of configuration settings
+ file_group (str) : Choice of [``"grid"``, ``"orog"``, ``"sfc_climo"``]
+ source_dir (str) : Path to directory that the ``file_group`` fix files are linked from
+ target_dir (str) : Directory that the fix files should be linked to
+ dot_or_uscore (str) : Either a dot (``.``) or an underscore (``_``)
+ nhw (int) : Wide halo width (grid parameter setting: N=number of cells,
+ H=halo, W=wide halo)
+ constants (dict): Dictionary containing the constants used by the SRW App
+ run_task (bool): Whether the task is to be run in the experiment
+ climo_fields (list): List of fields needed for surface climatology (see
+ ``fixed_files_mapping.yaml`` for details)
Returns:
- a string: resolution
+ res (str): File/grid resolution
"""
print_input_args(locals())
@@ -99,9 +98,9 @@ def link_fix(
# 1) "C*.mosaic.halo${NHW}.nc"
# This mosaic file for the wide-halo grid (i.e. the grid with a ${NHW}-
# cell-wide halo) is needed as an input to the orography filtering
- # executable in the orography generation task. The filtering code
+ # executable in the orography generation task. The filtering code
# extracts from this mosaic file the name of the file containing the
- # grid on which it will generate filtered topography. Note that the
+ # grid on which it will generate filtered topography. Note that the
# orography generation and filtering are both performed on the wide-
# halo grid. The filtered orography file on the wide-halo grid is then
# shaved down to obtain the filtered orography files with ${NH3}- and
@@ -256,7 +255,7 @@ def link_fix(
if not res:
print_err_msg_exit(
f"""
- The resolution could not be extracted from the current file's name. The
+ The resolution could not be extracted from the current file's name. The
full path to the file (fp) is:
fp = '{fp}'
This may be because fp contains the * globbing character, which would
@@ -376,8 +375,8 @@ def link_fix(
return res
-def parse_args(argv):
- """Parse command line arguments"""
+def _parse_args(argv):
+ """Parses command line arguments"""
parser = argparse.ArgumentParser(
description="Creates symbolic links to FIX directories."
)
@@ -402,7 +401,7 @@ def parse_args(argv):
if __name__ == "__main__":
- args = parse_args(sys.argv[1:])
+ args = _parse_args(sys.argv[1:])
cfg = load_yaml_config(args.path_to_defns)
link_fix(
verbose=cfg["workflow"]["VERBOSE"],
diff --git a/ush/mrms_pull_topofhour.py b/ush/mrms_pull_topofhour.py
index 310c5d97f9..30b6281503 100644
--- a/ush/mrms_pull_topofhour.py
+++ b/ush/mrms_pull_topofhour.py
@@ -7,7 +7,21 @@
import gzip
def main():
+ """Identifies the MRMS file closest to the valid time of the forecast.
+ METplus is configured to look for a MRMS composite reflectivity file
+ for the valid time of the forecast being verified; since MRMS composite
+ reflectivity files do not always exactly match the valid time, this
+ script is used to identify and rename the MRMS composite reflectivity
+ file to match the valid time of the forecast.
+ Returns:
+ None
+
+ Raises:
+ FileNotFoundError: If no valid file was found within 15 minutes of the valid
+ time of the forecast
+
+ """
#Parse input arguments
parser = argparse.ArgumentParser()
parser.add_argument('-v', '--valid_time', type=str, required=True,
diff --git a/ush/python_utils/check_for_preexist_dir_file.py b/ush/python_utils/check_for_preexist_dir_file.py
index 79666f8288..728eb260b3 100644
--- a/ush/python_utils/check_for_preexist_dir_file.py
+++ b/ush/python_utils/check_for_preexist_dir_file.py
@@ -9,14 +9,17 @@
def check_for_preexist_dir_file(path, method):
- """Check for a preexisting directory or file and, if present, deal with it
+ """Checks for a preexisting directory or file and, if present, deals with it
according to the specified method
Args:
- path: path to directory
- method: could be any of [ 'delete', 'reuse', 'rename', 'quit' ]
+ path (str): Path to directory
+ method (str): Could be any of [ ``'delete'``, ``'reuse'``, ``'rename'``, ``'quit'`` ]
Returns:
None
+ Raises:
+ ValueError: If an invalid method for dealing with a pre-existing directory is specified
+ FileExistsError: If the specified directory or file already exists
"""
try:
diff --git a/ush/python_utils/check_var_valid_value.py b/ush/python_utils/check_var_valid_value.py
index 0c9bcc49c6..c862dd2260 100644
--- a/ush/python_utils/check_var_valid_value.py
+++ b/ush/python_utils/check_var_valid_value.py
@@ -2,13 +2,15 @@
def check_var_valid_value(var, values):
- """Check if specified variable has a valid value
+ """Checks whether the specified variable has a valid value
Args:
- var: the variable
- values: list of valid values
+ var: The variable
+ values (list): Valid values
Returns:
- True: if var has valid value, exit(1) otherwise
+ True: If ``var`` has valid value; otherwise ``exit(1)``
+ Raises:
+ ValueError: If ``var`` has an invalid value
"""
if not var:
diff --git a/ush/python_utils/config_parser.py b/ush/python_utils/config_parser.py
index ff9a0c277c..eb9efb1c27 100644
--- a/ush/python_utils/config_parser.py
+++ b/ush/python_utils/config_parser.py
@@ -1,8 +1,9 @@
#!/usr/bin/env python3
"""
-This file provides utilities for processing different configuration file formats.
+This file provides utilities for processing different configuration (config) file formats.
Supported formats include:
+
a) YAML
b) JSON
c) SHELL
@@ -10,7 +11,7 @@
e) XML
Typical usage involves first loading the config file, then using the dictionary
-returnded by load_config to make queries.
+returned by ``load_config`` to make queries.
"""
@@ -43,7 +44,14 @@
# YAML
##########
def load_yaml_config(config_file):
- """Safe load a yaml file"""
+ """
+ Safe loads a YAML file
+
+ Args:
+ config_file: Configuration file to parse
+ Returns:
+ cfg: A Python object containing the config file data
+ """
with open(config_file, "r") as f:
cfg = yaml.safe_load(f)
@@ -54,24 +62,34 @@ def load_yaml_config(config_file):
try:
class custom_dumper(yaml.Dumper):
- """Custom yaml dumper to correct list indentation"""
+ """
+ Custom YAML dumper to correct list indentation
+
+ Args:
+ yaml.Dumper: A YAML Dumper object to custom format
+
+ Returns:
+ A custom-formatted Dumper object
+ """
- def increase_indent(self, flow=False, indentless=False):
- return super(custom_dumper, self).increase_indent(flow, False)
+ def _increase_indent(self, flow=False, indentless=False):
+ return super(custom_dumper, self)._increase_indent(flow, False)
- def str_presenter(dumper, data):
+ def _str_presenter(dumper, data):
if len(data.splitlines()) > 1:
return dumper.represent_scalar("tag:yaml.org,2002:str", data, style="|")
return dumper.represent_scalar("tag:yaml.org,2002:str", data)
- yaml.add_representer(str, str_presenter)
+ yaml.add_representer(str, _str_presenter)
except NameError:
pass
def cfg_to_yaml_str(cfg):
- """Get contents of config file as a yaml string"""
+ """
+ Gets contents of config file as a YAML string
+ """
return yaml.dump(
cfg, sort_keys=False, default_flow_style=False
@@ -79,16 +97,19 @@ def cfg_to_yaml_str(cfg):
def cycstr(loader, node):
- ''' Returns a cyclestring Element whose content corresponds to the
- input node argument '''
+ """
+ Returns a cyclestring element whose content corresponds to the
+ input node argument
+ """
arg = loader.construct_scalar(node)
return f'{arg}'
def include(filepaths):
- ''' Returns a dictionary that includes the contents of the referenced
- YAML file(s). '''
+ """
+ Returns a dictionary that includes the contents of the referenced YAML file(s).
+ """
srw_path = pathlib.Path(__file__).resolve().parents[0].parents[0]
@@ -104,15 +125,18 @@ def include(filepaths):
return yaml.dump(cfg, sort_keys=False)
def join_str(loader, node):
- """Custom tag hangler to join strings"""
+ """
+ Custom tag hangler to join strings
+ """
seq = loader.construct_sequence(node)
return "".join([str(i) for i in seq])
def startstopfreq(loader, node):
-
- ''' Returns a Rocoto-formatted string for the contents of a cycledef
- tag. Assume that the items in the node are env variables, and return
- a Rocoto-formatted string'''
+ """
+ Returns a Rocoto-formatted string for the contents of a ``cycledef``
+ tag. Assumes that the items in the node are environment variables, and returns
+ a Rocoto-formatted string.
+ """
args = loader.construct_sequence(node)
@@ -122,7 +146,7 @@ def startstopfreq(loader, node):
return f'{start}00 {stop}00 {freq}:00:00'
-def nowtimestamp(loader, node):
+def _nowtimestamp(loader, node):
return "id_" + str(int(datetime.datetime.now().timestamp()))
try:
@@ -130,27 +154,31 @@ def nowtimestamp(loader, node):
yaml.add_constructor("!include", include, Loader=yaml.SafeLoader)
yaml.add_constructor("!join_str", join_str, Loader=yaml.SafeLoader)
yaml.add_constructor("!startstopfreq", startstopfreq, Loader=yaml.SafeLoader)
- yaml.add_constructor("!nowtimestamp", nowtimestamp ,Loader=yaml.SafeLoader)
+ yaml.add_constructor("!nowtimestamp", _nowtimestamp ,Loader=yaml.SafeLoader)
except NameError:
pass
def path_join(arg):
- """A filter for jinja2 that joins paths"""
+ """
+ A filter for jinja2 that joins paths
+ """
return os.path.join(*arg)
def days_ago(arg):
- """A filter for jinja2 that gives us a date string for x number of
- days ago"""
+ """
+ A filter for jinja2 that gives us a date string for x number of
+ days ago
+ """
return (datetime.date.today() -
datetime.timedelta(days=arg)).strftime("%Y%m%d00")
def extend_yaml(yaml_dict, full_dict=None, parent=None):
"""
- Updates yaml_dict inplace by rendering any existing Jinja2 templates
+ Updates ``yaml_dict`` in place by rendering any existing Jinja2 templates
that exist in a value.
"""
@@ -250,7 +278,9 @@ def extend_yaml(yaml_dict, full_dict=None, parent=None):
# JSON
##########
def load_json_config(config_file):
- """Load json config file"""
+ """
+ Loads JSON config file
+ """
try:
with open(config_file, "r") as f:
@@ -262,7 +292,9 @@ def load_json_config(config_file):
def cfg_to_json_str(cfg):
- """Get contents of config file as a json string"""
+ """
+ Gets contents of config file as a JSON string
+ """
return json.dumps(cfg, sort_keys=False, indent=4) + "\n"
@@ -271,7 +303,9 @@ def cfg_to_json_str(cfg):
# SHELL
##########
def load_shell_as_ini_config(file_name, return_string=1):
- """Load shell config file with embedded structure in comments"""
+ """
+ Loads shell config file with embedded structure in comments
+ """
# read contents and replace comments as sections
with open(file_name, "r") as file:
@@ -294,13 +328,13 @@ def load_shell_as_ini_config(file_name, return_string=1):
def load_shell_config(config_file, return_string=0):
- """Loads old style shell config files.
- We source the config script in a subshell and gets the variables it sets
+ """Loads old-style shell config files.
+ We source the config script in a subshell and get the variables it sets
Args:
- config_file: path to config file script
+ config_file: Path to config file script
Returns:
- dictionary that should be equivalent to one obtained from parsing a yaml file.
+ Dictionary that should be equivalent to one obtained from parsing a YAML file.
"""
# First try to load it as a structured shell config file
@@ -339,7 +373,9 @@ def load_shell_config(config_file, return_string=0):
def cfg_to_shell_str(cfg, kname=None):
- """Get contents of config file as shell script string"""
+ """
+ Gets contents of config file as shell script string
+ """
shell_str = ""
for k, v in cfg.items():
@@ -369,7 +405,9 @@ def cfg_to_shell_str(cfg, kname=None):
# INI
##########
def load_ini_config(config_file, return_string=0):
- """Load a config file with a format similar to Microsoft's INI files"""
+ """
+ Loads a config file with a format similar to Microsoft's INI files
+ """
if not os.path.exists(config_file):
raise FileNotFoundError(
@@ -391,7 +429,9 @@ def load_ini_config(config_file, return_string=0):
def get_ini_value(config, section, key):
- """Finds the value of a property in a given section"""
+ """
+ Finds the value of a property in a given section
+ """
if not section in config:
raise KeyError(f"Section not found: {section}")
@@ -402,7 +442,9 @@ def get_ini_value(config, section, key):
def cfg_to_ini_str(cfg, kname=None):
- """Get contents of config file as ini string"""
+ """
+ Gets contents of config file as INI string
+ """
ini_str = ""
for k, v in cfg.items():
@@ -427,7 +469,9 @@ def cfg_to_ini_str(cfg, kname=None):
# XML
##########
def xml_to_dict(root, return_string):
- """Convert an xml tree to dictionary"""
+ """
+ Converts an XML tree to dictionary
+ """
cfg = {}
for child in root:
@@ -440,7 +484,7 @@ def xml_to_dict(root, return_string):
def dict_to_xml(d, tag):
- """Convert dictionary to an xml tree"""
+ """Converts dictionary to an XML tree"""
elem = ET.Element(tag)
for k, v in d.items():
@@ -456,7 +500,9 @@ def dict_to_xml(d, tag):
def load_xml_config(config_file, return_string=0):
- """Load xml config file"""
+ """
+ Loads XML config file
+ """
tree = ET.parse(config_file)
root = tree.getroot()
@@ -465,7 +511,9 @@ def load_xml_config(config_file, return_string=0):
def cfg_to_xml_str(cfg):
- """Get contents of config file as a xml string"""
+ """
+ Gets contents of config file as a XML string
+ """
root = dict_to_xml(cfg, "root")
r = ET.tostring(root, encoding="unicode")
@@ -479,11 +527,12 @@ def cfg_to_xml_str(cfg):
# CONFIG utils
##################
def flatten_dict(dictionary, keys=None):
- """Flatten a recursive dictionary (e.g.yaml/json) to be one level deep
+ """
+ Flattens a recursive dictionary (e.g.YAML/JSON) to be one level deep
Args:
- dictionary: the source dictionary
- keys: list of keys on top level whose contents to flatten, if None all of them
+ dictionary: The source dictionary
+ keys (list): Keys on top level whose contents to flatten; if ``None``, then all of them
Returns:
A one-level deep dictionary for the selected set of keys
"""
@@ -499,11 +548,12 @@ def flatten_dict(dictionary, keys=None):
def structure_dict(dict_o, dict_t):
- """Structure a dictionary based on a template dictionary
+ """
+ Structures a dictionary based on a template dictionary
Args:
- dict_o: dictionary to structure (flat one level structure)
- dict_t: template dictionary used for structuring
+ dict_o: Dictionary to structure (flat one level structure)
+ dict_t: Template dictionary used for structuring
Returns:
A dictionary with contents of dict_o following structure of dict_t
"""
@@ -519,7 +569,7 @@ def structure_dict(dict_o, dict_t):
def update_dict(dict_o, dict_t, provide_default=False):
- """Update a dictionary with another
+ """Updates a dictionary with another
Args:
dict_o: flat dictionary used as source
@@ -549,15 +599,16 @@ def update_dict(dict_o, dict_t, provide_default=False):
def check_structure_dict(dict_o, dict_t):
- """Check if a dictionary's structure follows a template.
+ """
+ Checks if a dictionary's structure follows a template.
The invalid entries are returned as a dictionary.
- If all entries are valid, returns an empty dictionary
+ If all entries are valid, returns an empty dictionary.
Args:
- dict_o: target dictionary
- dict_t: template dictionary to compare structure to
+ dict_o (dict): Target dictionary
+ dict_t (dict): Template dictionary to compare structure to
Returns:
- dict: Invalid key-value pairs.
+ dict: Invalid key-value pairs.
"""
inval = {}
for k, v in dict_o.items():
@@ -573,10 +624,12 @@ def check_structure_dict(dict_o, dict_t):
def filter_dict(dict_o, keys_regex):
- """Filter dictionary keys based on a list of keys
+ """
+ Filters dictionary keys based on a list of keys
+
Args:
- dict_o: the source dictionary
- keys_regex: list of keys to retain (could be regex exp.)
+ dict_o: The source dictionary
+ keys_regex: Keys to retain (could be regex expression)
"""
keys = []
@@ -591,7 +644,12 @@ def filter_dict(dict_o, keys_regex):
# CONFIG loader
##################
def load_config_file(file_name, return_string=0):
- """Load config file based on file name extension"""
+ """
+ Loads config file based on file name extension
+
+ Raises:
+ ValueError: If an unrecognized file extension is used.
+ """
ext = os.path.splitext(file_name)[1][1:]
if ext == "sh":
@@ -619,7 +677,9 @@ def load_config_file(file_name, return_string=0):
# CONFIG main
##################
def cfg_main():
- """Main function for converting and formatting between different config file formats"""
+ """
+ Converts between and formats different config file formats
+ """
parser = argparse.ArgumentParser(
description="Utility for managing different config formats."
diff --git a/ush/python_utils/create_symlink_to_file.py b/ush/python_utils/create_symlink_to_file.py
index 363a49fa40..5d64000ec6 100644
--- a/ush/python_utils/create_symlink_to_file.py
+++ b/ush/python_utils/create_symlink_to_file.py
@@ -8,12 +8,12 @@
def create_symlink_to_file(target, symlink, relative=True):
- """Create a symbolic link to the specified target file.
+ """Creates a symbolic link to the specified target file.
Args:
- target: target file
- symlink: symbolic link to target file
- relative: optional argument to specify relative symoblic link creation
+ target (str) : Target file
+ symlink (str) : Symbolic link to target file
+ relative (bool): Optional argument to specify relative symbolic link creation
Returns:
None
"""
diff --git a/ush/python_utils/define_macos_utilities.py b/ush/python_utils/define_macos_utilities.py
index 4a73020850..f171b4c55d 100644
--- a/ush/python_utils/define_macos_utilities.py
+++ b/ush/python_utils/define_macos_utilities.py
@@ -8,7 +8,13 @@
def check_darwin(cmd):
- """Check if darwin command exists"""
+ """Checks if Darwin command exists
+
+ Args:
+ cmd: The command to check (e.g., ``gsed``, ``gln``)
+ Returns:
+ True if successful; otherwise prints error
+ """
(err, _, _) = run_command(f"command -v {cmd}")
if err != 0:
@@ -24,8 +30,8 @@ def check_darwin(cmd):
def define_macos_utilities():
- """Set some environment variables for Darwin systems differently
- The variables are: READLINK, SED, DATE_UTIL and LN_UTIL
+ """Sets some environment variables for Darwin systems differently
+ The variables are: ``READLINK``, ``SED``, ``DATE_UTIL`` and ``LN_UTIL``.
"""
if os.uname()[0] == "Darwin":
diff --git a/ush/python_utils/environment.py b/ush/python_utils/environment.py
index c82c0a4fe9..427e190b9b 100644
--- a/ush/python_utils/environment.py
+++ b/ush/python_utils/environment.py
@@ -8,12 +8,12 @@
def str_to_date(s):
- """Get python datetime object from string.
+ """Gets Python datetime object from string.
Args:
- s: a string
+ s (str): A string
Returns:
- datetime object or None
+ Datetime object or None
"""
v = None
try:
@@ -32,31 +32,30 @@ def str_to_date(s):
def date_to_str(d, format="%Y%m%d%H%M"):
- """Get string from python datetime object.
- By default it converts to YYYYMMDDHHMM format unless
- told otherwise by passing a different format
+ """Gets string from Python datetime object.
+ By default it converts to ``YYYYMMDDHHmm`` format unless told otherwise by passing a different format.
Args:
- d: datetime object
+ d (datetime.datetime): Datetime object
+ format (str): Format of the datetime string; default is ``"%Y%m%d%H%M"`` (see `format codes `_ for other options).
Returns:
- string in YYYYMMDDHHMM or shorter version of it
+ String in YYYYMMDDHHmm or shorter version of it
"""
v = d.strftime(format)
return v
def str_to_type(s, return_string=0):
- """Check if the string contains a float, int, boolean, datetime, or just regular string.
+ """Checks whether the string is a float, int, boolean, datetime, or just regular string.
This will be used to automatically convert environment variables to data types
that are more convenient to work with. If you don't want this functionality,
- pass return_string = 1
+ pass ``return_string = 1``.
Args:
- s: a string
- return_string: Set to 1 to return the string itself
- Set to 2 to return the string itself only for a datetime object
+ s (str): A string
+ return_string (int): Set to ``1`` to return the string itself. Set to ``2`` to return the string itself only for a datetime object
Returns:
- a float, int, boolean, datetime, or the string itself when all else fails
+ A float, int, boolean, datetime, or the string itself when all else fails
"""
s = s.strip("\"'")
if return_string != 1:
@@ -91,13 +90,12 @@ def str_to_type(s, return_string=0):
def type_to_str(v):
- """Given a float/int/boolean/date or list of these types, gets a string
- representing their values
+ """Gets a string representing the value of a given float, int, boolean, date or list of these types.
Args:
- v: a variable of the above types
+ v: A variable of the above types
Returns:
- a string
+ A string
"""
if isinstance(v, bool):
return "TRUE" if v else "FALSE"
@@ -111,11 +109,12 @@ def type_to_str(v):
def list_to_str(v, oneline=False):
- """Given a string or list of string, construct a string
- to be used on right hand side of shell environement variables
+ """Given a string or list of strings, constructs a string
+ to be used on right hand side of shell environment variables.
Args:
- v: a string/number, list of strings/numbers, or null string('')
+ v: A string/number, list of strings/numbers, or null string(``''``)
+ oneline (bool): If the string is a single line (True) or multiple (False) ?
Returns:
A string
"""
@@ -134,13 +133,13 @@ def list_to_str(v, oneline=False):
def str_to_list(v, return_string=0):
- """Given a string, construct a string or list of strings.
- Basically does the reverse operation of `list_to_string`.
+ """Constructs a string or list of strings based on the given string.
+ Basically does the reverse operation of ``list_to_str``.
Args:
- v: a string
+ v: A string
Returns:
- a string, list of strings or null string('')
+ A string, a list of strings, or a null string(``''``)
"""
if not isinstance(v, str):
@@ -167,11 +166,11 @@ def str_to_list(v, return_string=0):
def set_env_var(param, value):
- """Set an environment variable
+ """Sets an environment variable.
Args:
- param: the variable to set
- value: either a string, list of strings or None
+ param: The variable to set
+ value: A string, a list of strings, or None
Returns:
None
"""
@@ -180,12 +179,12 @@ def set_env_var(param, value):
def get_env_var(param):
- """Get the value of an environement variable
+ """Gets the value of an environment variable
Args:
- param: the environement variable
+ param: The environment variable
Returns:
- Returns either a string, list of strings or None
+ A string, a list of strings, or None
"""
if not param in os.environ:
@@ -195,30 +194,30 @@ def get_env_var(param):
def import_vars(dictionary=None, target_dict=None, env_vars=None):
- """Import all (or select few) environment/dictionary variables as python global
- variables of the caller module. Call this function at the beginning of a function
+ """Imports all (or a select few) environment/dictionary variables as Python global
+ variables of the caller module. Calls this function at the beginning of a function
that uses environment variables.
- Note that for read-only environmental variables, calling this function once at the
+ Note that for read-only environment variables, calling this function once at the
beginning should be enough. However, if the variable is modified in the module it is
- called from, the variable should be explicitly tagged as `global`, and then its value
- should be exported back to the environment with a call to export_vars()
+ called from, the variable should be explicitly tagged as ``global``, and then its value
+ should be exported back to the environment with a call to ``export_vars()``:
+
+ .. code-block:: console
import_vars() # import all environment variables
global MY_VAR, MY_LIST_VAR
MY_PATH = "/path/to/somewhere"
MY_LIST_VAR.append("Hello")
- export_vars() # these exports all global variables
+ export_vars() # this exports all global variables
- There doesn't seem to an easier way of imitating the shell script doing way of things, which
- assumes that everything is global unless specifically tagged local, while the opposite is true
- for python.
+ This is because in shell scripting assumes that everything is global unless specifically tagged local, while the opposite is true
+ for Python.
Args:
- dictionary: source dictionary (default=os.environ)
- target_dict: target dictionary (default=caller module's globals())
- env_vars: list of selected environement/dictionary variables to import, or None,
- in which case all environment/dictionary variables are imported
+ dictionary (dict): Source dictionary
+ target_dict (dict): Target dictionary
+ env_vars (list): List of selected environment variables to import or ``None``, in which case all environment variables are imported
Returns:
None
"""
@@ -240,15 +239,14 @@ def import_vars(dictionary=None, target_dict=None, env_vars=None):
def export_vars(dictionary=None, source_dict=None, env_vars=None):
- """Export all (or select few) global variables of the caller module's
- to either the environement/dictionary. Call this function at the end of
+ """Exports all (or a select few) global variables of the caller modules
+ to the environment/dictionary. Calls this function at the end of
a function that updates environment variables.
Args:
- dictionary: target dictionary to set (default=os.environ)
- source_dict: source dictionary (default=caller modules globals())
- env_vars: list of selected environement/dictionary variables to export, or None,
- in which case all environment/dictionary variables are exported
+ dictionary (dict): Target dictionary to set
+ source_dict (dict): Source dictionary
+ env_vars (list): List of selected environment variables to export or ``None``, in which case all environment variables are exported
Returns:
None
"""
diff --git a/ush/python_utils/filesys_cmds_vrfy.py b/ush/python_utils/filesys_cmds_vrfy.py
index ca986f9693..c33e9b311c 100644
--- a/ush/python_utils/filesys_cmds_vrfy.py
+++ b/ush/python_utils/filesys_cmds_vrfy.py
@@ -5,13 +5,13 @@
def cmd_vrfy(cmd, *args):
- """Execute system command
+ """Executes system command
Args:
- cmd: the command
- *args: its arguments
+ cmd (str): The command
+ *args: Its arguments
Returns:
- Exit code
+ ret: Exit code
"""
cmd += " " + " ".join([str(a) for a in args])
@@ -22,28 +22,77 @@ def cmd_vrfy(cmd, *args):
def cp_vrfy(*args):
+ """Checks that the ``cp`` command executed successfully
+
+ Args:
+ *args: Iterable object containing command with its command line arguments
+ Returns:
+ Exit code
+ """
return cmd_vrfy("cp", *args)
def rsync_vrfy(*args):
+ """Checks that the ``rsync`` command executed successfully
+
+ Args:
+ *args: Iterable object containing command with its command line arguments
+ Returns:
+ Exit code
+ """
return cmd_vrfy("rsync", *args)
def mv_vrfy(*args):
+ """Checks that the ``mv`` command executed successfully
+
+ Args:
+ *args: Iterable object containing command with its command line arguments
+ Returns:
+ Exit code
+ """
return cmd_vrfy("mv", *args)
def rm_vrfy(*args):
+ """Checks that the ``rm`` command executed successfully
+
+ Args:
+ *args: Iterable object containing command with its command line arguments
+ Returns:
+ Exit code
+ """
return cmd_vrfy("rm", *args)
def ln_vrfy(*args):
+ """Checks that the ``ln`` command executed successfully
+
+ Args:
+ *args: Iterable object containing command with its command line arguments
+ Returns:
+ Exit code
+ """
return cmd_vrfy("ln", *args)
def mkdir_vrfy(*args):
+ """Checks that the ``mkdir`` command executed successfully
+
+ Args:
+ *args: Iterable object containing command with its command line arguments
+ Returns:
+ Exit code
+ """
return cmd_vrfy("mkdir", *args)
def cd_vrfy(*args):
+ """Checks that the ``cd`` command executed successfully
+
+ Args:
+ *args: Iterable object containing command with its command line arguments
+ Returns:
+ Exit code
+ """
return os.chdir(*args)
diff --git a/ush/python_utils/fv3write_parms_lambert.py b/ush/python_utils/fv3write_parms_lambert.py
index c94400710a..56c42b10b8 100755
--- a/ush/python_utils/fv3write_parms_lambert.py
+++ b/ush/python_utils/fv3write_parms_lambert.py
@@ -1,21 +1,33 @@
#!/usr/bin/env python
-#
-# To use this tool, you should source the regional workflow environment
-# $> source env/wflow_xxx.env
-# and activate pygraf (or any one with cartopy installation)
-# $> conda activate pygraf
-#
+
+"""
+To use this tool, first source the workflow environment:
+
+.. code-block:: console
+
+ $> module use /path/to/ufs-srweather-app/modulefiles
+ $> module load wflow_
+ $> conda activate srw_graphics
+
+Make sure to adjust the ``modulefiles`` path and ```` to correspond to your system.
+Even though the message printed to the console will direct users to run ``conda activate srw_app``, this script requires an environment (e.g., ``srw_graphics``) that includes ``pygraf`` or ``cartopy``. The ``srw_graphics`` environment uses ``cartopy`` for plotting. If the ``srw_app`` environment is already loaded, users can simply run ``conda activate srw_graphics`` to switch environments.
+
+For usage instructions, run:
+
+.. code-block:: console
+
+ $> python fv3write_parms_lambert.py -h
+
+"""
import argparse
import cartopy.crs as ccrs
-# @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
-#
-# Main function to return parameters for the FV3 write component.
-#
-
if __name__ == "__main__":
+ """
+ Main function to return parameters for the FV3 write component.
+ """
parser = argparse.ArgumentParser(
description="Determine FV3 write component lat1/lon1 for Lamert Conformal map projection",
diff --git a/ush/python_utils/misc.py b/ush/python_utils/misc.py
index e5e320ae43..a425f6687c 100644
--- a/ush/python_utils/misc.py
+++ b/ush/python_utils/misc.py
@@ -4,36 +4,36 @@
def uppercase(s):
- """Function to convert a given string to uppercase
+ """Converts a given string to uppercase
Args:
- s: the string
- Return:
- Uppercased str
+ s (str): The string to change to uppercase
+ Returns:
+ Uppercased string
"""
return s.upper()
def lowercase(s):
- """Function to convert a given string to lowercase
+ """Converts a given string to lowercase
Args:
- s: the string
- Return:
- Lowercase str
+ s (str): The string to change to lowercase
+ Returns:
+ Lowercased string
"""
return s.lower()
def find_pattern_in_str(pattern, source):
- """Find regex pattern in a string
+ """Finds a regular expression (regex) pattern in a string
Args:
- pattern: regex expression
- source: string
- Return:
+ pattern (str): Regex pattern
+ source (str): Text string to search for regex pattern
+ Returns:
A tuple of matched groups or None
"""
pattern = re.compile(pattern)
@@ -43,12 +43,12 @@ def find_pattern_in_str(pattern, source):
def find_pattern_in_file(pattern, file_name):
- """Find regex pattern in a file
+ """Finds a regular expression (regex) pattern in a file
Args:
- pattern: regex expression
- file_name: name of text file
- Return:
+ pattern (str): Regex pattern
+ file_name (str): Name of text file
+ Returns:
A tuple of matched groups or None
"""
pattern = re.compile(pattern)
diff --git a/ush/python_utils/print_input_args.py b/ush/python_utils/print_input_args.py
index 6d2a6ec2f1..7e7d7cd8a6 100644
--- a/ush/python_utils/print_input_args.py
+++ b/ush/python_utils/print_input_args.py
@@ -12,9 +12,9 @@ def print_input_args(valid_args):
"""Prints function arguments for debugging purposes
Args:
- valid_args: dictionary of arg-value pairs
+ valid_args (dict): Dictionary of argument-value pairs
Returns:
- Number of printed arguments
+ num_valid_args: Number of printed arguments
"""
# get verbosity from environment
diff --git a/ush/python_utils/print_msg.py b/ush/python_utils/print_msg.py
index 078d708ba9..fc173199cb 100644
--- a/ush/python_utils/print_msg.py
+++ b/ush/python_utils/print_msg.py
@@ -7,14 +7,12 @@
def print_err_msg_exit(error_msg="", stack_trace=True):
- """Function to print out an error message to stderr and exit.
+ """Prints out an error message to standard error and exits.
It can optionally print the stack trace as well.
Args:
- error_msg : error message to print
- stack_trace : set to True to print stack trace
- Returns:
- None
+ error_msg (str): Error message to print
+ stack_trace (bool): Set to ``True`` to print stack trace
"""
if stack_trace:
traceback.print_stack(file=sys.stderr)
@@ -25,15 +23,16 @@ def print_err_msg_exit(error_msg="", stack_trace=True):
def print_info_msg(info_msg, verbose=True):
- """Function to print information message to stdout, when verbose
- is set to True. It does proper "dedentation" that is needed for readability
- of python code.
+ """
+ Prints an informational message to standard output when ``verbose``
+ is set to ``True``. It does proper "dedentation"/formatting that is needed for readability
+ of Python code.
Args:
- info_msg : info message to print
- verbose : set to False to silence printing
+ info_msg (str): Info message to print
+ verbose (bool): Set to ``False`` to silence printing
Returns:
- True: if message is successfully printed
+ Boolean value: True if message is successfully printed; False if ``verbose`` is set to False.
"""
if verbose:
@@ -43,13 +42,14 @@ def print_info_msg(info_msg, verbose=True):
def log_info(info_msg, verbose=True, dedent_=True):
- """Function to print information message using the logging module. This function
- should not be used if python logging has not been initialized.
+ """
+ Prints information message using the logging module. This function
+ should not be used if Python logging has not been initialized.
Args:
- info_msg : info message to print
- verbose : set to False to silence printing
- dedent_ : set to False to disable "dedenting" (print string as-is)
+ info_msg (str): Info message to print
+ verbose (bool): Set to ``False`` to silence printing
+ dedent_ (bool): Set to ``False`` to disable "dedenting"/formatting and print string as-is
Returns:
None
"""
diff --git a/ush/python_utils/run_command.py b/ush/python_utils/run_command.py
index 3ab0fce898..bf41ec96e2 100644
--- a/ush/python_utils/run_command.py
+++ b/ush/python_utils/run_command.py
@@ -4,10 +4,10 @@
def run_command(cmd):
- """Run system command in a subprocess
+ """Runs system command in a subprocess
Args:
- cmd: command to execute
+ cmd (str): Command to execute
Returns:
Tuple of (exit code, std_out, std_err)
"""
diff --git a/ush/python_utils/xml_parser.py b/ush/python_utils/xml_parser.py
index a6f5add2e5..38637892fa 100644
--- a/ush/python_utils/xml_parser.py
+++ b/ush/python_utils/xml_parser.py
@@ -4,26 +4,26 @@
def load_xml_file(xml_file):
- """Loads xml file
+ """Loads XML file
Args:
- xml_file: path to xml file
+ xml_file: Path to XML file
Returns:
- root of the xml tree
+ tree: Root of the XML tree
"""
tree = ET.parse(xml_file)
return tree
def has_tag_with_value(tree, tag, value):
- """Check if xml tree has a node with tag and value
+ """Checks if XML tree has a node with tag and value
Args:
- tree: the xml tree
- tag: the tag
- value: text of tag
+ tree (xml.etree.ElementTree): The XML tree
+ tag (str): The tag
+ value (str): Text of tag
Returns:
- Boolean
+ Boolean value
"""
for node in tree.iter():
if node.tag == tag and node.text == value:
diff --git a/ush/retrieve_data.py b/ush/retrieve_data.py
index 5acf9d5ce9..17440441d5 100755
--- a/ush/retrieve_data.py
+++ b/ush/retrieve_data.py
@@ -1,28 +1,19 @@
#!/usr/bin/env python3
# pylint: disable=logging-fstring-interpolation
"""
-This script helps users pull data from known data streams, including
-URLS and HPSS (only on supported NOAA platforms), or from user-supplied
-data locations on disk.
-
-Several supported data streams are included in
-parm/data_locations.yml, which provides locations and naming
-conventions for files commonly used with the SRW App. Provide the file
-to this tool via the --config flag. Users are welcome to provide their
-own file with alternative locations and naming conventions.
-
-When using this script to pull from disk, the user is required to
-provide the path to the data location, which can include Python
-templates. The file names follow those included in the --config file by
-default, or can be user-supplied via the --file_name flag. That flag
-takes a YAML-formatted string that follows the same conventions outlined
-in the parm/data_locations.yml file for naming files.
+This script helps users pull data from known data streams, including URLs and HPSS (only on supported NOAA platforms), or from user-supplied data locations on disk.
+
+Several supported data streams are included in ``parm/data_locations.yml``, which provides locations and naming conventions for files commonly used with the SRW App. Provide the file to this tool via the ``--config`` flag. Users are welcome to provide their own file with alternative locations and naming conventions.
+
+When using this script to pull from disk, the user is required to provide the path to the data location, which can include Python templates. The file names follow those included in the ``--config`` file by default or can be user-supplied via the ``--file_name`` flag. That flag
+takes a YAML-formatted string that follows the same conventions outlined in the ``parm/data_locations.yml`` file for naming files.
To see usage for this script:
+ .. code-block::
+
python retrieve_data.py -h
-Also see the parse_args function below.
"""
import argparse
@@ -44,9 +35,17 @@
def clean_up_output_dir(expected_subdir, local_archive, output_path, source_paths):
- """Remove expected sub-directories and existing_archive files on
- disk once all files have been extracted and put into the specified
- output location."""
+ """Removes expected subdirectories and ``existing_archive`` files on disk once all files have been extracted and put into the specified output location.
+
+ Args:
+ expected_subdir : Expected subdirectories
+ local_archive (str): File name
+ output_path (str): Path to a location on disk. Path is expected to exist.
+ source_paths (str):
+
+ Returns:
+ unavailable (dict): A dictionary of unavailable files
+ """
unavailable = {}
expand_source_paths = []
@@ -82,10 +81,16 @@ def clean_up_output_dir(expected_subdir, local_archive, output_path, source_path
def copy_file(source, destination, copy_cmd):
"""
- Copy a file from a source and place it in the destination location.
- Return a boolean value reflecting the state of the copy.
-
+ Copies a file from a source and places it in the destination location.
Assumes destination exists.
+
+ Args:
+ source (str): Directory where file currently resides
+ destination (str): Directory that the file should be moved to
+ copy_cmd (str): Copy command (e.g., ``cp``, ``ln -sf``)
+
+ Returns:
+ A boolean value reflecting whether the copy was successful (True) or unsuccessful (False)
"""
if not os.path.exists(source):
@@ -110,8 +115,13 @@ def copy_file(source, destination, copy_cmd):
def check_file(url):
"""
- Check that a file exists at the expected URL. Return boolean value
- based on the response.
+ Checks that a file exists at the expected URL.
+
+ Args:
+ url: URL for file to be downloaded
+
+ Return:
+ Boolean value (True if ``status_code == 200`` or False otherwise)
"""
status_code = urllib.request.urlopen(url).getcode()
return status_code == 200
@@ -119,14 +129,13 @@ def check_file(url):
def download_file(url):
"""
- Download a file from a url source, and place it in a target location
- on disk.
+ Download a file from a URL source, and place it in a target location on disk.
- Arguments:
- url url to file to be downloaded
+ Args:
+ url: URL for file to be downloaded
- Return:
- boolean value reflecting state of download.
+ Returns:
+ Boolean value reflecting whether the copy was successful (True) or unsuccessful (False)
"""
# wget flags:
@@ -154,18 +163,24 @@ def download_file(url):
def arg_list_to_range(args):
"""
- Given an argparse list argument, return the sequence to process.
+ Returns the sequence to process, given an ``argparse`` list.
The length of the list will determine what sequence items are returned:
- Length = 1: A single item is to be processed
- Length = 2: A sequence of start, stop with increment 1
- Length = 3: A sequence of start, stop, increment
- Length > 3: List as is
+ * Length = 1: A single item is to be processed
+ * Length = 2: A sequence of start, stop with increment 1
+ * Length = 3: A sequence of start, stop, increment
+ * Length > 3: List as is
- argparse should provide a list of at least one item (nargs='+').
+ ``argparse`` should provide a list of at least one item (``nargs='+'``).
Must ensure that the list contains integers.
+
+ Args:
+ args (list): An ``argparse`` list argument
+
+ Returns:
+ args: An ``argparse`` list
"""
args = args if isinstance(args, list) else list(args)
@@ -179,25 +194,25 @@ def arg_list_to_range(args):
def fill_template(template_str, cycle_date, templates_only=False, **kwargs):
- """Fill in the provided template string with date time information,
- and return the resulting string.
+ """Fills in the provided template string with date time information, and returns the
+ resulting string.
- Arguments:
- template_str a string containing Python templates
- cycle_date a datetime object that will be used to fill in
- date and time information
- templates_only boolean value. When True, this function will only
- return the templates available.
+ Args:
+ template_str : A string containing Python templates
+ cycle_date : A datetime object that will be used to fill in date and time
+ information
+ templates_only (bool): When ``True``, this function will only return the templates
+ available.
Keyword Args:
- ens_group a number associated with a bin where ensemble
- members are stored in archive files
- fcst_hr an integer forecast hour. string formatting should
- be included in the template_str
- mem a single ensemble member. should be a positive integer value
+ ens_group (int): A number associated with a bin where ensemble members are stored in
+ archive files.
+ fcst_hr (int): An integer forecast hour. String formatting should be included in the
+ ``template_str``.
+ mem (int): A single ensemble member. Should be a positive integer value.
- Return:
- filled template string
+ Returns:
+ Filled template string
"""
# Parse keyword args
@@ -246,7 +261,13 @@ def fill_template(template_str, cycle_date, templates_only=False, **kwargs):
def create_target_path(target_path):
"""
- Append target path and create directory for ensemble members
+ Appends target path and creates directory for ensemble members
+
+ Args:
+ target_path (str): Target path
+
+ Returns:
+ target_path:
"""
if not os.path.exists(target_path):
os.makedirs(target_path)
@@ -258,7 +279,18 @@ def find_archive_files(paths, file_names, cycle_date, ens_group):
"""Given an equal-length set of archive paths and archive file
names, and a cycle date, check HPSS via hsi to make sure at least
one set exists. Return a dict of the paths of the existing archive, along with
- the item in set of paths that was found."""
+ the item in set of paths that was found.
+
+ Args:
+ paths (list): Archive paths
+ file_names (list): Archive file names
+ cycle_date (int): Cycle date (YYYYMMDDHH or YYYYMMDDHHmm format)
+ ens_group (int): A number associated with a bin where ensemble members are stored
+ in archive files
+
+ Returns:
+ A tuple containing (existing_archives, list_item) or ("", 0)
+ """
zipped_archive_file_paths = zip(paths, file_names)
@@ -291,19 +323,18 @@ def find_archive_files(paths, file_names, cycle_date, ens_group):
def get_file_templates(cla, known_data_info, data_store, use_cla_tmpl=False):
- """Returns the file templates requested by user input, either from
- the command line, or from the known data information dict.
+ """Returns the file templates requested by user input, either from the command line,
+ or from the known data information dictionary.
- Arguments:
+ Args:
- cla command line arguments Namespace object
- known_data_info dict from data_locations yaml file
- data_store string corresponding to a key in the
- known_data_info dict
- use_cla_tmpl boolean on whether to check cla for templates
+ cla (str) : Command line arguments (Namespace object)
+ known_data_info (dict): Dictionary from ``data_locations.yml`` file
+ data_store (str) : String corresponding to a key in the ``known_data_info`` dictionary
+ use_cla_tmpl (bool): Whether to check command line arguments for templates
Returns:
- file_templates a list of file templates
+ file_templates: A list of file templates
"""
file_templates = known_data_info.get(data_store, {}).get("file_names")
@@ -326,7 +357,7 @@ def get_file_templates(cla, known_data_info, data_store, use_cla_tmpl=False):
file_templates = file_templates[cla.file_set]
if not file_templates:
msg = "No file naming convention found. They must be provided \
- either on the command line or on in a config file."
+ either on the command line or in a config file."
raise argparse.ArgumentTypeError(msg)
return file_templates
@@ -335,29 +366,25 @@ def get_requested_files(cla, file_templates, input_locs, method="disk", **kwargs
# pylint: disable=too-many-locals
- """This function copies files from disk locations
- or downloads files from a url, depending on the option specified for
- user.
-
- This function expects that the output directory exists and is
- writeable.
+ """Copies files from disk locations or downloads files from a URL, depending on the option
+ specified by the user.
- Arguments:
+ This function expects that the output directory exists and is writeable.
- cla Namespace object containing command line arguments
- file_templates a list of file templates
- input_locs A string containing a single data location, either a url
- or disk path, or a list of paths/urls.
- method Choice of disk or download to indicate protocol for
- retrieval
+ Args:
+ cla (str) : Command line arguments (Namespace object)
+ file_templates (list): A list of file templates
+ input_locs (str) : A string containing a single data location, either a URL or disk
+ path, or a list of paths/URLs.
+ method (str) : Choice of ``"disk"`` or ``"download"`` to indicate protocol for
+ retrieval
- Keyword args:
- members a list integers corresponding to the ensemble members
- check_all boolean flag that indicates all urls should be
- checked for all files
+ Keyword Args:
+ members (list): A list of integers corresponding to the ensemble members
+ check_all (bool): Flag that indicates whether all URLs should be checked for all files
Returns:
- unavailable a list of locations/files that were unretrievable
+ unavailable (list): A list of locations/files that were unretrievable
"""
members = kwargs.get("members", "")
@@ -444,14 +471,13 @@ def get_requested_files(cla, file_templates, input_locs, method="disk", **kwargs
def hsi_single_file(file_path, mode="ls"):
- """Call hsi as a subprocess for Python and return information about
- whether the file_path was found.
-
- Arguments:
- file_path path on HPSS
- mode the hsi command to run. ls is default. may also
- pass "get" to retrieve the file path
+ """Calls ``hsi`` as a subprocess for Python and returns information about whether the
+ ``file_path`` was found.
+ Args:
+ file_path (str): File path on HPSS
+ mode (str): The ``hsi`` command to run. ``ls`` is default. May also pass ``get``
+ to retrieve the file path.
"""
cmd = f"hsi {mode} {file_path}"
@@ -473,17 +499,29 @@ def hpss_requested_files(cla, file_names, store_specs, members=-1, ens_group=-1)
# pylint: disable=too-many-locals
- """This function interacts with the "hpss" protocol in a provided
- data store specs file to download a set of files requested by the
- user. Depending on the type of archive file (zip or tar), it will
- either pull the entire file and unzip it, or attempt to pull
- individual files from a tar file.
+ """This function interacts with the "hpss" protocol in a provided data store specs file to
+ download a set of files requested by the user. Depending on the type of archive file (``zip``
+ or ``tar``), it will either pull the entire file and unzip it or attempt to pull individual
+ files from a tar file.
- It cleans up local disk after files are deemed available to remove
- any empty subdirectories that may still be present.
+ It cleans up the local disk after files are deemed available in order to remove any empty
+ subdirectories that may still be present.
- This function exepcts that the output directory exists and is
- writable.
+ This function exepcts that the output directory exists and is writable.
+
+ Args:
+ cla (str): Command line arguments (Namespace object)
+ file_names (list): List of file names
+ store_specs (dict): Data-store specifications (specs) file
+ members (list): A list of integers corresponding to the ensemble members
+ ens_group (int): A number associated with a bin where ensemble members are stored in
+ archive files
+
+ Returns:
+ A Python set of unavailable files
+
+ Raises:
+ Exception: If there is an error running the archive extraction command
"""
members = [-1] if members == -1 else members
@@ -629,15 +667,28 @@ def hpss_requested_files(cla, file_names, store_specs, members=-1, ens_group=-1)
def load_str(arg):
- """Load a dict string safely using YAML. Return the resulting dict."""
+ """Loads a dictionary string safely using YAML.
+
+ Args:
+ arg (str): A string representation of a dictionary
+
+ Returns:
+ A resulting dictionary
+ """
return yaml.load(arg, Loader=yaml.SafeLoader)
def config_exists(arg):
"""
- Check to ensure that the provided config file exists. If it does,
- load it with YAML's safe loader and return the resulting dict.
+ Checks to ensure that the provided config file exists. If it does, load it with YAML's safe
+ loader and return the resulting dictionary.
+
+ Args:
+ arg (str): Path to a configuration file
+
+ Returns:
+ cfg: A dictionary representation of the configuration file contents
"""
# Check for existence of file
@@ -657,19 +708,24 @@ def pair_locs_with_files(input_locs, file_templates, check_all):
contains the multiple locations and file templates for files that
should be searched in those locations.
- check_all indicates that all locations should be paired with all
- avaiable file templates.
-
The different possibilities:
- 1. Get one or more files from a single path/url
- 2. Get multiple files from multiple corresponding
- paths/urls
- 3. Check all paths for all file templates until files are
- found
- The default will be to handle #1 and #2. #3 will be
- indicated by a flag in the yaml: "check_all: True"
+ #. Get one or more files from a single path/URL
+ #. Get multiple files from multiple corresponding paths/URLs
+ #. Check all paths for all file templates until files are found
+
+ The default will be to handle #1 and #2. #3 will be indicated by a flag in the YAML:
+ ``check_all: True``
+ Args:
+ input_locs (list): Input locations
+ file_templates (list): File templates
+ check_all (bool): Flag that indicates whether all input locations should be checked
+ for all available file templates
+
+ Returns:
+ locs_files (list): Iterable containing multiple locations and file templates for files
+ that should be searched in those locations
"""
if not check_all:
@@ -697,7 +753,16 @@ def pair_locs_with_files(input_locs, file_templates, check_all):
def path_exists(arg):
- """Check whether the supplied path exists and is writeable"""
+ """
+ Check whether the supplied path exists and is writeable
+
+ Args:
+ arg (str): File path
+ Returns:
+ File path or error message
+ Raises:
+ argparse.ArgumentTypeError: If the path does not exist or is not writable
+ """
if not os.path.exists(arg):
msg = f"{arg} does not exist!"
@@ -710,10 +775,10 @@ def path_exists(arg):
return arg
-def setup_logging(debug=False):
+def _setup_logging(debug=False):
- """Calls initialization functions for logging package, and sets the
- user-defined level for logging in the script."""
+ """Calls initialization functions for logging package, and sets the user-defined level for
+ logging in the script."""
level = logging.INFO
if debug:
@@ -724,11 +789,11 @@ def setup_logging(debug=False):
logging.info("Logging level set to DEBUG")
-def write_summary_file(cla, data_store, file_templates):
+def _write_summary_file(cla, data_store, file_templates) -> None:
- """Given the command line arguments and the data store from which
- the data was retrieved, write a bash summary file that is needed by
- the workflow elements downstream."""
+ """Given the command line arguments and the data store from which the data was retrieved,
+ write a bash summary file that is needed by the workflow elements downstream.
+ """
members = cla.members if isinstance(cla.members, list) else [-1]
for mem in members:
@@ -757,8 +822,13 @@ def write_summary_file(cla, data_store, file_templates):
def to_datetime(arg):
- """Return a datetime object give a string like YYYYMMDDHH or
- YYYYMMDDHHmm."""
+ """Converts a string to a datetime object
+
+ Args:
+ arg (str): String like ``YYYYMMDDHH`` or ``YYYYMMDDHHmm``
+ Returns:
+ A datetime object
+ """
if len(arg) == 10:
fmt_str = "%Y%m%d%H"
elif len(arg) == 12:
@@ -771,20 +841,28 @@ def to_datetime(arg):
def to_lower(arg):
- """Return a string provided by arg into all lower case."""
+ """Converts a string to lowercase
+
+ Args:
+ arg (str): Any string
+ Returns:
+ An all-lowercase string
+ """
return arg.lower()
def main(argv):
# pylint: disable=too-many-branches, too-many-statements
"""
- Uses known location information to try the known locations and file
- paths in priority order.
+ Uses known location information to try the known locations and file paths in priority order.
+
+ Args:
+ argv (list): List of command line arguments
"""
cla = parse_args(argv)
- setup_logging(cla.debug)
+ _setup_logging(cla.debug)
print("Running script retrieve_data.py with args:", f"\n{('-' * 80)}\n{('-' * 80)}")
for name, val in cla.__dict__.items():
if name not in ["config"]:
@@ -885,7 +963,7 @@ def main(argv):
# All files are found. Stop looking!
# Write a variable definitions file for the data, if requested
if cla.summary_file and not cla.check_file:
- write_summary_file(cla, data_store, file_templates)
+ _write_summary_file(cla, data_store, file_templates)
break
logging.debug(f"Some unavailable files: {unavailable}")
@@ -898,9 +976,14 @@ def main(argv):
def get_ens_groups(members):
- """Given a list of ensemble members, return a dict with keys for
- the ensemble group, and values are lists of ensemble members
- requested in that group."""
+ """Gets ensemble groups with the corresponding list of ensemble members in that group.
+
+ Args:
+ members (list): List of ensemble members.
+ Returns:
+ ens_groups: A dictionary where keys are the ensemble group and values are lists of
+ ensemble members requested in that group
+ """
if members is None:
return {-1: [-1]}
@@ -918,9 +1001,15 @@ def get_ens_groups(members):
def parse_args(argv):
"""
- Function maintains the arguments accepted by this script. Please see
- Python's argparse documenation for more information about settings of each
- argument.
+ Maintains the arguments accepted by this script. Please see Python's
+ `argparse `_ documentation for more
+ information about settings of each argument.
+
+ Args:
+ argv (list): Command line arguments to parse
+
+ Returns:
+ args: An argparse.Namespace object (``parser.parse_args(argv)``)
"""
description = (
diff --git a/ush/run_srw_tests.py b/ush/run_srw_tests.py
index 9e77be14b8..8a3c5731b8 100755
--- a/ush/run_srw_tests.py
+++ b/ush/run_srw_tests.py
@@ -5,17 +5,19 @@
import time
import argparse
-# Python class to handle the launching of a set of SRW tests
-# The expectation is to have a "clean" experiment directory with only new experiments
-# that are ready to run (e.g. no _old* experiments left around from previous tests
-# This script takes only one parameter "-e" or "--exptdir" which points to the
-# expt_basedir specified when the run_WE2E_tests.sh is run to set up the tests.
-# The script will work sequentially through each of the test directories and
-# launch the workflow for each with a call to launch_FV3LAM_wflow.sh
-# After the initial launch, the checkTests method is called to monitor the
-# status of each test and call the launch_FV3LAM_wflow.sh script repeatedly
-# in each uncompleted workflow until all workflows are done.
class SRWTest:
+
+ """Python class to handle the launching of a set of SRW tests.
+ The expectation is to have a "clean" experiment directory with only new experiments
+ that are ready to run (e.g., no ``_old*`` experiments left around from previous tests).
+ This script takes only one parameter (``-e`` or ``--exptdir``) which points to the
+ ``expt_basedir`` specified when the ``run_WE2E_tests.py`` script is run to set up the tests.
+ The script will work sequentially through each of the test directories and
+ launch the workflow for each with a call to ``launch_FV3LAM_wflow.sh``.
+ After the initial launch, the ``checkTests`` method is called to monitor the
+ status of each test and to call the ``launch_FV3LAM_wflow.sh`` script repeatedly
+ in each uncompleted workflow until all workflows are done."""
+
def __init__(self, exptdir):
self.exptdir=exptdir
# Get a list of test directories
@@ -34,9 +36,13 @@ def __init__(self, exptdir):
self.checkTests()
def checkTests(self):
+ """Check status of workflows/experiments; remove any that have failed or completed,
+ and continue running the launch command for those that aren't complete.
+
+ Returns:
+ None
+ """
while(len(self.testDirectories) > 0):
- # Only continue running launch command for workflows that aren't complete
- # so check for any that have failed or completed and cull them from the list
cmdstring="grep -L 'wflow_status =' */log.launch_FV3LAM_wflow | xargs dirname"
try:
status= subprocess.check_output(cmdstring,shell=True).strip().decode('utf-8')
diff --git a/ush/set_cycle_dates.py b/ush/set_cycle_dates.py
index 0c63a87e49..cb386407b6 100644
--- a/ush/set_cycle_dates.py
+++ b/ush/set_cycle_dates.py
@@ -6,18 +6,17 @@
def set_cycle_dates(date_start, date_end, incr_cycl_freq):
- """This file defines a function that, given the start and end dates
- as date time objects, and a cycling frequency, returns an array of
- cycle date-hours whose elements have the form YYYYMMDDHH. Here,
- YYYY is a four-digit year, MM is a two- digit month, DD is a
- two-digit day of the month, and HH is a two-digit hour of the day.
+ """
+ Sets the cycle date(s).
Args:
- date_start: start date, datetime object
- date_end: end date, datetime object
- incr_cycl_freq: cycle frequency increment in hours, an int
+ date_start (datetime.datetime): Start date
+ date_end (datetime.datetime): End date
+ incr_cycl_freq (int): Cycle frequency increment in hours
Returns:
- A list of dates in a format YYYYMMDDHH
+ all_cdates: An array of cycle date-hours whose elements have the form ``YYYYMMDDHH``,
+ where ``YYYY`` is a four-digit year, ``MM`` is a two- digit month, ``DD``
+ is a two-digit day of the month, and ``HH`` is a two-digit hour of the day
"""
print_input_args(locals())
diff --git a/ush/set_fv3nml_ens_stoch_seeds.py b/ush/set_fv3nml_ens_stoch_seeds.py
index 0b9b186210..9160dec0a2 100644
--- a/ush/set_fv3nml_ens_stoch_seeds.py
+++ b/ush/set_fv3nml_ens_stoch_seeds.py
@@ -23,19 +23,17 @@
def set_fv3nml_ens_stoch_seeds(cdate, expt_config):
"""
- This function, for an ensemble-enabled experiment
- (i.e. for an experiment for which the workflow configuration variable
- DO_ENSEMBLE has been set to "TRUE"), creates new namelist files with
- unique stochastic "seed" parameters, using a base namelist file in the
- ${EXPTDIR} directory as a template. These new namelist files are stored
- within each member directory housed within each cycle directory. Files
- of any two ensemble members differ only in their stochastic "seed"
- parameter values. These namelist files are generated when this file is
- called as part of the TN_RUN_FCST task.
+ Creates new namelist files with unique stochastic "seed" parameters for an ensemble-enabled
+ experiment (i.e., where ``DO_ENSEMBLE: True``), using a base namelist file in ``${EXPTDIR}``
+ as a template. These new namelist files are stored within each member directory housed within
+ each cycle directory. Files of any two ensemble members differ only in their stochastic "seed"
+ parameter values. These namelist files are generated when this file is called as part of the
+ ``run_fcst`` task.
Args:
- cdate the cycle
- expt_config the in-memory dict representing the experiment configuration
+ cdate (datetime.datetime): The cycle date
+ expt_config (dict): The in-memory dictionary representing the experiment
+ configuration file
Returns:
None
"""
@@ -115,7 +113,7 @@ def set_fv3nml_ens_stoch_seeds(cdate, expt_config):
update_config=get_nml_config(settings),
)
-def parse_args(argv):
+def _parse_args(argv):
"""Parse command line arguments"""
parser = argparse.ArgumentParser(
description="Creates stochastic seeds for an ensemble experiment."
@@ -141,6 +139,6 @@ def parse_args(argv):
if __name__ == "__main__":
- args = parse_args(sys.argv[1:])
+ args = _parse_args(sys.argv[1:])
cfg = load_yaml_config(args.path_to_defns)
set_fv3nml_ens_stoch_seeds(args.cdate, cfg)
diff --git a/ush/set_fv3nml_sfc_climo_filenames.py b/ush/set_fv3nml_sfc_climo_filenames.py
index 7251a5b0e6..7b18399b00 100644
--- a/ush/set_fv3nml_sfc_climo_filenames.py
+++ b/ush/set_fv3nml_sfc_climo_filenames.py
@@ -38,15 +38,16 @@
def set_fv3nml_sfc_climo_filenames(config, debug=False):
"""
- This function sets the values of the variables in
- the forecast model's namelist file that specify the paths to the surface
- climatology files on the FV3LAM native grid (which are either pregenerated
- or created by the TN_MAKE_SFC_CLIMO task). Note that the workflow
+ Sets the values of the variables in the forecast model's namelist file that specify the paths
+ to the surface climatology files on the FV3LAM native grid (which are either pregenerated
+ or created by the ``make_sfc_climo`` task). Note that the workflow
generation scripts create symlinks to these surface climatology files
- in the FIXlam directory, and the values in the namelist file that get
+ in the ``FIXlam`` directory, and the values in the namelist file that get
set by this function are relative or full paths to these links.
Args:
+ config (dict): Section of configuration file specifying surface climatology fields
+ (as a flattened dictionary)
debug (bool): Enable extra output for debugging
Returns:
None
@@ -108,7 +109,7 @@ def set_fv3nml_sfc_climo_filenames(config, debug=False):
update_config=get_nml_config(settings),
)
-def parse_args(argv):
+def _parse_args(argv):
"""Parse command line arguments"""
parser = argparse.ArgumentParser(description="Set surface climatology fields.")
@@ -126,7 +127,7 @@ def parse_args(argv):
if __name__ == "__main__":
- args = parse_args(sys.argv[1:])
+ args = _parse_args(sys.argv[1:])
cfg = load_yaml_config(args.path_to_defns)
cfg = flatten_dict(cfg)
set_fv3nml_sfc_climo_filenames(cfg, args.debug)
diff --git a/ush/set_gridparams_ESGgrid.py b/ush/set_gridparams_ESGgrid.py
index abafd24ae4..4b070e9e50 100644
--- a/ush/set_gridparams_ESGgrid.py
+++ b/ush/set_gridparams_ESGgrid.py
@@ -15,21 +15,26 @@
def set_gridparams_ESGgrid(
lon_ctr, lat_ctr, nx, ny, halo_width, delx, dely, pazi, constants
):
- """Sets the parameters for a grid that is to be generated using the "ESGgrid"
- grid generation method (i.e. GRID_GEN_METHOD set to "ESGgrid").
+ """
+ Sets the parameters for a grid that is to be generated using the "ESGgrid"
+ grid generation method (i.e., when ``GRID_GEN_METHOD: "ESGgrid"``).
Args:
- lon_ctr
- lat_ctr
- nx
- ny
- halo_width
- delx
- dely
- pazi
- constants: dictionary of SRW constants
+ lon_ctr (float): The longitude of the center of the grid (in degrees).
+ lat_ctr (float): The latitude of the center of the grid (in degrees).
+ nx (int): The number of cells in the zonal direction on the regional grid.
+ ny (int): The number of cells in the meridional direction on the regional grid.
+ halo_width (int): The width (in number of grid cells) of the wide :term:`halo` to add
+ around the regional grid before shaving the halo down to the width(s)
+ expected by the forecast model. For predefined grids, this value is
+ set in ``setup.py`` based on the ``ESGgrid_WIDE_HALO_WIDTH`` value in
+ ``predef_grid_params.yaml``.
+ delx (float): The cell size in the zonal direction of the regional grid (in meters).
+ dely (float): The cell size in the meridional direction of the regional grid (in meters).
+ pazi (float): The rotational parameter for the “ESGgrid” (in degrees).
+ constants (dict): Dictionary of SRW constants
Returns:
- Tuple of inputs, and 4 outputs (see return statement)
+ Dictionary of inputs and 4 outputs (see return statement in code for details)
"""
print_input_args(locals())
@@ -43,12 +48,12 @@ def set_gridparams_ESGgrid(
#
# For a ESGgrid-type grid, the orography filtering is performed by pass-
# ing to the orography filtering the parameters for an "equivalent" glo-
- # bal uniform cubed-sphere grid. These are the parameters that a global
+ # bal uniform cubed-sphere grid. These are the parameters that a global
# uniform cubed-sphere grid needs to have in order to have a nominal
# grid cell size equal to that of the (average) cell size on the region-
- # al grid. These globally-equivalent parameters include a resolution
+ # al grid. These globally-equivalent parameters include a resolution
# (in units of number of cells in each of the two horizontal directions)
- # and a stretch factor. The equivalent resolution is calculated in the
+ # and a stretch factor. The equivalent resolution is calculated in the
# script that generates the grid, and the stretch factor needs to be set
# to 1 because we are considering an equivalent globally UNIFORM grid.
# However, it turns out that with a non-symmetric regional grid (one in
@@ -56,7 +61,7 @@ def set_gridparams_ESGgrid(
# cause the orography filtering program is designed for a global cubed-
# sphere grid and thus assumes that nx and ny for a given tile are equal
# when stretch_factor is exactly equal to 1.
- # ^^-- Why is this? Seems like symmetry btwn x and y should still hold when the stretch factor is not equal to 1.
+ # ^^-- Why is this? Seems like symmetry between x and y should still hold when the stretch factor is not equal to 1.
# It turns out that the program will work if we set stretch_factor to a
# value that is not exactly 1. This is what we do below.
#
diff --git a/ush/set_gridparams_GFDLgrid.py b/ush/set_gridparams_GFDLgrid.py
index 64dda5b951..f200bb6f83 100644
--- a/ush/set_gridparams_GFDLgrid.py
+++ b/ush/set_gridparams_GFDLgrid.py
@@ -13,7 +13,7 @@
)
-def prime_factors(n):
+def _prime_factors(n):
i = 2
factors = []
while i * i <= n:
@@ -41,24 +41,35 @@ def set_gridparams_GFDLgrid(
nh4,
run_envir,
):
- """Sets the parameters for a grid that is to be generated using the "GFDLgrid"
- grid generation method (i.e. GRID_GEN_METHOD set to "ESGgrid").
+ """Sets the parameters for a grid that is to be generated using the legacy "GFDLgrid"
+ grid generation method (i.e., when ``GRID_GEN_METHOD: "ESGgrid"``).
Args:
- lon_of_t6_ctr
- lat_of_t6_ctr
- res_of_t6g
- stretch_factor
- refine_ratio_t6g_to_t7g
- istart_of_t7_on_t6g
- iend_of_t7_on_t6g
- jstart_of_t7_on_t6g
- jend_of_t7_on_t6g
- verbose
- nh4
- run_envir
+ lon_of_t6_ctr (float): Longitude of the center of tile 6 (in degrees).
+ lat_of_t6_ctr (float): Latitude of the center of tile 6 (in degrees).
+ res_of_t6g (int): Number of grid cells in either of the two horizontal
+ directions (x and y) on each of the six tiles of the
+ parent global cubed-sphere grid (e.g., 48, 96, 192, 384,
+ 768, 1152, 3072).
+ stretch_factor (float): Stretching factor used in the Schmidt transformation
+ applied to the parent cubed-sphere grid. Setting the
+ Schmidt stretching factor to a value greater than 1
+ shrinks tile 6, while setting it to a value less than 1
+ (but still greater than 0) expands it.
+ refine_ratio_t6g_to_t7g (int): Cell refinement ratio for the regional grid. It refers to
+ the number of cells in either the x or y direction on the
+ regional grid (tile 7) that abut one cell on its parent
+ tile (tile 6).
+ istart_of_t7_on_t6g (int): i-index on tile 6 at which the regional grid (tile 7) starts.
+ iend_of_t7_on_t6g (int): i-index on tile 6 at which the regional grid (tile 7) ends.
+ jstart_of_t7_on_t6g (int): j-index on tile 6 at which the regional grid (tile 7) starts.
+ jend_of_t7_on_t6g (int): j-index on tile 6 at which the regional grid (tile 7) ends.
+ verbose (bool): Flag to print out additional informational messages
+ nh4 (int): The width (in number of cells) of the 4-cell-wide halo on
+ tile 7, i.e. NH4 = 4.
+ run_envir (str): Workflow mode (*community* or *nco*)
Returns:
- Tuple of inputs and outputs (see return statement)
+ Dictionary of inputs and outputs (see return statement in code for more detail)
"""
print_input_args(locals())
@@ -67,7 +78,7 @@ def set_gridparams_GFDLgrid(
# -----------------------------------------------------------------------
#
# To simplify the grid setup, we require that tile 7 be centered on tile
- # 6. Note that this is not really a restriction because tile 6 can al-
+ # 6. Note that this is not really a restriction because tile 6 can al-
# ways be moved so that it is centered on tile 7 [the location of tile 6
# doesn't really matter because for a regional setup, the forecast model
# will only run on tile 7 (not on tiles 1-6)].
@@ -369,8 +380,8 @@ def set_gridparams_GFDLgrid(
nx_of_t6_on_t6sg = 2 * nx_of_t6_on_t6g
ny_of_t6_on_t6sg = 2 * ny_of_t6_on_t6g
- prime_factors_nx_of_t7_on_t7g = prime_factors(nx_of_t7_on_t7g)
- prime_factors_ny_of_t7_on_t7g = prime_factors(ny_of_t7_on_t7g)
+ prime_factors_nx_of_t7_on_t7g = _prime_factors(nx_of_t7_on_t7g)
+ prime_factors_ny_of_t7_on_t7g = _prime_factors(ny_of_t7_on_t7g)
logging.debug(
f"""
diff --git a/ush/set_predef_grid_params.py b/ush/set_predef_grid_params.py
index 5d3bd0ffed..820e3fc659 100644
--- a/ush/set_predef_grid_params.py
+++ b/ush/set_predef_grid_params.py
@@ -13,11 +13,13 @@ def set_predef_grid_params(USHdir, grid_name, quilting):
"""Sets grid parameters for the specified predefined grid
Args:
- USHdir: path to the SRW ush directory
- grid_name str specifying the predefined grid name.
- quilting: bool whether quilting should be used for output
+ USHdir (str) : Path to the SRW App ``ush`` directory
+ grid_name (str) : String specifying the predefined grid name
+ quilting (bool): Whether quilting should be used for output
Returns:
- Dictionary of grid parameters
+ params_dict: A dictionary of grid parameters
+ Raises:
+ KeyError: If a selected predefined grid is not found in ``predef_grid_params.yaml``.
"""
params_dict = load_config_file(os.path.join(USHdir, "predef_grid_params.yaml"))
diff --git a/ush/setup.py b/ush/setup.py
index 335ce229e1..ab5eac8205 100644
--- a/ush/setup.py
+++ b/ush/setup.py
@@ -46,16 +46,24 @@
from link_fix import link_fix
def load_config_for_setup(ushdir, default_config, user_config):
- """Load in the default, machine, and user configuration files into
- Python dictionaries. Return the combined experiment dictionary.
+ """Updates a Python dictionary in place with experiment configuration settings from the
+ default, machine, and user configuration files.
Args:
- ushdir (str): Path to the ush directory for SRW
- default_config (str): Path to the default config YAML
- user_config (str): Path to the user-provided config YAML
+ ushdir (str): Path to the ``ush`` directory for the SRW App
+ default_config (str): Path to ``config_defaults.yaml``
+ user_config (str): Path to the user-provided config YAML (usually named
+ ``config.yaml``)
Returns:
- Python dict of configuration settings from YAML files.
+ None
+
+ Raises:
+ FileNotFoundError: If the user-provided configuration file or the machine file does not
+ exist.
+ Exception: If (1) the user-provided configuration file cannot be loaded or (2) it contains
+ invalid sections/keys or (3) it does not contain mandatory information or (4)
+ an invalid datetime format is used.
"""
# Load the default config.
@@ -170,8 +178,8 @@ def load_config_for_setup(ushdir, default_config, user_config):
# the "null" settings are removed, i.e., tasks turned off.
update_dict(cfg_u.get('rocoto', {}), cfg_wflow["rocoto"])
- def add_jobname(tasks):
- """ Add the jobname entry for all the tasks in the workflow """
+ def _add_jobname(tasks):
+ """ Adds the jobname entry for all the tasks in the workflow """
if not isinstance(tasks, dict):
return
@@ -184,11 +192,11 @@ def add_jobname(tasks):
task_settings.get("attrs", {}).get("name") or \
task.split("_", maxsplit=1)[1]
elif task_type == "metatask":
- add_jobname(task_settings)
+ _add_jobname(task_settings)
# Add jobname entry to each remaining task
- add_jobname(cfg_wflow["rocoto"]["tasks"])
+ _add_jobname(cfg_wflow["rocoto"]["tasks"])
# Update default config with the constants, the machine config, and
# then the user_config
@@ -267,10 +275,10 @@ def add_jobname(tasks):
raise Exception(
dedent(
f"""
- Date variable {val}={cfg_d['workflow'][val]} is not in a valid date format.
+ Date variable {val}={cfg_d['workflow'][val]} is not in a valid date format.
- For examples of valid formats, see the Users' Guide.
- """
+ For examples of valid formats, see the Users' Guide.
+ """
)
)
@@ -280,20 +288,25 @@ def add_jobname(tasks):
def set_srw_paths(ushdir, expt_config):
"""
- Generate a dictionary of directories that describe the SRW
- structure, i.e., where SRW is installed, and the paths to
- external repositories managed via the manage_externals tool.
+ Generates a dictionary of directories that describe the SRW App
+ structure, i.e., where the SRW App is installed and the paths to
+ external repositories managed via the ``manage_externals`` tool.
- Other paths for SRW are set as defaults in config_defaults.yaml
+ Other paths for the SRW App are set as defaults in ``config_defaults.yaml``.
Args:
- ushdir: (str) path to the system location of the ush/ directory
- under the SRW clone
- expt_config: (dict) contains the configuration settings for the
- user-defined experiment
+ ushdir (str) : Path to the system location of the ``ush`` directory under the
+ SRW App clone
+ expt_config (dict): Contains the configuration settings for the user-defined experiment
Returns:
- dictionary of config settings and system paths as keys/values
+ Dictionary of configuration settings and system paths as keys/values
+
+ Raises:
+ KeyError: If the external repository required is not listed in the externals
+ configuration file (e.g., ``Externals.cfg``)
+ FileNotFoundError: If the ``ufs-weather-model`` code containing the FV3 source code has
+ not been cloned properly
"""
# HOMEdir is the location of the SRW clone, one directory above ush/
@@ -342,24 +355,36 @@ def set_srw_paths(ushdir, expt_config):
def setup(USHdir, user_config_fn="config.yaml", debug: bool = False):
- """Function that validates user-provided configuration, and derives
- a secondary set of parameters needed to configure a Rocoto-based SRW
- workflow. The derived parameters use a set of required user-defined
- parameters defined by either config_defaults.yaml, a user-provided
- configuration file (config.yaml), or a YAML machine file.
+ """Validates user-provided configuration settings and derives
+ a secondary set of parameters needed to configure a Rocoto-based SRW App
+ workflow. The secondary parameters are derived from a set of required
+ parameters defined in ``config_defaults.yaml``, a user-provided
+ configuration file (e.g., ``config.yaml``), or a YAML machine file.
A set of global variable definitions is saved to the experiment
directory as a bash configure file that is sourced by scripts at run
time.
Args:
- USHdir (str): The full path of the ush/ directory where
- this script is located
- user_config_fn (str): The name of a user-provided config YAML
- debug (bool): Enable extra output for debugging
+ USHdir (str): The full path of the ``ush/`` directory where this script
+ (``setup.py``) is located
+ user_config_fn (str): The name of a user-provided configuration YAML (usually
+ ``config.yaml``)
+ debug (bool): Enable extra output for debugging
Returns:
- None
+ None
+
+ Raises:
+ ValueError: If checked configuration values are invalid (e.g., forecast length,
+ ``EXPTDIR`` path)
+ FileExistsError: If ``EXPTDIR`` already exists, and ``PREEXISTING_DIR_METHOD`` is not
+ set to a compatible handling method
+ FileNotFoundError: If the path to a particular file does not exist or if the file itself
+ does not exist at the expected path
+ TypeError: If ``USE_CUSTOM_POST_CONFIG_FILE`` or ``USE_CRTM`` are set to true but no
+ corresponding custom configuration file or CRTM fix file directory is set
+ KeyError: If an invalid value is provided (i.e., for ``GRID_GEN_METHOD``)
"""
logger = logging.getLogger(__name__)
@@ -513,7 +538,7 @@ def setup(USHdir, user_config_fn="config.yaml", debug: bool = False):
)
)
- def remove_tag(tasks, tag):
+ def _remove_tag(tasks, tag):
""" Remove the tag for all the tasks in the workflow """
if not isinstance(tasks, dict):
@@ -523,17 +548,17 @@ def remove_tag(tasks, tag):
if task_type == "task":
task_settings.pop(tag, None)
elif task_type == "metatask":
- remove_tag(task_settings, tag)
+ _remove_tag(task_settings, tag)
# Remove all memory tags for platforms that do not support them
remove_memory = expt_config["platform"].get("REMOVE_MEMORY")
if remove_memory:
- remove_tag(rocoto_tasks, "memory")
+ _remove_tag(rocoto_tasks, "memory")
for part in ['PARTITION_HPSS', 'PARTITION_DEFAULT', 'PARTITION_FCST']:
partition = expt_config["platform"].get(part)
if not partition:
- remove_tag(rocoto_tasks, 'partition')
+ _remove_tag(rocoto_tasks, 'partition')
# When not running subhourly post, remove those tasks, if they exist
if not expt_config.get("task_run_post", {}).get("SUB_HOURLY_POST"):
@@ -607,7 +632,7 @@ def remove_tag(tasks, tag):
#
# -----------------------------------------------------------------------
#
- def get_location(xcs, fmt, expt_cfg):
+ def _get_location(xcs, fmt, expt_cfg):
ics_lbcs = expt_cfg.get("data", {}).get("ics_lbcs")
if ics_lbcs is not None:
v = ics_lbcs.get(xcs)
@@ -620,7 +645,7 @@ def get_location(xcs, fmt, expt_cfg):
# Get the paths to any platform-supported data streams
get_extrn_ics = expt_config.get("task_get_extrn_ics", {})
- extrn_mdl_sysbasedir_ics = get_location(
+ extrn_mdl_sysbasedir_ics = _get_location(
get_extrn_ics.get("EXTRN_MDL_NAME_ICS"),
get_extrn_ics.get("FV3GFS_FILE_FMT_ICS"),
expt_config,
@@ -628,7 +653,7 @@ def get_location(xcs, fmt, expt_cfg):
get_extrn_ics["EXTRN_MDL_SYSBASEDIR_ICS"] = extrn_mdl_sysbasedir_ics
get_extrn_lbcs = expt_config.get("task_get_extrn_lbcs", {})
- extrn_mdl_sysbasedir_lbcs = get_location(
+ extrn_mdl_sysbasedir_lbcs = _get_location(
get_extrn_lbcs.get("EXTRN_MDL_NAME_LBCS"),
get_extrn_lbcs.get("FV3GFS_FILE_FMT_LBCS"),
expt_config,
@@ -1287,7 +1312,7 @@ def get_location(xcs, fmt, expt_cfg):
# the same resolution input.
#
- def dict_find(user_dict, substring):
+ def _dict_find(user_dict, substring):
if not isinstance(user_dict, dict):
return False
@@ -1296,14 +1321,14 @@ def dict_find(user_dict, substring):
if substring in key:
return True
if isinstance(value, dict):
- if dict_find(value, substring):
+ if _dict_find(value, substring):
return True
return False
- run_make_ics = dict_find(rocoto_tasks, "task_make_ics")
- run_make_lbcs = dict_find(rocoto_tasks, "task_make_lbcs")
- run_run_fcst = dict_find(rocoto_tasks, "task_run_fcst")
+ run_make_ics = _dict_find(rocoto_tasks, "task_make_ics")
+ run_make_lbcs = _dict_find(rocoto_tasks, "task_make_lbcs")
+ run_run_fcst = _dict_find(rocoto_tasks, "task_run_fcst")
run_any_coldstart_task = run_make_ics or \
run_make_lbcs or \
run_run_fcst
@@ -1547,10 +1572,14 @@ def dict_find(user_dict, substring):
return expt_config
def clean_rocoto_dict(rocotodict):
- """Removes any invalid entries from rocotodict. Examples of invalid entries are:
+ """Removes any invalid entries from ``rocotodict``. Examples of invalid entries are:
1. A task dictionary containing no "command" key
- 2. A metatask dictionary containing no task dictionaries"""
+ 2. A metatask dictionary containing no task dictionaries
+
+ Args:
+ rocotodict (dict): A dictionary containing Rocoto workflow settings
+ """
# Loop 1: search for tasks with no command key, iterating over metatasks
for key in list(rocotodict.keys()):
diff --git a/ush/update_input_nml.py b/ush/update_input_nml.py
index b85bbacd4a..be35d495d4 100644
--- a/ush/update_input_nml.py
+++ b/ush/update_input_nml.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python3
"""
-Update the model namelist for a variety of different settings.
+Updates the model namelist for a variety of different settings.
"""
import argparse
@@ -20,15 +20,16 @@
VERBOSE = os.environ.get("VERBOSE", "true")
def update_input_nml(namelist, restart, aqm_na_13km):
- """Update the FV3 input.nml file in the specified run directory
+ """
+ Updates the FV3 ``input.nml`` file in the specified run directory
Args:
- namelist: path to the namelist
- restart: should forecast start from restart?
- aqm_na_13km: should the 13km AQM config be used?
+ namelist (str) : Path to the namelist
+ restart (bool): Whether the forecast should start from restart?
+ aqm_na_13km (bool): Whether the 13km AQM configuration should be used?
Returns:
- Boolean
+ None: Updates ``input.nml`` with the settings provided
"""
print_input_args(locals())
@@ -80,7 +81,7 @@ def update_input_nml(namelist, restart, aqm_na_13km):
update_config=get_nml_config(settings),
)
-def parse_args(argv):
+def _parse_args(argv):
"""Parse command line arguments"""
parser = argparse.ArgumentParser(description="Update FV3 input.nml file for restart.")
@@ -107,7 +108,7 @@ def parse_args(argv):
if __name__ == "__main__":
- args = parse_args(sys.argv[1:])
+ args = _parse_args(sys.argv[1:])
update_input_nml(
namelist=args.namelist,
restart=args.restart,
From 3e9c40a527da89bbd53a6ab09dd71fbc24393bdd Mon Sep 17 00:00:00 2001
From: jdkublnick <47824899+jdkublnick@users.noreply.github.com>
Date: Thu, 10 Oct 2024 15:56:36 -0400
Subject: [PATCH 34/39] [develop]: Adding in the tutorial for the Halloween
Storm (#1124)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
This update adds a tutorial to the Tutorials chapter of the SRW App User's Guide. It is based off the Halloween Storm UFS case study.
---------
Co-authored-by: “gspetro-NOAA”
Co-authored-by: Gillian Petro <96886803+gspetro-NOAA@users.noreply.github.com>
Co-authored-by: Joshua Kublnick
Co-authored-by: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com>
---
doc/ContribGuide/contributing.rst | 2 +-
doc/UsersGuide/BackgroundInfo/Components.rst | 12 +-
.../BackgroundInfo/Introduction.rst | 4 +-
.../BuildingRunningTesting/BuildSRW.rst | 2 +-
.../BuildingRunningTesting/Quickstart.rst | 10 +-
.../BuildingRunningTesting/RunSRW.rst | 13 +-
.../BuildingRunningTesting/Tutorial.rst | 389 +++++++++++++++---
.../BuildingRunningTesting/VXCases.rst | 11 +-
doc/UsersGuide/Reference/FAQ.rst | 7 +-
doc/UsersGuide/Reference/Glossary.rst | 22 +-
doc/UsersGuide/SSHIntro.rst | 2 +-
doc/conf.py | 6 +-
doc/doc-snippets/cron-note.rst | 3 +
doc/doc-snippets/expt-conf-intro.rst | 14 +
doc/doc-snippets/file-edit-hint.rst | 9 +
doc/doc-snippets/load-env.rst | 7 +
doc/doc-snippets/scp-files.rst | 11 +
17 files changed, 412 insertions(+), 112 deletions(-)
create mode 100644 doc/doc-snippets/cron-note.rst
create mode 100644 doc/doc-snippets/expt-conf-intro.rst
create mode 100644 doc/doc-snippets/file-edit-hint.rst
create mode 100644 doc/doc-snippets/load-env.rst
create mode 100644 doc/doc-snippets/scp-files.rst
diff --git a/doc/ContribGuide/contributing.rst b/doc/ContribGuide/contributing.rst
index eb995efb41..0f7231e265 100644
--- a/doc/ContribGuide/contributing.rst
+++ b/doc/ContribGuide/contributing.rst
@@ -11,7 +11,7 @@ Fork and PR Overview
Contributions to the ``ufs-srweather-app`` project are made via a :github-docs:`Fork` and :github-docs:`Pull Request (PR)` model. GitHub provides a thorough description of this contribution model in their `Contributing to a project` :github-docs:`Quickstart`, but the steps, with respect to ``ufs-srweather-app`` contributions, can be summarized as:
-#. :github-docs:`Create an issue ` to document proposed changes.
+#. :github-docs:`Create an issue ` to document proposed changes.
#. :github-docs:`Fork` the :srw-repo:`ufs-srweather-app repository<>` into your personal GitHub account.
#. :github-docs:`Clone` your fork onto your development system.
#. :github-docs:`Create a branch` in your clone for your changes. All development should take place on a branch, *not* on ``develop``.
diff --git a/doc/UsersGuide/BackgroundInfo/Components.rst b/doc/UsersGuide/BackgroundInfo/Components.rst
index 559576725d..6df12dbd23 100644
--- a/doc/UsersGuide/BackgroundInfo/Components.rst
+++ b/doc/UsersGuide/BackgroundInfo/Components.rst
@@ -38,12 +38,12 @@ Supported model resolutions in this release include 3-, 13-, and 25-km predefine
Model Physics
---------------
-The Common Community Physics Package (CCPP), described `here `__, supports interoperable atmospheric physics and land surface model options. Atmospheric physics are a set of numerical methods describing small-scale processes such as clouds, turbulence, radiation, and their interactions. The most recent SRW App release (|latestr|) included five supported physics suites: FV3_RRFS_v1beta, FV3_GFS_v16, FV3_WoFS_v0, FV3_HRRR, and FV3_RAP. The FV3_RRFS_v1beta physics suite is being tested for use in the future operational implementation of the Rapid Refresh Forecast System (:term:`RRFS`) planned for 2023-2024, and the FV3_GFS_v16 is an updated version of the physics suite used in the operational Global Forecast System (GFS) v16. A detailed list of CCPP updates since the SRW App v2.1.0 release is available :ref:`here `. A full scientific description of CCPP parameterizations and suites can be found in the `CCPP Scientific Documentation `__, and CCPP technical aspects are described in the :doc:`CCPP Technical Documentation `. The model namelist has many settings beyond the physics options that can optimize various aspects of the model for use with each of the supported suites. Additional information on Stochastic Physics options is available :doc:`here `.
+The Common Community Physics Package (CCPP), described `here `_, supports interoperable atmospheric physics and land surface model options. Atmospheric physics are a set of numerical methods describing small-scale processes such as clouds, turbulence, radiation, and their interactions. The most recent SRW App release (|latestr|) included five supported physics suites: FV3_RRFS_v1beta, FV3_GFS_v16, FV3_WoFS_v0, FV3_HRRR, and FV3_RAP. The FV3_RRFS_v1beta physics suite is being tested for use in the future operational implementation of the Rapid Refresh Forecast System (:term:`RRFS`) planned for 2023-2024, and the FV3_GFS_v16 is an updated version of the physics suite used in the operational Global Forecast System (GFS) v16. A detailed list of CCPP updates since the SRW App v2.1.0 release is available :ref:`here `. A full scientific description of CCPP parameterizations and suites can be found in the `CCPP Scientific Documentation `_, and CCPP technical aspects are described in the :doc:`CCPP Technical Documentation `. The model namelist has many settings beyond the physics options that can optimize various aspects of the model for use with each of the supported suites. Additional information on Stochastic Physics options is available :doc:`here `.
.. note::
SPP is currently only available for specific physics schemes used in the RAP/HRRR physics suite. Users need to be aware of which physics suite definition file (:term:`SDF`) is chosen when turning this option on. Among the supported physics suites, the full set of parameterizations can only be used with the ``FV3_HRRR`` option for ``CCPP_PHYS_SUITE``.
-Additionally, a CCPP single-column model (`CCPP-SCM `__) option has also been developed as a child repository. Users can refer to the `CCPP Single Column Model User and Technical Guide `__ for more details. This CCPP-SCM user guide contains a Quick Start Guide with instructions for obtaining the code, compiling, and running test cases, which include five standard test cases and two additional FV3 replay cases (refer to section 5.2 in the CCPP-SCM user guide for more details). Moreover, the CCPP-SCM supports a precompiled version in a docker container, allowing it to be easily executed on NOAA's cloud computing platforms without any issues (see section 2.5 in the CCPP-SCM user guide for more details).
+Additionally, a CCPP single-column model (`CCPP-SCM `_) option has also been developed as a child repository. Users can refer to the `CCPP Single Column Model User and Technical Guide `_ for more details. This CCPP-SCM user guide contains a Quick Start Guide with instructions for obtaining the code, compiling, and running test cases, which include five standard test cases and two additional FV3 replay cases (refer to section 5.2 in the CCPP-SCM user guide for more details). Moreover, the CCPP-SCM supports a precompiled version in a docker container, allowing it to be easily executed on NOAA's cloud computing platforms without any issues (see section 2.5 in the CCPP-SCM user guide for more details).
The SRW App supports the use of both :term:`GRIB2` and :term:`NEMSIO` input data. The UFS Weather Model ingests initial and lateral boundary condition files produced by :term:`chgres_cube` and outputs files in netCDF format on a specific projection (e.g., Lambert Conformal) in the horizontal direction and model levels in the vertical direction.
@@ -57,17 +57,17 @@ The Unified Post Processor (:term:`UPP`) processes raw output from a variety of
METplus Verification Suite
=============================
-The Model Evaluation Tools (MET) package is a set of statistical verification tools developed by the `Developmental Testbed Center `__ (DTC) for use by the :term:`NWP` community to help them assess and evaluate the performance of numerical weather predictions. MET is the core component of the enhanced `METplus `__ verification framework; the suite also includes the associated database and display systems called METviewer and METexpress.
+The Model Evaluation Tools (MET) package is a set of statistical verification tools developed by the `Developmental Testbed Center `_ (DTC) for use by the :term:`NWP` community to help them assess and evaluate the performance of numerical weather predictions. MET is the core component of the enhanced `METplus `_ verification framework; the suite also includes the associated database and display systems called METviewer and METexpress.
-The METplus verification framework has been integrated into the SRW App to facilitate forecast evaluation. METplus is a verification framework that spans a wide range of temporal scales (warn-on-forecast to climate) and spatial scales (storm to global). It is supported by the `Developmental Testbed Center (DTC) `__.
+The METplus verification framework has been integrated into the SRW App to facilitate forecast evaluation. METplus is a verification framework that spans a wide range of temporal scales (warn-on-forecast to climate) and spatial scales (storm to global). It is supported by the `Developmental Testbed Center (DTC) `_.
METplus comes preinstalled with :term:`spack-stack` but can also be installed on other systems individually or as part of :term:`HPC-Stack` installation. Users on systems without a previous installation of METplus can follow the :ref:`MET Installation Guide ` and :ref:`METplus Installation Guide ` for individual installation. Currently, METplus *installation* is only supported as part of spack-stack installation; users attempting to install METplus individually or as part of HPC-Stack will need to direct assistance requests to the METplus team. However, METplus *use* is supported on any system with a functioning METplus installation.
-The core components of the METplus framework include the statistical driver (MET), the associated database and display systems known as METviewer and METexpress, and a suite of Python wrappers to provide low-level automation and examples, also called use cases. MET is a set of verification tools developed for use by the :term:`NWP` community. It matches up gridded forecast fields with either gridded analyses or point observations and applies configurable methods to compute statistics and diagnostics. Extensive documentation is available in the :doc:`METplus User's Guide ` and :doc:`MET User's Guide `. Documentation for all other components of the framework can be found at the *Documentation* link for each component on the METplus `downloads `__ page.
+The core components of the METplus framework include the statistical driver (MET), the associated database and display systems known as METviewer and METexpress, and a suite of Python wrappers to provide low-level automation and examples, also called use cases. MET is a set of verification tools developed for use by the :term:`NWP` community. It matches up gridded forecast fields with either gridded analyses or point observations and applies configurable methods to compute statistics and diagnostics. Extensive documentation is available in the :doc:`METplus User's Guide ` and :doc:`MET User's Guide `. Documentation for all other components of the framework can be found at the *Documentation* link for each component on the METplus `downloads `_ page.
Among other techniques, MET provides the capability to compute standard verification scores for comparing deterministic gridded model data to point-based and gridded observations. It also provides ensemble and probabilistic verification methods for comparing gridded model data to point-based or gridded observations. Verification tasks to accomplish these comparisons are defined in the SRW App in :numref:`Table %s `. Currently, the SRW App supports the use of :term:`NDAS` observation files (which include conventional point-based surface and upper-air data) `in prepBUFR format `__ for point-based verification. It also supports gridded Climatology-Calibrated Precipitation Analysis (:term:`CCPA`) data for accumulated precipitation evaluation and Multi-Radar/Multi-Sensor (:term:`MRMS`) gridded analysis data for composite reflectivity and :term:`echo top` verification.
-METplus is being actively developed by :term:`NCAR`/Research Applications Laboratory (RAL), NOAA/Earth Systems Research Laboratories (`ESRL `__), and NOAA/Environmental Modeling Center (:term:`EMC`), and it is open to community contributions. More details about METplus can be found on the `METplus website `__.
+METplus is being actively developed by :term:`NCAR`/Research Applications Laboratory (RAL), NOAA/Earth Systems Research Laboratories (`ESRL `__), and NOAA/Environmental Modeling Center (:term:`EMC`), and it is open to community contributions. More details about METplus can be found on the `METplus website `_.
Air Quality Modeling (AQM) Utilities
=======================================
diff --git a/doc/UsersGuide/BackgroundInfo/Introduction.rst b/doc/UsersGuide/BackgroundInfo/Introduction.rst
index f1a384e025..4b0978ef23 100644
--- a/doc/UsersGuide/BackgroundInfo/Introduction.rst
+++ b/doc/UsersGuide/BackgroundInfo/Introduction.rst
@@ -4,9 +4,9 @@
Introduction
==============
-The Unified Forecast System (:term:`UFS`) is a community-based, coupled, comprehensive Earth modeling system. NOAA's operational model suite for numerical weather prediction (:term:`NWP`) is quickly transitioning to the UFS from a number of different modeling systems. The UFS enables research, development, and contribution opportunities within the broader :term:`Weather Enterprise` (including government, industry, and academia). For more information about the UFS, visit the `UFS Portal `__.
+The Unified Forecast System (:term:`UFS`) is a community-based, coupled, comprehensive Earth modeling system. NOAA's operational model suite for numerical weather prediction (:term:`NWP`) is quickly transitioning to the UFS from a number of different modeling systems. The UFS enables research, development, and contribution opportunities within the broader :term:`Weather Enterprise` (including government, industry, and academia). For more information about the UFS, visit the `UFS Portal `_.
-The UFS includes `multiple applications `__ that support different forecast durations and spatial domains. This documentation describes the UFS Short-Range Weather (SRW) Application, which targets predictions of atmospheric behavior on a limited spatial domain and on time scales from minutes to several days. The most recent SRW Application includes a prognostic atmospheric model, pre- and post-processing, and a community workflow for running the system end-to-end. These components are documented within this User's Guide and supported through the `GitHub Discussions `__ forum. The SRW App also includes support for a verification package (METplus) for both deterministic and ensemble simulations and support for four stochastically perturbed physics schemes.
+The UFS includes `multiple applications `_ that support different forecast durations and spatial domains. This documentation describes the UFS Short-Range Weather (SRW) Application, which targets predictions of atmospheric behavior on a limited spatial domain and on time scales from minutes to several days. The most recent SRW Application includes a prognostic atmospheric model, pre- and post-processing, and a community workflow for running the system end-to-end. These components are documented within this User's Guide and supported through the `GitHub Discussions `_ forum. The SRW App also includes support for a verification package (METplus) for both deterministic and ensemble simulations and support for four stochastically perturbed physics schemes.
Since the last release, developers have added a variety of features:
diff --git a/doc/UsersGuide/BuildingRunningTesting/BuildSRW.rst b/doc/UsersGuide/BuildingRunningTesting/BuildSRW.rst
index 3076a5f6eb..73334828da 100644
--- a/doc/UsersGuide/BuildingRunningTesting/BuildSRW.rst
+++ b/doc/UsersGuide/BuildingRunningTesting/BuildSRW.rst
@@ -35,7 +35,7 @@ Install the Prerequisite Software Stack
Users on any sufficiently up-to-date machine with a UNIX-based operating system should be able to install the prerequisite software stack and run the SRW Application. However, a list of prerequisites is available in :numref:`Section %s ` for reference. Users should install or update their system as required before attempting to install the software stack.
-Currently, installation of the prerequisite software stack is supported via spack-stack on most systems. :term:`Spack-stack` is a :term:`repository` that provides a Spack-based system to build the software stack required for `UFS `__ applications such as the SRW App. Spack-stack is the software stack validated by the UFS Weather Model (:term:`WM`), and the SRW App has likewise shifted to spack-stack for most Level 1 systems.
+Currently, installation of the prerequisite software stack is supported via spack-stack on most systems. :term:`Spack-stack` is a :term:`repository` that provides a Spack-based system to build the software stack required for `UFS `_ applications such as the SRW App. Spack-stack is the software stack validated by the UFS Weather Model (:term:`WM`), and the SRW App has likewise shifted to spack-stack for most Level 1 systems.
.. hint::
Skip the spack-stack installation if working on a :srw-wiki:`Level 1 system ` (e.g., Hera, Jet, Derecho, NOAA Cloud), and :ref:`continue to the next section `.
diff --git a/doc/UsersGuide/BuildingRunningTesting/Quickstart.rst b/doc/UsersGuide/BuildingRunningTesting/Quickstart.rst
index 3e58d6117f..8f31dd9b1a 100644
--- a/doc/UsersGuide/BuildingRunningTesting/Quickstart.rst
+++ b/doc/UsersGuide/BuildingRunningTesting/Quickstart.rst
@@ -50,13 +50,9 @@ For a detailed explanation of how to build and run the SRW App on any supported
#. Load the python environment for the workflow. Users on Level 2-4 systems will need to use one of the existing ``wflow_`` modulefiles (e.g., ``wflow_macos``) and adapt it to their system. Then, run:
- .. code-block:: console
-
- source /path/to/ufs-srweather-app/etc/lmod-setup.sh
- module use /path/to/ufs-srweather-app/modulefiles
- module load wflow_
-
- where ```` refers to a valid machine name (see :numref:`Section %s `). After loading the workflow, users should follow the instructions printed to the console. For example, if the output says:
+ .. include:: ../../doc-snippets/load-env.rst
+
+ After loading the workflow, users should follow the instructions printed to the console. For example, if the output says:
.. code-block:: console
diff --git a/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst b/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst
index 0eb10e1519..bea4ab59aa 100644
--- a/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst
+++ b/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst
@@ -134,13 +134,8 @@ Loading the Workflow Environment
The |wflow_env| conda/Python environment can be activated in the following way:
-.. code-block:: console
-
- source /path/to/ufs-srweather-app/etc/lmod-setup.sh
- module use /path/to/ufs-srweather-app/modulefiles
- module load wflow_
-
-where ```` refers to a valid machine name (see :numref:`Section %s ` for ``MACHINE`` options). In a csh shell environment, users should replace ``lmod-setup.sh`` with ``lmod-setup.csh``.
+.. include:: ../../doc-snippets/load-env.rst
+In a csh shell environment, users should replace ``lmod-setup.sh`` with ``lmod-setup.csh``.
.. note::
If users source the lmod-setup file on a system that doesn't need it, it will not cause any problems (it will simply do a ``module purge``).
@@ -155,7 +150,7 @@ The ``wflow_`` modulefile will then output instructions to activate th
then the user should run |activate|. This activates the |wflow_env| conda environment, and the user typically sees |prompt| in front of the Terminal prompt at this point.
.. note::
- If users do not use the wflow module to load conda, ``conda`` will need to be initialized before running ``conda activate srw_app`` command. Depending on the user's system and login setup, this may be accomplished in a variety of ways. Conda initialization usually involves the following command: ``source /etc/profile.d/conda.sh``, where ```` is the base conda installation directory and by default will be the full path to ``ufs-srweather-app/conda``.
+ If users do not use the ``wflow_`` module to load conda, ``conda`` will need to be initialized before running ``conda activate srw_app`` command. Depending on the user's system and login setup, this may be accomplished in a variety of ways. Conda initialization usually involves the following command: ``source /etc/profile.d/conda.sh``, where ```` is the base conda installation directory and by default will be the full path to ``ufs-srweather-app/conda``.
After loading the workflow environment, users may continue to :numref:`Section %s ` for instructions on setting the experiment configuration parameters.
@@ -690,7 +685,7 @@ More information about configuring the ``rocoto:`` section can be found in :numr
If users have access to NOAA :term:`HPSS` but have not pre-staged the data, the default ``verify_pre.yaml`` taskgroup will activate the tasks, and the workflow will attempt to download the appropriate data from NOAA HPSS. In this case, the ``*_OBS_DIR`` paths must be set to the location where users want the downloaded data to reside.
-Users who do not have access to NOAA HPSS and do not have the data on their system will need to download :term:`CCPA`, :term:`MRMS`, and :term:`NDAS` data manually from collections of publicly available data, such as the ones listed `here `__.
+Users who do not have access to NOAA HPSS and do not have the data on their system will need to download :term:`CCPA`, :term:`MRMS`, and :term:`NDAS` data manually from collections of publicly available data.
Users who have already staged the observation data needed for METplus (i.e., the :term:`CCPA`, :term:`MRMS`, and :term:`NDAS` data) on their system should set the path to this data in ``config.yaml``.
diff --git a/doc/UsersGuide/BuildingRunningTesting/Tutorial.rst b/doc/UsersGuide/BuildingRunningTesting/Tutorial.rst
index a21b7aa9bd..2b7f169711 100644
--- a/doc/UsersGuide/BuildingRunningTesting/Tutorial.rst
+++ b/doc/UsersGuide/BuildingRunningTesting/Tutorial.rst
@@ -11,7 +11,7 @@ Users can run through the entire set of tutorials or jump to the one that intere
#. :ref:`Severe Weather Over Indianapolis `: Change physics suites and compare graphics plots.
#. :ref:`Cold Air Damming `: Coming soon!
#. :ref:`Southern Plains Winter Weather Event `: Coming soon!
- #. :ref:`Halloween Storm `: Coming soon!
+ #. :ref:`Halloween Storm `: Change :term:`IC/LBC ` sources and compare results.
#. :ref:`Hurricane Barry `: Coming soon!
Each section provides a summary of the weather event and instructions for configuring an experiment.
@@ -41,7 +41,7 @@ A surface boundary associated with a vorticity maximum over the northern Great P
Data
-------
-On :srw-wiki:`Level 1 ` systems, users can find data for the Indianapolis Severe Weather Forecast in the usual input model data locations (see :numref:`Section %s ` for a list). The data can also be downloaded from the `UFS SRW Application Data Bucket `__.
+On :srw-wiki:`Level 1 ` systems, users can find data for the Indianapolis Severe Weather Forecast in the usual input model data locations (see :numref:`Section %s ` for a list). The data can also be downloaded from the `UFS SRW Application Data Bucket `_.
* FV3GFS data for the first forecast (``control``) is located at:
@@ -55,15 +55,9 @@ On :srw-wiki:`Level 1 ` systems, users can fi
Load the Workflow
--------------------
-To load the workflow environment, source the lmod-setup file. Then load the workflow conda environment. From the ``ufs-srweather-app`` directory, run:
+To load the workflow environment, source the lmod-setup file and load the workflow conda environment by running:
-.. code-block:: console
-
- source etc/lmod-setup.sh # OR: source etc/lmod-setup.csh when running in a csh/tcsh shell
- module use modulefiles
- module load wflow_
-
-where ```` is a valid, lowercased machine name (see ``MACHINE`` in :numref:`Section %s ` for valid values).
+.. include:: ../../doc-snippets/load-env.rst
After loading the workflow, users should follow the instructions printed to the console. Usually, the instructions will tell the user to run |activate|. For example, a user on Hera with permissions on the ``nems`` project may issue the following commands to load the workflow (replacing ``User.Name`` with their actual username):
@@ -77,35 +71,14 @@ After loading the workflow, users should follow the instructions printed to the
Configuration
-------------------------
-Navigate to the ``ufs-srweather-app/ush`` directory. The default (or "control") configuration for this experiment is based on the ``config.community.yaml`` file in that directory. Users can copy this file into ``config.yaml`` if they have not already done so:
-
-.. code-block:: console
-
- cd /path/to/ufs-srweather-app/ush
- cp config.community.yaml config.yaml
-
-Users can save the location of the ``ush`` directory in an environment variable (``$USH``). This makes it easier to navigate between directories later. For example:
-
-.. code-block:: console
-
- export USH=/path/to/ufs-srweather-app/ush
-
-Users should substitute ``/path/to/ufs-srweather-app/ush`` with the actual path on their system. As long as a user remains logged into their system, they can run ``cd $USH``, and it will take them to the ``ush`` directory. The variable will need to be reset for each login session.
+.. include:: ../../doc-snippets/expt-conf-intro.rst
Experiment 1: Control
^^^^^^^^^^^^^^^^^^^^^^^^
Edit the configuration file (``config.yaml``) to include the variables and values in the sample configuration excerpts below.
-.. Hint::
-
- To open the configuration file in the command line, users may run the command:
-
- .. code-block:: console
-
- vi config.yaml
-
- To modify the file, hit the ``i`` key and then make any changes required. To close and save, hit the ``esc`` key and type ``:wq`` to write the changes to the file and exit/quit the file. Users may opt to use their preferred code editor instead.
+.. include:: ../../doc-snippets/file-edit-hint.rst
Start in the ``user:`` section and change the ``MACHINE`` and ``ACCOUNT`` variables. For example, when running on a personal MacOS device, users might set:
@@ -138,11 +111,9 @@ In the ``workflow:`` section of ``config.yaml``, update ``EXPT_SUBDIR`` and ``PR
.. _CronNote:
-.. note::
-
- Users may also want to set ``USE_CRON_TO_RELAUNCH: true`` and add ``CRON_RELAUNCH_INTVL_MNTS: 3``. This will automate submission of workflow tasks when running the experiment. However, not all systems have :term:`cron`.
+.. include:: ../../doc-snippets/cron-note.rst
-``EXPT_SUBDIR:`` This variable can be changed to any name the user wants from "gfsv16_physics_fcst" to "forecast1" to "a;skdfj". However, the best names will indicate useful information about the experiment. This tutorial uses ``control`` to establish a baseline, or "control", forecast. Since this tutorial helps users to compare the output from two different forecasts --- one that uses the FV3_GFS_v16 physics suite and one that uses the FV3_RRFS_v1beta physics suite --- "gfsv16_physics_fcst" could be a good alternative directory name.
+``EXPT_SUBDIR:`` This variable can be changed to any name the user wants from "gfsv16_physics_fcst" to "forecast1" to "askdfj" (but note that whitespace and some punctuation characters are not allowed). However, the best names will indicate useful information about the experiment. This tutorial uses ``control`` to establish a baseline, or "control", forecast. Since this tutorial helps users to compare the output from two different forecasts --- one that uses the FV3_GFS_v16 physics suite and one that uses the FV3_RRFS_v1beta physics suite --- "gfsv16_physics_fcst" could be a good alternative directory name.
``PREDEF_GRID_NAME:`` This experiment uses the SUBCONUS_Ind_3km grid, rather than the default RRFS_CONUS_25km grid. The SUBCONUS_Ind_3km grid is a high-resolution grid (with grid cell size of approximately 3km) that covers a small area of the U.S. centered over Indianapolis, IN. For more information on this grid, see :numref:`Section %s `.
@@ -251,8 +222,6 @@ Once the control case is running, users can return to the ``config.yaml`` file (
Later, users may want to conduct additional experiments using the FV3_HRRR and FV3_WoFS_v0 physics suites. Like FV3_RRFS_v1beta, these physics suites were designed for use with high-resolution grids for storm-scale predictions.
-.. COMMENT: Maybe also FV3_RAP?
-
Next, users will need to modify the data parameters in ``task_get_extrn_ics:`` and ``task_get_extrn_lbcs:`` to use HRRR and RAP data rather than FV3GFS data. Users will need to change the following lines in each section:
.. code-block:: console
@@ -331,17 +300,7 @@ Navigate to ``test_expt/2019061518/postprd``. This directory contains the post-p
Copy ``.png`` Files onto Local System
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-Users who are working on the cloud or on an HPC cluster may want to copy the ``.png`` files onto their local system to view in their preferred image viewer. Detailed instructions are available in the :ref:`Introduction to SSH & Data Transfer `.
-
-In summary, users can run the ``scp`` command in a new terminal/command prompt window to securely copy files from a remote system to their local system if an SSH tunnel is already established between the local system and the remote system. Users can adjust one of the following commands for their system:
-
-.. code-block:: console
-
- scp username@your-IP-address:/path/to/source_file_or_directory /path/to/destination_file_or_directory
- # OR
- scp -P 12345 username@localhost:/path/to/source_file_or_directory /path/to/destination_file_or_directory
-
-Users would need to modify ``username``, ``your-IP-address``, ``-P 12345``, and the file paths to reflect their systems' information. See the :ref:`Introduction to SSH & Data Transfer ` for example commands.
+.. include:: ../../doc-snippets/scp-files.rst
.. _ComparePlots:
@@ -555,18 +514,20 @@ A polar vortex brought arctic air to much of the U.S. and Mexico. A series of co
*Southern Plains Winter Weather Event Over Oklahoma City*
-.. COMMENT: Upload a png to the SRW wiki and change the hyperlink to point to that.
-
Tutorial Content
-------------------
-Coming Soon!
+Coming Soon!
.. _fcst4:
Sample Forecast #4: Halloween Storm
=======================================
+**Objective:**
+ * Compare forecast outputs for similar experiments that use different :term:`IC/LBC ` sources.
+ * Coming soon: Option to use verification tools to assess forecast quality.
+
Weather Summary
--------------------
@@ -574,17 +535,329 @@ A line of severe storms brought strong winds, flash flooding, and tornadoes to t
**Weather Phenomena:** Flooding and high winds
- * `Storm Prediction Center (SPC) Storm Report for 20191031 `__
+ * `Storm Prediction Center (SPC) Storm Report for 20191031 `_
-.. figure:: https://github.com/ufs-community/ufs-srweather-app/wiki/Tutorial/HalloweenStorm.jpg
+.. figure:: https://github.com/ufs-community/ufs-srweather-app/wiki/Tutorial/HalloweenStorm.gif
:alt: Radar animation of the Halloween Storm that swept across the Eastern United States in 2019.
*Halloween Storm 2019*
-Tutorial Content
--------------------
+Data
+-------
-Coming Soon!
+Data for the Halloween Storm is publicly available in S3 data buckets. The Rapid Refresh (`RAP `_) data can be downloaded from the `SRW App data bucket `_ using ``wget``. Make sure to issue the command from the folder where you want to place the data.
+
+.. code-block:: console
+
+ wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/halloween_rap.tgz
+ tar -xzf halloween_rap.tgz
+
+This will untar the ``halloween_rap.tgz`` data into a directory named ``RAP``.
+
+The SRW App can pull HRRR data directly from the `HRRR data bucket `_. Users do not need to download the data separately.
+
+Load the workflow
+---------------------
+
+To load the workflow environment, source the lmod-setup file and load the workflow conda environment by running:
+
+.. include:: ../../doc-snippets/load-env.rst
+
+After loading the workflow, users should follow the instructions printed to the console. Usually, the instructions will tell the user to run |activate|. For example, a user on Hera with permissions on the ``nems`` project may issue the following commands to load the workflow (replacing ``User.Name`` with their actual username):
+
+.. code-block:: console
+
+ source /scratch1/NCEPDEV/nems/User.Name/ufs-srweather-app/etc/lmod-setup.sh hera
+ module use /scratch1/NCEPDEV/nems/User.Name/ufs-srweather-app/modulefiles
+ module load wflow_hera
+ conda activate srw_app
+
+Configuration
+-------------------------
+
+.. include:: ../../doc-snippets/expt-conf-intro.rst
+
+Experiment 1: RAP Data
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+Edit the configuration file (``config.yaml``) to include the variables and values in the sample configuration excerpts below.
+
+.. include:: ../../doc-snippets/file-edit-hint.rst
+
+Start in the ``user:`` section and change the ``MACHINE`` and ``ACCOUNT`` variables. For example, when running on a personal MacOS device, users might set:
+
+.. code-block:: console
+
+ user:
+ RUN_ENVIR: community
+ MACHINE: macos
+ ACCOUNT: none
+
+For a detailed description of these variables, see :numref:`Section %s `.
+
+Users do not need to change the ``platform:`` section of the configuration file for this tutorial.
+
+In the ``workflow:`` section of ``config.yaml``, update ``EXPT_SUBDIR``, ``CCPP_PHYS_SUITE``, ``PREDEF_GRID_NAME``, ``DATE_FIRST_CYCL``, ``DATE_LAST_CYCL``, and ``FCST_LEN_HRS``.
+
+.. code-block:: console
+
+ workflow:
+ USE_CRON_TO_RELAUNCH: false
+ EXPT_SUBDIR: halloweenRAP
+ CCPP_PHYS_SUITE: FV3_RAP
+ PREDEF_GRID_NAME: RRFS_CONUS_13km
+ DATE_FIRST_CYCL: '2019103012'
+ DATE_LAST_CYCL: '2019103012'
+ FCST_LEN_HRS: 36
+ PREEXISTING_DIR_METHOD: rename
+ VERBOSE: true
+ COMPILER: intel
+
+.. include:: ../../doc-snippets/cron-note.rst
+
+``EXPT_SUBDIR:`` This variable can be changed to any name the user wants from "halloweenRAP" to "HalloweenStorm1" to "askdfj" (but note that whitespace and some punctuation characters are not allowed). However, the best names will indicate useful information about the experiment. Since this tutorial helps users to compare the output from RAP and HRRR forecast input data, this tutorial will use ``halloweenRAP`` for the Halloween Storm experiment that uses RAP forecast data.
+
+``PREDEF_GRID_NAME:`` This experiment uses the RRFS_CONUS_13km, rather than the default RRFS_CONUS_25km grid. This 13-km resolution is used in the NOAA operational Rapid Refresh (`RAP `_) model and is the resolution envisioned for the initial operational implementation of the Rapid Refresh Forecast System (:term:`RRFS`). For more information on this grid, see :numref:`Section %s `.
+
+``CCPP_PHYS_SUITE:`` The FV3_RAP physics suite contains the evolving :term:`parameterizations` used operationally in the NOAA Rapid Refresh (`RAP `_) model; the suite is also a prime candidate under consideration for initial RRFS implementation and has been well-tested at the 13-km resolution. It is therefore an appropriate physics choice when using the RRFS_CONUS_13km grid.
+
+``DATE_FIRST_CYCL``, ``DATE_LAST_CYCL``, and ``FCST_LEN_HRS`` set parameters related to the date and duration of the forecast. Because this is a one-cycle experiment that does not use cycling or :term:`data assimilation`, the date of the first :term:`cycle` and last cycle are the same.
+
+For a detailed description of other ``workflow:`` variables, see :numref:`Section %s `.
+
+In the ``task_get_extrn_ics:`` section, add ``USE_USER_STAGED_EXTRN_FILES`` and ``EXTRN_MDL_SOURCE_BASEDIR_ICS``. Users will need to adjust the file path to point to the location of the data on their system.
+
+.. code-block:: console
+
+ task_get_extrn_ics:
+ EXTRN_MDL_NAME_ICS: RAP
+ USE_USER_STAGED_EXTRN_FILES: true
+ EXTRN_MDL_SOURCE_BASEDIR_ICS: /path/to/RAP/for_ICS
+
+For a detailed description of the ``task_get_extrn_ics:`` variables, see :numref:`Section %s `.
+
+Similarly, in the ``task_get_extrn_lbcs:`` section, add ``USE_USER_STAGED_EXTRN_FILES`` and ``EXTRN_MDL_SOURCE_BASEDIR_LBCS``. Users will need to adjust the file path to point to the location of the data on their system.
+
+.. code-block:: console
+
+ task_get_extrn_lbcs:
+ EXTRN_MDL_NAME_LBCS: RAP
+ LBC_SPEC_INTVL_HRS: 3
+ USE_USER_STAGED_EXTRN_FILES: true
+ EXTRN_MDL_SOURCE_BASEDIR_LBCS: /path/to/RAP/for_LBCS
+
+For a detailed description of the ``task_get_extrn_lbcs:`` variables, see :numref:`Section %s `.
+
+Users do not need to modify the ``task_run_fcst:`` section for this tutorial.
+
+.. COMMENT: Do we need to set QUILTING to true?
+
+In the ``rocoto:tasks:`` section, increase the walltime for the data-related tasks and metatasks. Then include the YAML configuration file containing the plotting task in the ``rocoto:tasks:taskgroups:`` section, like this:
+
+.. code-block:: console
+
+ rocoto:
+ tasks:
+ task_get_extrn_ics:
+ walltime: 06:00:00
+ task_get_extrn_lbcs:
+ walltime: 06:00:00
+ metatask_run_ensemble:
+ task_make_lbcs_mem#mem#:
+ walltime: 06:00:00
+ task_run_fcst_mem#mem#:
+ walltime: 06:00:00
+ taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml"]|include }}'
+
+.. note::
+
+ Rocoto tasks are run once each. A :ref:`Rocoto ` metatask expands into one or more similar tasks by replacing the values between ``#`` symbols with the values under the ``var:`` key. See the `Rocoto documentation `_ for more information.
+
+For more information on how to turn on/off tasks in the workflow, please see :numref:`Section %s `.
+
+In the ``task_plot_allvars:`` section, add ``PLOT_FCST_INC: 6``. Users may also want to add ``PLOT_FCST_START: 0`` and ``PLOT_FCST_END: 36`` explicitly, but these can be omitted since the default values are the same as the forecast start and end time respectively.
+
+.. code-block:: console
+
+ task_plot_allvars:
+ COMOUT_REF: ""
+ PLOT_FCST_INC: 6
+
+``PLOT_FCST_INC:`` This variable indicates the forecast hour increment for the plotting task. By setting the value to ``6``, the task will generate a ``.png`` file for every 6th forecast hour starting from 12z on October 30, 2019 (the 0th forecast hour) through the 36th forecast hour (November 1, 2019 at 0z).
+
+After configuring the forecast, users can generate the forecast by running:
+
+.. code-block:: console
+
+ ./generate_FV3LAM_wflow.py
+
+To see experiment progress, users should navigate to their experiment directory. Then, use the ``rocotorun`` command to launch new workflow tasks and ``rocotostat`` to check on experiment progress.
+
+.. code-block:: console
+
+ cd /path/to/expt_dirs/halloweenRAP
+ rocotorun -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10
+ rocotostat -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10
+
+Users will need to rerun the ``rocotorun`` and ``rocotostat`` commands above regularly and repeatedly to continue submitting workflow tasks and receiving progress updates.
+
+.. note::
+
+ When using cron to automate the workflow submission (as described :ref:`above `), users can omit the ``rocotorun`` command and simply use ``rocotostat`` to check on progress periodically.
+
+Users can save the location of the ``halloweenRAP`` directory in an environment variable (e.g., ``$HRAP``). This makes it easier to navigate between directories later. For example:
+
+.. code-block:: console
+
+ export HRAP=/path/to/expt_dirs/halloweenRAP
+
+Users should substitute ``/path/to/expt_dirs/halloweenRAP`` with the actual path to the experiment directory on their system. As long as a user remains logged into their system, they can run ``cd $HRAP``, and it will take them to the ``halloweenRAP`` experiment directory. The variable will need to be reset for each login session.
+
+Experiment 2: Changing the Forecast Input
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Once the ``halloweenRAP`` case is running, users can return to the ``config.yaml`` file (in ``$USH``) and adjust the parameters for a new forecast. In this forecast, users will change the forecast input to use ``HRRR`` data and alter a few associated parameters.
+
+In the ``workflow:`` section of ``config.yaml``, update ``EXPT_SUBDIR`` and ``PREDEF_GRID_NAME``. Other parameters should remain the same.
+
+.. code-block:: console
+
+ workflow:
+ EXPT_SUBDIR: halloweenHRRR
+ PREDEF_GRID_NAME: RRFS_CONUScompact_13km
+
+.. note::
+
+ Relative to the original CONUS domain, the "compact" CONUS domains are slightly smaller. The original CONUS domains were a bit too large to run with :term:`LBCs` from HRRR, so the "compact" domains were created to be just small enough to work with HRRR data.
+
+In the ``task_get_extrn_ics:`` section, update the values for ``EXTRN_MDL_NAME_ICS`` and ``USE_USER_STAGED_EXTRN_FILES`` and add ``EXTRN_MDL_FILES_ICS``. Users may choose to comment out or remove ``EXTRN_MDL_SOURCE_BASEDIR_ICS``, but this is not necessary.
+
+.. code-block:: console
+
+ task_get_extrn_ics:
+ EXTRN_MDL_NAME_ICS: HRRR
+ USE_USER_STAGED_EXTRN_FILES: false
+ EXTRN_MDL_FILES_ICS:
+ - '{yy}{jjj}{hh}00{fcst_hr:02d}00'
+
+For a detailed description of the ``task_get_extrn_ics:`` variables, see :numref:`Section %s `.
+
+Update the same values in the ``task_get_extrn_lbcs:`` section:
+
+.. code-block:: console
+
+ task_get_extrn_lbcs:
+ EXTRN_MDL_NAME_LBCS: HRRR
+ LBC_SPEC_INTVL_HRS: 3
+ USE_USER_STAGED_EXTRN_FILES: false
+ EXTRN_MDL_FILES_LBCS:
+ - '{yy}{jjj}{hh}00{fcst_hr:02d}00'
+
+
+For a detailed description of the ``task_get_extrn_lbcs:`` variables, see :numref:`Section %s `.
+
+After configuring the forecast, users can generate the second forecast by running:
+
+.. code-block:: console
+
+ ./generate_FV3LAM_wflow.py
+
+To see experiment progress, users should navigate to their experiment directory. As in the first forecast, the following commands allow users to launch new workflow tasks and check on experiment progress.
+
+.. code-block:: console
+
+ cd /path/to/expt_dirs/halloweenHRRR
+ rocotorun -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10
+ rocotostat -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10
+
+.. note::
+
+ When using cron to automate the workflow submission (as described :ref:`above `), users can omit the ``rocotorun`` command and simply use ``rocotostat`` to check on progress periodically.
+
+.. note::
+
+ If users have not automated their workflow using cron, they will need to ensure that they continue issuing ``rocotorun`` commands to launch all of the tasks in each experiment. While switching between experiment directories to run ``rocotorun`` and ``rocotostat`` commands in both directories is possible, it may be easier to finish the ``halloweenRAP`` experiment's tasks before starting on ``halloweenHRRR``.
+
+As with the ``halloweenRAP`` experiment, users can save the location of the ``halloweenHRRR`` directory in an environment variable (e.g., ``$HHRRR``). This makes it easier to navigate between directories later. For example:
+
+.. code-block:: console
+
+ export HHRRR=/path/to/expt_dirs/halloweenHRRR
+
+Users should substitute ``/path/to/expt_dirs/halloweenHRRR`` with the actual path on their system.
+
+
+How to Analyze Results
+-----------------------
+Navigate to ``halloweenHRRR/2019103012/postprd`` and/or ``halloweenRAP/2019203012/postprd``. These directories contain the post-processed data generated by the :term:`UPP` from the Halloween Storm forecasts. After the ``plot_allvars`` task completes, this directory will contain ``.png`` images for several forecast variables.
+
+Copy ``.png`` Files onto Local System
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. include:: ../../doc-snippets/scp-files.rst
+
+Examining Forecast Plots at Peak Intensity
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+This experiment will be looking at plots from HRRR and RAP input files while the Halloween Storm is at or approaching peak intensity.
+
+.. _fcst4_250wind:
+
+250mb Wind
+``````````
+An effective weather forecast begins with analyzing a 250mb wind chart. By using this wind plot, forecasters can identify key features such as jet stream placement, jet maxima, troughs, ridges, and more. This analysis also helps pinpoint areas with the potential for the strongest severe weather.
+
+In the 250mb wind plots below, the ``halloweenHRRR`` and ``halloweenRAP`` plots are nearly identical at forecast hour f036. This shows great model agreement. Analyzing this chart we can see multiple ingredients signaling a significant severe weather event over the eastern CONUS. The first thing to notice is the placement of the jet streak along with troughing approaching the eastern US. Also notice an extreme 150KT jet max over southern Ohio further fueling severe weather. The last thing to notice is the divergence aloft present over the eastern CONUS; seeing divergence present all the way up to 250mb indicates a strong system.
+
+.. figure:: https://github.com/ufs-community/ufs-srweather-app/wiki/fcst4_plots/250wind_rap_conus_f036.png
+ :align: center
+ :width: 75%
+
+ *RAP Plot for 250mb Wind*
+
+.. figure:: https://github.com/ufs-community/ufs-srweather-app/wiki/fcst4_plots/250wind_hrrr_conus_f036.png
+ :align: center
+ :width: 75%
+
+ *HRRR Plot for 250mb Wind*
+
+.. _fcst4_10mwind:
+
+10m Wind
+``````````
+The 10m wind plots allows forecasters to pick up on patterns closer to the surface. It shows features such as convergence and pressure areas.
+
+In the 10m wind plots below, the ``halloweenHRRR`` and ``halloweenRAP`` are once again very similar, which makes sense given that the 250mb wind plots are also so similar. We can see a few key features on this chart. The most important is the area of convergence taking place over the east coast which is driving the line of severe storms.
+
+.. figure:: https://github.com/ufs-community/ufs-srweather-app/wiki/fcst4_plots/10mwind_rap_conus_f036.png
+ :align: center
+ :width: 75%
+
+ *RAP Plot for 10m Winds*
+
+.. figure:: https://github.com/ufs-community/ufs-srweather-app/wiki/fcst4_plots/10mwind_hrrr_conus_f036.png
+ :align: center
+ :width: 75%
+
+ *HRRR Plot for 10m Winds*
+
+.. _fcst4_refc:
+
+Composite Reflectivity
+````````````````````````
+Reflectivity images visually represent the weather based on the energy (measured in decibels [dBZ]) reflected back from radar. Composite reflectivity generates an image based on reflectivity scans at multiple elevation angles, or "tilts", of the antenna. See https://www.noaa.gov/jetstream/reflectivity for a more detailed explanation of composite reflectivity.
+
+In the composite reflectivity plots below, the ``halloweenHRRR`` and ``halloweenRAP`` models remain quite similar, as expected. Utilizing the reflectivity plots provides the final piece of the puzzle. From the previous analyses, we already had a good understanding of where the storms were likely to occur. Composite reflectivity serves as an additional tool, allowing us to visualize where the models predict storm placement. In this case, the strongest storms are indicated by higher dBZ values and appear to be concentrated in the NC/VA region.
+
+.. figure:: https://github.com/ufs-community/ufs-srweather-app/wiki/fcst4_plots/refc_rap_conus_f036.png
+ :align: center
+ :width: 75%
+
+ *RAP Plot for Composite Reflectivity*
+
+.. figure:: https://github.com/ufs-community/ufs-srweather-app/wiki/fcst4_plots/refc_hrrr_conus_f036.png
+ :align: center
+ :width: 75%
+
+ *HRRR Plot for Composite Reflectivity*
.. _fcst5:
diff --git a/doc/UsersGuide/BuildingRunningTesting/VXCases.rst b/doc/UsersGuide/BuildingRunningTesting/VXCases.rst
index 2bf6f775d0..b36afcefd4 100644
--- a/doc/UsersGuide/BuildingRunningTesting/VXCases.rst
+++ b/doc/UsersGuide/BuildingRunningTesting/VXCases.rst
@@ -81,14 +81,9 @@ Save the path to this file in and ``INDYDATA`` environment variable:
Load the Workflow
^^^^^^^^^^^^^^^^^^^^
-First, navigate to the ``ufs-srweather-app/ush`` directory. Then, load the workflow environment:
-
-.. code-block:: console
-
- source /path/to/etc/lmod-setup.sh
- module use /path/to/ufs-srweather-app/modulefiles
- module load wflow_
+To load the workflow environment, run:
+.. include:: ../../doc-snippets/load-env.rst
Users running a csh/tcsh shell would run ``source /path/to/etc/lmod-setup.csh `` in place of the first command above.
After loading the workflow, users should follow the instructions printed to the console. Usually, the instructions will tell the user to run |activate|.
@@ -267,7 +262,7 @@ Point STAT Files
The Point-Stat files contain continuous variables like temperature, pressure, and wind speed. A description of the Point-Stat file can be found :ref:`here ` in the MET documentation.
-The Point-Stat files contain a potentially overwhelming amount of information. Therefore, it is recommended that users focus on the CNT MET test, which contains the `RMSE `__ and `MBIAS `__ statistics. The MET tests are defined in column 24 'LINE_TYPE' of the ``.stat`` file. Look for 'CNT' in this column. Then find column 66-68 for MBIAS and 78-80 for RMSE statistics. A full description of this file can be found :ref:`here `.
+The Point-Stat files contain a potentially overwhelming amount of information. Therefore, it is recommended that users focus on the CNT MET test, which contains the `RMSE `_ and `MBIAS `_ statistics. The MET tests are defined in column 24 'LINE_TYPE' of the ``.stat`` file. Look for 'CNT' in this column. Then find column 66-68 for MBIAS and 78-80 for RMSE statistics. A full description of this file can be found :ref:`here `.
To narrow down the variable field even further, users can focus on these weather variables:
diff --git a/doc/UsersGuide/Reference/FAQ.rst b/doc/UsersGuide/Reference/FAQ.rst
index e8c3df0dec..b1c7bcce1f 100644
--- a/doc/UsersGuide/Reference/FAQ.rst
+++ b/doc/UsersGuide/Reference/FAQ.rst
@@ -281,12 +281,7 @@ How can I run a new experiment?
To run a new experiment at a later time, users need to rerun the commands in :numref:`Section %s ` that reactivate the |wflow_env| environment:
-.. code-block:: console
-
- source /path/to/etc/lmod-setup.sh/or/lmod-setup.csh
- module use /path/to/modulefiles
- module load wflow_
-
+.. include:: ../../doc-snippets/load-env.rst
Follow any instructions output by the console (e.g., |activate|).
Then, users can configure a new experiment by updating the experiment parameters in ``config.yaml`` to reflect the desired experiment configuration. Detailed instructions can be viewed in :numref:`Section %s `. Parameters and valid values are listed in :numref:`Section %s `. After adjusting the configuration file, generate the new experiment by running ``./generate_FV3LAM_wflow.py``. Check progress by navigating to the ``$EXPTDIR`` and running ``rocotostat -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10``.
diff --git a/doc/UsersGuide/Reference/Glossary.rst b/doc/UsersGuide/Reference/Glossary.rst
index 748d0f33b6..7ffc569b21 100644
--- a/doc/UsersGuide/Reference/Glossary.rst
+++ b/doc/UsersGuide/Reference/Glossary.rst
@@ -10,7 +10,7 @@ Glossary
To transport substances in the atmostphere by :term:`advection`.
advection
- According to the American Meteorological Society (AMS) `definition `__, advection is "The process of transport of an atmospheric property solely by the mass motion (velocity field) of the atmosphere." In common parlance, advection is movement of atmospheric substances that are carried around by the wind.
+ According to the American Meteorological Society (AMS) definition, `advection `_ is "The process of transport of an atmospheric property solely by the mass motion (velocity field) of the atmosphere." In common parlance, advection is movement of atmospheric substances that are carried around by the wind.
AQM
The `Air Quality Model `__ (AQM) is a UFS Application that dynamically couples the Community Multiscale Air Quality (:term:`CMAQ`) model with the UFS Weather Model through the :term:`NUOPC` Layer to simulate temporal and spatial variations of atmospheric compositions (e.g., ozone and aerosol compositions). The CMAQ, treated as a column chemistry model, updates concentrations of chemical species (e.g., ozone and aerosol compositions) at each integration time step. The transport terms (e.g., :term:`advection` and diffusion) of all chemical species are handled by the UFS Weather Model as :term:`tracers`.
@@ -22,7 +22,7 @@ Glossary
Climatology-Calibrated Precipitation Analysis (CCPA) data. This data is required for METplus precipitation verification tasks within the SRW App. The most recent 8 days worth of data are publicly available and can be accessed `here `__.
CCPP
- The `Common Community Physics Package `__ is a forecast-model agnostic, vetted collection of code containing atmospheric physical parameterizations and suites of parameterizations for use in Numerical Weather Prediction (NWP) along with a framework that connects the physics to the host forecast model.
+ The `Common Community Physics Package `_ is a forecast-model agnostic, vetted collection of code containing atmospheric physical parameterizations and suites of parameterizations for use in Numerical Weather Prediction (NWP) along with a framework that connects the physics to the host forecast model.
chgres_cube
The preprocessing software used to create initial and boundary condition files to
@@ -78,10 +78,10 @@ Glossary
The radar-indicated top of an area of precipitation. Specifically, it contains the height of the 18 dBZ reflectivity value.
EMC
- The `Environmental Modeling Center `__.
-
+ The `Environmental Modeling Center `_.
+
EPIC
- The `Earth Prediction Innovation Center `__ seeks to accelerate scientific research and modeling contributions through continuous and sustained community engagement in order to produce the most accurate and reliable operational modeling system in the world.
+ The `Earth Prediction Innovation Center `_ seeks to accelerate scientific research and modeling contributions through continuous and sustained community engagement in order to produce the most accurate and reliable operational modeling system in the world.
ESG
Extended Schmidt Gnomonic (ESG) grid. The ESG grid uses the map projection developed by Jim Purser of NOAA :term:`EMC` (:cite:t:`Purser_2020`).
@@ -118,7 +118,7 @@ Glossary
High-Performance Computing.
HPC-Stack
- The `HPC-Stack `__ is a repository that provides a unified, shell script-based build system for building the software stack required for numerical weather prediction (NWP) tools such as the `Unified Forecast System (UFS) `__ and the `Joint Effort for Data assimilation Integration (JEDI) `__ framework. View the HPC-Stack documentation :doc:`here `.
+ The `HPC-Stack `__ is a repository that provides a unified, shell script-based build system for building the software stack required for numerical weather prediction (NWP) tools such as the `Unified Forecast System (UFS) `_ and the `Joint Effort for Data assimilation Integration (JEDI) `_ framework. View the HPC-Stack documentation :doc:`here `.
HPSS
High Performance Storage System (HPSS).
@@ -227,25 +227,25 @@ Glossary
A central location in which files (e.g., data, code, documentation) are stored and managed.
RRFS
- The `Rapid Refresh Forecast System `__ (RRFS) is NOAA's next-generation convection-allowing, rapidly-updated, ensemble-based data assimilation and forecasting system currently scheduled for operational implementation in 2024. It is designed to run forecasts on a 3-km :term:`CONUS` domain, see also `NOAA Rapid Refresh Forecast System (RRFS) `__. Experimental data is currently available from the `AWS S3 NOAA-RRFS `__ bucket for deterministic forecasts out to 60 hours at 00, 06, 12, and 18 UTC. Additionally, hourly forecasts out to 18 hours may be available for more recent RRFS model runs; the user needs to verify that data exists for needed dates.
+ The `Rapid Refresh Forecast System `_ (RRFS) is NOAA's next-generation convection-allowing, rapidly-updated, ensemble-based data assimilation and forecasting system currently scheduled for operational implementation in 2024. It is designed to run forecasts on a 3-km :term:`CONUS` domain, see also `NOAA Rapid Refresh Forecast System (RRFS) `__. Experimental data is currently available from the `AWS S3 NOAA-RRFS `__ bucket for deterministic forecasts out to 60 hours at 00, 06, 12, and 18 UTC. Additionally, hourly forecasts out to 18 hours may be available for more recent RRFS model runs; the user needs to verify that data exists for needed dates.
SDF
Suite Definition File. An external file containing information about the construction of a physics suite. It describes the schemes that are called, in which order they are called, whether they are subcycled, and whether they are assembled into groups to be called together.
Spack
- `Spack `__ is a package management tool designed to support multiple versions and configurations of software on a wide variety of platforms and environments. It was designed for large supercomputing centers, where many users and application teams share common installations of software on clusters with exotic architectures.
+ `Spack `_ is a package management tool designed to support multiple versions and configurations of software on a wide variety of platforms and environments. It was designed for large supercomputing centers, where many users and application teams share common installations of software on clusters with exotic architectures.
spack-stack
- The `spack-stack `__ is a collaborative effort between the NOAA Environmental Modeling Center (EMC), the UCAR Joint Center for Satellite Data Assimilation (JCSDA), and the Earth Prediction Innovation Center (EPIC). *spack-stack* is a repository that provides a :term:`Spack`-based method for building the software stack required for numerical weather prediction (NWP) tools such as the `Unified Forecast System (UFS) `__ and the `Joint Effort for Data assimilation Integration (JEDI) `__ framework. *spack-stack* uses the Spack package manager along with custom Spack configuration files and Python scripts to simplify installation of the libraries required to run various applications. The *spack-stack* can be installed on a range of platforms and comes pre-configured for many systems. Users can install the necessary packages for a particular application and later add the missing packages for another application without having to rebuild the entire stack. To get started, check out the documentation :doc:`here `.
+ The `spack-stack `_ is a collaborative effort between the NOAA Environmental Modeling Center (EMC), the UCAR Joint Center for Satellite Data Assimilation (JCSDA), and the Earth Prediction Innovation Center (EPIC). *spack-stack* is a repository that provides a :term:`Spack`-based method for building the software stack required for numerical weather prediction (NWP) tools such as the `Unified Forecast System (UFS) `_ and the `Joint Effort for Data assimilation Integration (JEDI) `_ framework. *spack-stack* uses the Spack package manager along with custom Spack configuration files and Python scripts to simplify installation of the libraries required to run various applications. The *spack-stack* can be installed on a range of platforms and comes pre-configured for many systems. Users can install the necessary packages for a particular application and later add the missing packages for another application without having to rebuild the entire stack. To get started, check out the documentation :doc:`here `.
tracer
- According to the American Meteorological Society (AMS) `definition `__, a tracer is "Any substance in the atmosphere that can be used to track the history [i.e., movement] of an air mass." Tracers are carried around by the motion of the atmosphere (i.e., by :term:`advection`). These substances are usually gases (e.g., water vapor, CO2), but they can also be non-gaseous (e.g., rain drops in microphysics parameterizations). In weather models, temperature (or potential temperature), absolute humidity, and radioactivity are also usually treated as tracers. According to AMS, "The main requirement for a tracer is that its lifetime be substantially longer than the transport process under study."
+ According to the American Meteorological Society (AMS) definition, a `tracer `_ is "Any substance in the atmosphere that can be used to track the history [i.e., movement] of an air mass." Tracers are carried around by the motion of the atmosphere (i.e., by :term:`advection`). These substances are usually gases (e.g., water vapor, CO2), but they can also be non-gaseous (e.g., rain drops in microphysics parameterizations). In weather models, temperature (or potential temperature), absolute humidity, and radioactivity are also usually treated as tracers. According to AMS, "The main requirement for a tracer is that its lifetime be substantially longer than the transport process under study."
UFS
The Unified Forecast System is a community-based, coupled, comprehensive Earth modeling
system consisting of several applications (apps). These apps span regional to global
- domains and sub-hourly to seasonal time scales. The UFS is designed to support the :term:`Weather Enterprise` and to be the source system for NOAA's operational numerical weather prediction applications. For more information, visit https://ufscommunity.org/.
+ domains and sub-hourly to seasonal time scales. The UFS is designed to support the :term:`Weather Enterprise` and to be the source system for NOAA's operational numerical weather prediction applications. For more information, visit https://ufs.epic.noaa.gov/.
UFS_UTILS
A collection of code used by multiple :term:`UFS` apps (e.g., the UFS Short-Range Weather App,
diff --git a/doc/UsersGuide/SSHIntro.rst b/doc/UsersGuide/SSHIntro.rst
index 7292a37e97..25837b9cc0 100644
--- a/doc/UsersGuide/SSHIntro.rst
+++ b/doc/UsersGuide/SSHIntro.rst
@@ -139,7 +139,7 @@ Download the Data from a Remote System to a Local System
.. note::
- Users should transfer data to or from non-:srw-wiki:`Level 1 ` platforms using the recommended approach for that platform. This section outlines some basic guidance, but users may need to supplement with research of their own. On Level 1 systems, users may find it helpful to refer to the `RDHPCS CommonDocs Wiki `__.
+ Users should transfer data to or from non-:srw-wiki:`Level 1 ` platforms using the recommended approach for that platform. This section outlines some basic guidance, but users may need to supplement with research of their own. On Level 1 systems, users may find it helpful to refer to the `RDHPCS Data Transfer Documentation `_.
To download data using ``scp``, users can typically adjust one of the following commands for use on their system:
diff --git a/doc/conf.py b/doc/conf.py
index 0d440a733b..f1f094d545 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -103,14 +103,16 @@
# Ignore working links that cause a linkcheck 403 error.
linkcheck_ignore = [r'https://www\.intel\.com/content/www/us/en/docs/cpp\-compiler/developer\-guide\-reference/2021\-10/thread\-affinity\-interface\.html',
r'https://www\.intel\.com/content/www/us/en/developer/tools/oneapi/hpc\-toolkit\-download\.html',
- #r'https://glossary.ametsoc.org/.*',
+ r'https://glossary.ametsoc.org/.*',
]
# Ignore anchor tags for SRW App data bucket. Shows Not Found even when they exist.
linkcheck_anchors_ignore = [r"current_srw_release_data/",
r"input_model_data/.*",
r"fix.*",
- r"sample_cases/.*",
+ r"experiment-user-cases/.*",
+ r"rrfs_a/*",
+ r"develop-20240618/*",
]
linkcheck_allowed_redirects = {r"https://github\.com/ufs-community/ufs-srweather-app/wiki/.*":
diff --git a/doc/doc-snippets/cron-note.rst b/doc/doc-snippets/cron-note.rst
new file mode 100644
index 0000000000..99192d0a1e
--- /dev/null
+++ b/doc/doc-snippets/cron-note.rst
@@ -0,0 +1,3 @@
+.. note::
+
+ Users may also want to set ``USE_CRON_TO_RELAUNCH: true`` and add ``CRON_RELAUNCH_INTVL_MNTS: 3``. This will automate submission of workflow tasks when running the experiment. However, not all systems have :term:`cron`.
\ No newline at end of file
diff --git a/doc/doc-snippets/expt-conf-intro.rst b/doc/doc-snippets/expt-conf-intro.rst
new file mode 100644
index 0000000000..d23fc546cb
--- /dev/null
+++ b/doc/doc-snippets/expt-conf-intro.rst
@@ -0,0 +1,14 @@
+Navigate to the ``ufs-srweather-app/ush`` directory. The default (or "control") configuration for this experiment is based on the ``config.community.yaml`` file in that directory. Users can copy this file into ``config.yaml`` if they have not already done so:
+
+.. code-block:: console
+
+ cd /path/to/ufs-srweather-app/ush
+ cp config.community.yaml config.yaml
+
+Users can save the location of the ``ush`` directory in an environment variable (``$USH``). This makes it easier to navigate between directories later. For example:
+
+.. code-block:: console
+
+ export USH=/path/to/ufs-srweather-app/ush
+
+Users should substitute ``/path/to/ufs-srweather-app/ush`` with the actual path on their system. As long as a user remains logged into their system, they can run ``cd $USH``, and it will take them to the ``ush`` directory. The variable will need to be reset for each login session.
\ No newline at end of file
diff --git a/doc/doc-snippets/file-edit-hint.rst b/doc/doc-snippets/file-edit-hint.rst
new file mode 100644
index 0000000000..b7a9b99b6a
--- /dev/null
+++ b/doc/doc-snippets/file-edit-hint.rst
@@ -0,0 +1,9 @@
+.. Hint::
+
+ To open the configuration file in the command line, users may run the command:
+
+ .. code-block:: console
+
+ vi config.yaml
+
+ To modify the file, hit the ``i`` key and then make any changes required. To close and save, hit the ``esc`` key and type ``:wq`` to write the changes to the file and exit/quit the file. Users may opt to use their preferred code editor instead.
\ No newline at end of file
diff --git a/doc/doc-snippets/load-env.rst b/doc/doc-snippets/load-env.rst
new file mode 100644
index 0000000000..85afec6199
--- /dev/null
+++ b/doc/doc-snippets/load-env.rst
@@ -0,0 +1,7 @@
+.. code-block:: console
+
+ source /path/to/ufs-srweather-app/etc/lmod-setup.sh
+ module use /path/to/ufs-srweather-app/modulefiles
+ module load wflow_
+
+where ```` is a valid, lowercased machine name (see ``MACHINE`` in :numref:`Section %s ` for valid values), and ``/path/to/`` is replaced by the actual path to the ``ufs-srweather-app``.
\ No newline at end of file
diff --git a/doc/doc-snippets/scp-files.rst b/doc/doc-snippets/scp-files.rst
new file mode 100644
index 0000000000..bc29780e9c
--- /dev/null
+++ b/doc/doc-snippets/scp-files.rst
@@ -0,0 +1,11 @@
+Users who are working on the cloud or on an HPC cluster may want to copy the ``.png`` files onto their local system to view in their preferred image viewer. Detailed instructions are available in the :ref:`Introduction to SSH & Data Transfer `.
+
+In summary, users can run the ``scp`` command in a new terminal/command prompt window to securely copy files from a remote system to their local system if an SSH tunnel is already established between the local system and the remote system. Users can adjust one of the following commands for their system:
+
+.. code-block:: console
+
+ scp username@your-IP-address:/path/to/source_file_or_directory /path/to/destination_file_or_directory
+ # OR
+ scp -P 12345 username@localhost:/path/to/source_file_or_directory /path/to/destination_file_or_directory
+
+Users would need to modify ``username``, ``your-IP-address``, ``-P 12345``, and the file paths to reflect their systems' information. See the :ref:`Introduction to SSH & Data Transfer ` for example commands.
\ No newline at end of file
From 563dd4050f510bb3ebb74ede745ce20311d06f2b Mon Sep 17 00:00:00 2001
From: Michael Kavulich
Date: Mon, 21 Oct 2024 07:13:38 -0600
Subject: [PATCH 35/39] [develop] Fix get_crontab_contents.py (#1142)
Fixes bug described in issue #1141: get_crontab_contents.py fails when run as a script
---
ush/get_crontab_contents.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/ush/get_crontab_contents.py b/ush/get_crontab_contents.py
index 7f08346b5d..f619fa4587 100644
--- a/ush/get_crontab_contents.py
+++ b/ush/get_crontab_contents.py
@@ -224,7 +224,7 @@ def _parse_args(argv):
)
# Check that inputs are correct and consistent
- args = parser._parse_args(argv)
+ args = parser.parse_args(argv)
if args.remove:
if args.line is None:
From 87b26cc53b0a0248a0c7221aabd0c1771f26dfb1 Mon Sep 17 00:00:00 2001
From: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com>
Date: Fri, 1 Nov 2024 10:10:03 -0400
Subject: [PATCH 36/39] [develop] Update ufs-weather-model hash and UPP hash
and use upp-addon-env spack-stack environment (#1136)
* Update ufs-weather-model hash to 38a29a6 (September 19)
* Update UPP hash to 81b38a8 (August 13)
* All Tier-1 modulefiles/build_*.lua files have been updated to use the upp-addon-env spack-stack environment
* srw_common.lua was updated to use g2/3.5.1 and g2tmpl/1.13.0 (these are required for UPP)
* .cicd/JENKINSFILE was updated to replace cheyenne entries with derecho.
* The doc/tables/Tests.csv table had nco-mode WE2E tests removed
* The doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst documentation was updated to updated ush/config_defaults.yaml file.
* The .github/CODEOWNERS file was updated to add Bruce Kropp to the list of reviewers
* The exregional_plot_allvars.py and exregional_plot_allvars_diff.py scripts were updated to address changes made to the postxconfig-NT-fv3lam.txt file.
* Updated ush/config_defaults.yaml to update PE_MEMBER01 calculation and documentation for OMP_NUM_THREADS_RUN_FCST to allow for the run_fcst task to properly run on Tier-1 platforms after updates to allow threading to function properly.
* The ush/machine/*.yaml files were updated to allow for the run_fcst task to properly run on Tier-1 platforms after updates to allow threading to function properly.
* There are not enough resources on Jet to run the high resolution WE2E tests (136 (ReqNodeNotAvail)). Commented out the tests in the comprehensive.jet test suite and removed one test from the coverage.jet test suite.
* The ufs-case-studies WE2E tests are currently failing on Derecho. The failure is due to the file not being available. This is an issue because the file in question is named correctly and is available, but the tests fail in the get_extrn_ics/lbs tasks stating that the files aren't present. Commented out these tests in comprehensive.derecho and moved WE2E tests to remove from coverage.derecho. Issue ufs-case-studies WE2E tests fail on Derecho in get_extrn_ics/lbcs #1144 was opened to track this issue on Derecho.
---
.cicd/Jenkinsfile | 4 +-
.github/CODEOWNERS | 2 +-
Externals.cfg | 4 +-
.../CustomizingTheWorkflow/ConfigWorkflow.rst | 6 +-
doc/tables/Tests.csv | 8 +-
modulefiles/build_derecho_intel.lua | 2 +-
modulefiles/build_gaea_intel.lua | 6 +-
modulefiles/build_hera_gnu.lua | 2 +-
modulefiles/build_hera_intel.lua | 2 +-
modulefiles/build_hercules_intel.lua | 2 +-
modulefiles/build_jet_intel.lua | 2 +-
modulefiles/build_noaacloud_intel.lua | 2 +-
modulefiles/build_orion_intel.lua | 2 +-
modulefiles/srw_common.lua | 4 +-
modulefiles/tasks/gaea/python_srw.lua | 3 +-
modulefiles/wflow_gaea.lua | 1 -
parm/model_configure | 3 -
parm/ufs.configure | 25 ++++--
scripts/exregional_plot_allvars.py | 10 ++-
scripts/exregional_plot_allvars_diff.py | 9 ++-
.../WE2E/machine_suites/comprehensive.derecho | 18 ++---
tests/WE2E/machine_suites/comprehensive.jet | 77 +++++++++++++++++++
tests/WE2E/machine_suites/coverage.derecho | 6 +-
.../machine_suites/coverage.hera.intel.nco | 2 +-
tests/WE2E/machine_suites/coverage.hercules | 2 +-
tests/WE2E/machine_suites/coverage.jet | 2 +-
...fig.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml | 1 +
..._FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml | 2 -
...3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR.yaml | 2 -
...pact_3km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml | 2 -
...m_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml | 2 -
.../test_create_model_configure_file.py | 2 -
ush/config.aqm.yaml | 1 +
ush/config_defaults.yaml | 4 +-
ush/create_model_configure_file.py | 2 -
ush/create_ufs_configure_file.py | 7 +-
ush/machine/gaea.yaml | 6 +-
ush/machine/hera.yaml | 4 +-
ush/machine/hercules.yaml | 13 +++-
ush/machine/jet.yaml | 6 +-
ush/machine/orion.yaml | 13 +++-
41 files changed, 191 insertions(+), 82 deletions(-)
create mode 100644 tests/WE2E/machine_suites/comprehensive.jet
diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile
index 5b90ab1173..18d015b6e9 100644
--- a/.cicd/Jenkinsfile
+++ b/.cicd/Jenkinsfile
@@ -12,9 +12,9 @@ pipeline {
parameters {
// Allow job runner to filter based on platform
// Use the line below to enable all PW clusters
- // choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'cheyenne', 'gaea', 'hera', 'jet', 'orion', 'hercules', 'pclusternoaav2use1', 'azclusternoaav2eus1', 'gclusternoaav2usc1'], description: 'Specify the platform(s) to use')
+ // choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'derecho', 'gaea', 'hera', 'jet', 'orion', 'hercules', 'pclusternoaav2use1', 'azclusternoaav2eus1', 'gclusternoaav2usc1'], description: 'Specify the platform(s) to use')
// Use the line below to enable the PW AWS cluster
- // choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'cheyenne', 'gaea', 'hera', 'jet', 'orion', 'hercules', 'pclusternoaav2use1'], description: 'Specify the platform(s) to use')
+ // choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'derecho', 'gaea', 'hera', 'jet', 'orion', 'hercules', 'pclusternoaav2use1'], description: 'Specify the platform(s) to use')
choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'derecho', 'gaea', 'hera', 'jet', 'orion', 'hercules'], description: 'Specify the platform(s) to use')
// Allow job runner to filter based on compiler
choice(name: 'SRW_COMPILER_FILTER', choices: ['all', 'gnu', 'intel'], description: 'Specify the compiler(s) to use to build')
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 700cea255c..e1878a6447 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -3,7 +3,7 @@
# These owners will be the default owners for everything in the repo.
#* @defunkt
-* @mkavulich @gsketefian @JeffBeck-NOAA @RatkoVasic-NOAA @BenjaminBlake-NOAA @ywangwof @chan-hoo @panll @christinaholtNOAA @christopherwharrop-noaa @danielabdi-noaa @mark-a-potts @jkbk2004 @willmayfield @dmwright526 @gspetro-NOAA @natalie-perlin @EdwardSnyder-NOAA @MichaelLueken @rickgrubin-noaa
+* @mkavulich @gsketefian @JeffBeck-NOAA @RatkoVasic-NOAA @BenjaminBlake-NOAA @ywangwof @chan-hoo @panll @christinaholtNOAA @christopherwharrop-noaa @danielabdi-noaa @mark-a-potts @jkbk2004 @willmayfield @dmwright526 @gspetro-NOAA @natalie-perlin @EdwardSnyder-NOAA @MichaelLueken @rickgrubin-noaa @BruceKropp-Raytheon
# Order is important. The last matching pattern has the most precedence.
# So if a pull request only touches javascript files, only these owners
diff --git a/Externals.cfg b/Externals.cfg
index b57d63957e..9acd326b65 100644
--- a/Externals.cfg
+++ b/Externals.cfg
@@ -12,7 +12,7 @@ protocol = git
repo_url = https://github.com/ufs-community/ufs-weather-model
# Specify either a branch name or a hash but not both.
#branch = develop
-hash = a1143cc
+hash = 38a29a6
local_path = sorc/ufs-weather-model
required = True
@@ -21,7 +21,7 @@ protocol = git
repo_url = https://github.com/NOAA-EMC/UPP
# Specify either a branch name or a hash but not both.
#branch = develop
-hash = be0410e
+hash = 81b38a8
local_path = sorc/UPP
required = True
diff --git a/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst
index 4d88173028..d9704b1ab6 100644
--- a/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst
+++ b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst
@@ -1097,7 +1097,7 @@ For each workflow task, certain parameter values must be passed to the job sched
For more information, see the `Intel Development Reference Guide `__.
``OMP_NUM_THREADS_RUN_FCST``: (Default: 2)
- The number of OpenMP threads to use for parallel regions. Corresponds to the ``atmos_nthreads`` value in ``model_configure``.
+ The number of OpenMP threads to use for parallel regions. Corresponds to the ``ATM_omp_num_threads`` value in ``ufs.configure``.
``OMP_STACKSIZE_RUN_FCST``: (Default: "1024m")
Controls the size of the stack for threads created by the OpenMP implementation.
@@ -1163,12 +1163,12 @@ Write-Component (Quilting) Parameters
``PRINT_ESMF``: (Default: false)
Flag that determines whether to output extra (debugging) information from :term:`ESMF` routines. Note that the write component uses ESMF library routines to interpolate from the native forecast model grid to the user-specified output grid (which is defined in the model configuration file ``model_configure`` in the forecast run directory). Valid values: ``True`` | ``False``
-``PE_MEMBER01``: (Default: ``'{{ LAYOUT_Y * LAYOUT_X + WRTCMP_write_groups * WRTCMP_write_tasks_per_group if QUILTING else LAYOUT_Y * LAYOUT_X}}'``)
+``PE_MEMBER01``: (Default: ``'{{ OMP_NUM_THREADS_RUN_FCST * (LAYOUT_Y * LAYOUT_X + WRTCMP_write_groups * WRTCMP_write_tasks_per_group) if QUILTING else OMP_NUM_THREADS_RUN_FCST * (LAYOUT_Y * LAYOUT_X)}}'``)
The number of MPI processes required by the forecast. When QUILTING is true, it is calculated as:
.. math::
- LAYOUT\_X * LAYOUT\_Y + WRTCMP\_write\_groups * WRTCMP\_write\_tasks\_per\_group
+ OMP\_NUM\_THREADS\_RUN\_FCST * (LAYOUT\_X * LAYOUT\_Y + WRTCMP\_write\_groups * WRTCMP\_write\_tasks\_per\_group)
``WRTCMP_write_groups``: (Default: "")
The number of write groups (i.e., groups of :term:`MPI` tasks) to use in the write component. Each write group will write to one set of output files (a ``dynf${fhr}.nc`` and a ``phyf${fhr}.nc`` file, where ``${fhr}`` is the forecast hour). Each write group contains ``WRTCMP_write_tasks_per_group`` tasks. Usually, one write group is sufficient. This may need to be increased if the forecast is proceeding so quickly that a single write group cannot complete writing to its set of files before there is a need/request to start writing the next set of files at the next output time.
diff --git a/doc/tables/Tests.csv b/doc/tables/Tests.csv
index d063e5c3fa..325b4dad69 100644
--- a/doc/tables/Tests.csv
+++ b/doc/tables/Tests.csv
@@ -1,6 +1,5 @@
Fundamental,Comprehensive,Test Name,PREDEF_GRID_NAME,CCPP_PHYS_SUITE,EXTRN_MDL_NAME_ICS,EXTRN_MDL_NAME_LBCS,DATES (UTC),FCST_LEN_HRS (hrs),est. core hours, walltime (min),notes
yes,yes,grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta,RRFS_CONUScompact_25km,FV3_RRFS_v1beta,HRRR,RAP,2020081000,3,8,22,
-yes,yes,nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16,RRFS_CONUS_25km,FV3_GFS_v16,FV3GFS,FV3GFS,2022081012,6,10,15,
yes,yes,grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2,RRFS_CONUS_25km,FV3_GFS_v15p2,FV3GFS,FV3GFS,2019070100,6,7,10,
yes,yes,grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v17_p8_plot,RRFS_CONUS_25km,FV3_GFS_v17_p8,FV3GFS,FV3GFS,2019070100,6,11,15,
yes,yes,grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR,RRFS_CONUScompact_25km,FV3_HRRR,HRRR,HRRR,2020081000,24,26,20
@@ -50,11 +49,6 @@ yes,yes,grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16,RRFS_CONUS_25km,FV3_
,yes,grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta,SUBCONUS_Ind_3km,FV3_RRFS_v1beta,RAP,RAP,2020080103,3,20,22,
,yes,MET_ensemble_verification_only_vx,RRFS_CONUS_25km,*none*,*none*,*none*,2019061500,6,1,15,Runs ensemble verification tasks on staged data without running the rest of the workflow
,yes,MET_verification_only_vx,RRFS_CONUS_25km,*none*,*none*,*none*,2019061500,6,1,8,Runs verification tasks on staged data without running the rest of the workflow
-,yes,nco,RRFS_CONUS_25km,FV3_GFS_v16,FV3GFS,FV3GFS,2022040700,6,7,20,
-,yes,nco_ensemble,RRFS_CONUS_25km,FV3_GFS_v15p2,FV3GFS,FV3GFS,2019070100 2019070112 2019070200 2019070212,6,55,20,
-,yes,nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16,RRFS_CONUS_13km,FV3_GFS_v16,FV3GFS,FV3GFS,2019061500,6,26,20,
-,yes,nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km,RRFS_CONUS_3km,FV3_GFS_v15_thompson_mynn_lam3km,FV3GFS,FV3GFS,2019061500,6,320,25,
-,yes,nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR,RRFS_CONUScompact_25km,FV3_HRRR,HRRR,RAP,2020081000,6,12,17,
,yes,pregen_grid_orog_sfc_climo,RRFS_CONUS_25km,FV3_GFS_v15p2,FV3GFS,FV3GFS,2019070100,6,6,17,
,yes,specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS,RRFS_CONUS_25km,FV3_GFS_v15p2,FV3GFS,FV3GFS,2021061500,6,6,19,
,yes,specify_template_filenames,RRFS_CONUS_25km,FV3_GFS_v15p2,FV3GFS,FV3GFS,2019070100,6,6,28,
@@ -74,4 +68,4 @@ yes,yes,grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16,RRFS_CONUS_25km,FV3_
,,aqm_grid_AQM_NA13km_suite_GFS_v16,AQM_NA_13km,FV3_GFS_v16,FV3GFS,FV3GFS,2023021700 2023021706,6,,,This is an air-quality model test that requires special compilation to run; not supported in this release
,,grid_RRFS_NA_3km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta,RRFS_NA_3km,FV3_RRFS_v1beta,FV3GFS,FV3GFS,2019070100,3,,,The RRFS_NA_3km domain currently has segfault problems--this test is not run
,,subhourly_post,RRFS_CONUScompact_25km,FV3_RRFS_v1beta,HRRR,RAP,2020081000,3,,,Subhourly post tasks are currently broken--these tests are not run
-,,subhourly_post_ensemble_2mems,RRFS_CONUScompact_25km,FV3_RRFS_v1beta,HRRR,RAP,2020081000,3,,,Subhourly post tasks are currently broken--these tests are not run
\ No newline at end of file
+,,subhourly_post_ensemble_2mems,RRFS_CONUScompact_25km,FV3_RRFS_v1beta,HRRR,RAP,2020081000,3,,,Subhourly post tasks are currently broken--these tests are not run
diff --git a/modulefiles/build_derecho_intel.lua b/modulefiles/build_derecho_intel.lua
index 491a94f912..1356fdb3e0 100644
--- a/modulefiles/build_derecho_intel.lua
+++ b/modulefiles/build_derecho_intel.lua
@@ -6,7 +6,7 @@ the CISL machine Derecho (Cray) using Intel@2021.10.0
whatis([===[Loads libraries needed for building the UFS SRW App on Derecho ]===])
prepend_path("MODULEPATH","/lustre/desc1/scratch/epicufsrt/contrib/modulefiles_extra")
-prepend_path("MODULEPATH", "/glade/work/epicufsrt/contrib/spack-stack/derecho/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core")
+prepend_path("MODULEPATH", "/glade/work/epicufsrt/contrib/spack-stack/derecho/spack-stack-1.6.0/envs/upp-addon-env/install/modulefiles/Core")
load(pathJoin("stack-intel", os.getenv("stack_intel_ver") or "2021.10.0"))
load(pathJoin("stack-cray-mpich", os.getenv("stack_cray_mpich_ver") or "8.1.25"))
diff --git a/modulefiles/build_gaea_intel.lua b/modulefiles/build_gaea_intel.lua
index b47209194c..2a53acf15b 100644
--- a/modulefiles/build_gaea_intel.lua
+++ b/modulefiles/build_gaea_intel.lua
@@ -5,11 +5,11 @@ the NOAA RDHPC machine Gaea C5 using Intel-2023.1.0
whatis([===[Loads libraries needed for building the UFS SRW App on Gaea C5 ]===])
-prepend_path("MODULEPATH","/ncrc/proj/epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core")
-stack_intel_ver=os.getenv("stack_intel_ver") or "2023.1.0"
+prepend_path("MODULEPATH","/ncrc/proj/epic/spack-stack/spack-stack-1.6.0/envs/upp-addon-env/install/modulefiles/Core")
+stack_intel_ver=os.getenv("stack_intel_ver") or "2023.2.0"
load(pathJoin("stack-intel", stack_intel_ver))
-stack_mpich_ver=os.getenv("stack_mpich_ver") or "8.1.25"
+stack_mpich_ver=os.getenv("stack_mpich_ver") or "8.1.28"
load(pathJoin("stack-cray-mpich", stack_mpich_ver))
stack_python_ver=os.getenv("stack_python_ver") or "3.10.13"
diff --git a/modulefiles/build_hera_gnu.lua b/modulefiles/build_hera_gnu.lua
index 8854108966..621c7581a6 100644
--- a/modulefiles/build_hera_gnu.lua
+++ b/modulefiles/build_hera_gnu.lua
@@ -7,7 +7,7 @@ whatis([===[Loads libraries needed for building the UFS SRW App on Hera using GN
prepend_path("MODULEPATH", "/scratch2/NCEPDEV/stmp1/role.epic/installs/gnu/modulefiles")
prepend_path("MODULEPATH", "/scratch2/NCEPDEV/stmp1/role.epic/installs/openmpi/modulefiles")
-prepend_path("MODULEPATH", "/scratch2/NCEPDEV/stmp1/role.epic/spack-stack/spack-stack-1.6.0_gnu13/envs/ufs-wm-srw-rocky8/install/modulefiles/Core")
+prepend_path("MODULEPATH", "/scratch2/NCEPDEV/stmp1/role.epic/spack-stack/spack-stack-1.6.0_gnu13/envs/upp-addon-env/install/modulefiles/Core")
load("stack-gcc/13.3.0")
load("stack-openmpi/4.1.6")
diff --git a/modulefiles/build_hera_intel.lua b/modulefiles/build_hera_intel.lua
index d8e793044c..d3d20a5bb5 100644
--- a/modulefiles/build_hera_intel.lua
+++ b/modulefiles/build_hera_intel.lua
@@ -8,7 +8,7 @@ whatis([===[Loads libraries needed for building the UFS SRW App on Hera ]===])
prepend_path("MODULEPATH","/contrib/sutils/modulefiles")
load("sutils")
-prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.6.0/envs/unified-env-rocky8/install/modulefiles/Core")
+prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.6.0/envs/upp-addon-env/install/modulefiles/Core")
stack_intel_ver=os.getenv("stack_intel_ver") or "2021.5.0"
load(pathJoin("stack-intel", stack_intel_ver))
diff --git a/modulefiles/build_hercules_intel.lua b/modulefiles/build_hercules_intel.lua
index b65890f1c4..e01b68905e 100644
--- a/modulefiles/build_hercules_intel.lua
+++ b/modulefiles/build_hercules_intel.lua
@@ -5,7 +5,7 @@ the MSU machine Hercules using intel-oneapi-compilers/2022.2.1
whatis([===[Loads libraries needed for building the UFS SRW App on Hercules ]===])
-prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core")
+prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.6.0/envs/upp-addon-env/install/modulefiles/Core")
load("stack-intel/2021.9.0")
load("stack-intel-oneapi-mpi/2021.9.0")
diff --git a/modulefiles/build_jet_intel.lua b/modulefiles/build_jet_intel.lua
index a0169a684a..78e70e0960 100644
--- a/modulefiles/build_jet_intel.lua
+++ b/modulefiles/build_jet_intel.lua
@@ -5,7 +5,7 @@ the NOAA RDHPC machine Jet using Intel-2021.5.0
whatis([===[Loads libraries needed for building the UFS SRW App on Jet ]===])
-prepend_path("MODULEPATH","/contrib/spack-stack/spack-stack-1.6.0/envs/unified-env-rocky8/install/modulefiles/Core")
+prepend_path("MODULEPATH","/contrib/spack-stack/spack-stack-1.6.0/envs/upp-addon-env/install/modulefiles/Core")
load("stack-intel/2021.5.0")
load("stack-intel-oneapi-mpi/2021.5.1")
diff --git a/modulefiles/build_noaacloud_intel.lua b/modulefiles/build_noaacloud_intel.lua
index b1a6adbbf0..1f5855343d 100644
--- a/modulefiles/build_noaacloud_intel.lua
+++ b/modulefiles/build_noaacloud_intel.lua
@@ -5,7 +5,7 @@ the NOAA cloud using Intel-oneapi
whatis([===[Loads libraries needed for building the UFS SRW App on NOAA cloud ]===])
-prepend_path("MODULEPATH", "/contrib/spack-stack-rocky8/spack-stack-1.6.0/envs/ue-intel/install/modulefiles/Core")
+prepend_path("MODULEPATH", "/contrib/spack-stack-rocky8/spack-stack-1.6.0/envs/upp-addon-env/install/modulefiles/Core")
prepend_path("MODULEPATH", "/apps/modules/modulefiles")
load("gnu")
load("stack-intel")
diff --git a/modulefiles/build_orion_intel.lua b/modulefiles/build_orion_intel.lua
index b2f3d85c00..92d268a4d7 100644
--- a/modulefiles/build_orion_intel.lua
+++ b/modulefiles/build_orion_intel.lua
@@ -5,7 +5,7 @@ the MSU machine Orion using intel-oneapi-compilers/2021.9.0
whatis([===[Loads libraries needed for building the UFS SRW App on Orion ]===])
-prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.6.0/envs/unified-env-rocky9/install/modulefiles/Core")
+prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.6.0/envs/upp-addon-env/install/modulefiles/Core")
load("stack-intel/2021.9.0")
load("stack-intel-oneapi-mpi/2021.9.0")
diff --git a/modulefiles/srw_common.lua b/modulefiles/srw_common.lua
index cb2047cbe1..d2bdbe6790 100644
--- a/modulefiles/srw_common.lua
+++ b/modulefiles/srw_common.lua
@@ -10,8 +10,8 @@ load("fms/2023.04")
load("bacio/2.4.1")
load("crtm/2.4.0.1")
-load("g2/3.4.5")
-load("g2tmpl/1.10.2")
+load("g2/3.5.1")
+load("g2tmpl/1.13.0")
load("ip/4.3.0")
load("sp/2.5.0")
load("w3emc/2.10.0")
diff --git a/modulefiles/tasks/gaea/python_srw.lua b/modulefiles/tasks/gaea/python_srw.lua
index 5058b3f615..84df70682d 100644
--- a/modulefiles/tasks/gaea/python_srw.lua
+++ b/modulefiles/tasks/gaea/python_srw.lua
@@ -3,5 +3,6 @@ unload("python")
load("conda")
setenv("SRW_ENV", "srw_app")
-setenv("LD_PRELOAD", "/opt/cray/pe/gcc/12.2.0/snos/lib64/libstdc++.so.6")
+setenv("LD_PRELOAD", "/usr/lib64/libstdc++.so.6")
+setenv("FI_VERBS_PREFER_XRC", "0")
diff --git a/modulefiles/wflow_gaea.lua b/modulefiles/wflow_gaea.lua
index 6c24672c30..c2207ff57b 100644
--- a/modulefiles/wflow_gaea.lua
+++ b/modulefiles/wflow_gaea.lua
@@ -11,7 +11,6 @@ load("rocoto")
load("conda")
pushenv("MKLROOT", "/opt/intel/oneapi/mkl/2023.1.0/")
-setenv("LD_PRELOAD", "/opt/cray/pe/gcc/12.2.0/snos/lib64/libstdc++.so.6")
if mode() == "load" then
LmodMsgRaw([===[Please do the following to activate conda:
diff --git a/parm/model_configure b/parm/model_configure
index aeb45f4719..d22adf3f3a 100644
--- a/parm/model_configure
+++ b/parm/model_configure
@@ -1,5 +1,3 @@
-total_member: 1
-PE_MEMBER01: {{ PE_MEMBER01 }}
start_year: {{ start_year }}
start_month: {{ start_month }}
start_day: {{ start_day }}
@@ -13,7 +11,6 @@ ENS_SPS: .false.
dt_atmos: {{ dt_atmos }}
calendar: 'julian'
memuse_verbose: .false.
-atmos_nthreads: {{ atmos_nthreads }}
restart_interval: {{ restart_interval }}
output_1st_tstep_rst: .false.
write_dopost: {{ write_dopost }}
diff --git a/parm/ufs.configure b/parm/ufs.configure
index d90b7447f4..48d2a66e8a 100644
--- a/parm/ufs.configure
+++ b/parm/ufs.configure
@@ -19,14 +19,14 @@ EARTH_attributes::
# ATM #
ATM_model: fv3
-ATM_petlist_bounds: -1 -1
+ATM_petlist_bounds: 0 {{ pe_member01_m1 }}
ATM_attributes::
Verbosity = 0
::
# AQM #
AQM_model: aqm
-AQM_petlist_bounds: -1 -1
+AQM_petlist_bounds: 0 {{ aqm_pe_member01_m1 }}
AQM_attributes::
Verbosity = 0
Diagnostic = 0
@@ -45,8 +45,21 @@ runSeq::
{% else %}
# EARTH #
EARTH_component_list: ATM
- ATM_model: fv3
- runSeq::
- ATM
- ::
+EARTH_attributes::
+ Verbosity = 0
+::
+
+# ATM #
+ATM_model: fv3
+ATM_petlist_bounds: 0 {{ pe_member01_m1 }}
+ATM_omp_num_threads: {{ atm_omp_num_threads }}
+ATM_attributes::
+ Verbosity = 0
+ Diagnostic = 0
+::
+
+# Run Sequence #
+runSeq::
+ ATM
+::
{% endif %}
diff --git a/scripts/exregional_plot_allvars.py b/scripts/exregional_plot_allvars.py
index 040e17b012..14d15c07f0 100755
--- a/scripts/exregional_plot_allvars.py
+++ b/scripts/exregional_plot_allvars.py
@@ -429,7 +429,7 @@ def setup_logging(debug=False):
t1a = time.perf_counter()
# Sea level pressure
- slp = data1.select(name="Pressure reduced to MSL")[0].values * 0.01
+ slp = data1.select(name="MSLP (Eta model reduction)")[0].values * 0.01
slpsmooth = ndimage.gaussian_filter(slp, 13.78)
# 2-m temperature
@@ -484,7 +484,13 @@ def setup_logging(debug=False):
)
# Composite reflectivity
- refc = data1.select(name="Maximum/Composite radar reflectivity")[0].values
+ # refc is the 37th entry in the GRIB2 post output file
+ # First rewind to the start of the GRIB2 file
+ data1.rewind()
+ # Advance 36 entries in the GRIB2 file
+ data1.seek(36)
+ # Read values from the 37th entry in the GRIB2 file
+ refc = data1.readline().values
if fhr > 0:
# Max/Min Hourly 2-5 km Updraft Helicity
diff --git a/scripts/exregional_plot_allvars_diff.py b/scripts/exregional_plot_allvars_diff.py
index 61efcdb82b..c493f68f65 100755
--- a/scripts/exregional_plot_allvars_diff.py
+++ b/scripts/exregional_plot_allvars_diff.py
@@ -446,9 +446,9 @@ def setup_logging(debug=False):
t1a = time.perf_counter()
# Sea level pressure
- slp_1 = data1.select(name="Pressure reduced to MSL")[0].values * 0.01
+ slp_1 = data1.select(name="MSLP (Eta model reduction)")[0].values * 0.01
slpsmooth_1 = ndimage.gaussian_filter(slp_1, 13.78)
- slp_2 = data2.select(name="Pressure reduced to MSL")[0].values * 0.01
+ slp_2 = data2.select(name="MSLP (Eta model reduction)")[0].values * 0.01
slpsmooth_2 = ndimage.gaussian_filter(slp_2, 13.78)
slp_diff = slp_2 - slp_1
@@ -544,7 +544,12 @@ def setup_logging(debug=False):
qpf_diff = qpf_2 - qpf_1
# Composite reflectivity
+ # refc is the 37th entry in the GRIB2 post output file
+ data1.rewind()
+ data1.seek(36)
refc_1 = data1.select(name="Maximum/Composite radar reflectivity")[0].values
+ data2.rewind()
+ data2.seek(36)
refc_2 = data2.select(name="Maximum/Composite radar reflectivity")[0].values
if fhr > 0:
diff --git a/tests/WE2E/machine_suites/comprehensive.derecho b/tests/WE2E/machine_suites/comprehensive.derecho
index 5930843582..b88ec31bba 100644
--- a/tests/WE2E/machine_suites/comprehensive.derecho
+++ b/tests/WE2E/machine_suites/comprehensive.derecho
@@ -1,12 +1,12 @@
-2020_CAD
-2020_CAPE
-2019_hurricane_barry
-2019_halloween_storm
-2019_hurricane_lorenzo
-2019_memorial_day_heat_wave
-2020_denver_radiation_inversion
-2020_easter_storm
-2020_jan_cold_blast
+#2020_CAD
+#2020_CAPE
+#2019_hurricane_barry
+#2019_halloween_storm
+#2019_hurricane_lorenzo
+#2019_memorial_day_heat_wave
+#2020_denver_radiation_inversion
+#2020_easter_storm
+#2020_jan_cold_blast
community
custom_ESGgrid
custom_ESGgrid_Central_Asia_3km
diff --git a/tests/WE2E/machine_suites/comprehensive.jet b/tests/WE2E/machine_suites/comprehensive.jet
new file mode 100644
index 0000000000..0e15479feb
--- /dev/null
+++ b/tests/WE2E/machine_suites/comprehensive.jet
@@ -0,0 +1,77 @@
+2020_CAD
+2020_CAPE
+2019_hurricane_barry
+2019_halloween_storm
+2019_hurricane_lorenzo
+2019_memorial_day_heat_wave
+2020_denver_radiation_inversion
+2020_easter_storm
+2020_jan_cold_blast
+community
+custom_ESGgrid
+custom_ESGgrid_Central_Asia_3km
+custom_ESGgrid_Great_Lakes_snow_8km
+custom_ESGgrid_IndianOcean_6km
+custom_ESGgrid_NewZealand_3km
+custom_ESGgrid_Peru_12km
+custom_ESGgrid_SF_1p1km
+custom_GFDLgrid__GFDLgrid_USE_NUM_CELLS_IN_FILENAMES_eq_FALSE
+custom_GFDLgrid
+deactivate_tasks
+#get_from_AWS_ics_GEFS_lbcs_GEFS_fmt_grib2_2022040400_ensemble_2mems
+get_from_HPSS_ics_FV3GFS_lbcs_FV3GFS_fmt_grib2_2019061200
+get_from_HPSS_ics_FV3GFS_lbcs_FV3GFS_fmt_nemsio_2019061200
+get_from_HPSS_ics_FV3GFS_lbcs_FV3GFS_fmt_nemsio_2021032018
+get_from_HPSS_ics_FV3GFS_lbcs_FV3GFS_fmt_netcdf_2022060112_48h
+#get_from_HPSS_ics_GDAS_lbcs_GDAS_fmt_netcdf_2022040400_ensemble_2mems
+get_from_HPSS_ics_GSMGFS_lbcs_GSMGFS
+get_from_HPSS_ics_HRRR_lbcs_RAP
+get_from_HPSS_ics_RAP_lbcs_RAP
+get_from_NOMADS_ics_FV3GFS_lbcs_FV3GFS
+grid_CONUS_25km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
+grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta
+grid_RRFS_AK_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot
+grid_RRFS_AK_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR
+grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_RAP
+grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot
+grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR
+grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta
+grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2
+grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot
+grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v17_p8_plot
+grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR
+grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_RAP
+grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta
+grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_RAP_suite_RAP
+grid_RRFS_CONUS_25km_ics_GSMGFS_lbcs_GSMGFS_suite_GFS_v15p2
+grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16
+grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_RRFS_v1beta
+#grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2
+#grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km
+#grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
+#grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR
+#grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta
+grid_RRFS_CONUScompact_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
+grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_HRRR
+grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta
+grid_RRFS_CONUScompact_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
+grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR
+grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta
+grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta
+grid_RRFS_CONUScompact_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
+grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR
+grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta
+grid_RRFS_NA_13km_ics_FV3GFS_lbcs_FV3GFS_suite_RAP
+grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0
+grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_HRRR_suite_HRRR
+grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_RAP_suite_WoFS_v0
+grid_SUBCONUS_Ind_3km_ics_NAM_lbcs_NAM_suite_GFS_v16
+grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot
+long_fcst
+MET_ensemble_verification_only_vx
+MET_ensemble_verification_only_vx_time_lag
+MET_ensemble_verification_winter_wx
+MET_verification_only_vx
+pregen_grid_orog_sfc_climo
+specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS
+specify_template_filenames
diff --git a/tests/WE2E/machine_suites/coverage.derecho b/tests/WE2E/machine_suites/coverage.derecho
index a948c76033..a10c865447 100644
--- a/tests/WE2E/machine_suites/coverage.derecho
+++ b/tests/WE2E/machine_suites/coverage.derecho
@@ -1,11 +1,11 @@
custom_ESGgrid_IndianOcean_6km
grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot
+grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2
grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16
+grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta
grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_HRRR
+grid_RRFS_CONUScompact_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta
grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_HRRR_suite_HRRR
pregen_grid_orog_sfc_climo
specify_template_filenames
-2019_hurricane_barry
-2019_memorial_day_heat_wave
-2020_denver_radiation_inversion
diff --git a/tests/WE2E/machine_suites/coverage.hera.intel.nco b/tests/WE2E/machine_suites/coverage.hera.intel.nco
index d5ab0d6fe8..e4b02a90ba 100644
--- a/tests/WE2E/machine_suites/coverage.hera.intel.nco
+++ b/tests/WE2E/machine_suites/coverage.hera.intel.nco
@@ -1,8 +1,8 @@
+2019_memorial_day_heat_wave
custom_ESGgrid_Peru_12km
get_from_HPSS_ics_FV3GFS_lbcs_FV3GFS_fmt_grib2_2019061200
get_from_HPSS_ics_GDAS_lbcs_GDAS_fmt_netcdf_2022040400_ensemble_2mems
get_from_HPSS_ics_HRRR_lbcs_RAP
-grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2
grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot
grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_RAP_suite_RAP
grid_RRFS_CONUS_25km_ics_GSMGFS_lbcs_GSMGFS_suite_GFS_v15p2
diff --git a/tests/WE2E/machine_suites/coverage.hercules b/tests/WE2E/machine_suites/coverage.hercules
index ec37d81a56..668a26e685 100644
--- a/tests/WE2E/machine_suites/coverage.hercules
+++ b/tests/WE2E/machine_suites/coverage.hercules
@@ -4,9 +4,9 @@ grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta
grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v17_p8_plot
grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR
grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_RAP
-grid_RRFS_CONUScompact_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
grid_RRFS_NA_13km_ics_FV3GFS_lbcs_FV3GFS_suite_RAP
grid_SUBCONUS_Ind_3km_ics_NAM_lbcs_NAM_suite_GFS_v16
MET_verification_only_vx
specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS
2019_hurricane_lorenzo
+2020_denver_radiation_inversion
diff --git a/tests/WE2E/machine_suites/coverage.jet b/tests/WE2E/machine_suites/coverage.jet
index 53308090b1..8c79a0b700 100644
--- a/tests/WE2E/machine_suites/coverage.jet
+++ b/tests/WE2E/machine_suites/coverage.jet
@@ -1,3 +1,4 @@
+2019_hurricane_barry
community
custom_ESGgrid
custom_ESGgrid_Great_Lakes_snow_8km
@@ -8,4 +9,3 @@ get_from_HPSS_ics_RAP_lbcs_RAP
grid_RRFS_AK_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR
grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot
grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2
-grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta
diff --git a/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml b/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml
index 2901d1ebf1..a9548f8b5f 100644
--- a/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml
+++ b/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml
@@ -34,6 +34,7 @@ task_get_extrn_lbcs:
EXTRN_MDL_LBCS_OFFSET_HRS: 0
USE_USER_STAGED_EXTRN_FILES: true
task_run_fcst:
+ OMP_NUM_THREADS_RUN_FCST: 1
DT_ATMOS: 180
LAYOUT_X: 50
LAYOUT_Y: 34
diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml
index de456cea73..f4c40bf722 100644
--- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml
+++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml
@@ -20,5 +20,3 @@ task_get_extrn_lbcs:
EXTRN_MDL_NAME_LBCS: FV3GFS
LBC_SPEC_INTVL_HRS: 3
USE_USER_STAGED_EXTRN_FILES: true
-task_run_fcst:
- OMP_NUM_THREADS_RUN_FCST: 3
diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR.yaml
index 4a340185f3..6d4dbc3b33 100644
--- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR.yaml
+++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR.yaml
@@ -19,5 +19,3 @@ task_get_extrn_lbcs:
EXTRN_MDL_NAME_LBCS: FV3GFS
LBC_SPEC_INTVL_HRS: 3
USE_USER_STAGED_EXTRN_FILES: true
-task_run_fcst:
- OMP_NUM_THREADS_RUN_FCST: 3
diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml
index 35be12a1ee..b00a24ae84 100644
--- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml
+++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml
@@ -23,5 +23,3 @@ task_get_extrn_lbcs:
USE_USER_STAGED_EXTRN_FILES: true
EXTRN_MDL_FILES_LBCS:
- '{yy}{jjj}{hh}00{fcst_hr:02d}00'
-task_run_fcst:
- OMP_NUM_THREADS_RUN_FCST: 3
diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml
index 1265fa8e0c..44dfec5e75 100644
--- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml
+++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml
@@ -24,5 +24,3 @@ task_get_extrn_lbcs:
USE_USER_STAGED_EXTRN_FILES: true
EXTRN_MDL_FILES_LBCS:
- '{yy}{jjj}{hh}00{fcst_hr:02d}00'
-task_run_fcst:
- OMP_NUM_THREADS_RUN_FCST: 3
diff --git a/tests/test_python/test_create_model_configure_file.py b/tests/test_python/test_create_model_configure_file.py
index d5aea79ed8..9475028505 100644
--- a/tests/test_python/test_create_model_configure_file.py
+++ b/tests/test_python/test_create_model_configure_file.py
@@ -43,11 +43,9 @@ def setUp(self):
set_env_var("USHdir", USHdir)
set_env_var("MODEL_CONFIG_FN", MODEL_CONFIG_FN)
set_env_var("MODEL_CONFIG_TMPL_FP", MODEL_CONFIG_TMPL_FP)
- set_env_var("PE_MEMBER01", 24)
set_env_var("FCST_LEN_HRS", 72)
set_env_var("FHROT", 0)
set_env_var("DT_ATMOS", 1)
- set_env_var("OMP_NUM_THREADS_RUN_FCST", 1)
set_env_var("RESTART_INTERVAL", 4)
set_env_var("ITASKS", 1)
diff --git a/ush/config.aqm.yaml b/ush/config.aqm.yaml
index 21a73591ee..77512ec7a2 100644
--- a/ush/config.aqm.yaml
+++ b/ush/config.aqm.yaml
@@ -46,6 +46,7 @@ task_get_extrn_lbcs:
FV3GFS_FILE_FMT_LBCS: netcdf
EXTRN_MDL_LBCS_OFFSET_HRS: 0
task_run_fcst:
+ OMP_NUM_THREADS_RUN_FCST: 1
DT_ATMOS: 180
LAYOUT_X: 50
LAYOUT_Y: 34
diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml
index 488c2d5d3d..6af52578b8 100644
--- a/ush/config_defaults.yaml
+++ b/ush/config_defaults.yaml
@@ -1722,7 +1722,7 @@ task_run_fcst:
#-----------------------------------------------------------------------
#
KMP_AFFINITY_RUN_FCST: "scatter"
- OMP_NUM_THREADS_RUN_FCST: 2 # atmos_nthreads in model_configure
+ OMP_NUM_THREADS_RUN_FCST: 2 # ATM_omp_num_threads in ufs.configure
OMP_STACKSIZE_RUN_FCST: "1024m"
#
#-----------------------------------------------------------------------
@@ -1918,7 +1918,7 @@ task_run_fcst:
QUILTING: true
PRINT_ESMF: false
- PE_MEMBER01: '{{ LAYOUT_Y * LAYOUT_X + WRTCMP_write_groups * WRTCMP_write_tasks_per_group if QUILTING else LAYOUT_Y * LAYOUT_X}}'
+ PE_MEMBER01: '{{ OMP_NUM_THREADS_RUN_FCST * (LAYOUT_Y * LAYOUT_X + WRTCMP_write_groups * WRTCMP_write_tasks_per_group) if QUILTING else OMP_NUM_THREADS_RUN_FCST * (LAYOUT_Y * LAYOUT_X)}}'
WRTCMP_write_groups: ""
WRTCMP_write_tasks_per_group: ""
diff --git a/ush/create_model_configure_file.py b/ush/create_model_configure_file.py
index 9b430b83ae..e0e420c71b 100644
--- a/ush/create_model_configure_file.py
+++ b/ush/create_model_configure_file.py
@@ -70,7 +70,6 @@ def create_model_configure_file(
# -----------------------------------------------------------------------
#
settings = {
- "PE_MEMBER01": PE_MEMBER01,
"start_year": cdate.year,
"start_month": cdate.month,
"start_day": cdate.day,
@@ -78,7 +77,6 @@ def create_model_configure_file(
"nhours_fcst": fcst_len_hrs,
"fhrot": fhrot,
"dt_atmos": DT_ATMOS,
- "atmos_nthreads": OMP_NUM_THREADS_RUN_FCST,
"restart_interval": RESTART_INTERVAL,
"itasks": ITASKS,
"write_dopost": f".{lowercase(str(WRITE_DOPOST))}.",
diff --git a/ush/create_ufs_configure_file.py b/ush/create_ufs_configure_file.py
index dc6a43420d..e97fa25c6a 100644
--- a/ush/create_ufs_configure_file.py
+++ b/ush/create_ufs_configure_file.py
@@ -50,6 +50,8 @@ def create_ufs_configure_file(run_dir):
# Set output file path
#
ufs_config_fp = os.path.join(run_dir, UFS_CONFIG_FN)
+ pe_member01_m1 = str(int(PE_MEMBER01)-1)
+ aqm_pe_member01_m1 = str(int(LAYOUT_X*LAYOUT_Y)-1)
#
#-----------------------------------------------------------------------
#
@@ -62,7 +64,10 @@ def create_ufs_configure_file(run_dir):
settings = {
"dt_atmos": DT_ATMOS,
"print_esmf": PRINT_ESMF,
- "cpl_aqm": CPL_AQM
+ "cpl_aqm": CPL_AQM,
+ "pe_member01_m1": pe_member01_m1,
+ "aqm_pe_member01_m1": aqm_pe_member01_m1,
+ "atm_omp_num_threads": OMP_NUM_THREADS_RUN_FCST,
}
settings_str = cfg_to_yaml_str(settings)
diff --git a/ush/machine/gaea.yaml b/ush/machine/gaea.yaml
index 1ec2ded2ef..92d33d7ad2 100644
--- a/ush/machine/gaea.yaml
+++ b/ush/machine/gaea.yaml
@@ -12,7 +12,7 @@ platform:
QUEUE_HPSS: normal
REMOVE_MEMORY: True
PARTITION_HPSS: eslogin_c5
- RUN_CMD_FCST: srun --export=ALL -n ${PE_MEMBER01}
+ RUN_CMD_FCST: srun --export=ALL
RUN_CMD_POST: srun --export=ALL -n $nprocs
RUN_CMD_PRDGEN: srun --export=ALL -n $nprocs
RUN_CMD_SERIAL: time
@@ -47,9 +47,9 @@ rocoto:
tasks:
metatask_run_ensemble:
task_run_fcst_mem#mem#:
- cores: '{{ task_run_fcst.PE_MEMBER01 // 1 }}'
+ cores:
native: '--cpus-per-task {{ task_run_fcst.OMP_NUM_THREADS_RUN_FCST|int }} --exclusive {{ platform.SCHED_NATIVE_CMD }}'
- nodes:
+ nodes: '{{ task_run_fcst.NNODES_RUN_FCST // 1 }}:ppn={{ task_run_fcst.PPN_RUN_FCST // 1 }}'
nnodes:
nodesize:
ppn:
diff --git a/ush/machine/hera.yaml b/ush/machine/hera.yaml
index 80fbb8fc98..189689f30d 100644
--- a/ush/machine/hera.yaml
+++ b/ush/machine/hera.yaml
@@ -48,9 +48,9 @@ rocoto:
tasks:
metatask_run_ensemble:
task_run_fcst_mem#mem#:
- cores: '{{ task_run_fcst.PE_MEMBER01 // 1 }}'
+ cores:
native: '--cpus-per-task {{ task_run_fcst.OMP_NUM_THREADS_RUN_FCST|int }} --exclusive {{ platform.SCHED_NATIVE_CMD }}'
- nodes:
+ nodes: '{{ task_run_fcst.NNODES_RUN_FCST // 1 }}:ppn={{ task_run_fcst.PPN_RUN_FCST // 1 }}'
nnodes:
nodesize:
ppn:
diff --git a/ush/machine/hercules.yaml b/ush/machine/hercules.yaml
index e29801dd49..6a325094da 100644
--- a/ush/machine/hercules.yaml
+++ b/ush/machine/hercules.yaml
@@ -13,7 +13,7 @@ platform:
QUEUE_FCST: batch
PARTITION_HPSS: service
QUEUE_HPSS: batch
- RUN_CMD_FCST: srun --export=ALL -n ${PE_MEMBER01}
+ RUN_CMD_FCST: srun --export=ALL
RUN_CMD_POST: srun --export=ALL
RUN_CMD_PRDGEN: srun --export=ALL
RUN_CMD_SERIAL: time
@@ -51,3 +51,14 @@ cpl_aqm_parm:
COMINfire_default: /work/noaa/epic/SRW-AQM_DATA/aqm_data/RAVE_fire
COMINgefs_default: /work/noaa/epic/SRW-AQM_DATA/aqm_data/GEFS_DATA
NEXUS_GFS_SFC_DIR: /work/noaa/epic/SRW-AQM_DATA/aqm_data/GFS_SFC_DATA
+
+rocoto:
+ tasks:
+ metatask_run_ensemble:
+ task_run_fcst_mem#mem#:
+ cores:
+ native: '--cpus-per-task {{ task_run_fcst.OMP_NUM_THREADS_RUN_FCST|int }} --exclusive {{ platform.SCHED_NATIVE_CMD }}'
+ nodes: '{{ task_run_fcst.NNODES_RUN_FCST // 1 }}:ppn={{ task_run_fcst.PPN_RUN_FCST // 1 }}'
+ nnodes:
+ nodesize:
+ ppn:
diff --git a/ush/machine/jet.yaml b/ush/machine/jet.yaml
index 375711c61a..847530e4eb 100644
--- a/ush/machine/jet.yaml
+++ b/ush/machine/jet.yaml
@@ -7,7 +7,7 @@ platform:
TEST_NDAS_OBS_DIR: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/obs_data/ndas/proc
TEST_NOHRSC_OBS_DIR: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/obs_data/nohrsc/proc
DOMAIN_PREGEN_BASEDIR: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/FV3LAM_pregen
- PARTITION_DEFAULT: sjet,vjet,kjet,xjet
+ PARTITION_DEFAULT: vjet,kjet,xjet
QUEUE_DEFAULT: batch
PARTITION_FCST: xjet
QUEUE_FCST: batch
@@ -49,9 +49,9 @@ rocoto:
tasks:
metatask_run_ensemble:
task_run_fcst_mem#mem#:
- cores: '{{ task_run_fcst.PE_MEMBER01 // 1 }}'
+ cores:
native: '--cpus-per-task {{ task_run_fcst.OMP_NUM_THREADS_RUN_FCST|int }} --exclusive {{ platform.SCHED_NATIVE_CMD }}'
- nodes:
+ nodes: '{{ task_run_fcst.NNODES_RUN_FCST // 1 }}:ppn={{ task_run_fcst.PPN_RUN_FCST // 1 }}'
nnodes:
nodesize:
ppn:
diff --git a/ush/machine/orion.yaml b/ush/machine/orion.yaml
index 3f756e2836..5467160167 100644
--- a/ush/machine/orion.yaml
+++ b/ush/machine/orion.yaml
@@ -13,7 +13,7 @@ platform:
QUEUE_FCST: batch
PARTITION_HPSS: service
QUEUE_HPSS: batch
- RUN_CMD_FCST: srun --export=ALL -n ${PE_MEMBER01}
+ RUN_CMD_FCST: srun --export=ALL
RUN_CMD_POST: srun --export=ALL
RUN_CMD_PRDGEN: srun --export=ALL
RUN_CMD_SERIAL: time
@@ -50,3 +50,14 @@ cpl_aqm_parm:
COMINfire_default: /work/noaa/epic/SRW-AQM_DATA/aqm_data/RAVE_fire
COMINgefs_default: /work/noaa/epic/SRW-AQM_DATA/aqm_data/GEFS_DATA
NEXUS_GFS_SFC_DIR: /work/noaa/epic/SRW-AQM_DATA/aqm_data/GFS_SFC_DATA
+
+rocoto:
+ tasks:
+ metatask_run_ensemble:
+ task_run_fcst_mem#mem#:
+ cores:
+ native: '--cpus-per-task {{ task_run_fcst.OMP_NUM_THREADS_RUN_FCST|int }} --exclusive {{ platform.SCHED_NATIVE_CMD }}'
+ nodes: '{{ task_run_fcst.NNODES_RUN_FCST // 1 }}:ppn={{ task_run_fcst.PPN_RUN_FCST // 1 }}'
+ nnodes:
+ nodesize:
+ ppn:
From be863bc9aad861d024ed528dd3c8509ee6a4c2b8 Mon Sep 17 00:00:00 2001
From: Gillian Petro <96886803+gspetro-NOAA@users.noreply.github.com>
Date: Tue, 12 Nov 2024 12:53:44 -0500
Subject: [PATCH 37/39] [develop]: Add GitHub Actions to check that Technical
Docs are up-to-date (#1152)
* Adds a GitHub Actions workflow & script to check whether the Technical Documentation is up-to-date
* Turns on the -W flag so that documentation build warnings register as error
* Updates the requirements file so that --remove-old flag can be used to check Tech Docs
* Updates the Contributor's Guide w/information on Technical Documentation and troubleshooting guidelines
* Fixes a few broken links.
---
.github/scripts/check_tech_doc.sh | 28 ++++++++++
.github/workflows/doc_tests.yaml | 25 +++++++++
doc/ContribGuide/documentation.rst | 52 ++++++++++++++++++-
doc/Makefile | 2 +-
...eds.rst => set_fv3nml_ens_stoch_seeds.rst} | 0
...rst => set_fv3nml_sfc_climo_filenames.rst} | 0
.../BuildingRunningTesting/VXCases.rst | 3 +-
.../CustomizingTheWorkflow/DefineWorkflow.rst | 2 +-
doc/conf.py | 10 +---
doc/requirements.in | 3 +-
doc/requirements.txt | 9 +---
11 files changed, 112 insertions(+), 22 deletions(-)
create mode 100755 .github/scripts/check_tech_doc.sh
create mode 100644 .github/workflows/doc_tests.yaml
rename doc/TechDocs/ush/{set_FV3nml_ens_stoch_seeds.rst => set_fv3nml_ens_stoch_seeds.rst} (100%)
rename doc/TechDocs/ush/{set_FV3nml_sfc_climo_filenames.rst => set_fv3nml_sfc_climo_filenames.rst} (100%)
diff --git a/.github/scripts/check_tech_doc.sh b/.github/scripts/check_tech_doc.sh
new file mode 100755
index 0000000000..d988e50cd6
--- /dev/null
+++ b/.github/scripts/check_tech_doc.sh
@@ -0,0 +1,28 @@
+#!/bin/bash
+# This script recreates technical documentation for the ush and tests/WE2E Python scripts
+# If the tech docs produced here do not match the branch's contents, the script will fail
+
+set -eo pipefail
+
+# Install prerequisites
+pip install sphinx
+pip install sphinx-rtd-theme
+pip install sphinxcontrib-bibtex
+
+# Regenerate tech docs in ush and tests/WE2E based on current state of scripts in those directories.
+cd doc/TechDocs
+sphinx-apidoc -fM --remove-old -o ./ush ../../ush
+sphinx-apidoc -fM --remove-old -o ./tests/WE2E ../../tests/WE2E
+
+# Check for mismatch between what comes out of this action and what is in the PR.
+status=`git status -s`
+
+if [ -n "${status}" ]; then
+ echo ${status}
+ echo ""
+ echo "Please update your Technical Documentation RST files."
+ exit 1
+else
+ echo "Technical documentation is up-to-date."
+ exit 0
+fi
diff --git a/.github/workflows/doc_tests.yaml b/.github/workflows/doc_tests.yaml
new file mode 100644
index 0000000000..34fb01a9ac
--- /dev/null
+++ b/.github/workflows/doc_tests.yaml
@@ -0,0 +1,25 @@
+name: Doc Tests
+on:
+ push:
+ pull_request:
+ branches:
+ - develop
+ - 'release/*'
+ workflow_dispatch:
+
+defaults:
+ run:
+ shell: bash -leo pipefail {0}
+
+jobs:
+ doc_tests:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ - name: Check tech docs
+ run: .github/scripts/check_tech_doc.sh
+ - name: Build documentation
+ run: |
+ cd doc
+ make doc
diff --git a/doc/ContribGuide/documentation.rst b/doc/ContribGuide/documentation.rst
index 9e0bad6bda..babcd24368 100644
--- a/doc/ContribGuide/documentation.rst
+++ b/doc/ContribGuide/documentation.rst
@@ -69,4 +69,54 @@ Please follow these guidelines when contributing to the documentation:
.. code-block:: python
- n = 88
\ No newline at end of file
+ n = 88
+
+Troubleshooting Documentation
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+In the SRW App documentation Makefile (``ufs-srweather-app/doc/Makefile``), the ``-W`` option causes documentation builds to fail when there are errors or warnings in the build.
+This ensures that the documentation remains up-to-date and notifies developers of any new issues (like failing links). However, the build will fail when it hits the first error without showing subsequent errors.
+When troubleshooting, it can be useful to see all warnings and errors.
+To see all warnings and errors, comment out the ``-W`` flag in ``SPHINXOPTS`` in the Makefile and build the documentation by running ``make doc`` from the ``doc`` directory.
+
+Technical Documentation Guidelines
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Technical (API-like) documentation is generated for any Python scripts in the ``ush`` or ``tests/WE2E`` directories.
+When developers change Python files in these directories, they need to update the Python docstrings (i.e., comments in ``"""triple quotes"""``) to reflect the changes they made.
+Each Python script should include:
+
+ * A summary of the script's purpose/functionality
+
+ * Should start with a verb, e.g., "checks" or "loads" or "initializes"
+
+ * Docstrings for each method (except methods like ``_parse_args`` that start with an underscore). These docstrings should include:
+
+ * A description of what the method does (starting with a verb, e.g., "checks" or "parses")
+ * A list of method parameters, or ``Args:``, with a definition and expected data type for each
+ * A return statement (``Returns:``) -- if applicable
+ * List of exceptions (``Raises:``) -- if applicable
+
+.. note::
+
+ Python does not have truly private methods, but methods that start with an underscore are the closest equivalent. In the SRW App, the underscore signals that this method is only accessed by the script within which it is called.
+
+After updating the docstrings, developers need to update the corresponding RST files.
+To do this successfully, developers must have *sphinx>=7.4.0* installed on their system. To update the RST files, run:
+
+.. code-block:: console
+
+ cd ufs-srweather-app/doc/TechDoc
+ sphinx-apidoc -fM --remove-old -o ./ush ../../ush
+ sphinx-apidoc -fM --remove-old -o ./tests/WE2E ../../tests/WE2E
+
+.. note::
+
+ Developers who do not have *sphinx>=7.4.0* installed may issue the following commands from ``ufs-srweather-app/doc/TechDoc`` before running the sphinx-apidoc commands above:
+
+ .. code-block:: console
+
+ rm -rf ush
+ rm -rf tests/WE2E
+
+ This will delete current RST files before recreating them with the ``sphinx-apidoc`` command based on the current contents of ``ush`` and ``tests/WE2E``. This step is necessary because the ``--remove-old`` flag does not work with earlier versions of sphinx.
diff --git a/doc/Makefile b/doc/Makefile
index c91f2f147b..9663ba3996 100644
--- a/doc/Makefile
+++ b/doc/Makefile
@@ -1,6 +1,6 @@
# Makefile for Sphinx documentation
-SPHINXOPTS = -a -n #-W
+SPHINXOPTS = -a -n -W
SPHINXBUILD = sphinx-build
SOURCEDIR = .
BUILDDIR = build
diff --git a/doc/TechDocs/ush/set_FV3nml_ens_stoch_seeds.rst b/doc/TechDocs/ush/set_fv3nml_ens_stoch_seeds.rst
similarity index 100%
rename from doc/TechDocs/ush/set_FV3nml_ens_stoch_seeds.rst
rename to doc/TechDocs/ush/set_fv3nml_ens_stoch_seeds.rst
diff --git a/doc/TechDocs/ush/set_FV3nml_sfc_climo_filenames.rst b/doc/TechDocs/ush/set_fv3nml_sfc_climo_filenames.rst
similarity index 100%
rename from doc/TechDocs/ush/set_FV3nml_sfc_climo_filenames.rst
rename to doc/TechDocs/ush/set_fv3nml_sfc_climo_filenames.rst
diff --git a/doc/UsersGuide/BuildingRunningTesting/VXCases.rst b/doc/UsersGuide/BuildingRunningTesting/VXCases.rst
index b36afcefd4..f8b01f993c 100644
--- a/doc/UsersGuide/BuildingRunningTesting/VXCases.rst
+++ b/doc/UsersGuide/BuildingRunningTesting/VXCases.rst
@@ -262,7 +262,8 @@ Point STAT Files
The Point-Stat files contain continuous variables like temperature, pressure, and wind speed. A description of the Point-Stat file can be found :ref:`here ` in the MET documentation.
-The Point-Stat files contain a potentially overwhelming amount of information. Therefore, it is recommended that users focus on the CNT MET test, which contains the `RMSE `_ and `MBIAS `_ statistics. The MET tests are defined in column 24 'LINE_TYPE' of the ``.stat`` file. Look for 'CNT' in this column. Then find column 66-68 for MBIAS and 78-80 for RMSE statistics. A full description of this file can be found :ref:`here `.
+The Point-Stat files contain a potentially overwhelming amount of information. Therefore, it is recommended that users focus on the CNT MET test, which contains the Root Mean Squared Error (`RMSE `_) and Magnitude &
+Multiplicative bias (`MBIAS `_) statistics. The MET tests are defined in column 24 'LINE_TYPE' of the ``.stat`` file. Look for 'CNT' in this column. Then find column 66-68 for MBIAS and 78-80 for RMSE statistics. A full description of this file can be found :ref:`here `.
To narrow down the variable field even further, users can focus on these weather variables:
diff --git a/doc/UsersGuide/CustomizingTheWorkflow/DefineWorkflow.rst b/doc/UsersGuide/CustomizingTheWorkflow/DefineWorkflow.rst
index 4ea8f7052d..b5d587969e 100644
--- a/doc/UsersGuide/CustomizingTheWorkflow/DefineWorkflow.rst
+++ b/doc/UsersGuide/CustomizingTheWorkflow/DefineWorkflow.rst
@@ -6,7 +6,7 @@ Defining an SRW App Workflow
Many predefined workflows with optional variants exist within the Short-Range Weather Application, but the Application also includes the ability to define a new workflow from scratch. This functionality allows users to add tasks to the workflow to meet their scientific exploration needs.
-Rocoto is the primary workflow manager software used by the UFS SRW App. Rocoto workflows are defined in an XML file (``FV3LAM_wflow.xml``) based on parameters set during experiment generation. This section explains how the Rocoto XML is built using a Jinja2 template (`Jinja docs here `__) and structured YAML files. The YAML follows the requirements in the `Rocoto documentation `__ with a few exceptions or additions outlined in this documentation.
+Rocoto is the primary workflow manager software used by the UFS SRW App. Rocoto workflows are defined in an XML file (``FV3LAM_wflow.xml``) based on parameters set during experiment generation. This section explains how the Rocoto XML is built using a Jinja2 template (`Jinja docs here `_) and structured YAML files. The YAML follows the requirements in the `Rocoto documentation `__ with a few exceptions or additions outlined in this documentation.
The Jinja2 Template
===================
diff --git a/doc/conf.py b/doc/conf.py
index f1f094d545..5989ae0d74 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -38,7 +38,7 @@
nitpick_ignore = [('py:class', 'obj'),('py:class',
'yaml.dumper.Dumper'),('py:class',
- 'xml.etree.ElementTree'),]
+ 'xml.etree.ElementTree'),('py:class', 'Basemap'),]
# -- General configuration ---------------------------------------------------
@@ -46,7 +46,6 @@
extensions = [
'sphinx_rtd_theme',
'sphinx.ext.autodoc',
- 'sphinxcontrib.autoyaml',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.extlinks',
@@ -310,10 +309,3 @@ def warn_undocumented_members(app, what, name, obj, options, lines):
'uw': ('https://uwtools.readthedocs.io/en/main/%s', '%s'),
}
-# -- Options for autoyaml extension ---------------------------------------
-
-autoyaml_root = "../ush"
-autoyaml_doc_delimiter = "###" # Character(s) which start a documentation comment.
-autoyaml_comment = "#" #Comment start character(s).
-autoyaml_level = 6
-#autoyaml_safe_loader = False
\ No newline at end of file
diff --git a/doc/requirements.in b/doc/requirements.in
index 831cdc431d..75a70ab416 100644
--- a/doc/requirements.in
+++ b/doc/requirements.in
@@ -1,4 +1,3 @@
-sphinx>=6.0.0
+sphinx>=7.4.0
sphinx_rtd_theme
sphinxcontrib-bibtex
-sphinxcontrib-autoyaml
diff --git a/doc/requirements.txt b/doc/requirements.txt
index 38fdd2ef01..faa8455abe 100644
--- a/doc/requirements.txt
+++ b/doc/requirements.txt
@@ -2,7 +2,7 @@
# This file is autogenerated by pip-compile with Python 3.11
# by the following command:
#
-# pip-compile --strip-extras requirements.in
+# pip-compile requirements.in
#
alabaster==0.7.16
# via sphinx
@@ -42,27 +42,22 @@ pyyaml==6.0.1
# via pybtex
requests==2.32.2
# via sphinx
-ruamel-yaml==0.16.13
- # via sphinxcontrib-autoyaml
six==1.16.0
# via
# latexcodec
# pybtex
snowballstemmer==2.2.0
# via sphinx
-sphinx==7.2.6
+sphinx==7.4.7
# via
# -r requirements.in
# sphinx-rtd-theme
- # sphinxcontrib-autoyaml
# sphinxcontrib-bibtex
# sphinxcontrib-jquery
sphinx-rtd-theme==2.0.0
# via -r requirements.in
sphinxcontrib-applehelp==1.0.8
# via sphinx
-sphinxcontrib-autoyaml==1.1.1
- # via -r requirements.in
sphinxcontrib-bibtex==2.6.2
# via -r requirements.in
sphinxcontrib-devhelp==1.0.6
From fae3a7cf0beaf41e11f5cec0f97c07f04de9a773 Mon Sep 17 00:00:00 2001
From: Michael Kavulich
Date: Wed, 13 Nov 2024 12:55:28 -0700
Subject: [PATCH 38/39] [develop] Add Community Fire Behavior Model (#1139)
This PR introduces the Community Fire Behavior Module (ufs-community/ufs-weather-model#2220) to the SRW App.
In addition, there are a number of general improvements to the UFS SRW code and workflow:
* The addition of the build_settings.yaml file that is placed in the exec directory
* Improve capability to use a different set of vertical levels
* Flexible configuration file name
* Additional options for retrieve_data.py
* Speedup of symbolic linking
* Various random improvements
---------
Co-authored-by: Dan Rosen
Co-authored-by: Christina Holt <56881914+christinaholtNOAA@users.noreply.github.com>
---
.github/workflows/python_tests.yaml | 13 +-
CMakeLists.txt | 30 +-
devbuild.sh | 7 +-
.../DefaultVarsTable.rst | 2 +-
.../BuildingRunningTesting/FIRE.rst | 257 ++++++++++++++++++
.../BuildingRunningTesting/index.rst | 1 +
.../CustomizingTheWorkflow/ConfigWorkflow.rst | 131 ++++++++-
.../CustomizingTheWorkflow/LAMGrids.rst | 29 +-
doc/UsersGuide/Reference/Glossary.rst | 3 +
doc/conf.py | 1 +
modulefiles/build_hera_intel.lua | 2 +
modulefiles/build_hercules_intel.lua | 2 +
modulefiles/build_jet_intel.lua | 2 +
modulefiles/build_odin_intel.lua | 2 +
modulefiles/build_orion_intel.lua | 2 +
modulefiles/build_singularity_gnu.lua | 2 +
parm/diag_table.FV3_HRRR | 3 +
parm/namelist.fire | 28 ++
parm/ufs.configure | 56 ++--
scripts/exregional_run_fcst.sh | 46 +++-
sorc/build_settings_template.yaml | 23 ++
.../ufs_srweather_app_meta.h.in | 0
tests/WE2E/WE2E_summary.py | 22 +-
tests/WE2E/run_WE2E_tests.py | 2 +-
...ig.UFS_FIRE_multifire_one-way-coupled.yaml | 76 ++++++
.../fire/config.UFS_FIRE_one-way-coupled.yaml | 60 ++++
tests/WE2E/utils.py | 12 +-
.../test_create_diag_table_file.py | 1 +
.../test_python/test_generate_FV3LAM_wflow.py | 11 +-
ufs_srweather_app.settings.in | 21 --
ush/bash_utils/boolify.sh | 18 --
ush/bash_utils/create_symlink_to_file.sh | 15 +-
ush/config.fire.yaml | 67 +++++
ush/config_defaults.yaml | 152 ++++++++++-
ush/create_diag_table_file.py | 5 +-
ush/create_ufs_configure_file.py | 109 +++++---
ush/generate_FV3LAM_wflow.py | 96 ++++++-
ush/machine/derecho.yaml | 1 +
ush/machine/gaea.yaml | 1 +
ush/machine/hera.yaml | 1 +
ush/machine/hercules.yaml | 1 +
ush/machine/jet.yaml | 1 +
ush/machine/noaacloud.yaml | 1 +
ush/machine/odin.yaml | 1 +
ush/machine/orion.yaml | 1 +
ush/machine/stampede.yaml | 1 +
ush/machine/wcoss2.yaml | 1 +
ush/predef_grid_params.yaml | 76 +++++-
ush/retrieve_data.py | 11 +
ush/setup.py | 91 ++++++-
ush/valid_param_vals.yaml | 4 +-
51 files changed, 1288 insertions(+), 213 deletions(-)
create mode 100644 doc/UsersGuide/BuildingRunningTesting/FIRE.rst
create mode 100644 parm/namelist.fire
create mode 100644 sorc/build_settings_template.yaml
rename ufs_srweather_app_meta.h.in => sorc/ufs_srweather_app_meta.h.in (100%)
create mode 100644 tests/WE2E/test_configs/fire/config.UFS_FIRE_multifire_one-way-coupled.yaml
create mode 100644 tests/WE2E/test_configs/fire/config.UFS_FIRE_one-way-coupled.yaml
delete mode 100644 ufs_srweather_app.settings.in
create mode 100644 ush/config.fire.yaml
diff --git a/.github/workflows/python_tests.yaml b/.github/workflows/python_tests.yaml
index fb0de16910..113ec3f59c 100644
--- a/.github/workflows/python_tests.yaml
+++ b/.github/workflows/python_tests.yaml
@@ -30,10 +30,6 @@ jobs:
cache-downloads: true
cache-environment: true
- - name: Checkout externals
- run: |
- ./manage_externals/checkout_externals ufs-weather-model
-
- name: Lint the python code
run: |
micromamba activate srw_app
@@ -44,17 +40,22 @@ jobs:
pylint ush/set_fv3nml*.py
pylint ush/update_input_nml.py
+ - name: Checkout externals
+ run: |
+ ./manage_externals/checkout_externals ufs-weather-model
+
- name: Run python unittests
run: |
# exclude test_retrieve_data that is tested in functional test
micromamba activate srw_app
export UNIT_TEST=True
export PYTHONPATH=$(pwd)/ush
- python -m unittest -b tests/test_python/*.py
+ python -m unittest tests/test_python/*.py
- name: Run python functional tests
run: |
micromamba activate srw_app
export CI=true
export PYTHONPATH=${PWD}/ush
- python3 -m unittest -b tests/test_python/test_retrieve_data.py
+ python3 -m unittest tests/test_python/test_retrieve_data.py
+
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 30e241daba..cf8180097d 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -8,12 +8,20 @@ project(ufs-srweather-app VERSION 1.0 LANGUAGES C CXX Fortran)
find_package(MPI REQUIRED COMPONENTS C CXX Fortran)
# Set extended version info.
-SET(SRWA_VERSION_MAJOR 1)
-SET(SRWA_VERSION_MINOR 0)
+SET(SRWA_VERSION_MAJOR 2)
+SET(SRWA_VERSION_MINOR 2)
SET(SRWA_VERSION_PATCH 0)
SET(SRWA_VERSION_NOTE "-development")
SET(SRWA_VERSION ${SRWA_VERSION_MAJOR}.${SRWA_VERSION_MINOR}.${SRWA_VERSION_PATCH}${SRWA_VERSION_NOTE})
+# Get the latest abbreviated commit hash of the working branch
+execute_process(
+ COMMAND git log -1 --format=%h
+ WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}
+ OUTPUT_VARIABLE GIT_HASH
+ OUTPUT_STRIP_TRAILING_WHITESPACE
+ )
+
# A function used to create autotools-style 'yes/no' definitions.
# If a variable is set, it 'yes' is returned. Otherwise, 'no' is
# returned.
@@ -55,7 +63,7 @@ if(NOT DEFINED CMAKE_INSTALL_BINDIR)
endif()
#####
-# Configure and print the ufs-srweather-app.settings file.
+# Configure and print the build settings file.
#####
# Determine the configure date.
@@ -92,10 +100,12 @@ SET(host_vendor "${osname}")
SET(host_os "${osrel}")
SET(abs_top_builddir "${CMAKE_CURRENT_BINARY_DIR}")
SET(abs_top_srcdir "${CMAKE_CURRENT_SOURCE_DIR}")
+SET(application "${APP}")
+SET(machine "${BUILD_MACHINE}")
SET(CC_VERSION "${CMAKE_C_COMPILER}")
-# Set values for .settings file.
+# Set values for build settings file.
SET(CFLAGS "${CMAKE_C_FLAGS} ${CMAKE_C_FLAGS_${CMAKE_BUILD_TYPE}}")
SET(CPPFLAGS "${CMAKE_CPP_FLAGS} ${CMAKE_CPP_FLAGS_${CMAKE_BUILD_TYPE}}")
SET(LDFLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${CMAKE_SHARED_LINKER_FLAGS_${CMAKE_BUILD_TYPE}}")
@@ -107,27 +117,27 @@ is_enabled(BUILD_SHARED_LIBS enable_shared)
is_enabled(STATUS_PARALLEL HAS_PARALLEL)
# Generate file from template.
-CONFIGURE_FILE("${CMAKE_CURRENT_SOURCE_DIR}/ufs_srweather_app.settings.in"
- "${CMAKE_CURRENT_BINARY_DIR}/ufs_srweather_app.settings"
+CONFIGURE_FILE("${CMAKE_CURRENT_SOURCE_DIR}/sorc/build_settings_template.yaml"
+ "${CMAKE_CURRENT_BINARY_DIR}/build_settings.yaml"
@ONLY)
# Read in settings file, print out.
# Avoid using system-specific calls so that this
# might also work on Windows.
-FILE(READ "${CMAKE_CURRENT_BINARY_DIR}/ufs_srweather_app.settings"
+FILE(READ "${CMAKE_CURRENT_BINARY_DIR}/build_settings.yaml"
UFS-SRWEATHER-APP_SETTINGS)
MESSAGE(${UFS-SRWEATHER-APP_SETTINGS})
-# Install ufs_srweather_app.settings file into same location
+# Install build settings file into same location
# as the app.
-INSTALL(FILES "${CMAKE_BINARY_DIR}/ufs_srweather_app.settings"
+INSTALL(FILES "${CMAKE_BINARY_DIR}/build_settings.yaml"
DESTINATION ${CMAKE_INSTALL_BINDIR})
#####
# Create 'ufs_srweather_app_meta.h' include file.
#####
configure_file(
- ufs_srweather_app_meta.h.in
+ sorc/ufs_srweather_app_meta.h.in
ufs_srweather_app_meta.h @ONLY)
FILE(COPY "${CMAKE_CURRENT_BINARY_DIR}/ufs_srweather_app_meta.h" DESTINATION include)
diff --git a/devbuild.sh b/devbuild.sh
index 014fbdb3b7..332abb49b5 100755
--- a/devbuild.sh
+++ b/devbuild.sh
@@ -15,8 +15,10 @@ OPTIONS
compiler to use; default depends on platform
(e.g. intel | gnu | cray | gccgfortran)
-a, --app=APPLICATION
- weather model application to build; for example, ATMAQ for Online-CMAQ
- (e.g. ATM | ATMAQ | ATMW | S2S | S2SW)
+ weather model application to build; supported SRW options are
+ ATM (default) Atmosphere only
+ ATMAQ Online-CMAQ (air quality)
+ ATMF UFS_FIRE (coupled Community Fire Behavior Model)
--ccpp="CCPP_SUITE1,CCPP_SUITE2..."
CCPP suites (CCPP_SUITES) to include in build; delimited with ','
--enable-options="OPTION1,OPTION2,..."
@@ -363,6 +365,7 @@ fi
# cmake settings
CMAKE_SETTINGS="\
+ -DBUILD_MACHINE=${MACHINE}\
-DCMAKE_BUILD_TYPE=${BUILD_TYPE}\
-DCMAKE_INSTALL_PREFIX=${INSTALL_DIR}\
-DCMAKE_INSTALL_BINDIR=${BIN_DIR}\
diff --git a/doc/UsersGuide/BuildingRunningTesting/DefaultVarsTable.rst b/doc/UsersGuide/BuildingRunningTesting/DefaultVarsTable.rst
index faaf9129c2..c22751656c 100644
--- a/doc/UsersGuide/BuildingRunningTesting/DefaultVarsTable.rst
+++ b/doc/UsersGuide/BuildingRunningTesting/DefaultVarsTable.rst
@@ -20,7 +20,7 @@ Table of Variables in ``config_defaults.yaml``
TEST_VX_FCST_INPUT_BASEDIR, FIXgsm, FIXaer, FIXlut, FIXorg, FIXsfc, FIXshp, FIXcrtm, FIXcrtmupp, EXTRN_MDL_DATA_STORES
* - Workflow
- WORKFLOW_ID, RELATIVE_LINK_FLAG, USE_CRON_TO_RELAUNCH, CRON_RELAUNCH_INTVL_MNTS, CRONTAB_LINE, LOAD_MODULES_RUN_TASK_FP, EXPT_BASEDIR, EXPT_SUBDIR, EXEC_SUBDIR,
- EXPTDIR, DOT_OR_USCORE, EXPT_CONFIG_FN, CONSTANTS_FN, RGNL_GRID_NML_FN, FV3_NML_FN, FV3_NML_BASE_SUITE_FN, FV3_NML_YAML_CONFIG_FN, FV3_NML_BASE_ENS_FN,
+ EXPTDIR, DOT_OR_USCORE, CONSTANTS_FN, RGNL_GRID_NML_FN, FV3_NML_FN, FV3_NML_BASE_SUITE_FN, FV3_NML_YAML_CONFIG_FN, FV3_NML_BASE_ENS_FN,
FV3_EXEC_FN, DATA_TABLE_FN, DIAG_TABLE_FN, FIELD_TABLE_FN, DIAG_TABLE_TMPL_FN, FIELD_TABLE_TMPL_FN, MODEL_CONFIG_FN, NEMS_CONFIG_FN, AQM_RC_FN, AQM_RC_TMPL_FN,
FV3_NML_BASE_SUITE_FP, FV3_NML_YAML_CONFIG_FP, FV3_NML_BASE_ENS_FP, DATA_TABLE_TMPL_FP, DIAG_TABLE_TMPL_FP, FIELD_TABLE_TMPL_FP,
MODEL_CONFIG_TMPL_FP, NEMS_CONFIG_TMPL_FP, AQM_RC_TMPL_FP, DATA_TABLE_FP, FIELD_TABLE_FP, NEMS_CONFIG_FP, FV3_NML_FP,
diff --git a/doc/UsersGuide/BuildingRunningTesting/FIRE.rst b/doc/UsersGuide/BuildingRunningTesting/FIRE.rst
new file mode 100644
index 0000000000..09a22975a6
--- /dev/null
+++ b/doc/UsersGuide/BuildingRunningTesting/FIRE.rst
@@ -0,0 +1,257 @@
+.. _UFS_FIRE:
+
+=========================================
+Community Fire Behavior Module (UFS FIRE)
+=========================================
+
+The `Community Fire Behavior Model (CFBM) `_ is a wildland fire model coupled to the UFS Atmospheric Model. The capability to run this code is now available in the UFS Short-Range Weather App for easy use by the community. The `fire_behavior repository `_ is a :term:`submodule` of the UFS Weather Model (WM), coupled through the :term:`NUOPC` Layer to provide direct feedback between the simulated atmosphere and the simulated fire. More information about the CFBM can be found in the :fire-ug:`CFBM Users Guide <>`.
+
+The biggest difference between the UFS FIRE capability and other modes of the UFS SRW is that a special build flag is required to build the coupled fire behavior code, as described in the instructions below. Aside from that, the need for additional input files, and some fire-specific config settings, configuring and running an experiment is the same as any other use of SRW.
+
+
+.. note::
+
+ Although this chapter is the primary documentation resource for running the UFS FIRE configuration, users may need to refer to :numref:`Chapter %s ` and :numref:`Chapter %s ` for additional information on building and running the SRW App, respectively.
+
+Quick Start Guide (UFS FIRE)
+=====================================
+
+Download the Code
+-------------------
+
+Clone the |branch| branch of the authoritative SRW App repository:
+
+.. code-block:: console
+
+ git clone -b develop https://github.com/ufs-community/ufs-srweather-app
+ cd ufs-srweather-app
+
+Checkout Externals
+---------------------
+
+Users must run the ``checkout_externals`` script to collect (or "check out") the individual components of the SRW App from their respective GitHub repositories.
+
+.. code-block:: console
+
+ ./manage_externals/checkout_externals
+
+Build the SRW App with Fire Behavior Enabled
+--------------------------------------------
+
+To build the SRW with fire behavior code, use the following command:
+
+.. code-block:: console
+
+ ./devbuild.sh -p= -a=ATMF
+
+where ```` is ``hera``, ``derecho``, or any other Tier 1 platform. The ``-a`` argument indicates the configuration/version of the application to build; in this case, the atmosphere-fire coupling (ATMF).
+
+If UFS FIRE builds correctly, users should see the standard executables listed in :numref:`Table %s `. There are no additional files expected, since the CFBM is coupled to the UFS weather model via the same ``ufs_model`` executable.
+
+Load the |wflow_env| Environment
+--------------------------------------------
+
+Load the appropriate modules for the workflow:
+
+.. code-block:: console
+
+ module use /path/to/ufs-srweather-app/modulefiles
+ module load wflow_
+
+where ```` is ``hera``, ``derecho``, or any other Tier 1 platform.
+
+If the console outputs a message, the user should run the commands specified in the message. For example, if the output says:
+
+.. code-block:: console
+
+ Please do the following to activate conda:
+ > conda activate srw_app
+
+then the user should run |activate|. Otherwise, the user can continue with configuring the workflow.
+
+.. _FIREConfig:
+
+Configure Experiment
+---------------------------
+
+Users will need to configure their experiment by setting parameters in the ``config.yaml`` file. To start, users can copy an example experiment setting into ``config.yaml``:
+
+.. code-block:: console
+
+ cd ush
+ cp config.fire.yaml config.yaml
+
+Users will need to change the ``MACHINE`` and ``ACCOUNT`` variables in ``config.yaml`` to match their system. They may also wish to adjust other experiment settings, especially under the ``fire:`` section, described in further detail below. For more information on other configuration settings, see :numref:`Section %s