diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 8cc95c6b00..18d015b6e9 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -5,17 +5,23 @@ pipeline { disableConcurrentBuilds() overrideIndexTriggers(false) skipDefaultCheckout(true) + timestamps() + timeout(time: 12, unit: 'HOURS') } parameters { // Allow job runner to filter based on platform // Use the line below to enable all PW clusters - // choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'cheyenne', 'gaea', 'hera', 'jet', 'orion', 'hercules', 'pclusternoaav2use1', 'azclusternoaav2eus1', 'gclusternoaav2usc1'], description: 'Specify the platform(s) to use') + // choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'derecho', 'gaea', 'hera', 'jet', 'orion', 'hercules', 'pclusternoaav2use1', 'azclusternoaav2eus1', 'gclusternoaav2usc1'], description: 'Specify the platform(s) to use') // Use the line below to enable the PW AWS cluster - // choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'cheyenne', 'gaea', 'hera', 'jet', 'orion', 'hercules', 'pclusternoaav2use1'], description: 'Specify the platform(s) to use') + // choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'derecho', 'gaea', 'hera', 'jet', 'orion', 'hercules', 'pclusternoaav2use1'], description: 'Specify the platform(s) to use') choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'derecho', 'gaea', 'hera', 'jet', 'orion', 'hercules'], description: 'Specify the platform(s) to use') // Allow job runner to filter based on compiler choice(name: 'SRW_COMPILER_FILTER', choices: ['all', 'gnu', 'intel'], description: 'Specify the compiler(s) to use to build') + // Workflow Wrapper test depth {0..9}, 0=none, 1=simple, 9=all [default] + choice(name: 'SRW_WRAPPER_TASK_DEPTH', choices: ['9', '1', '0'], description: '0=none, 1=simple, 9=all [default]') + // WE2E Tests ? + choice(name: 'SRW_WE2E_SINGLE_TEST', choices: ['coverage', 'none', 'skill-score', 'grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0'], description: 'Specify the WE2E test to use') booleanParam name: 'SRW_WE2E_COMPREHENSIVE_TESTS', defaultValue: false, description: 'Whether to execute the comprehensive end-to-end tests' } @@ -23,9 +29,15 @@ pipeline { stage('Launch SonarQube') { steps { script { + echo "BRANCH_NAME=${env.CHANGE_BRANCH}" + echo "FORK_NAME=${env.CHANGE_FORK}" + echo "CHANGE_URL=${env.CHANGE_URL}" + echo "CHANGE_ID=${env.CHANGE_ID}" build job: '/ufs-srweather-app/ufs-srw-sonarqube', parameters: [ string(name: 'BRANCH_NAME', value: env.CHANGE_BRANCH ?: 'develop'), - string(name: 'FORK_NAME', value: env.CHANGE_FORK ?: '') + string(name: 'FORK_NAME', value: env.CHANGE_FORK ?: ''), + string(name: 'CHANGE_URL', value: env.CHANGE_URL ?: ''), + string(name: 'CHANGE_ID', value: env.CHANGE_ID ?: '') ], wait: false } } @@ -70,6 +82,11 @@ pipeline { // Run on all platform/compiler combinations by default or build and test only on the platform(s) and // compiler(s) specified by SRW_PLATFORM_FILTER and SRW_COMPILER_FILTER when { + beforeAgent true + expression { + return nodesByLabel(env.SRW_PLATFORM).size() > 0 + } + allOf { anyOf { expression { params.SRW_PLATFORM_FILTER == 'all' } @@ -126,20 +143,33 @@ pipeline { stage('Initialize') { steps { dir ("${env.SRW_PLATFORM}") { - echo "Initializing SRW (${env.SRW_COMPILER}) build environment on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" + echo "${env.STAGE_NAME} SRW (${env.SRW_COMPILER}) build environment on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" cleanWs() checkout scm - sh '"${WORKSPACE}/${SRW_PLATFORM}/manage_externals/checkout_externals"' + sh '"${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/srw_init.sh"' + sh "STAGE_NAME=${env.STAGE_NAME} " + 'bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/disk_usage.sh"' + } + } + + post { + always { + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-time-srw_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-disk-usage${env.STAGE_NAME}.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } } // Run the unified build script; if successful create a tarball of the build and upload to S3 stage('Build') { + options { + timeout(time: 4, unit: 'HOURS') + } + steps { dir ("${env.SRW_PLATFORM}") { - echo "Building SRW (${env.SRW_COMPILER}) on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" + echo "${env.STAGE_NAME} SRW (${env.SRW_COMPILER}) on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" sh 'bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/srw_build.sh"' + sh "STAGE_NAME=${env.STAGE_NAME} " + 'bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/disk_usage.sh"' } } @@ -148,6 +178,11 @@ pipeline { sh 'cd "${WORKSPACE}/${SRW_PLATFORM}/${INSTALL_NAME}" && tar --create --gzip --verbose --file "${WORKSPACE}/${SRW_PLATFORM}/${BUILD_NAME}.tgz" *' s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: true, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/${env.BUILD_NAME}.tgz", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: true, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/build_${env.SRW_COMPILER}/srw_build-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } + always { + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-env.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-time-srw_build.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-disk-usage${env.STAGE_NAME}.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + } } } @@ -163,9 +198,13 @@ pipeline { // Try a few Workflow Task scripts to make sure E2E tests can be launched in a follow-on 'Test' stage stage('Functional WorkflowTaskTests') { + environment { + TASK_DEPTH = "${params.SRW_WRAPPER_TASK_DEPTH}" + } + steps { dir ("${env.SRW_PLATFORM}") { - echo "Running simple workflow script task tests on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" + echo "Running ${TASK_DEPTH} simple workflow script task tests on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" sh 'bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/wrapper_srw_ftest.sh"' } } @@ -173,17 +212,22 @@ pipeline { // Run the unified test script stage('Test') { + options { + timeout(time: 8, unit: 'HOURS') + } + environment { SRW_WE2E_EXPERIMENT_BASE_DIR = "${env.WORKSPACE}/${env.SRW_PLATFORM}/expt_dirs" } steps { dir ("${env.SRW_PLATFORM}") { - echo "Testing SRW (${env.SRW_COMPILER}) on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" + echo "${env.STAGE_NAME} SRW (${env.SRW_COMPILER}) on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" // If executing for a Pull Request, check for the run_we2e_comprehensive_tests. If set, // override the value of the SRW_WE2E_COMPREHENSIVE_TESTS parameter script { + def single_test = params.SRW_WE2E_SINGLE_TEST def run_we2e_comprehensive_tests = params.SRW_WE2E_COMPREHENSIVE_TESTS def run_we2e_comprehensive_tests_label = 'run_we2e_comprehensive_tests' @@ -195,18 +239,25 @@ pipeline { } } - sh "SRW_WE2E_COMPREHENSIVE_TESTS=${run_we2e_comprehensive_tests}" + ' bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/srw_test.sh"' - } + sh "SRW_WE2E_COMPREHENSIVE_TESTS=${run_we2e_comprehensive_tests} SRW_WE2E_SINGLE_TEST=${single_test}" + ' bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/srw_test.sh"' + + } + sh "STAGE_NAME=${env.STAGE_NAME} " + 'bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/disk_usage.sh"' } } post { + success { + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/*-skill-score.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + } always { // Archive the test log files - sh 'cd "${SRW_WE2E_EXPERIMENT_BASE_DIR}" && tar --create --gzip --verbose --dereference --file "${WORKSPACE}/${SRW_PLATFORM}/we2e_test_logs-${SRW_PLATFORM}-${SRW_COMPILER}.tgz" */log.generate_FV3LAM_wflow */log/* ${WORKSPACE}/${SRW_PLATFORM}/tests/WE2E/WE2E_tests_*yaml WE2E_summary*txt ${WORKSPACE}/${SRW_PLATFORM}/tests/WE2E/log.*' + sh "[[ -d ${SRW_WE2E_EXPERIMENT_BASE_DIR} ]] && cd ${SRW_WE2E_EXPERIMENT_BASE_DIR} && tar --create --gzip --verbose --dereference --file ${env.WORKSPACE}/${env.SRW_PLATFORM}/we2e_test_logs-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.tgz */log.generate_FV3LAM_wflow */log/* ${env.WORKSPACE}/${env.SRW_PLATFORM}/tests/WE2E/WE2E_tests_*yaml WE2E_summary*txt ${env.WORKSPACE}/${env.SRW_PLATFORM}/tests/WE2E/log.* || cat /dev/null > ${env.WORKSPACE}/${env.SRW_PLATFORM}/we2e_test_logs-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.tgz" + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/*_test_results-*-*.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/we2e_test_logs-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.tgz", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-time-srw_test.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-disk-usage${env.STAGE_NAME}.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] // Remove the data sets from the experiments directory to conserve disk space sh 'find "${SRW_WE2E_EXPERIMENT_BASE_DIR}" -regextype posix-extended -regex "^.*(orog|[0-9]{10})$" -type d | xargs rm -rf' - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/*_test_results-*-*.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/we2e_test_logs-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.tgz", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } } @@ -214,13 +265,23 @@ pipeline { } } } + // end of stages{} - // Uncomment the following block to re-enable PW clusters - /* post { always { - // Stop any Parallel Works clusters that were started during the pipeline execution script { + // Trigger another job to collect all build statistics + CI_JOB_NAME=env.JOB_NAME.replace("/${env.JOB_BASE_NAME}","") + CI_BRANCH_NAME=env.JOB_BASE_NAME.replace("%2F","%252F") + echo "post: Triggering ufs-srweather-app/ufs-srw-metrics job for ${CI_JOB_NAME} on branch build ${CI_BRANCH_NAME}/${env.BUILD_NUMBER} ..." + build job: '/ufs-srweather-app/ufs-srw-metrics', parameters: [ + string(name: 'CI_JOB_NAME', value: "${CI_JOB_NAME}"), + string(name: 'CI_BUILD_NUMBER', value: "${CI_BRANCH_NAME}/${env.BUILD_NUMBER}") + ], wait: false + + // Uncomment the following block to re-enable PW clusters + /* + // Stop any Parallel Works clusters that were started during the pipeline execution // def pw_clusters = ['pclusternoaav2use1', 'azclusternoaav2eus1', 'gclusternoaav2usc1'] def pw_clusters = ['pclusternoaav2use1'] def clusters = [] @@ -239,8 +300,8 @@ pipeline { // PW_CLUSTER_NAME parameter build job: 'parallel-works-jenkins-client/stop-cluster', parameters: [string(name: 'PW_CLUSTER_NAME', value: clusters[i])] } + */ } } } - */ } diff --git a/.cicd/scripts/disk_usage.sh b/.cicd/scripts/disk_usage.sh new file mode 100755 index 0000000000..08a482d70f --- /dev/null +++ b/.cicd/scripts/disk_usage.sh @@ -0,0 +1,48 @@ +#!/usr/bin/env bash + +# Output a CSV report of disk usage on subdirs of some path +# Usage: +# [JOB_NAME=] [BUILD_NUMBER=] [SRW_COMPILER=] [SRW_PLATFORM=] disk_usage path depth size outfile.csv +# +# args: +# directory=$1 +# depth=$2 +# size=$3 +# outfile=$4 + +[[ -n ${WORKSPACE} ]] || WORKSPACE=$(pwd) +[[ -n ${SRW_PLATFORM} ]] || SRW_PLATFORM=$(hostname -s 2>/dev/null) || SRW_PLATFORM=$(hostname 2>/dev/null) +[[ -n ${SRW_COMPILER} ]] || SRW_COMPILER=compiler + +script_dir="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)" + +# Get repository root from Jenkins WORKSPACE variable if set, otherwise, set +# relative to script directory. +declare workspace +if [[ -n "${WORKSPACE}/${SRW_PLATFORM}" ]]; then + workspace="${WORKSPACE}/${SRW_PLATFORM}" +else + workspace="$(cd -- "${script_dir}/../.." && pwd)" +fi + +echo "STAGE_NAME=${STAGE_NAME}" # from pipeline +outfile="${4:-${workspace}-${SRW_COMPILER}-disk-usage${STAGE_NAME}.csv}" + +function disk_usage() { + local directory=${1:-${PWD}} + local depth=${2:-1} + local size=${3:-k} + echo "Disk usage: ${JOB_NAME:-ci}/${SRW_PLATFORM}/$(basename $directory)" + ( + cd $directory || exit 1 + echo "Platform,Build,Owner,Group,Inodes,${size:-k}bytes,Access Time,Filename" + du -Px -d ${depth:-1} --inode --exclude='./workspace' | \ + while read line ; do + arr=($line); inode=${arr[0]}; filename=${arr[1]}; + echo "${SRW_PLATFORM}-${SRW_COMPILER:-compiler},${JOB_NAME:-ci}/${BUILD_NUMBER:-0},$(stat -c '%U,%G' $filename),${inode:-0},$(du -Px -s -${size:-k} --time $filename)" | tr '\t' ',' ; + done | sort -t, -k5 -n #-r + ) + echo "" +} + +disk_usage $1 $2 $3 | tee ${outfile} diff --git a/.cicd/scripts/qsub_srw_ftest.sh b/.cicd/scripts/qsub_srw_ftest.sh index e9f0170a05..8b2569ca69 100644 --- a/.cicd/scripts/qsub_srw_ftest.sh +++ b/.cicd/scripts/qsub_srw_ftest.sh @@ -9,7 +9,5 @@ #PBS -l select=1:ncpus=24:mpiprocs=24:ompthreads=1 #PBS -l walltime=00:30:00 #PBS -V -#PBS -o log_wrap.%j.log -#PBS -e err_wrap.%j.err bash ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/srw_ftest.sh diff --git a/.cicd/scripts/srw_build.sh b/.cicd/scripts/srw_build.sh index 196d984a05..25546561eb 100755 --- a/.cicd/scripts/srw_build.sh +++ b/.cicd/scripts/srw_build.sh @@ -27,7 +27,8 @@ fi # Build and install cd ${workspace}/tests set +e -./build.sh ${platform} ${SRW_COMPILER} +/usr/bin/time -p -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' -o ${WORKSPACE}/${SRW_PLATFORM}-${SRW_COMPILER}-time-srw_build.json \ + ./build.sh ${platform} ${SRW_COMPILER} build_exit=$? set -e cd - @@ -35,6 +36,6 @@ cd - # Create combined log file for upload to s3 build_dir="${workspace}/build_${SRW_COMPILER}" cat ${build_dir}/log.cmake ${build_dir}/log.make \ - >${build_dir}/srw_build-${platform}-${SRW_COMPILER}.txt + >${build_dir}/srw_build-${SRW_PLATFORM}-${SRW_COMPILER}.txt exit $build_exit diff --git a/.cicd/scripts/srw_ftest.sh b/.cicd/scripts/srw_ftest.sh index b77ee767f3..95530a89aa 100755 --- a/.cicd/scripts/srw_ftest.sh +++ b/.cicd/scripts/srw_ftest.sh @@ -66,6 +66,9 @@ sed "s|^workflow:|workflow:\n EXEC_SUBDIR: ${workspace}/install_${SRW_COMPILER} # Decrease forecast length since we are running all the steps sed "s|^ FCST_LEN_HRS: 12| FCST_LEN_HRS: 6|g" -i ush/config.yaml +# Update compiler +sed "s|^ COMPILER: intel| COMPILER: ${SRW_COMPILER}|g" -i ush/config.yaml + # DATA_LOCATION differs on each platform ... find it. export DATA_LOCATION=$(grep TEST_EXTRN_MDL_SOURCE_BASEDIR ${workspace}/ush/machine/${platform,,}.yaml | awk '{printf "%s", $2}') echo "DATA_LOCATION=${DATA_LOCATION}" @@ -85,6 +88,8 @@ source etc/lmod-setup.sh ${platform,,} module use modulefiles module load build_${platform,,}_${SRW_COMPILER} module load wflow_${platform,,} +# Deactivate conflicting conda env on GCP +[[ "${SRW_PLATFORM}" =~ "gclusternoaa" ]] && conda deactivate [[ ${FORGIVE_CONDA} == true ]] && set +e +u # Some platforms have incomplete python3 or conda support, but wouldn't necessarily block workflow tests conda activate srw_app diff --git a/.cicd/scripts/srw_init.sh b/.cicd/scripts/srw_init.sh new file mode 100755 index 0000000000..688255ac98 --- /dev/null +++ b/.cicd/scripts/srw_init.sh @@ -0,0 +1,38 @@ +#!/usr/bin/env bash +# +# A unified init script for the SRW application. This script is expected to +# fetch initial source for the SRW application for all supported platforms. +# +set -e -u -x + +script_dir="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)" + +# Get repository root from Jenkins WORKSPACE variable if set, otherwise, set +# relative to script directory. +declare workspace +if [[ -n "${WORKSPACE}/${SRW_PLATFORM}" ]]; then + workspace="${WORKSPACE}/${SRW_PLATFORM}" +else + workspace="$(cd -- "${script_dir}/../.." && pwd)" +fi + +# Normalize Parallel Works cluster platform value. +declare platform +if [[ "${SRW_PLATFORM}" =~ ^(az|g|p)clusternoaa ]]; then + platform='noaacloud' +else + platform="${SRW_PLATFORM}" +fi + +# Build and install +cd ${workspace} +set +e +/usr/bin/time -p -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' -o ${WORKSPACE}/${SRW_PLATFORM}-${SRW_COMPILER}-time-srw_init.json \ + ./manage_externals/checkout_externals +init_exit=$? +echo "STAGE_NAME=${STAGE_NAME}" +env | grep = | sort > ${WORKSPACE}/${SRW_PLATFORM}-${SRW_COMPILER}-env.txt +set -e +cd - + +exit $init_exit diff --git a/.cicd/scripts/srw_metric_example.sh b/.cicd/scripts/srw_metric.sh similarity index 84% rename from .cicd/scripts/srw_metric_example.sh rename to .cicd/scripts/srw_metric.sh index 45dd30c299..8f6eed85b0 100755 --- a/.cicd/scripts/srw_metric_example.sh +++ b/.cicd/scripts/srw_metric.sh @@ -56,8 +56,8 @@ else fi # Test directories -we2e_experiment_base_dir="${workspace}/../expt_dirs/metric_test" -we2e_test_dir="${workspace}/tests/WE2E" +we2e_experiment_base_dir="${we2e_experiment_base_dir:=${workspace}/../expt_dirs/metric_test}" +we2e_test_dir="${we2e_test_dir:=${workspace}/tests/WE2E}" we2e_test_name="grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0" pwd @@ -65,8 +65,8 @@ pwd # Setup the build environment declare srw_compiler srw_compiler=${SRW_COMPILER} -source etc/lmod-setup.sh ${platform,,} -module use modulefiles +source ${workspace}/etc/lmod-setup.sh ${platform,,} +module use ${workspace}/modulefiles module load build_${platform,,}_${srw_compiler} # Build srw @@ -78,6 +78,8 @@ cd ${workspace} # Activate workflow environment module load wflow_${platform,,} +# Deactivate conflicting conda env on GCP +[[ "${SRW_PLATFORM}" =~ "gclusternoaa" ]] && conda deactivate [[ ${FORGIVE_CONDA} == true ]] && set +e +u # Some platforms have incomplete python3 or conda support, but would not necessarily block workflow tests conda activate srw_app @@ -98,17 +100,17 @@ if [[ ${RUN_STAT_ANLY_OPT} == true ]]; then # Clear out data rm -rf ${workspace}/Indy-Severe-Weather/ # Check if metprd data exists locally otherwise get it from S3 - TEST_EXTRN_MDL_SOURCE_BASEDIR=$(grep TEST_EXTRN_MDL_SOURCE_BASEDIR ${workspace}/ush/machine/${SRW_PLATFORM}.yaml | awk '{print $NF}') - if [[ ! -d $(dirname ${TEST_EXTRN_MDL_SOURCE_BASEDIR})/metprd/point_stat ]] ; then + TEST_EXTRN_MDL_SOURCE_BASEDIR=$(grep TEST_EXTRN_MDL_SOURCE_BASEDIR ${workspace}/ush/machine/${platform}.yaml | awk '{print $NF}') + if [[ -d $(dirname ${TEST_EXTRN_MDL_SOURCE_BASEDIR})/metprd/point_stat ]] ; then mkdir -p Indy-Severe-Weather/metprd/point_stat cp -rp $(dirname ${TEST_EXTRN_MDL_SOURCE_BASEDIR})/metprd/point_stat Indy-Severe-Weather/metprd elif [[ -f Indy-Severe-Weather.tgz ]]; then tar xvfz Indy-Severe-Weather.tgz else - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/sample_cases/release-public-v2.1.0/Indy-Severe-Weather.tgz + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.1.0/METplus-vx-sample/Indy-Severe-Weather.tgz tar xvfz Indy-Severe-Weather.tgz fi - [[ -f skill-score.txt ]] && rm skill-score.txt + [[ -f ${SRW_PLATFORM,,}-${srw_compiler}-skill-score.txt ]] && rm ${SRW_PLATFORM,,}-${srw_compiler}-skill-score.txt # Skill score index is computed over several terms that are defined in parm/metplus/STATAnalysisConfig_skill_score. # It is computed by aggregating the output from earlier runs of the Point-Stat and/or Grid-Stat tools over one or more cases. # In this example, skill score index is a weighted average of 4 skill scores of RMSE statistics for wind speed, dew point temperature, @@ -126,15 +128,15 @@ if [[ ${RUN_STAT_ANLY_OPT} == true ]]; then sed -i 's|--load("conda")|load("conda")|g' ${workspace}/modulefiles/tasks/${platform,,}/run_vx.local.lua fi # Run stat_analysis - stat_analysis -config parm/metplus/STATAnalysisConfig_skill_score -lookin ${workspace}/Indy-Severe-Weather/metprd/point_stat -v 2 -out skill-score.txt + stat_analysis -config parm/metplus/STATAnalysisConfig_skill_score -lookin ${workspace}/Indy-Severe-Weather/metprd/point_stat -v 2 -out ${SRW_PLATFORM,,}-${srw_compiler}-skill-score.txt # check skill-score.txt - cat skill-score.txt + cat ${SRW_PLATFORM,,}-${srw_compiler}-skill-score.txt # get skill-score (SS_INDEX) and check if it is significantly smaller than 1.0 # A value greater than 1.0 indicates that the forecast model outperforms the reference, # while a value less than 1.0 indicates that the reference outperforms the forecast. - tmp_string=$( tail -2 skill-score.txt | head -1 ) + tmp_string=$( tail -2 ${SRW_PLATFORM,,}-${srw_compiler}-skill-score.txt | head -1 ) SS_INDEX=$(echo $tmp_string | awk -F " " '{print $NF}') echo "Skill Score: ${SS_INDEX}" if [[ ${SS_INDEX} < "0.700" ]]; then diff --git a/.cicd/scripts/srw_test.sh b/.cicd/scripts/srw_test.sh index 76ddf020df..90273f2730 100755 --- a/.cicd/scripts/srw_test.sh +++ b/.cicd/scripts/srw_test.sh @@ -11,7 +11,7 @@ script_dir="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd) # Get repository root from Jenkins WORKSPACE variable if set, otherwise, set # relative to script directory. declare workspace -if [[ -n "${WORKSPACE}/${SRW_PLATFORM}" ]]; then +if [[ -d "${WORKSPACE}/${SRW_PLATFORM}" ]]; then workspace="${WORKSPACE}/${SRW_PLATFORM}" else workspace="$(cd -- "${script_dir}/../.." && pwd)" @@ -26,21 +26,30 @@ else fi # Test directories -we2e_experiment_base_dir="${workspace}/expt_dirs" -we2e_test_dir="${workspace}/tests/WE2E" +export we2e_experiment_base_dir="${workspace}/expt_dirs" +export we2e_test_dir="${workspace}/tests/WE2E" + +# Clean any stale test logs +rm -f ${workspace}/tests/WE2E/log.* +rm -f ${we2e_experiment_base_dir}/*/log.generate_FV3LAM_wflow ${we2e_experiment_base_dir}/*/log/* WE2E_summary*txt # Run the end-to-end tests. if "${SRW_WE2E_COMPREHENSIVE_TESTS}"; then - test_type="comprehensive" + export test_type="comprehensive" else - test_type="coverage" + export test_type=${SRW_WE2E_SINGLE_TEST:-"coverage"} + if [[ "${test_type}" = skill-score ]]; then + export test_type="grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0" + fi fi cd ${we2e_test_dir} # Progress file -progress_file="${workspace}/we2e_test_results-${platform}-${SRW_COMPILER}.txt" -./setup_WE2E_tests.sh ${platform} ${SRW_PROJECT} ${SRW_COMPILER} ${test_type} \ - --expt_basedir=${we2e_experiment_base_dir} | tee ${progress_file} +progress_file="${workspace}/we2e_test_results-${SRW_PLATFORM}-${SRW_COMPILER}.txt" +/usr/bin/time -p -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' -o ${WORKSPACE}/${SRW_PLATFORM}-${SRW_COMPILER}-time-srw_test.json \ + ./setup_WE2E_tests.sh ${platform} ${SRW_PROJECT} ${SRW_COMPILER} ${test_type} \ + --expt_basedir=${we2e_experiment_base_dir} | tee ${progress_file}; \ + [[ -f ${we2e_experiment_base_dir}/grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0/log.generate_FV3LAM_wflow ]] && ${workspace}/.cicd/scripts/srw_metric.sh run_stat_anly # Set exit code to number of failures set +e diff --git a/.cicd/scripts/wrapper_srw_ftest.sh b/.cicd/scripts/wrapper_srw_ftest.sh index c6a4d19568..33fd966efa 100755 --- a/.cicd/scripts/wrapper_srw_ftest.sh +++ b/.cicd/scripts/wrapper_srw_ftest.sh @@ -15,17 +15,17 @@ declare arg_1 if [[ "${SRW_PLATFORM}" == cheyenne ]] || [[ "${SRW_PLATFORM}" == derecho ]]; then workflow_cmd=qsub arg_1="" - check_job="qstat -u ${USER} -r ${job_id}" else workflow_cmd=sbatch arg_1="--parsable" - check_job="squeue -u ${USER} -j ${job_id} --noheader" fi # Customize wrapper scripts if [[ "${SRW_PLATFORM}" == gaea ]]; then sed -i '15i #SBATCH --clusters=c5' ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh sed -i 's|qos=batch|qos=normal|g' ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh + sed -i 's|00:30:00|00:45:00|g' ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh + sed -i 's|${JOBSdir}/JREGIONAL_RUN_POST|$USHdir/load_modules_run_task.sh "gaea" "run_post" ${JOBSdir}/JREGIONAL_RUN_POST|g' ${WORKSPACE}/${SRW_PLATFORM}/ush/wrappers/run_post.sh fi if [[ "${SRW_PLATFORM}" == hera ]]; then @@ -38,6 +38,10 @@ if [[ "${SRW_PLATFORM}" == jet ]]; then sed -i '15i #SBATCH --partition=xjet' ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh fi +if [[ "${TASK_DEPTH}" == 0 ]] ; then + exit 0 +fi + # Call job card and return job_id echo "Running: ${workflow_cmd} -A ${SRW_PROJECT} ${arg_1} ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh" job_id=$(${workflow_cmd} -A ${SRW_PROJECT} ${arg_1} ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh) @@ -48,6 +52,11 @@ sleep 10 # Check for job and exit when done while true do + if [[ "${SRW_PLATFORM}" == derecho ]]; then + check_job="qstat -u ${USER} -r ${job_id}" + else + check_job="squeue -u ${USER} -j ${job_id} --noheader" + fi job_id_info=$($check_job) if [ ! -z "$job_id_info" ]; then echo "Job is still running. Check again in two minutes" @@ -58,7 +67,7 @@ do # Return exit code and check for results file first results_file="${WORKSPACE}/${SRW_PLATFORM}/functional_test_results_${SRW_PLATFORM}_${SRW_COMPILER}.txt" if [ ! -f "$results_file" ]; then - echo "Missing results file! \nexit 1" + echo -e "Missing results file! \nexit 1" exit 1 fi diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index bdc428c736..e1878a6447 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -3,7 +3,7 @@ # These owners will be the default owners for everything in the repo. #* @defunkt -* @mkavulich @gsketefian @JeffBeck-NOAA @RatkoVasic-NOAA @BenjaminBlake-NOAA @ywangwof @chan-hoo @panll @christinaholtNOAA @christopherwharrop-noaa @danielabdi-noaa @mark-a-potts @jkbk2004 @willmayfield @dmwright526 @gspetro-NOAA @natalie-perlin @EdwardSnyder-NOAA @MichaelLueken +* @mkavulich @gsketefian @JeffBeck-NOAA @RatkoVasic-NOAA @BenjaminBlake-NOAA @ywangwof @chan-hoo @panll @christinaholtNOAA @christopherwharrop-noaa @danielabdi-noaa @mark-a-potts @jkbk2004 @willmayfield @dmwright526 @gspetro-NOAA @natalie-perlin @EdwardSnyder-NOAA @MichaelLueken @rickgrubin-noaa @BruceKropp-Raytheon # Order is important. The last matching pattern has the most precedence. # So if a pull request only touches javascript files, only these owners diff --git a/.github/PULL_REQUEST_TEMPLATE b/.github/PULL_REQUEST_TEMPLATE index 1c363c651f..29a878d4a4 100644 --- a/.github/PULL_REQUEST_TEMPLATE +++ b/.github/PULL_REQUEST_TEMPLATE @@ -30,15 +30,13 @@ -- [ ] hera.intel -- [ ] orion.intel -- [ ] hercules.intel -- [ ] cheyenne.intel -- [ ] cheyenne.gnu - [ ] derecho.intel - [ ] gaea.intel -- [ ] gaeac5.intel +- [ ] hera.gnu +- [ ] hera.intel +- [ ] hercules.intel - [ ] jet.intel +- [ ] orion.intel - [ ] wcoss2.intel - [ ] NOAA Cloud (indicate which platform) - [ ] Jenkins diff --git a/.github/scripts/check_tech_doc.sh b/.github/scripts/check_tech_doc.sh new file mode 100755 index 0000000000..d988e50cd6 --- /dev/null +++ b/.github/scripts/check_tech_doc.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# This script recreates technical documentation for the ush and tests/WE2E Python scripts +# If the tech docs produced here do not match the branch's contents, the script will fail + +set -eo pipefail + +# Install prerequisites +pip install sphinx +pip install sphinx-rtd-theme +pip install sphinxcontrib-bibtex + +# Regenerate tech docs in ush and tests/WE2E based on current state of scripts in those directories. +cd doc/TechDocs +sphinx-apidoc -fM --remove-old -o ./ush ../../ush +sphinx-apidoc -fM --remove-old -o ./tests/WE2E ../../tests/WE2E + +# Check for mismatch between what comes out of this action and what is in the PR. +status=`git status -s` + +if [ -n "${status}" ]; then + echo ${status} + echo "" + echo "Please update your Technical Documentation RST files." + exit 1 +else + echo "Technical documentation is up-to-date." + exit 0 +fi diff --git a/.github/workflows/doc_tests.yaml b/.github/workflows/doc_tests.yaml new file mode 100644 index 0000000000..34fb01a9ac --- /dev/null +++ b/.github/workflows/doc_tests.yaml @@ -0,0 +1,25 @@ +name: Doc Tests +on: + push: + pull_request: + branches: + - develop + - 'release/*' + workflow_dispatch: + +defaults: + run: + shell: bash -leo pipefail {0} + +jobs: + doc_tests: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Check tech docs + run: .github/scripts/check_tech_doc.sh + - name: Build documentation + run: | + cd doc + make doc diff --git a/.github/workflows/python_tests.yaml b/.github/workflows/python_tests.yaml index fb0de16910..113ec3f59c 100644 --- a/.github/workflows/python_tests.yaml +++ b/.github/workflows/python_tests.yaml @@ -30,10 +30,6 @@ jobs: cache-downloads: true cache-environment: true - - name: Checkout externals - run: | - ./manage_externals/checkout_externals ufs-weather-model - - name: Lint the python code run: | micromamba activate srw_app @@ -44,17 +40,22 @@ jobs: pylint ush/set_fv3nml*.py pylint ush/update_input_nml.py + - name: Checkout externals + run: | + ./manage_externals/checkout_externals ufs-weather-model + - name: Run python unittests run: | # exclude test_retrieve_data that is tested in functional test micromamba activate srw_app export UNIT_TEST=True export PYTHONPATH=$(pwd)/ush - python -m unittest -b tests/test_python/*.py + python -m unittest tests/test_python/*.py - name: Run python functional tests run: | micromamba activate srw_app export CI=true export PYTHONPATH=${PWD}/ush - python3 -m unittest -b tests/test_python/test_retrieve_data.py + python3 -m unittest tests/test_python/test_retrieve_data.py + diff --git a/.gitignore b/.gitignore index ed78ca4182..99f71c9590 100644 --- a/.gitignore +++ b/.gitignore @@ -23,6 +23,7 @@ ush/python_utils/__pycache__/ ush/python_utils/workflow-tools/ ush/*.swp conda_loc +*/.DS_Store *.swp __pycache__ diff --git a/.readthedocs.yaml b/.readthedocs.yaml index c8ce6064b2..8aabbaae76 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -28,8 +28,4 @@ python: install: - requirements: doc/requirements.txt -submodules: - include: - - hpc-stack-mod - recursive: true diff --git a/CMakeLists.txt b/CMakeLists.txt index 30e241daba..cf8180097d 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -8,12 +8,20 @@ project(ufs-srweather-app VERSION 1.0 LANGUAGES C CXX Fortran) find_package(MPI REQUIRED COMPONENTS C CXX Fortran) # Set extended version info. -SET(SRWA_VERSION_MAJOR 1) -SET(SRWA_VERSION_MINOR 0) +SET(SRWA_VERSION_MAJOR 2) +SET(SRWA_VERSION_MINOR 2) SET(SRWA_VERSION_PATCH 0) SET(SRWA_VERSION_NOTE "-development") SET(SRWA_VERSION ${SRWA_VERSION_MAJOR}.${SRWA_VERSION_MINOR}.${SRWA_VERSION_PATCH}${SRWA_VERSION_NOTE}) +# Get the latest abbreviated commit hash of the working branch +execute_process( + COMMAND git log -1 --format=%h + WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR} + OUTPUT_VARIABLE GIT_HASH + OUTPUT_STRIP_TRAILING_WHITESPACE + ) + # A function used to create autotools-style 'yes/no' definitions. # If a variable is set, it 'yes' is returned. Otherwise, 'no' is # returned. @@ -55,7 +63,7 @@ if(NOT DEFINED CMAKE_INSTALL_BINDIR) endif() ##### -# Configure and print the ufs-srweather-app.settings file. +# Configure and print the build settings file. ##### # Determine the configure date. @@ -92,10 +100,12 @@ SET(host_vendor "${osname}") SET(host_os "${osrel}") SET(abs_top_builddir "${CMAKE_CURRENT_BINARY_DIR}") SET(abs_top_srcdir "${CMAKE_CURRENT_SOURCE_DIR}") +SET(application "${APP}") +SET(machine "${BUILD_MACHINE}") SET(CC_VERSION "${CMAKE_C_COMPILER}") -# Set values for .settings file. +# Set values for build settings file. SET(CFLAGS "${CMAKE_C_FLAGS} ${CMAKE_C_FLAGS_${CMAKE_BUILD_TYPE}}") SET(CPPFLAGS "${CMAKE_CPP_FLAGS} ${CMAKE_CPP_FLAGS_${CMAKE_BUILD_TYPE}}") SET(LDFLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${CMAKE_SHARED_LINKER_FLAGS_${CMAKE_BUILD_TYPE}}") @@ -107,27 +117,27 @@ is_enabled(BUILD_SHARED_LIBS enable_shared) is_enabled(STATUS_PARALLEL HAS_PARALLEL) # Generate file from template. -CONFIGURE_FILE("${CMAKE_CURRENT_SOURCE_DIR}/ufs_srweather_app.settings.in" - "${CMAKE_CURRENT_BINARY_DIR}/ufs_srweather_app.settings" +CONFIGURE_FILE("${CMAKE_CURRENT_SOURCE_DIR}/sorc/build_settings_template.yaml" + "${CMAKE_CURRENT_BINARY_DIR}/build_settings.yaml" @ONLY) # Read in settings file, print out. # Avoid using system-specific calls so that this # might also work on Windows. -FILE(READ "${CMAKE_CURRENT_BINARY_DIR}/ufs_srweather_app.settings" +FILE(READ "${CMAKE_CURRENT_BINARY_DIR}/build_settings.yaml" UFS-SRWEATHER-APP_SETTINGS) MESSAGE(${UFS-SRWEATHER-APP_SETTINGS}) -# Install ufs_srweather_app.settings file into same location +# Install build settings file into same location # as the app. -INSTALL(FILES "${CMAKE_BINARY_DIR}/ufs_srweather_app.settings" +INSTALL(FILES "${CMAKE_BINARY_DIR}/build_settings.yaml" DESTINATION ${CMAKE_INSTALL_BINDIR}) ##### # Create 'ufs_srweather_app_meta.h' include file. ##### configure_file( - ufs_srweather_app_meta.h.in + sorc/ufs_srweather_app_meta.h.in ufs_srweather_app_meta.h @ONLY) FILE(COPY "${CMAKE_CURRENT_BINARY_DIR}/ufs_srweather_app_meta.h" DESTINATION include) diff --git a/Externals.cfg b/Externals.cfg index 49ea5ffc38..9acd326b65 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -12,7 +12,7 @@ protocol = git repo_url = https://github.com/ufs-community/ufs-weather-model # Specify either a branch name or a hash but not both. #branch = develop -hash = 8518c2c +hash = 38a29a6 local_path = sorc/ufs-weather-model required = True @@ -21,7 +21,7 @@ protocol = git repo_url = https://github.com/NOAA-EMC/UPP # Specify either a branch name or a hash but not both. #branch = develop -hash = 945cb2c +hash = 81b38a8 local_path = sorc/UPP required = True @@ -30,7 +30,7 @@ protocol = git repo_url = https://github.com/noaa-oar-arl/NEXUS # Specify either a branch name or a hash but not both. #branch = develop -hash = 40346b6 +hash = e153072 local_path = sorc/arl_nexus required = True @@ -39,7 +39,7 @@ protocol = git repo_url = https://github.com/NOAA-EMC/AQM-utils # Specify either a branch name or a hash but not both. #branch = develop -hash = d953bd1 +hash = e236acd local_path = sorc/AQM-utils required = True diff --git a/README.md b/README.md index 3bf56f4c21..bdda52279d 100644 --- a/README.md +++ b/README.md @@ -1,13 +1,13 @@ # UFS Short-Range Weather Application -The Unified Forecast System (UFS) is a community-based, coupled, comprehensive Earth modeling system. NOAA's operational model suite for numerical weather prediction (NWP) is quickly transitioning to the UFS from a number of legacy modeling systems. The UFS enables research, development, and contribution opportunities within the broader Weather Enterprise (including government, industry, and academia). For more information about the UFS, visit the UFS Portal at https://ufscommunity.org/. +The Unified Forecast System (UFS) is a community-based, coupled, comprehensive Earth modeling system. NOAA's operational model suite for numerical weather prediction (NWP) is quickly transitioning to the UFS from a number of legacy modeling systems. The UFS enables research, development, and contribution opportunities within the broader Weather Enterprise (including government, industry, and academia). For more information about the UFS, visit the UFS Portal at https://ufs.epic.noaa.gov/. -The UFS includes multiple applications (see a complete list at https://ufscommunity.org/science/aboutapps/) that support different forecast durations and spatial domains. This documentation describes the development branch of the UFS Short-Range Weather (SRW) Application, which targets predictions of atmospheric behavior on a limited spatial domain and on time scales from minutes to several days. The development branch of the application is continually evolving as the system undergoes open development. The latest SRW App release (v2.2.0) represents a snapshot of this continuously evolving system. +The UFS includes multiple applications (see a complete list at https://ufs.epic.noaa.gov/applications/) that support different forecast durations and spatial domains. This documentation describes the development branch of the UFS Short-Range Weather (SRW) Application, which targets predictions of atmospheric behavior on a limited spatial domain and on time scales from minutes to several days. The development branch of the application is continually evolving as the system undergoes open development. The latest SRW App release (v2.2.0) represents a snapshot of this continuously evolving system. The UFS SRW App User's Guide associated with the development branch is at: https://ufs-srweather-app.readthedocs.io/en/develop/, while the guide specific to the SRW App v2.2.0 release can be found at: https://ufs-srweather-app.readthedocs.io/en/release-public-v2.2.0/. The repository is at: https://github.com/ufs-community/ufs-srweather-app. For instructions on how to clone the repository, build the code, and run the workflow, see: -- https://ufs-srweather-app.readthedocs.io/en/develop/BuildingRunningTesting/Quickstart.html +- https://ufs-srweather-app.readthedocs.io/en/develop/UsersGuide/BuildingRunningTesting/Quickstart.html For a debugging guide for users and developers in the field of Earth System Modeling, please see: https://epic.noaa.gov/wp-content/uploads/2022/12/Debugging-Guide.pdf diff --git a/aqm_environment.yml b/aqm_environment.yml index afd8a7b634..11bf9e57e3 100644 --- a/aqm_environment.yml +++ b/aqm_environment.yml @@ -9,5 +9,5 @@ dependencies: - pylint=2.17* - pytest=7.2* - scipy=1.10.* - - uwtools=2.1* + - uwtools=2.3* - xarray=2022.11.* diff --git a/devbuild.sh b/devbuild.sh index 014fbdb3b7..332abb49b5 100755 --- a/devbuild.sh +++ b/devbuild.sh @@ -15,8 +15,10 @@ OPTIONS compiler to use; default depends on platform (e.g. intel | gnu | cray | gccgfortran) -a, --app=APPLICATION - weather model application to build; for example, ATMAQ for Online-CMAQ - (e.g. ATM | ATMAQ | ATMW | S2S | S2SW) + weather model application to build; supported SRW options are + ATM (default) Atmosphere only + ATMAQ Online-CMAQ (air quality) + ATMF UFS_FIRE (coupled Community Fire Behavior Model) --ccpp="CCPP_SUITE1,CCPP_SUITE2..." CCPP suites (CCPP_SUITES) to include in build; delimited with ',' --enable-options="OPTION1,OPTION2,..." @@ -363,6 +365,7 @@ fi # cmake settings CMAKE_SETTINGS="\ + -DBUILD_MACHINE=${MACHINE}\ -DCMAKE_BUILD_TYPE=${BUILD_TYPE}\ -DCMAKE_INSTALL_PREFIX=${INSTALL_DIR}\ -DCMAKE_INSTALL_BINDIR=${BIN_DIR}\ diff --git a/devclean.sh b/devclean.sh index 01ace7a7d9..b26988dd93 100755 --- a/devclean.sh +++ b/devclean.sh @@ -4,33 +4,31 @@ usage () { cat << EOF_USAGE -Clean the UFS-SRW Application build +Clean the UFS-SRW Application build. + +NOTE: If user included custom directories at build time, those directories must be deleted manually + Usage: $0 [OPTIONS] ... OPTIONS -h, --help - show this help guide + Show this help guide -a, --all - removes "bin", "build" directories, and other build artifacts - --remove - removes the "build" directory, keeps the "bin", "lib" and other build artifacts intact - --clean - removes "bin", "build" directories, and other build artifacts (same as "-a", "--all") - --conda - removes "conda" directory and conda_loc file in SRW - --install-dir=INSTALL_DIR - installation directory name (\${SRW_DIR} by default) - --build-dir=BUILD_DIR - main build directory, absolute path (\${SRW_DIR}/build/ by default) - --bin-dir=BIN_DIR - binary directory name ("exec" by default); full path is \${INSTALL_DIR}/\${BIN_DIR}) - --conda-dir=CONDA_DIR - directory where conda is installed. caution: if outside the SRW clone, it may have broader use - --sub-modules - remove sub-module directories. They will need to be checked out again by sourcing "\${SRW_DIR}/manage_externals/checkout_externals" before attempting subsequent builds + Remove all build artifacts, conda and submodules (equivalent to \`-b -c -s\`) + -b, --build + Remove build directories and artifacts: build/ exec/ share/ include/ lib/ lib64/ + -c, --conda + Remove "conda" directory and conda_loc file in SRW main directory + --container + For cleaning builds within the SRW containers, will remove the "container-bin" + directory rather than "exec". Has no effect if \`-b\` is not specified. + -f, --force + Remove directories as requested, without asking for user confirmation of their deletion. + -s, --sub-modules + Remove sub-module directories. They need to be checked out again by sourcing "\${SRW_DIR}/manage_externals/checkout_externals" before attempting subsequent builds -v, --verbose - provide more verbose output - + Provide more verbose output + EOF_USAGE } @@ -39,17 +37,10 @@ settings () { cat << EOF_SETTINGS Settings: - INSTALL_DIR=${INSTALL_DIR} - BUILD_DIR=${BUILD_DIR} - BIN_DIR=${BIN_DIR} - CONDA_DIR=${CONDA_DIR} - REMOVE=${REMOVE} + FORCE=${REMOVE} VERBOSE=${VERBOSE} - -Default cleaning options: (if no arguments provided, then nothing is cleaned) - REMOVE=${REMOVE} - CLEAN=${CLEAN} - INCLUDE_SUB_MODULES=${INCLUDE_SUB_MODULES} + REMOVE_SUB_MODULES=${REMOVE_SUB_MODULES} + REMOVE_CONDA=${REMOVE_CONDA} EOF_SETTINGS } @@ -63,46 +54,28 @@ usage_error () { # default settings SRW_DIR=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}" )" )" && pwd -P) -INSTALL_DIR=${INSTALL_DIR:-${SRW_DIR}} -BUILD_DIR=${BUILD_DIR:-"${SRW_DIR}/build"} -BIN_DIR="exec" -CONDA_DIR=${CONDA_DIR:-"${SRW_DIR}/conda"} -REMOVE=false VERBOSE=false # default clean options REMOVE=false -CLEAN=false -INCLUDE_SUB_MODULES=false #changes to true if '--sub-modules' option is provided +REMOVE_BUILD=false +REMOVE_CONDA=false +REMOVE_SUB_MODULES=false +CONTAINER=false -# process requires arguments -if [[ ("$1" == "--help") || ("$1" == "-h") ]]; then - usage - exit 0 -fi - -# process optional arguments +# process arguments while :; do case $1 in --help|-h) usage; exit 0 ;; - --all|-a) ALL_CLEAN=true ;; - --remove) REMOVE=true ;; - --remove=?*|--remove=) usage_error "$1 argument ignored." ;; - --clean) CLEAN=true ;; - --conda) REMOVE_CONDA=true ;; - --install-dir=?*) INSTALL_DIR=${1#*=} ;; - --install-dir|--install-dir=) usage_error "$1 requires argument." ;; - --build-dir=?*) BUILD_DIR=${1#*=} ;; - --build-dir|--build-dir=) usage_error "$1 requires argument." ;; - --bin-dir=?*) BIN_DIR=${1#*=} ;; - --bin-dir|--bin-dir=) usage_error "$1 requires argument." ;; - --conda-dir=?*) CONDA_DIR=${1#*=} ;; - --conda-dir|--conda-dir=) usage_error "$1 requires argument." ;; - --sub-modules) INCLUDE_SUB_MODULES=true ;; + --all|-a) REMOVE_BUILD=true; REMOVE_CONDA=true; REMOVE_SUB_MODULES=true ;; + --build|-b) REMOVE_BUILD=true ;; + --conda|-c) REMOVE_CONDA=true ;; + --container) CONTAINER=true ;; + --force) REMOVE=true ;; + --force=?*|--force=) usage_error "$1 argument ignored." ;; + --sub-modules|-s) REMOVE_SUB_MODULES=true ;; + --sub-modules=?*|--sub-modules=) usage_error "$1 argument ignored." ;; --verbose|-v) VERBOSE=true ;; - --verbose=?*|--verbose=) usage_error "$1 argument ignored." ;; - # targets - default) ALL_CLEAN=false ;; # unknown -?*|?*) usage_error "Unknown option $1" ;; *) break ;; @@ -110,66 +83,94 @@ while :; do shift done -# choose defaults to clean -if [ "${ALL_CLEAN}" = true ]; then - CLEAN=true -fi # print settings if [ "${VERBOSE}" = true ] ; then settings fi -# clean if build directory already exists -if [ "${REMOVE}" = true ] && [ "${CLEAN}" = false ] ; then - printf '%s\n' "Remove the \"build\" directory only, BUILD_DIR = $BUILD_DIR " - [[ -d ${BUILD_DIR} ]] && rm -rf ${BUILD_DIR} && printf '%s\n' "rm -rf ${BUILD_DIR}" -elif [ "${CLEAN}" = true ]; then - printf '%s\n' "Remove build directory, bin directory, and other build artifacts " - printf '%s\n' " from the installation directory = ${INSTALL_DIR} " - - directories=( \ - "${BUILD_DIR}" \ - "${INSTALL_DIR}/${BIN_DIR}" \ - "${INSTALL_DIR}/share" \ - "${INSTALL_DIR}/include" \ - "${INSTALL_DIR}/lib" \ - "${INSTALL_DIR}/lib64" \ +# Populate "removal_list" as an array of files/directories to remove, based on user selections +declare -a removal_list='()' + +# Clean standard build artifacts +if [ ${REMOVE_BUILD} == true ]; then + removal_list=( \ + "${SRW_DIR}/build" \ + "${SRW_DIR}/share" \ + "${SRW_DIR}/include" \ + "${SRW_DIR}/lib" \ + "${SRW_DIR}/lib64" \ ) - if [ ${#directories[@]} -ge 1 ]; then - for dir in ${directories[@]}; do - [[ -d "${dir}" ]] && rm -rfv ${dir} - done - echo " " + if [ ${CONTAINER} == true ]; then + removal_list+=("${SRW_DIR}/container-bin") + else + removal_list+=("${SRW_DIR}/exec") fi fi -# Clean all the submodules if requested. Note: Need to check out them again before attempting subsequent builds, by sourcing ${SRW_DIR}/manage_externals/checkout_externals -if [ ${INCLUDE_SUB_MODULES} == true ]; then - printf '%s\n' "Removing submodules ..." + +# Clean all the submodules if requested. +if [ ${REMOVE_SUB_MODULES} == true ]; then declare -a submodules='()' - submodules=(${SRW_DIR}/sorc/*) -# echo " submodules are: ${submodules[@]} (total of ${#submodules[@]}) " - if [ ${#submodules[@]} -ge 1 ]; then - for sub in ${submodules[@]}; do [[ -d "${sub}" ]] && ( rm -rf ${sub} && printf '%s\n' "rm -rf ${sub}" ); done + submodules=(./sorc/*) + # Only add directories to make sure we don't delete CMakeLists.txt + for sub in ${submodules[@]}; do [[ -d "${sub}" ]] && removal_list+=( "${sub}" ); done + if [ "${VERBOSE}" = true ] ; then + printf '%s\n' "Note: Need to check out submodules again for any subsequent builds, " \ + " by running ${SRW_DIR}/manage_externals/checkout_externals " fi - printf '%s\n' "Note: Need to check out submodules again for any subsequent builds, " \ - " by sourcing ${SRW_DIR}/manage_externals/checkout_externals " fi -# # Clean conda if requested if [ "${REMOVE_CONDA}" = true ] ; then - printf '%s\n' "Removing conda location file" - rm -rf ${SRW_DIR}/conda_loc - printf '%s\n' "Removing conda installation" - rm -rf ${CONDA_DIR} + # Do not read "conda_loc" file to determine location of conda install; if the user has changed it to a different location + # they likely do not want to remove it! + conda_location=$(<${SRW_DIR}/conda_loc) + if [ "${VERBOSE}" = true ] ; then + echo "conda_location=$conda_location" + fi + if [ "${conda_location}" == "${SRW_DIR}/conda" ]; then + removal_list+=("${SRW_DIR}/conda_loc") + removal_list+=("${SRW_DIR}/conda") + else + echo "WARNING: location of conda build in ${SRW_DIR}/conda_loc is not the default location!" + echo "Will not attempt to remove conda!" + fi fi +# If array is empty, that means user has not selected any removal options +if [ ${#removal_list[@]} -eq 0 ]; then + usage_error "No removal options specified" +fi +while [ ${REMOVE} == false ]; do + # Make user confirm deletion of directories unless '--force' option was provided + printf "The following files/directories will be deleted:\n\n" + for i in "${removal_list[@]}"; do + echo "$i" + done + echo "" + read -p "Confirm that you want to delete these files/directories! (Yes/No): " choice + case ${choice} in + [Yy]* ) REMOVE=true ;; + [Nn]* ) echo "User chose not to delete, exiting..."; exit ;; + * ) printf "Invalid option selected.\n" ;; + esac +done + +if [ ${REMOVE} == true ]; then + for dir in ${removal_list[@]}; do + echo "Removing ${dir}" + if [ "${VERBOSE}" = true ] ; then + rm -rfv ${dir} + else + rm -rf ${dir} + fi + done + echo " " + echo "All the requested cleaning tasks have been completed" + echo " " +fi -echo " " -echo "All the requested cleaning tasks have been completed" -echo " " exit 0 diff --git a/doc/ContribGuide/contributing.rst b/doc/ContribGuide/contributing.rst index ed1671363e..0f7231e265 100644 --- a/doc/ContribGuide/contributing.rst +++ b/doc/ContribGuide/contributing.rst @@ -11,7 +11,7 @@ Fork and PR Overview Contributions to the ``ufs-srweather-app`` project are made via a :github-docs:`Fork` and :github-docs:`Pull Request (PR)` model. GitHub provides a thorough description of this contribution model in their `Contributing to a project` :github-docs:`Quickstart`, but the steps, with respect to ``ufs-srweather-app`` contributions, can be summarized as: -#. :github-docs:`Create an issue ` to document proposed changes. +#. :github-docs:`Create an issue ` to document proposed changes. #. :github-docs:`Fork` the :srw-repo:`ufs-srweather-app repository<>` into your personal GitHub account. #. :github-docs:`Clone` your fork onto your development system. #. :github-docs:`Create a branch` in your clone for your changes. All development should take place on a branch, *not* on ``develop``. @@ -227,15 +227,13 @@ Here is the template that is provided when developers click "Create pull request - - [ ] hera.intel - - [ ] orion.intel - - [ ] hercules.intel - - [ ] cheyenne.intel - - [ ] cheyenne.gnu - [ ] derecho.intel - [ ] gaea.intel - - [ ] gaeac5.intel + - [ ] hera.gnu + - [ ] hera.intel + - [ ] hercules.intel - [ ] jet.intel + - [ ] orion.intel - [ ] wcoss2.intel - [ ] NOAA Cloud (indicate which platform) - [ ] Jenkins diff --git a/doc/ContribGuide/documentation.rst b/doc/ContribGuide/documentation.rst index 9e0bad6bda..babcd24368 100644 --- a/doc/ContribGuide/documentation.rst +++ b/doc/ContribGuide/documentation.rst @@ -69,4 +69,54 @@ Please follow these guidelines when contributing to the documentation: .. code-block:: python - n = 88 \ No newline at end of file + n = 88 + +Troubleshooting Documentation +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +In the SRW App documentation Makefile (``ufs-srweather-app/doc/Makefile``), the ``-W`` option causes documentation builds to fail when there are errors or warnings in the build. +This ensures that the documentation remains up-to-date and notifies developers of any new issues (like failing links). However, the build will fail when it hits the first error without showing subsequent errors. +When troubleshooting, it can be useful to see all warnings and errors. +To see all warnings and errors, comment out the ``-W`` flag in ``SPHINXOPTS`` in the Makefile and build the documentation by running ``make doc`` from the ``doc`` directory. + +Technical Documentation Guidelines +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Technical (API-like) documentation is generated for any Python scripts in the ``ush`` or ``tests/WE2E`` directories. +When developers change Python files in these directories, they need to update the Python docstrings (i.e., comments in ``"""triple quotes"""``) to reflect the changes they made. +Each Python script should include: + + * A summary of the script's purpose/functionality + + * Should start with a verb, e.g., "checks" or "loads" or "initializes" + + * Docstrings for each method (except methods like ``_parse_args`` that start with an underscore). These docstrings should include: + + * A description of what the method does (starting with a verb, e.g., "checks" or "parses") + * A list of method parameters, or ``Args:``, with a definition and expected data type for each + * A return statement (``Returns:``) -- if applicable + * List of exceptions (``Raises:``) -- if applicable + +.. note:: + + Python does not have truly private methods, but methods that start with an underscore are the closest equivalent. In the SRW App, the underscore signals that this method is only accessed by the script within which it is called. + +After updating the docstrings, developers need to update the corresponding RST files. +To do this successfully, developers must have *sphinx>=7.4.0* installed on their system. To update the RST files, run: + +.. code-block:: console + + cd ufs-srweather-app/doc/TechDoc + sphinx-apidoc -fM --remove-old -o ./ush ../../ush + sphinx-apidoc -fM --remove-old -o ./tests/WE2E ../../tests/WE2E + +.. note:: + + Developers who do not have *sphinx>=7.4.0* installed may issue the following commands from ``ufs-srweather-app/doc/TechDoc`` before running the sphinx-apidoc commands above: + + .. code-block:: console + + rm -rf ush + rm -rf tests/WE2E + + This will delete current RST files before recreating them with the ``sphinx-apidoc`` command based on the current contents of ``ush`` and ``tests/WE2E``. This step is necessary because the ``--remove-old`` flag does not work with earlier versions of sphinx. diff --git a/doc/INSTALL b/doc/INSTALL deleted file mode 100644 index 53dc159bbd..0000000000 --- a/doc/INSTALL +++ /dev/null @@ -1,61 +0,0 @@ -# Simple setup instructions for the UFS SRW App -# For more details, see the "Getting Started" guide: -# https://github.com/ufs-community/ufs-srweather-app/wiki/Getting-Started - -# Getting the UFS SRW App code -# -# The SRW App can be downloaded directly from github, either by using `git clone` or by downloading -# from the web. - -git clone https://github.com/ufs-community/ufs-srweather-app.git - -cd ufs-srweather-app/ -./manage_externals/checkout_externals - -# We can build ufs-srweather-app binaries in two ways. - -# Method 1 -# ======== - -# This is the simplest way to build the binaries - -./devbuild.sh --platform=PLATFORM - -# If compiler auto-detection fails, specify it using - -./devbuild.sh --platform=PLATFORM --compiler=COMPILER - -# Method 2 -# ======== - -# The above instructions will work atleast on Tier-1 systems, if not on all supported machines. -# However, if it fails for some reason, we can build directly with cmake. - -# First, we need to make sure that there is a modulefile "build_[PLATFORM]_[COMPILER]" in the -# "modulefiles" directory. Also, on some systems (e.g. Gaea/Odin) that come with cray module app, -# we may need to swap that for Lmod instead. Assuming your login shell is bash, run - -source etc/lmod-setup.sh PLATFORM - -# and if your login schell is csh/tcsh, source etc/lmod-setup.csh instead. - -# From here on, we can assume Lmod is loaded and ready to go. Then we load the specific -# module for a given PLATFORM and COMPILER as follows - -module use $PWD/modulefiles #full path to modulefiles directory -module load build_[PLATFORM]_[COMPILER] - -# Supported CMake flags: -# -DCMAKE_INSTALL_PREFIX Location where the bin/ include/ lib/ and share/ directories containing -# the various components of the SRW App will be created. Recommended value -# is "..", one directory up from the build directory -# -DCCPP_SUITES A comma-separated list of CCPP suites to build with the UFS weather -# model. See the User's Guide for a full list of available suites. The -# default is to build with the released supported suites: FV3_GFS_v15p2 and -# FV3_RRFS_v1beta - -mkdir build && cd build -cmake .. -DCMAKE_INSTALL_PREFIX=.. -make -j 8 - - diff --git a/doc/Makefile b/doc/Makefile index c91f2f147b..a4fac61e1a 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -1,6 +1,6 @@ # Makefile for Sphinx documentation -SPHINXOPTS = -a -n #-W +SPHINXOPTS = -a -n -W SPHINXBUILD = sphinx-build SOURCEDIR = . BUILDDIR = build @@ -24,4 +24,4 @@ linkcheck: # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) -w $(BUILDDIR)/warnings.log \ No newline at end of file + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) -w $(BUILDDIR)/warnings.log diff --git a/doc/README b/doc/README index 0ad8948eda..ce2e37f76e 100644 --- a/doc/README +++ b/doc/README @@ -9,6 +9,7 @@ Steps to build and use the Sphinx documentation tool: pip install sphinx pip install sphinxcontrib-bibtex pip install sphinx-rtd-theme + pip install sphinxcontrib.autoyaml One approach that has worked to resolve "Module Not Found" errors for users with MacPorts package manager: $ sudo port install py-six # may not be necessary @@ -20,10 +21,11 @@ Steps to build and use the Sphinx documentation tool: To build html: -$ cd ufs-srweather-app/docs/UsersGuide -$ make clean && sphinx-build -b html source build +$ cd ufs-srweather-app/doc +$ make clean && sphinx-build -b html . build The "make html" command can often be used in place of the previous command. +"make doc" will both build the html and run the linkchecker. Sphinx uses Latex to export the documentation as a PDF file. To build pdf: diff --git a/doc/RUNTIME b/doc/RUNTIME deleted file mode 100644 index e2ca78894d..0000000000 --- a/doc/RUNTIME +++ /dev/null @@ -1,22 +0,0 @@ -# Users should load the appropriate python environment for the workflow. -# The workflow requires Python 3, with the packages 'PyYAML', 'Jinja2', and 'f90nml' available. - -# For users' convenience, the python environment for the workflow can be activated by loading wflow_[PLATFORM] modulefile - -# For example, on Hera: - -module load wflow_hera - -# Due to older version of Lmod, inconsistency with TCL modulefiles etc, you may have to activate -# conda manually using instructions that the previous module command prints. -# Hera is one of those systems, so execute: - -conda activate regional_workflow - -# After that we can setup an experiment in the directory - -cd regional_workflow/ush - -# Once we prepare experiment file config.sh, we can generate workflow using - -./generate_FV3LAM_wflow.sh diff --git a/doc/TechDocs/index.rst b/doc/TechDocs/index.rst new file mode 100644 index 0000000000..1ca8ec7309 --- /dev/null +++ b/doc/TechDocs/index.rst @@ -0,0 +1,8 @@ +Technical Documentation +=========================== + +.. toctree:: + :maxdepth: 3 + + tests/index + ush/modules \ No newline at end of file diff --git a/doc/TechDocs/tests/WE2E/WE2E_summary.rst b/doc/TechDocs/tests/WE2E/WE2E_summary.rst new file mode 100644 index 0000000000..d15eb54012 --- /dev/null +++ b/doc/TechDocs/tests/WE2E/WE2E_summary.rst @@ -0,0 +1,7 @@ +WE2E\_summary module +==================== + +.. automodule:: WE2E_summary + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/tests/WE2E/modules.rst b/doc/TechDocs/tests/WE2E/modules.rst new file mode 100644 index 0000000000..5012d5229f --- /dev/null +++ b/doc/TechDocs/tests/WE2E/modules.rst @@ -0,0 +1,11 @@ +WE2E +==== + +.. toctree:: + :maxdepth: 4 + + WE2E_summary + monitor_jobs + print_test_info + run_WE2E_tests + utils diff --git a/doc/TechDocs/tests/WE2E/monitor_jobs.rst b/doc/TechDocs/tests/WE2E/monitor_jobs.rst new file mode 100644 index 0000000000..9630b5310c --- /dev/null +++ b/doc/TechDocs/tests/WE2E/monitor_jobs.rst @@ -0,0 +1,7 @@ +monitor\_jobs module +==================== + +.. automodule:: monitor_jobs + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/tests/WE2E/print_test_info.rst b/doc/TechDocs/tests/WE2E/print_test_info.rst new file mode 100644 index 0000000000..83e66e9bf8 --- /dev/null +++ b/doc/TechDocs/tests/WE2E/print_test_info.rst @@ -0,0 +1,7 @@ +print\_test\_info module +======================== + +.. automodule:: print_test_info + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/tests/WE2E/run_WE2E_tests.rst b/doc/TechDocs/tests/WE2E/run_WE2E_tests.rst new file mode 100644 index 0000000000..648ae51bd2 --- /dev/null +++ b/doc/TechDocs/tests/WE2E/run_WE2E_tests.rst @@ -0,0 +1,7 @@ +run\_WE2E\_tests module +======================= + +.. automodule:: run_WE2E_tests + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/tests/WE2E/utils.rst b/doc/TechDocs/tests/WE2E/utils.rst new file mode 100644 index 0000000000..44cef9edab --- /dev/null +++ b/doc/TechDocs/tests/WE2E/utils.rst @@ -0,0 +1,7 @@ +utils module +============ + +.. automodule:: utils + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/tests/index.rst b/doc/TechDocs/tests/index.rst new file mode 100644 index 0000000000..d3c05fdf84 --- /dev/null +++ b/doc/TechDocs/tests/index.rst @@ -0,0 +1,7 @@ +tests +====== + +.. toctree:: + :maxdepth: 3 + + WE2E/modules \ No newline at end of file diff --git a/doc/TechDocs/ush/UFS_plot_domains.rst b/doc/TechDocs/ush/UFS_plot_domains.rst new file mode 100644 index 0000000000..bd1b42314a --- /dev/null +++ b/doc/TechDocs/ush/UFS_plot_domains.rst @@ -0,0 +1,7 @@ +UFS\_plot\_domains module +========================= + +.. automodule:: UFS_plot_domains + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/ush/calculate_cost.rst b/doc/TechDocs/ush/calculate_cost.rst new file mode 100644 index 0000000000..aa85031292 --- /dev/null +++ b/doc/TechDocs/ush/calculate_cost.rst @@ -0,0 +1,7 @@ +calculate\_cost module +====================== + +.. automodule:: calculate_cost + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/ush/check_python_version.rst b/doc/TechDocs/ush/check_python_version.rst new file mode 100644 index 0000000000..15a22a03c9 --- /dev/null +++ b/doc/TechDocs/ush/check_python_version.rst @@ -0,0 +1,7 @@ +check\_python\_version module +============================= + +.. automodule:: check_python_version + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/ush/config_utils.rst b/doc/TechDocs/ush/config_utils.rst new file mode 100644 index 0000000000..449200270d --- /dev/null +++ b/doc/TechDocs/ush/config_utils.rst @@ -0,0 +1,7 @@ +config\_utils module +==================== + +.. automodule:: config_utils + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/ush/create_aqm_rc_file.rst b/doc/TechDocs/ush/create_aqm_rc_file.rst new file mode 100644 index 0000000000..033b996dbc --- /dev/null +++ b/doc/TechDocs/ush/create_aqm_rc_file.rst @@ -0,0 +1,7 @@ +create\_aqm\_rc\_file module +============================ + +.. automodule:: create_aqm_rc_file + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/ush/create_diag_table_file.rst b/doc/TechDocs/ush/create_diag_table_file.rst new file mode 100644 index 0000000000..5a317526c1 --- /dev/null +++ b/doc/TechDocs/ush/create_diag_table_file.rst @@ -0,0 +1,7 @@ +create\_diag\_table\_file module +================================ + +.. automodule:: create_diag_table_file + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/ush/create_model_configure_file.rst b/doc/TechDocs/ush/create_model_configure_file.rst new file mode 100644 index 0000000000..7a8dd0cd54 --- /dev/null +++ b/doc/TechDocs/ush/create_model_configure_file.rst @@ -0,0 +1,7 @@ +create\_model\_configure\_file module +===================================== + +.. automodule:: create_model_configure_file + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/ush/create_ufs_configure_file.rst b/doc/TechDocs/ush/create_ufs_configure_file.rst new file mode 100644 index 0000000000..56c17689b7 --- /dev/null +++ b/doc/TechDocs/ush/create_ufs_configure_file.rst @@ -0,0 +1,7 @@ +create\_ufs\_configure\_file module +=================================== + +.. automodule:: create_ufs_configure_file + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/ush/eval_metplus_timestr_tmpl.rst b/doc/TechDocs/ush/eval_metplus_timestr_tmpl.rst new file mode 100644 index 0000000000..7ccc52bb64 --- /dev/null +++ b/doc/TechDocs/ush/eval_metplus_timestr_tmpl.rst @@ -0,0 +1,7 @@ +eval\_metplus\_timestr\_tmpl module +=================================== + +.. automodule:: eval_metplus_timestr_tmpl + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/ush/generate_FV3LAM_wflow.rst b/doc/TechDocs/ush/generate_FV3LAM_wflow.rst new file mode 100644 index 0000000000..f8e3dbc91e --- /dev/null +++ b/doc/TechDocs/ush/generate_FV3LAM_wflow.rst @@ -0,0 +1,7 @@ +generate\_FV3LAM\_wflow module +============================== + +.. automodule:: generate_FV3LAM_wflow + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/ush/get_crontab_contents.rst b/doc/TechDocs/ush/get_crontab_contents.rst new file mode 100644 index 0000000000..8e62d5a258 --- /dev/null +++ b/doc/TechDocs/ush/get_crontab_contents.rst @@ -0,0 +1,7 @@ +get\_crontab\_contents module +============================= + +.. automodule:: get_crontab_contents + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/ush/get_obs.rst b/doc/TechDocs/ush/get_obs.rst new file mode 100644 index 0000000000..6b4e2ac936 --- /dev/null +++ b/doc/TechDocs/ush/get_obs.rst @@ -0,0 +1,7 @@ +get\_obs module +=============== + +.. automodule:: get_obs + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/ush/link_fix.rst b/doc/TechDocs/ush/link_fix.rst new file mode 100644 index 0000000000..79a7611115 --- /dev/null +++ b/doc/TechDocs/ush/link_fix.rst @@ -0,0 +1,7 @@ +link\_fix module +================ + +.. automodule:: link_fix + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/ush/modules.rst b/doc/TechDocs/ush/modules.rst new file mode 100644 index 0000000000..6ac0346624 --- /dev/null +++ b/doc/TechDocs/ush/modules.rst @@ -0,0 +1,32 @@ +ush +=== + +.. toctree:: + :maxdepth: 4 + + UFS_plot_domains + calculate_cost + check_python_version + config_utils + create_aqm_rc_file + create_diag_table_file + create_model_configure_file + create_ufs_configure_file + eval_metplus_timestr_tmpl + generate_FV3LAM_wflow + get_crontab_contents + get_obs + link_fix + mrms_pull_topofhour + python_utils + retrieve_data + run_srw_tests + set_cycle_and_obs_timeinfo + set_fv3nml_ens_stoch_seeds + set_fv3nml_sfc_climo_filenames + set_gridparams_ESGgrid + set_gridparams_GFDLgrid + set_leadhrs + set_predef_grid_params + setup + update_input_nml diff --git a/doc/TechDocs/ush/mrms_pull_topofhour.rst b/doc/TechDocs/ush/mrms_pull_topofhour.rst new file mode 100644 index 0000000000..c688cd70ca --- /dev/null +++ b/doc/TechDocs/ush/mrms_pull_topofhour.rst @@ -0,0 +1,7 @@ +mrms\_pull\_topofhour module +============================ + +.. automodule:: mrms_pull_topofhour + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/ush/python_utils.rst b/doc/TechDocs/ush/python_utils.rst new file mode 100644 index 0000000000..51dadd8b35 --- /dev/null +++ b/doc/TechDocs/ush/python_utils.rst @@ -0,0 +1,114 @@ +python\_utils package +===================== + +.. automodule:: python_utils + :members: + :undoc-members: + :show-inheritance: + +Submodules +---------- + +python\_utils.check\_for\_preexist\_dir\_file module +---------------------------------------------------- + +.. automodule:: python_utils.check_for_preexist_dir_file + :members: + :undoc-members: + :show-inheritance: + +python\_utils.check\_var\_valid\_value module +--------------------------------------------- + +.. automodule:: python_utils.check_var_valid_value + :members: + :undoc-members: + :show-inheritance: + +python\_utils.config\_parser module +----------------------------------- + +.. automodule:: python_utils.config_parser + :members: + :undoc-members: + :show-inheritance: + +python\_utils.create\_symlink\_to\_file module +---------------------------------------------- + +.. automodule:: python_utils.create_symlink_to_file + :members: + :undoc-members: + :show-inheritance: + +python\_utils.define\_macos\_utilities module +--------------------------------------------- + +.. automodule:: python_utils.define_macos_utilities + :members: + :undoc-members: + :show-inheritance: + +python\_utils.environment module +-------------------------------- + +.. automodule:: python_utils.environment + :members: + :undoc-members: + :show-inheritance: + +python\_utils.filesys\_cmds\_vrfy module +---------------------------------------- + +.. automodule:: python_utils.filesys_cmds_vrfy + :members: + :undoc-members: + :show-inheritance: + +python\_utils.fv3write\_parms\_lambert module +--------------------------------------------- + +.. automodule:: python_utils.fv3write_parms_lambert + :members: + :undoc-members: + :show-inheritance: + +python\_utils.misc module +------------------------- + +.. automodule:: python_utils.misc + :members: + :undoc-members: + :show-inheritance: + +python\_utils.print\_input\_args module +--------------------------------------- + +.. automodule:: python_utils.print_input_args + :members: + :undoc-members: + :show-inheritance: + +python\_utils.print\_msg module +------------------------------- + +.. automodule:: python_utils.print_msg + :members: + :undoc-members: + :show-inheritance: + +python\_utils.run\_command module +--------------------------------- + +.. automodule:: python_utils.run_command + :members: + :undoc-members: + :show-inheritance: + +python\_utils.xml\_parser module +-------------------------------- + +.. automodule:: python_utils.xml_parser + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/ush/retrieve_data.rst b/doc/TechDocs/ush/retrieve_data.rst new file mode 100644 index 0000000000..ff53326f08 --- /dev/null +++ b/doc/TechDocs/ush/retrieve_data.rst @@ -0,0 +1,7 @@ +retrieve\_data module +===================== + +.. automodule:: retrieve_data + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/ush/run_srw_tests.rst b/doc/TechDocs/ush/run_srw_tests.rst new file mode 100644 index 0000000000..d75e1a765f --- /dev/null +++ b/doc/TechDocs/ush/run_srw_tests.rst @@ -0,0 +1,7 @@ +run\_srw\_tests module +====================== + +.. automodule:: run_srw_tests + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/ush/set_cycle_and_obs_timeinfo.rst b/doc/TechDocs/ush/set_cycle_and_obs_timeinfo.rst new file mode 100644 index 0000000000..13ec7b9b73 --- /dev/null +++ b/doc/TechDocs/ush/set_cycle_and_obs_timeinfo.rst @@ -0,0 +1,7 @@ +set\_cycle\_and\_obs\_timeinfo module +===================================== + +.. automodule:: set_cycle_and_obs_timeinfo + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/ush/set_fv3nml_ens_stoch_seeds.rst b/doc/TechDocs/ush/set_fv3nml_ens_stoch_seeds.rst new file mode 100644 index 0000000000..049c01d059 --- /dev/null +++ b/doc/TechDocs/ush/set_fv3nml_ens_stoch_seeds.rst @@ -0,0 +1,7 @@ +set\_fv3nml\_ens\_stoch\_seeds module +===================================== + +.. automodule:: set_fv3nml_ens_stoch_seeds + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/ush/set_fv3nml_sfc_climo_filenames.rst b/doc/TechDocs/ush/set_fv3nml_sfc_climo_filenames.rst new file mode 100644 index 0000000000..bc0a942e82 --- /dev/null +++ b/doc/TechDocs/ush/set_fv3nml_sfc_climo_filenames.rst @@ -0,0 +1,7 @@ +set\_fv3nml\_sfc\_climo\_filenames module +========================================= + +.. automodule:: set_fv3nml_sfc_climo_filenames + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/ush/set_gridparams_ESGgrid.rst b/doc/TechDocs/ush/set_gridparams_ESGgrid.rst new file mode 100644 index 0000000000..0728326fa0 --- /dev/null +++ b/doc/TechDocs/ush/set_gridparams_ESGgrid.rst @@ -0,0 +1,7 @@ +set\_gridparams\_ESGgrid module +=============================== + +.. automodule:: set_gridparams_ESGgrid + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/ush/set_gridparams_GFDLgrid.rst b/doc/TechDocs/ush/set_gridparams_GFDLgrid.rst new file mode 100644 index 0000000000..b207ffa986 --- /dev/null +++ b/doc/TechDocs/ush/set_gridparams_GFDLgrid.rst @@ -0,0 +1,7 @@ +set\_gridparams\_GFDLgrid module +================================ + +.. automodule:: set_gridparams_GFDLgrid + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/ush/set_leadhrs.rst b/doc/TechDocs/ush/set_leadhrs.rst new file mode 100644 index 0000000000..b0172264d3 --- /dev/null +++ b/doc/TechDocs/ush/set_leadhrs.rst @@ -0,0 +1,7 @@ +set\_leadhrs module +=================== + +.. automodule:: set_leadhrs + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/ush/set_predef_grid_params.rst b/doc/TechDocs/ush/set_predef_grid_params.rst new file mode 100644 index 0000000000..23c5d30dfe --- /dev/null +++ b/doc/TechDocs/ush/set_predef_grid_params.rst @@ -0,0 +1,7 @@ +set\_predef\_grid\_params module +================================ + +.. automodule:: set_predef_grid_params + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/ush/setup.rst b/doc/TechDocs/ush/setup.rst new file mode 100644 index 0000000000..552eb49d6d --- /dev/null +++ b/doc/TechDocs/ush/setup.rst @@ -0,0 +1,7 @@ +setup module +============ + +.. automodule:: setup + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/TechDocs/ush/update_input_nml.rst b/doc/TechDocs/ush/update_input_nml.rst new file mode 100644 index 0000000000..8f3a51f729 --- /dev/null +++ b/doc/TechDocs/ush/update_input_nml.rst @@ -0,0 +1,7 @@ +update\_input\_nml module +========================= + +.. automodule:: update_input_nml + :members: + :undoc-members: + :show-inheritance: diff --git a/doc/UsersGuide/BackgroundInfo/Components.rst b/doc/UsersGuide/BackgroundInfo/Components.rst index 1ba9349d8d..6df12dbd23 100644 --- a/doc/UsersGuide/BackgroundInfo/Components.rst +++ b/doc/UsersGuide/BackgroundInfo/Components.rst @@ -22,7 +22,7 @@ UFS Preprocessing Utilities (UFS_UTILS) The SRW Application includes a number of pre-processing utilities (UFS_UTILS) that initialize and prepare the model. Since the SRW App provides forecast predictions over a limited area (rather than globally), these utilities generate a regional grid (``regional_esg_grid/make_hgrid``) along with :term:`orography` (``orog``) and surface climatology (``sfc_climo_gen``) files on that grid. Grids include a strip, or "halo," of six cells that surround the regional grid and feed in lateral boundary condition data. Since different grid and orography files require different numbers of :term:`halo` cells, additional utilities handle topography filtering and shave the number of halo points (based on downstream workflow component requirements). The pre-processing software :term:`chgres_cube` is used to convert the raw external model data into initial and lateral boundary condition files in :term:`netCDF` format. These are needed as input to the :term:`FV3` limited area model (:term:`LAM`). Additional information about the UFS pre-processing utilities can be found in the :doc:`UFS_UTILS Technical Documentation ` and in the `UFS_UTILS Scientific Documentation `__. -The SRW Application can be initialized from a range of operational initial condition files. It is possible to initialize the model from the Global Forecast System (:term:`GFS`), North American Mesoscale (:term:`NAM`) Forecast System, Rapid Refresh (:term:`RAP`), and High-Resolution Rapid Refresh (:term:`HRRR`) files in Gridded Binary v2 (:term:`GRIB2`) format. GFS files also come in :term:`NEMSIO` format for past dates. +The SRW Application can be initialized from a range of operational initial condition files. It is possible to initialize the model from the Global Forecast System (:term:`GFS`), North American Mesoscale (:term:`NAM`) Forecast System, Rapid Refresh (:term:`RAP`), High-Resolution Rapid Refresh (:term:`HRRR`), and Rapid Refresh Forecast System (:term:`RRFS`) files in Gridded Binary v2 (:term:`GRIB2`) format. GFS files also come in :term:`NEMSIO` format for past dates. .. WARNING:: For GFS data, dates prior to 1 January 2018 may work but are not guaranteed. Public archives of model data can be accessed through the `NOAA Operational Model Archive and Distribution System `__ (NOMADS). Raw external model data may be pre-staged on disk by the user. @@ -38,12 +38,12 @@ Supported model resolutions in this release include 3-, 13-, and 25-km predefine Model Physics --------------- -The Common Community Physics Package (CCPP), described `here `__, supports interoperable atmospheric physics and land surface model options. Atmospheric physics are a set of numerical methods describing small-scale processes such as clouds, turbulence, radiation, and their interactions. The most recent SRW App release (|latestr|) included five supported physics suites: FV3_RRFS_v1beta, FV3_GFS_v16, FV3_WoFS_v0, FV3_HRRR, and FV3_RAP. The FV3_RRFS_v1beta physics suite is being tested for use in the future operational implementation of the Rapid Refresh Forecast System (:term:`RRFS`) planned for 2023-2024, and the FV3_GFS_v16 is an updated version of the physics suite used in the operational Global Forecast System (GFS) v16. A detailed list of CCPP updates since the SRW App v2.1.0 release is available :ref:`here `. A full scientific description of CCPP parameterizations and suites can be found in the `CCPP Scientific Documentation `__, and CCPP technical aspects are described in the :doc:`CCPP Technical Documentation `. The model namelist has many settings beyond the physics options that can optimize various aspects of the model for use with each of the supported suites. Additional information on Stochastic Physics options is available :doc:`here `. +The Common Community Physics Package (CCPP), described `here `_, supports interoperable atmospheric physics and land surface model options. Atmospheric physics are a set of numerical methods describing small-scale processes such as clouds, turbulence, radiation, and their interactions. The most recent SRW App release (|latestr|) included five supported physics suites: FV3_RRFS_v1beta, FV3_GFS_v16, FV3_WoFS_v0, FV3_HRRR, and FV3_RAP. The FV3_RRFS_v1beta physics suite is being tested for use in the future operational implementation of the Rapid Refresh Forecast System (:term:`RRFS`) planned for 2023-2024, and the FV3_GFS_v16 is an updated version of the physics suite used in the operational Global Forecast System (GFS) v16. A detailed list of CCPP updates since the SRW App v2.1.0 release is available :ref:`here `. A full scientific description of CCPP parameterizations and suites can be found in the `CCPP Scientific Documentation `_, and CCPP technical aspects are described in the :doc:`CCPP Technical Documentation `. The model namelist has many settings beyond the physics options that can optimize various aspects of the model for use with each of the supported suites. Additional information on Stochastic Physics options is available :doc:`here `. .. note:: SPP is currently only available for specific physics schemes used in the RAP/HRRR physics suite. Users need to be aware of which physics suite definition file (:term:`SDF`) is chosen when turning this option on. Among the supported physics suites, the full set of parameterizations can only be used with the ``FV3_HRRR`` option for ``CCPP_PHYS_SUITE``. -Additionally, a CCPP single-column model (`CCPP-SCM `__) option has also been developed as a child repository. Users can refer to the `CCPP Single Column Model User and Technical Guide `__ for more details. This CCPP-SCM user guide contains a Quick Start Guide with instructions for obtaining the code, compiling, and running test cases, which include five standard test cases and two additional FV3 replay cases (refer to section 5.2 in the CCPP-SCM user guide for more details). Moreover, the CCPP-SCM supports a precompiled version in a docker container, allowing it to be easily executed on NOAA's cloud computing platforms without any issues (see section 2.5 in the CCPP-SCM user guide for more details). +Additionally, a CCPP single-column model (`CCPP-SCM `_) option has also been developed as a child repository. Users can refer to the `CCPP Single Column Model User and Technical Guide `_ for more details. This CCPP-SCM user guide contains a Quick Start Guide with instructions for obtaining the code, compiling, and running test cases, which include five standard test cases and two additional FV3 replay cases (refer to section 5.2 in the CCPP-SCM user guide for more details). Moreover, the CCPP-SCM supports a precompiled version in a docker container, allowing it to be easily executed on NOAA's cloud computing platforms without any issues (see section 2.5 in the CCPP-SCM user guide for more details). The SRW App supports the use of both :term:`GRIB2` and :term:`NEMSIO` input data. The UFS Weather Model ingests initial and lateral boundary condition files produced by :term:`chgres_cube` and outputs files in netCDF format on a specific projection (e.g., Lambert Conformal) in the horizontal direction and model levels in the vertical direction. @@ -57,17 +57,17 @@ The Unified Post Processor (:term:`UPP`) processes raw output from a variety of METplus Verification Suite ============================= -The Model Evaluation Tools (MET) package is a set of statistical verification tools developed by the `Developmental Testbed Center `__ (DTC) for use by the :term:`NWP` community to help them assess and evaluate the performance of numerical weather predictions. MET is the core component of the enhanced `METplus `__ verification framework; the suite also includes the associated database and display systems called METviewer and METexpress. +The Model Evaluation Tools (MET) package is a set of statistical verification tools developed by the `Developmental Testbed Center `_ (DTC) for use by the :term:`NWP` community to help them assess and evaluate the performance of numerical weather predictions. MET is the core component of the enhanced `METplus `_ verification framework; the suite also includes the associated database and display systems called METviewer and METexpress. -The METplus verification framework has been integrated into the SRW App to facilitate forecast evaluation. METplus is a verification framework that spans a wide range of temporal scales (warn-on-forecast to climate) and spatial scales (storm to global). It is supported by the `Developmental Testbed Center (DTC) `__. +The METplus verification framework has been integrated into the SRW App to facilitate forecast evaluation. METplus is a verification framework that spans a wide range of temporal scales (warn-on-forecast to climate) and spatial scales (storm to global). It is supported by the `Developmental Testbed Center (DTC) `_. METplus comes preinstalled with :term:`spack-stack` but can also be installed on other systems individually or as part of :term:`HPC-Stack` installation. Users on systems without a previous installation of METplus can follow the :ref:`MET Installation Guide ` and :ref:`METplus Installation Guide ` for individual installation. Currently, METplus *installation* is only supported as part of spack-stack installation; users attempting to install METplus individually or as part of HPC-Stack will need to direct assistance requests to the METplus team. However, METplus *use* is supported on any system with a functioning METplus installation. -The core components of the METplus framework include the statistical driver (MET), the associated database and display systems known as METviewer and METexpress, and a suite of Python wrappers to provide low-level automation and examples, also called use cases. MET is a set of verification tools developed for use by the :term:`NWP` community. It matches up gridded forecast fields with either gridded analyses or point observations and applies configurable methods to compute statistics and diagnostics. Extensive documentation is available in the :doc:`METplus User's Guide ` and :doc:`MET User's Guide `. Documentation for all other components of the framework can be found at the *Documentation* link for each component on the METplus `downloads `__ page. +The core components of the METplus framework include the statistical driver (MET), the associated database and display systems known as METviewer and METexpress, and a suite of Python wrappers to provide low-level automation and examples, also called use cases. MET is a set of verification tools developed for use by the :term:`NWP` community. It matches up gridded forecast fields with either gridded analyses or point observations and applies configurable methods to compute statistics and diagnostics. Extensive documentation is available in the :doc:`METplus User's Guide ` and :doc:`MET User's Guide `. Documentation for all other components of the framework can be found at the *Documentation* link for each component on the METplus `downloads `_ page. Among other techniques, MET provides the capability to compute standard verification scores for comparing deterministic gridded model data to point-based and gridded observations. It also provides ensemble and probabilistic verification methods for comparing gridded model data to point-based or gridded observations. Verification tasks to accomplish these comparisons are defined in the SRW App in :numref:`Table %s `. Currently, the SRW App supports the use of :term:`NDAS` observation files (which include conventional point-based surface and upper-air data) `in prepBUFR format `__ for point-based verification. It also supports gridded Climatology-Calibrated Precipitation Analysis (:term:`CCPA`) data for accumulated precipitation evaluation and Multi-Radar/Multi-Sensor (:term:`MRMS`) gridded analysis data for composite reflectivity and :term:`echo top` verification. -METplus is being actively developed by :term:`NCAR`/Research Applications Laboratory (RAL), NOAA/Earth Systems Research Laboratories (`ESRL `__), and NOAA/Environmental Modeling Center (:term:`EMC`), and it is open to community contributions. More details about METplus can be found on the `METplus website `__. +METplus is being actively developed by :term:`NCAR`/Research Applications Laboratory (RAL), NOAA/Earth Systems Research Laboratories (`ESRL `__), and NOAA/Environmental Modeling Center (:term:`EMC`), and it is open to community contributions. More details about METplus can be found on the `METplus website `_. Air Quality Modeling (AQM) Utilities ======================================= diff --git a/doc/UsersGuide/BackgroundInfo/Introduction.rst b/doc/UsersGuide/BackgroundInfo/Introduction.rst index f1a384e025..4b0978ef23 100644 --- a/doc/UsersGuide/BackgroundInfo/Introduction.rst +++ b/doc/UsersGuide/BackgroundInfo/Introduction.rst @@ -4,9 +4,9 @@ Introduction ============== -The Unified Forecast System (:term:`UFS`) is a community-based, coupled, comprehensive Earth modeling system. NOAA's operational model suite for numerical weather prediction (:term:`NWP`) is quickly transitioning to the UFS from a number of different modeling systems. The UFS enables research, development, and contribution opportunities within the broader :term:`Weather Enterprise` (including government, industry, and academia). For more information about the UFS, visit the `UFS Portal `__. +The Unified Forecast System (:term:`UFS`) is a community-based, coupled, comprehensive Earth modeling system. NOAA's operational model suite for numerical weather prediction (:term:`NWP`) is quickly transitioning to the UFS from a number of different modeling systems. The UFS enables research, development, and contribution opportunities within the broader :term:`Weather Enterprise` (including government, industry, and academia). For more information about the UFS, visit the `UFS Portal `_. -The UFS includes `multiple applications `__ that support different forecast durations and spatial domains. This documentation describes the UFS Short-Range Weather (SRW) Application, which targets predictions of atmospheric behavior on a limited spatial domain and on time scales from minutes to several days. The most recent SRW Application includes a prognostic atmospheric model, pre- and post-processing, and a community workflow for running the system end-to-end. These components are documented within this User's Guide and supported through the `GitHub Discussions `__ forum. The SRW App also includes support for a verification package (METplus) for both deterministic and ensemble simulations and support for four stochastically perturbed physics schemes. +The UFS includes `multiple applications `_ that support different forecast durations and spatial domains. This documentation describes the UFS Short-Range Weather (SRW) Application, which targets predictions of atmospheric behavior on a limited spatial domain and on time scales from minutes to several days. The most recent SRW Application includes a prognostic atmospheric model, pre- and post-processing, and a community workflow for running the system end-to-end. These components are documented within this User's Guide and supported through the `GitHub Discussions `_ forum. The SRW App also includes support for a verification package (METplus) for both deterministic and ensemble simulations and support for four stochastically perturbed physics schemes. Since the last release, developers have added a variety of features: diff --git a/doc/UsersGuide/BuildingRunningTesting/AQM.rst b/doc/UsersGuide/BuildingRunningTesting/AQM.rst index 6d2ae0f193..7186de6618 100644 --- a/doc/UsersGuide/BuildingRunningTesting/AQM.rst +++ b/doc/UsersGuide/BuildingRunningTesting/AQM.rst @@ -123,7 +123,7 @@ The community AQM configuration assumes that users have :term:`HPSS` access and USE_USER_STAGED_EXTRN_FILES: true EXTRN_MDL_SOURCE_BASEDIR_LBCS: /path/to/data -On Level 1 systems, users can find :term:`ICs/LBCs` in the usual :ref:`input data locations ` under ``FV3GFS/netcdf/2023021700`` and ``FV3GFS/netcdf/2023021706``. Users can also download the data required for the community experiment from the `UFS SRW App Data Bucket `__. +On Level 1 systems, users can find :term:`ICs/LBCs` in the usual :ref:`input data locations ` under ``FV3GFS/netcdf/2023021700`` and ``FV3GFS/netcdf/2023021706``. Users can also download the data required for the community experiment from the `UFS SRW App Data Bucket `__. Users may also wish to change :term:`cron`-related parameters in ``config.yaml``. In the ``config.aqm.community.yaml`` file, which was copied into ``config.yaml``, cron is used for automatic submission and resubmission of the workflow: diff --git a/doc/UsersGuide/BuildingRunningTesting/BuildSRW.rst b/doc/UsersGuide/BuildingRunningTesting/BuildSRW.rst index 3076a5f6eb..73334828da 100644 --- a/doc/UsersGuide/BuildingRunningTesting/BuildSRW.rst +++ b/doc/UsersGuide/BuildingRunningTesting/BuildSRW.rst @@ -35,7 +35,7 @@ Install the Prerequisite Software Stack Users on any sufficiently up-to-date machine with a UNIX-based operating system should be able to install the prerequisite software stack and run the SRW Application. However, a list of prerequisites is available in :numref:`Section %s ` for reference. Users should install or update their system as required before attempting to install the software stack. -Currently, installation of the prerequisite software stack is supported via spack-stack on most systems. :term:`Spack-stack` is a :term:`repository` that provides a Spack-based system to build the software stack required for `UFS `__ applications such as the SRW App. Spack-stack is the software stack validated by the UFS Weather Model (:term:`WM`), and the SRW App has likewise shifted to spack-stack for most Level 1 systems. +Currently, installation of the prerequisite software stack is supported via spack-stack on most systems. :term:`Spack-stack` is a :term:`repository` that provides a Spack-based system to build the software stack required for `UFS `_ applications such as the SRW App. Spack-stack is the software stack validated by the UFS Weather Model (:term:`WM`), and the SRW App has likewise shifted to spack-stack for most Level 1 systems. .. hint:: Skip the spack-stack installation if working on a :srw-wiki:`Level 1 system ` (e.g., Hera, Jet, Derecho, NOAA Cloud), and :ref:`continue to the next section `. diff --git a/doc/UsersGuide/BuildingRunningTesting/ContainerQuickstart.rst b/doc/UsersGuide/BuildingRunningTesting/ContainerQuickstart.rst index 9e4f58f0bd..0607a232b5 100644 --- a/doc/UsersGuide/BuildingRunningTesting/ContainerQuickstart.rst +++ b/doc/UsersGuide/BuildingRunningTesting/ContainerQuickstart.rst @@ -81,7 +81,7 @@ On most Level 1 systems, a container named ``ubuntu20.04-intel-ue-1.4.1-srw-dev. * - Hera - /scratch1/NCEPDEV/nems/role.epic/containers * - Jet - - /mnt/lfs4/HFIP/hfv3gfs/role.epic/containers + - /mnt/lfs5/HFIP/hfv3gfs/role.epic/containers * - NOAA Cloud - /contrib/EPIC/containers * - Orion/Hercules [#fn]_ @@ -188,8 +188,8 @@ The SRW App requires input files to run. These include static datasets, initial .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/current_srw_release_data/fix_data.tgz - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/current_srw_release_data/gst_data.tgz + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.2.0/out-of-the-box/fix_data.tgz + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.2.0/out-of-the-box/gst_data.tgz tar -xzf fix_data.tgz tar -xzf gst_data.tgz @@ -439,4 +439,4 @@ If users have the PBS resource manager installed on their system, the allocation For more information on the ``qsub`` command options, see the `PBS Manual §2.59.3 `__, (p. 1416). -These commands should output a hostname. Users can then run ``ssh ``. After "ssh-ing" to the compute node, they can run the container from that node. To run larger experiments, it may be necessary to allocate multiple compute nodes. \ No newline at end of file +These commands should output a hostname. Users can then run ``ssh ``. After "ssh-ing" to the compute node, they can run the container from that node. To run larger experiments, it may be necessary to allocate multiple compute nodes. diff --git a/doc/UsersGuide/BuildingRunningTesting/DefaultVarsTable.rst b/doc/UsersGuide/BuildingRunningTesting/DefaultVarsTable.rst index faaf9129c2..c22751656c 100644 --- a/doc/UsersGuide/BuildingRunningTesting/DefaultVarsTable.rst +++ b/doc/UsersGuide/BuildingRunningTesting/DefaultVarsTable.rst @@ -20,7 +20,7 @@ Table of Variables in ``config_defaults.yaml`` TEST_VX_FCST_INPUT_BASEDIR, FIXgsm, FIXaer, FIXlut, FIXorg, FIXsfc, FIXshp, FIXcrtm, FIXcrtmupp, EXTRN_MDL_DATA_STORES * - Workflow - WORKFLOW_ID, RELATIVE_LINK_FLAG, USE_CRON_TO_RELAUNCH, CRON_RELAUNCH_INTVL_MNTS, CRONTAB_LINE, LOAD_MODULES_RUN_TASK_FP, EXPT_BASEDIR, EXPT_SUBDIR, EXEC_SUBDIR, - EXPTDIR, DOT_OR_USCORE, EXPT_CONFIG_FN, CONSTANTS_FN, RGNL_GRID_NML_FN, FV3_NML_FN, FV3_NML_BASE_SUITE_FN, FV3_NML_YAML_CONFIG_FN, FV3_NML_BASE_ENS_FN, + EXPTDIR, DOT_OR_USCORE, CONSTANTS_FN, RGNL_GRID_NML_FN, FV3_NML_FN, FV3_NML_BASE_SUITE_FN, FV3_NML_YAML_CONFIG_FN, FV3_NML_BASE_ENS_FN, FV3_EXEC_FN, DATA_TABLE_FN, DIAG_TABLE_FN, FIELD_TABLE_FN, DIAG_TABLE_TMPL_FN, FIELD_TABLE_TMPL_FN, MODEL_CONFIG_FN, NEMS_CONFIG_FN, AQM_RC_FN, AQM_RC_TMPL_FN, FV3_NML_BASE_SUITE_FP, FV3_NML_YAML_CONFIG_FP, FV3_NML_BASE_ENS_FP, DATA_TABLE_TMPL_FP, DIAG_TABLE_TMPL_FP, FIELD_TABLE_TMPL_FP, MODEL_CONFIG_TMPL_FP, NEMS_CONFIG_TMPL_FP, AQM_RC_TMPL_FP, DATA_TABLE_FP, FIELD_TABLE_FP, NEMS_CONFIG_FP, FV3_NML_FP, diff --git a/doc/UsersGuide/BuildingRunningTesting/FIRE.rst b/doc/UsersGuide/BuildingRunningTesting/FIRE.rst new file mode 100644 index 0000000000..09a22975a6 --- /dev/null +++ b/doc/UsersGuide/BuildingRunningTesting/FIRE.rst @@ -0,0 +1,257 @@ +.. _UFS_FIRE: + +========================================= +Community Fire Behavior Module (UFS FIRE) +========================================= + +The `Community Fire Behavior Model (CFBM) `_ is a wildland fire model coupled to the UFS Atmospheric Model. The capability to run this code is now available in the UFS Short-Range Weather App for easy use by the community. The `fire_behavior repository `_ is a :term:`submodule` of the UFS Weather Model (WM), coupled through the :term:`NUOPC` Layer to provide direct feedback between the simulated atmosphere and the simulated fire. More information about the CFBM can be found in the :fire-ug:`CFBM Users Guide <>`. + +The biggest difference between the UFS FIRE capability and other modes of the UFS SRW is that a special build flag is required to build the coupled fire behavior code, as described in the instructions below. Aside from that, the need for additional input files, and some fire-specific config settings, configuring and running an experiment is the same as any other use of SRW. + + +.. note:: + + Although this chapter is the primary documentation resource for running the UFS FIRE configuration, users may need to refer to :numref:`Chapter %s ` and :numref:`Chapter %s ` for additional information on building and running the SRW App, respectively. + +Quick Start Guide (UFS FIRE) +===================================== + +Download the Code +------------------- + +Clone the |branch| branch of the authoritative SRW App repository: + +.. code-block:: console + + git clone -b develop https://github.com/ufs-community/ufs-srweather-app + cd ufs-srweather-app + +Checkout Externals +--------------------- + +Users must run the ``checkout_externals`` script to collect (or "check out") the individual components of the SRW App from their respective GitHub repositories. + +.. code-block:: console + + ./manage_externals/checkout_externals + +Build the SRW App with Fire Behavior Enabled +-------------------------------------------- + +To build the SRW with fire behavior code, use the following command: + +.. code-block:: console + + ./devbuild.sh -p= -a=ATMF + +where ```` is ``hera``, ``derecho``, or any other Tier 1 platform. The ``-a`` argument indicates the configuration/version of the application to build; in this case, the atmosphere-fire coupling (ATMF). + +If UFS FIRE builds correctly, users should see the standard executables listed in :numref:`Table %s `. There are no additional files expected, since the CFBM is coupled to the UFS weather model via the same ``ufs_model`` executable. + +Load the |wflow_env| Environment +-------------------------------------------- + +Load the appropriate modules for the workflow: + +.. code-block:: console + + module use /path/to/ufs-srweather-app/modulefiles + module load wflow_ + +where ```` is ``hera``, ``derecho``, or any other Tier 1 platform. + +If the console outputs a message, the user should run the commands specified in the message. For example, if the output says: + +.. code-block:: console + + Please do the following to activate conda: + > conda activate srw_app + +then the user should run |activate|. Otherwise, the user can continue with configuring the workflow. + +.. _FIREConfig: + +Configure Experiment +--------------------------- + +Users will need to configure their experiment by setting parameters in the ``config.yaml`` file. To start, users can copy an example experiment setting into ``config.yaml``: + +.. code-block:: console + + cd ush + cp config.fire.yaml config.yaml + +Users will need to change the ``MACHINE`` and ``ACCOUNT`` variables in ``config.yaml`` to match their system. They may also wish to adjust other experiment settings, especially under the ``fire:`` section, described in further detail below. For more information on other configuration settings, see :numref:`Section %s `. + +Activating the fire behavior module is done by setting ``UFS_FIRE: True`` in the ``fire:`` section of your ``config.yaml`` file. If this variable is not specified or set to false, a normal atmospheric simulation will be run, without fire settings. + +.. code-block:: console + + fire: + UFS_FIRE: True + +The fire module has the ability to print out additional messages to the log file for debugging; to enable additional log output (which may slow down the integration considerably, especially at higher levels) set ``FIRE_PRINT_MSG`` > 0 + +.. code-block:: console + + fire: + FIRE_PRINT_MSG: 1 + +Additional boundary conditions file +----------------------------------- +The CFBM, as an independent, coupled component, runs separately from the atmospheric component of the weather model, requires an additional input file (``geo_em.d01.nc``) that contains fire-specific boundary conditions such as fuel properties. On Level 1 systems, users can find an example file in the usual :ref:`input data locations ` under ``LOCATION``. Users can also download the data required for the community experiment from the `UFS SRW App Data Bucket `__. + + +Instructions on how to create this file for your own experiment can be found in the :fire-ug:`CFBM Users Guide `. + +Once the file is acquired/created, you will need to specify its location in your ``config.yaml`` file with the setting ``FIRE_INPUT_DIR``. + +.. code-block:: console + + fire: + FIRE_INPUT_DIR: /directory/containing/geo_em/file + + + +Specifying a fire ignition +--------------------------- + +The CFBM simulates fires by specifying an "ignition" that will then propogate based on the atmospheric conditions and the specified settings. An ignition can either be a "point ignition" (i.e. a disk of fire some specified radius around a single location), or a straight line linear ignition specified by a start and end location and a specified "radius" (width). The ignition can start at the beginning of your simulation, or at some time later as specified. The CFBM can support up to 5 different fire ignitions at different places and times in a given simulation. + +The CFBM settings are controlled by the :term:`namelist` file ``namelist.fire``. The available settings in this file are described in the :fire-ug:`CFBM Users Guide `, and an example file can be found under ``parm/namelist.fire``. However, there is no need to manually provide or edit this file, as the SRW workflow will create the fire namelist using the user settings in ``config.yaml``. The fire-specific options in SRW are documented in :numref:`Section %s `. + +Example fire configuration +--------------------------- + +Here is one example of settings that can be specified for a UFS FIRE simulation: + +.. code-block:: console + + fire: + UFS_FIRE: True + FIRE_INPUT_DIR: /home/fire_input + DT_FIRE: 0.5 + OUTPUT_DT_FIRE: 1800 + FIRE_NUM_IGNITIONS: 1 + FIRE_IGNITION_ROS: 0.05 + FIRE_IGNITION_START_LAT: 40.609 + FIRE_IGNITION_START_LON: -105.879 + FIRE_IGNITION_END_LAT: 40.609 + FIRE_IGNITION_END_LON: -105.879 + FIRE_IGNITION_RADIUS: 250 + FIRE_IGNITION_START_TIME: 6480 + FIRE_IGNITION_END_TIME: 7000 + +In this case, a single fire (``FIRE_NUM_IGNITIONS: 1``) of radius 250 meters (``FIRE_IGNITION_RADIUS: 250``) is ignited at latitude 40.609˚N (``FIRE_IGNITION_START_LAT: 40.609``), 105.879˚W (``FIRE_IGNITION_START_LON: -105.879``) 6480 seconds after the start of the simulation (``FIRE_IGNITION_START_TIME: 6480``) with a rate of spread specified as 0.05 m/s (``FIRE_IGNITION_ROS: 0.05``). This "ignition" ends 7000 seconds after the start of the simulation (``FIRE_IGNITION_END_TIME: 7000``), after which the fire behavior is completely governed by the physics of the fire behavior model (integrated every 0.5 seconds as specified by ``OUTPUT_DT_FIRE``), the input fuel conditions, and the simulated atmospheric conditions. + +The CFBM creates output files in :term:`netCDF` format, with the naming scheme ``fire_output_YYYY-MM-DD_hh:mm:ss.nc``. In this case the output files are written every 30 minutes (``OUTPUT_DT_FIRE: 1800``). + +.. note:: + + Any of the settings under :fire-ug:`the &fire section of the namelist ` can be specified in the SRW App ``config.yaml`` file under the ``fire:`` section, not just the settings described above. However, any additional settings from ``namelist.fire`` will need to be added to ``config_defaults.yaml`` first; otherwise the check for valid SRW options will fail. + +To specify multiple fire ignitions (``FIRE_NUM_IGNITIONS > 1``), the above settings will need to be specified as a list, with one entry per ignition. See :numref:`Section %s ` for more details. + + +Generate the Workflow +------------------------ + +Generate the workflow: + +.. code-block:: console + + ./generate_FV3LAM_wflow.py + +Run the Workflow +------------------ + +If ``USE_CRON_TO_RELAUNCH`` is set to true in ``config.yaml``, the workflow will run automatically. If it was set to false, users must submit the workflow manually from the experiment directory: + +.. code-block:: console + + cd ${EXPT_BASEDIR}/${EXPT_SUBDIR} + ./launch_FV3LAM_wflow.sh + +Repeat the launch command regularly until a SUCCESS or FAILURE message appears on the terminal window. See :numref:`Section %s ` for more on the ``${EXPT_BASEDIR}`` and ``${EXPT_SUBDIR}`` variables. + +Users may check experiment status from the experiment directory with either of the following commands: + +.. code-block:: console + + # Check the experiment status (for cron jobs) + rocotostat -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10 + + # Check the experiment status and relaunch the workflow (for manual jobs) + ./launch_FV3LAM_wflow.sh; tail -n 40 log.launch_FV3LAM_wflow + +.. _FIRESuccess: + +Experiment Output +-------------------- + +The workflow run is complete when all tasks display a "SUCCEEDED" message. If everything goes smoothly, users will eventually see a workflow status table similar to the following: + +.. code-block:: console + + CYCLE TASK JOBID STATE EXIT STATUS TRIES DURATION + ================================================================================================================================ + 202008131800 make_grid 6498125 SUCCEEDED 0 1 70.0 + 202008131800 make_orog 6498145 SUCCEEDED 0 1 87.0 + 202008131800 make_sfc_climo 6498172 SUCCEEDED 0 1 90.0 + 202008131800 get_extrn_ics 6498126 SUCCEEDED 0 1 46.0 + 202008131800 get_extrn_lbcs 6498127 SUCCEEDED 0 1 46.0 + 202008131800 make_ics_mem000 6498202 SUCCEEDED 0 1 91.0 + 202008131800 make_lbcs_mem000 6498203 SUCCEEDED 0 1 106.0 + 202008131800 run_fcst_mem000 6498309 SUCCEEDED 0 1 1032.0 + 202008131800 run_post_mem000_f000 6498336 SUCCEEDED 0 1 75.0 + 202008131800 run_post_mem000_f001 6498387 SUCCEEDED 0 1 76.0 + 202008131800 run_post_mem000_f002 6498408 SUCCEEDED 0 1 75.0 + 202008131800 run_post_mem000_f003 6498409 SUCCEEDED 0 1 75.0 + 202008131800 run_post_mem000_f004 6498432 SUCCEEDED 0 1 64.0 + 202008131800 run_post_mem000_f005 6498433 SUCCEEDED 0 1 77.0 + 202008131800 run_post_mem000_f006 6498435 SUCCEEDED 0 1 74.0 + 202008131800 integration_test_mem000 6498434 SUCCEEDED 0 1 27.0 + +In addition to the standard UFS and UPP output described elsewhere in this users guide, the UFS_FIRE runs produce additional output files :ref:`described above `: + +.. code-block:: console + + $ cd /path/to/expt_dir/experiment + $ ls 2020081318/fire_output* + fire_output_2020-08-13_18:00:00.nc fire_output_2020-08-13_19:30:00.nc fire_output_2020-08-13_21:00:00.nc fire_output_2020-08-13_22:30:00.nc + fire_output_2020-08-13_18:30:00.nc fire_output_2020-08-13_20:00:00.nc fire_output_2020-08-13_21:30:00.nc fire_output_2020-08-13_23:00:00.nc + fire_output_2020-08-13_19:00:00.nc fire_output_2020-08-13_20:30:00.nc fire_output_2020-08-13_22:00:00.nc fire_output_2020-08-13_23:30:00.nc + +These files contain output directly from the fire model (hence why they are at a greater frequency), including variables such as the fire perimeter and area, smoke emitted, and fuel percentage burnt. + +.. image:: https://github.com/ufs-community/ufs-srweather-app/wiki/FIRE/ncview.emis_smoke_trim.png + :alt: Image of the simulated fire area from an example run + :align: center + +.. _FIRE-WE2E: + + +WE2E Tests for FIRE +======================= + +Build the app for FIRE: + +.. code-block:: console + + ./devbuild.sh -p=hera -a=ATMF + + +Run the WE2E tests: + +.. code-block:: console + + $ cd /path/to/ufs-srweather-app/tests/WE2E + $ ./run_WE2E_tests.py -t my_tests.txt -m hera -a gsd-fv3 -q -t fire + +You can also run each test individually if needed: + + $ ./run_WE2E_tests.py -t my_tests.txt -m hera -a gsd-fv3 -q -t UFS_FIRE_one-way-coupled + $ ./run_WE2E_tests.py -t my_tests.txt -m hera -a gsd-fv3 -q -t UFS_FIRE_multifire_one-way-coupled + + + diff --git a/doc/UsersGuide/BuildingRunningTesting/Quickstart.rst b/doc/UsersGuide/BuildingRunningTesting/Quickstart.rst index 36ea8eb7fb..8f31dd9b1a 100644 --- a/doc/UsersGuide/BuildingRunningTesting/Quickstart.rst +++ b/doc/UsersGuide/BuildingRunningTesting/Quickstart.rst @@ -50,13 +50,9 @@ For a detailed explanation of how to build and run the SRW App on any supported #. Load the python environment for the workflow. Users on Level 2-4 systems will need to use one of the existing ``wflow_`` modulefiles (e.g., ``wflow_macos``) and adapt it to their system. Then, run: - .. code-block:: console - - source /path/to/ufs-srweather-app/etc/lmod-setup.sh - module use /path/to/ufs-srweather-app/modulefiles - module load wflow_ - - where ```` refers to a valid machine name (see :numref:`Section %s `). After loading the workflow, users should follow the instructions printed to the console. For example, if the output says: + .. include:: ../../doc-snippets/load-env.rst + + After loading the workflow, users should follow the instructions printed to the console. For example, if the output says: .. code-block:: console @@ -89,10 +85,10 @@ For a detailed explanation of how to build and run the SRW App on any supported CRON_RELAUNCH_INTVL_MNTS: 3 task_get_extrn_ics: USE_USER_STAGED_EXTRN_FILES: true - EXTRN_MDL_SOURCE_BASEDIR_ICS: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/v2p2/input_model_data/FV3GFS/grib2/${yyyymmddhh} + EXTRN_MDL_SOURCE_BASEDIR_ICS: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/input_model_data/FV3GFS/grib2/${yyyymmddhh} task_get_extrn_lbcs: USE_USER_STAGED_EXTRN_FILES: true - EXTRN_MDL_SOURCE_BASEDIR_LBCS: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/v2p2/input_model_data/FV3GFS/grib2/${yyyymmddhh} + EXTRN_MDL_SOURCE_BASEDIR_LBCS: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/input_model_data/FV3GFS/grib2/${yyyymmddhh} Users on a different system would update the machine, account, and data paths accordingly. Additional changes may be required based on the system and experiment. More detailed guidance is available in :numref:`Section %s `. Parameters and valid values are listed in :numref:`Section %s `. diff --git a/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst b/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst index 9d42aaf0dc..b66b399652 100644 --- a/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst +++ b/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst @@ -55,14 +55,14 @@ The SRW App requires input files to run. These include static datasets, initial - File location * - Derecho - /glade/work/epicufsrt/contrib/UFS_SRW_data/|data|/input_model_data - * - Gaea (C3/C4/C5) - - /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/|data|/input_model_data/ + * - Gaea + - /gpfs/f5/epic/world-shared/UFS_SRW_data/|data|/input_model_data/ * - Hera - /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/|data|/input_model_data/ * - Hercules - /work/noaa/epic/role-epic/contrib/UFS_SRW_data/|data|/input_model_data/ * - Jet - - /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/|data|/input_model_data/ + - /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/|data|/input_model_data/ * - NOAA Cloud - /contrib/EPIC/UFS_SRW_data/|data|/input_model_data/ * - Orion @@ -134,13 +134,8 @@ Loading the Workflow Environment The |wflow_env| conda/Python environment can be activated in the following way: -.. code-block:: console - - source /path/to/ufs-srweather-app/etc/lmod-setup.sh - module use /path/to/ufs-srweather-app/modulefiles - module load wflow_ - -where ```` refers to a valid machine name (see :numref:`Section %s ` for ``MACHINE`` options). In a csh shell environment, users should replace ``lmod-setup.sh`` with ``lmod-setup.csh``. +.. include:: ../../doc-snippets/load-env.rst +In a csh shell environment, users should replace ``lmod-setup.sh`` with ``lmod-setup.csh``. .. note:: If users source the lmod-setup file on a system that doesn't need it, it will not cause any problems (it will simply do a ``module purge``). @@ -155,7 +150,7 @@ The ``wflow_`` modulefile will then output instructions to activate th then the user should run |activate|. This activates the |wflow_env| conda environment, and the user typically sees |prompt| in front of the Terminal prompt at this point. .. note:: - If users do not use the wflow module to load conda, ``conda`` will need to be initialized before running ``conda activate srw_app`` command. Depending on the user's system and login setup, this may be accomplished in a variety of ways. Conda initialization usually involves the following command: ``source /etc/profile.d/conda.sh``, where ```` is the base conda installation directory and by default will be the full path to ``ufs-srweather-app/conda``. + If users do not use the ``wflow_`` module to load conda, ``conda`` will need to be initialized before running ``conda activate srw_app`` command. Depending on the user's system and login setup, this may be accomplished in a variety of ways. Conda initialization usually involves the following command: ``source /etc/profile.d/conda.sh``, where ```` is the base conda installation directory and by default will be the full path to ``ufs-srweather-app/conda``. After loading the workflow environment, users may continue to :numref:`Section %s ` for instructions on setting the experiment configuration parameters. @@ -203,15 +198,6 @@ The user must set the specifics of their experiment configuration in a ``config. * - ACCOUNT - "" - "an_account" - * - CCPA_OBS_DIR - - "{{ workflow.EXPTDIR }}/obs_data/ccpa/proc" - - "" - * - MRMS_OBS_DIR - - "{{ workflow.EXPTDIR }}/obs_data/mrms/proc" - - "" - * - NDAS_OBS_DIR - - "{{ workflow.EXPTDIR }}/obs_data/ndas/proc" - - "" * - USE_CRON_TO_RELAUNCH - false - false @@ -269,9 +255,6 @@ The user must set the specifics of their experiment configuration in a ``config. * - NUM_ENS_MEMBERS - 1 - 2 - * - VX_FCST_MODEL_NAME - - '{{ nco.NET_default }}.{{ task_run_post.POST_OUTPUT_DOMAIN_NAME }}' - - FV3_GFS_v16_CONUS_25km .. _GeneralConfig: @@ -549,6 +532,7 @@ The ``data:`` section of the machine file can point to various data sources that netcdf: /Users/username/DATA/UFS/FV3GFS/netcdf RAP: /Users/username/DATA/UFS/RAP/grib2 HRRR: /Users/username/DATA/UFS/HRRR/grib2 + RRFS: /Users/username/DATA/UFS/RRFS/grib2 This can be helpful when conducting multiple experiments with different types of data. @@ -584,7 +568,7 @@ the same cycle starting date/time and forecast hours. Other parameters may diffe Cartopy Shapefiles ````````````````````` -The Python plotting tasks require a path to the directory where the Cartopy Natural Earth shapefiles are located. The medium scale (1:50m) cultural and physical shapefiles are used to create coastlines and other geopolitical borders on the map. On :srw-wiki:`Level 1 ` systems, this path is already set in the system's machine file using the variable ``FIXshp``. Users on other systems will need to download the shapefiles and update the path of ``$FIXshp`` in the machine file they are using (e.g., ``$SRW/ush/machine/macos.yaml`` for a generic MacOS system, where ``$SRW`` is the path to the ``ufs-srweather-app`` directory). The subset of shapefiles required for the plotting task can be obtained from the `SRW Data Bucket `__. The full set of medium-scale (1:50m) Cartopy shapefiles can be downloaded `here `__. +The Python plotting tasks require a path to the directory where the Cartopy Natural Earth shapefiles are located. The medium scale (1:50m) cultural and physical shapefiles are used to create coastlines and other geopolitical borders on the map. On :srw-wiki:`Level 1 ` systems, this path is already set in the system's machine file using the variable ``FIXshp``. Users on other systems will need to download the shapefiles and update the path of ``$FIXshp`` in the machine file they are using (e.g., ``$SRW/ush/machine/macos.yaml`` for a generic MacOS system, where ``$SRW`` is the path to the ``ufs-srweather-app`` directory). The subset of shapefiles required for the plotting task can be obtained from the `SRW Data Bucket `__. The full set of medium-scale (1:50m) Cartopy shapefiles can be downloaded `here `__. Task Configuration ````````````````````` @@ -633,15 +617,21 @@ The output files (in ``.png`` format) will be located in the ``postprd`` directo * To configure an experiment to run METplus verification tasks, see the :ref:`next section `. * Otherwise, skip to :numref:`Section %s ` to generate the workflow. + .. _VXConfig: Configure METplus Verification Suite (Optional) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Users who want to use the METplus verification suite to evaluate their forecasts need to add additional information to their machine file (``ush/machine/.yaml``) or their ``config.yaml`` file. Other users may skip to the next step (:numref:`Section %s: Generate the SRW App Workflow `). +Users who want to use the METplus verification (VX) suite to evaluate their forecasts or to evaluate +a staged forecast (e.g. from another forecasting system) need to add additional information to their +machine file (``ush/machine/.yaml``) or their ``config.yaml`` file. Other users may skip +to the next step (:numref:`Section %s: Generate the SRW App Workflow `). + +To use METplus verification, MET and METplus modules need to be installed on the system. .. note:: - If METplus users update their METplus installation, they must update the module load statements in ``ufs-srweather-app/modulefiles/tasks//run_vx.local`` to correspond to their system's updated installation: + If users update their METplus installation, they must also update the module load statements in ``ufs-srweather-app/modulefiles/tasks//run_vx.local`` to correspond to their system's updated installation: .. code-block:: console @@ -649,15 +639,54 @@ Users who want to use the METplus verification suite to evaluate their forecasts module load met/ module load metplus/ -To use METplus verification, MET and METplus modules need to be installed. To turn on verification tasks in the workflow, include the ``parm/wflow/verify_*.yaml`` file(s) in the ``rocoto: tasks: taskgroups:`` section of ``config.yaml``. For example: - -.. code-block:: console - rocoto: - tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml"]|include }}' +Background +`````````````` +Whether generated by the SRW App or another forecasting system, a forecasting experiment consists +of one or more forecast periods known as cycles. If there is one forecast per cycle, the experiment +is referred to briefly as a deterministic forecast, and if there are multiple, it is referred to as +an ensemble forecast. Verification of a deterministic forecast is known (unsurprisingly) as +deterministic VX, while verification of an ensemble forecast as a whole is known as ensemble VX. +It is also possible to consider each member of an ensemble separately and verify each such member +deterministically. + +The SRW App allows users to include in the Rocoto XML that defines the workflow various tasks that +perform deterministic and/or ensemble VX. The forecast files to be verified may be generated as part +of the SRW experiment that is performing the verification, or they may be pre-generated files that +are staged somewhere on disk. In the latter case, the forecast files may have been generated from a +previous SRW experiment, or they may have been generated from another forecasting system altogether +(see :numref:`Section %s ` for the procedure to stage forecast files). In the SRW +App, the flag ``DO_ENSEMBLE`` in the ``global:`` section of ``config.yaml`` specifies whether the +(generated or staged) forecast files to be verified constitute a deterministic or an ensemble forecast. +Setting ``DO_ENSEMBLE: False`` (the default) causes the SRW App workflow to assume that the forecast +is deterministic, while setting ``DO_ENSEMBLE: True`` causes it to assume that the forecast is ensemble. +In the latter case, the number of ensemble members must be specified via the variable ``NUM_ENS_MEMBERS``, +also found in the ``global:`` section of ``config.yaml``. + +Both deterministic and ensemble VX require observation and forecast files as well as possible preprocessing +of those files. Thus, whenever deterministic or ensemble VX tasks are included in the workflow, preprocessing +(meta)tasks must also be included that check for the presence of the required obs and forecast files on disk, +retrieve obs files if necessary from a data store such as NOAA HPSS (see note below regarding forecast files), +and preprocess both types of files as needed. We refer to these collectively as the VX preprocessing tasks. -:numref:`Table %s ` indicates which functions each ``verify_*.yaml`` file configures. Users must add ``verify_pre.yaml`` anytime they want to run verification (VX); it runs preprocessing tasks that are necessary for both deterministic and ensemble VX. Then users can add ``verify_det.yaml`` for deterministic VX or ``verify_ens.yaml`` for ensemble VX (or both). Note that ensemble VX requires the user to be running an ensemble forecast or to stage ensemble forecast files in an appropriate location. +.. note:: + Currently, the SRW App workflow does not support the ability to retrieve forecast files from data stores; + these must either be generated by the forecast model in the SRW App or be manually staged by the user. + See :numref:`Section %s ` for details. + + +Adding VX Tasks to the Workflow +`````````````````````````````````` +To add verification tasks to the workflow, users must include the VX taskgroup files ``verify_pre.yaml``, +``verify_det.yaml``, and/or ``verify_ens.yaml`` (all located in the ``parm/wflow`` directory) in the ``rocoto: +tasks: taskgroups:`` section of ``config.yaml``. :numref:`Table %s ` specifies the set of workflow +VX (meta)tasks that each ``verify_*.yaml`` file defines. As implied above, users must add ``verify_pre.yaml`` +to ``rocoto: tasks: taskgroups:`` anytime they want to run deterministic and/or ensemble VX because this +contains VX preprocessing tasks that are required by both VX types. Then users can add ``verify_det.yaml`` +to run deterministic VX on either a deterministic forecast or on each member of an ensemble forecast, they +can add ``verify_ens.yaml`` to run ensemble VX on an ensemble forecast, or they can add both if they want to +run both deterministic and ensemble VX on an ensemble forecast (where the deterministic VX is performed on +each member of the ensemble). .. _VX-yamls: @@ -665,43 +694,267 @@ To use METplus verification, MET and METplus modules need to be installed. To t :widths: 20 50 :header-rows: 1 - * - File - - Description - * - verify_pre.yaml - - Contains (meta)tasks that are prerequisites for both deterministic and ensemble verification (vx) - * - verify_det.yaml - - Perform deterministic vx - * - verify_ens.yaml - - Perform ensemble vx (must set ``DO_ENSEMBLE: true`` in ``config.yaml``) - -The ``verify_*.yaml`` files include the definitions of several common verification tasks by default. Individual verification tasks appear in :numref:`Table %s `. The tasks in the ``verify_*.yaml`` files are independent of each other, so users may want to turn some off depending on the needs of their experiment. To turn off a task, simply include its entry from ``verify_*.yaml`` as an empty YAML entry in ``config.yaml``. For example, to turn off PointStat tasks: + * - Taskgroup File + - Taskgroup Description + * - ``verify_pre.yaml`` + - Defines (meta)tasks that run the VX preprocessing tasks that are prerequisites for both deterministic + and ensemble VX. + * - ``verify_det.yaml`` + - Defines (meta)tasks that perform deterministic VX on a single forecast or on each member of an ensemble + forecast (the latter requires ``DO_ENSEMBLE`` and ``NUM_ENS_MEMBERS`` in ``config.yaml`` to be set to + ``True`` and the number of ensemble members, respectively). + * - ``verify_ens.yaml`` + - Defines (meta)tasks that perform ensemble VX on an ensemble of forecasts as a whole (requires ``DO_ENSEMBLE`` + and ``NUM_ENS_MEMBERS`` in ``config.yaml`` to be set to ``True`` and the number of ensemble members, + respectively). + +For example, to enable deterministic VX, ``rocoto: tasks: taskgroups:`` may be set as follows: .. code-block:: console rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml"]|include }}' - metatask_vx_ens_member: - metatask_PointStat_mem#mem#: + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", + "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml"]|include }}' + +This setting can apply to either a deterministic or an ensemble forecast. In the latter case, it will +cause deterministic VX will be performed on each member of the ensemble (but not on the ensemble as a +whole). Note that with this setting, the UFS Weather Model will be run as part of the workflow to generate +forecast output because ``prep.yaml``, ``coldstart.yaml``, and ``post.yaml`` are also included in +``rocoto: tasks: taskgroups:``. Whether these forecasts are deterministic or ensemble depends on +whether ``DO_ENSEMBLE`` in ``config.yaml`` is set to ``False`` or ``True``, respectively (and, if +``True``, ``NUM_ENS_MEMBERS`` must be set to the number of ensemble members). Similarly, to enable +ensemble VX for an ensemble forecast as well as deterministic VX for each member of that ensemble, +``rocoto: tasks: taskgroups:`` may be set as follows: +.. code-block:: console + + rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", + "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml" "parm/wflow/verify_ens.yaml"]|include }}' + +If deterministic VX of each ensemble member is not desired, ``verify_det.yaml`` must be left out of the +above. Note that, as in the previous example, this setting of ``rocoto: tasks: taskgroups:`` will cause +the workflow to run the UFS Weather Model to generate forecast output because ``prep.yaml``, ``coldstart.yaml``, +and ``post.yaml`` are again included, but in this case, ``DO_ENSEMBLE`` **must be** set to ``True`` (and +``NUM_ENS_MEMBERS`` set appropriately) in ``config.yaml`` because inclusion of ``verify_ens.yaml`` requires +that the forecast be an ensemble one. + +If users want to manually stage the forecast files instead of generating them with the SRW's native weather +model (see :numref:`Section %s ` for the procedure), they must exclude ``prep.yaml``, +``coldstart.yaml``, and ``post.yaml`` from the examples above. Also, regardless of whether the forecast +files are generated by the SRW App or staged manually by the user, if the forecast to be verified is an +ensemble one, in the ``global:`` section of ``config.yaml`` users must set ``DO_ENSEMBLE`` to ``True`` +and ``NUM_ENS_MEMBERS`` to the number of ensemble members. This tells the workflow to look for multiple +forecasts for each cycle instead of just one (as well as the number of such forecasts). More information about configuring the ``rocoto:`` section can be found in :numref:`Section %s `. -If users have access to NOAA :term:`HPSS` but have not pre-staged the data, the default ``verify_pre.yaml`` taskgroup will activate the tasks, and the workflow will attempt to download the appropriate data from NOAA HPSS. In this case, the ``*_OBS_DIR`` paths must be set to the location where users want the downloaded data to reside. -Users who do not have access to NOAA HPSS and do not have the data on their system will need to download :term:`CCPA`, :term:`MRMS`, and :term:`NDAS` data manually from collections of publicly available data, such as the ones listed `here `__. +VX Taskgroup Organization and VX Field Groups +````````````````````````````````````````````````` +The VX (meta)tasks in the ``verify_*.yaml`` taskgroup files are described in detail in :numref:`Table %s +`. They are organized as follows. + +The (meta)tasks in ``verify_pre.yaml`` each +operate on a single observation (obs) type (except for ``metatask_check_post_output_all_mems``, which operates on the +forecast(s) and checks for the presence of all necessary forecast files), while the ones in ``verify_det.yaml`` +and ``verify_ens.yaml`` operate on one or more verification field groups. A verification field group +represents one or more meteorologial fields that are operated on (e.g. verified) together in a single +call to one of the METplus tools (such as GridStat, PointStat, GenEnsProd, and EnsembleStat), and each +field group has associated with it an obs type against which those forecast fields are verified. The +set of valid VX field groups, the obs types they are associated with, and a brief description of the +fields they include are given in :numref:`Table %s `. -Users who have already staged the observation data needed for METplus (i.e., the :term:`CCPA`, :term:`MRMS`, and :term:`NDAS` data) on their system should set the path to this data in ``config.yaml``. +.. _VXFieldGroupDescsTable: + +.. list-table:: Valid Verification Field Groups and Descriptions + :widths: 20 20 60 + :header-rows: 1 + + * - VX Field Group + - Associated Obs Type + - Fields Included in Group + * - APCP + - CCPA + - Accumulated precipitation for the accumulation intervals specified in ``VX_APCP_ACCUMS_HRS`` + * - ASNOW + - NOHRSC + - Accumulated snowfall for the accumulation intervals specified in ``VX_APCP_ACCUMS_HRS`` + * - REFC + - MRMS + - Composite reflectivity + * - RETOP + - MRMS + - Echo top + * - SFC + - NDAS + - Various surface and near-surface fields (e.g. at the surface, 2 m, 10 m, etc) + * - UPA + - NDAS + - Various upper-air fields (e.g. at 800 mb, 500 mb, etc) + +The ``VX_FIELD_GROUPS`` list in the ``verification:`` section of ``config.yaml`` specifies the VX field +groups for which to run verification. In order to avoid unwanted computation, the Rocoto XML will include +only those (meta)tasks that operate on field groups or obs types associated with field groups in ``VX_FIELD_GROUPS``. +Thus, inclusion of a ``verify_*.yaml`` taskgroup file under the +``rocoto: tasks: taskgroups:`` section of ``config.yaml`` does not mean that all the (meta)tasks in that +file will be included in the workflow. For example, setting: .. code-block:: console - platform: - CCPA_OBS_DIR: /path/to/UFS_SRW_data/develop/obs_data/ccpa/proc - NOHRSC_OBS_DIR: /path/to/UFS_SRW_data/develop/obs_data/nohrsc/proc - MRMS_OBS_DIR: /path/to/UFS_SRW_data/develop/obs_data/mrms/proc - NDAS_OBS_DIR: /path/to/UFS_SRW_data/develop/obs_data/ndas/proc + VX_FIELD_GROUPS: [ 'APCP', 'REFC', 'RETOP', 'SFC', 'UPA' ] + +in ``config.yaml`` and including all three taskgroups ``verify_*.yaml`` in ``rocoto: tasks: taskgroups:`` +will add to the Rocoto XML the VX (meta)tasks for all valid field groups except those for accumulated +snowfall (``'ASNOW'``) and its associated obs type (:term:`NOHRSC`). In other words, all the (meta)tasks +in :numref:`Table %s `. will be included in the Rocoto XML except for those +associated with the :term:`NOHRSC` obs type and the ``'ASNOW'`` field group. Users might want to set +``VX_FIELD_GROUPS`` in this way for example because the forecast experiment they are verifying is for a +summer period for which ``ASNOW`` is not relevant. + + +Staging Observation Files +`````````````````````````````````` +The taskgroup in ``verify_pre.yaml`` defines a set of workflow tasks named ``get_obs_*``, where the ``*`` +represents any one of the supported obs types: :term:`CCPA`, :term:`NOHRSC`, :term:`MRMS`, and :term:`NDAS`. These ``get_obs_*`` tasks +will first check on disk for the existence of the obs files required for VX using the locations specified +by the variables ``*_OBS_DIR`` and ``OBS_*_FN_TEMPLATES[1,3,...]`` in the ``verification:`` section of +``config.yaml``. The ``*_OBS_DIR`` are the base directories in which the obs files are or should be +staged, and the ``OBS_*_FN_TEMPLATES[1,3,...]`` are the file name templates (with METplus time strings +used for templating; see example below). Thus, the templates for the full paths to the obs files are +given by: + +.. code-block:: console + + {*_OBS_DIR}/{OBS_*_FN_TEMPLATES[1,3,...]} + +The contents of the ``OBS_*_FN_TEMPLATES`` list come in pairs, where the first element +of each pair (with even-numbered indices ``[0,2,...]``) refers to the field group, +while the second element (with odd-numbered indices ``[1,3,...]``) refers to the +corresponding sets of files that the obs type contains. Note that ``OBS_*_FN_TEMPLATES +[1,3,...]`` may include leading subdirectories and are +relative to the obs type's ``*_OBS_DIR``. + +If the obs files exist at the locations specified by ``{*_OBS_DIR}/{OBS_*_FN_TEMPLATES[1,3,...]}``, then the ``get_obs_*`` tasks will +succeed, and the workflow will move on to subsequent tasks. If one or more obs files do not exist, the +``get_obs_*`` tasks will attempt to retrieve the required files from a data store such as NOAA HPSS and +place them in the locations specified by ``{*_OBS_DIR}/{OBS_*_FN_TEMPLATES[1,3,...]}``. Assuming +that attempt is successful, the workflow will move on to subsequent tasks. Thus: + + * Users who have the obs files already available (staged) on their system only need to set ``*_OBS_DIR`` + and ``OBS_*_FN_TEMPLATES[1,3,...]`` in ``config.yaml`` to match those staging locations and file names. + + * Users who do not have the obs files available on their systems and do not have access to NOAA HPSS + need to download :term:`CCPA`, :term:`NOHRSC`, :term:`MRMS`, and/or :term:`NDAS` files manually + from collections of publicly available data. + Then, as above, they must set ``*_OBS_DIR`` and ``OBS_*_FN_TEMPLATES[1,3,...]`` to match those + staging locations and file names. + + * Users who have access to a data store that hosts the necessary files (e.g. NOAA HPSS) do not need to + manually stage the obs data because the ``get_obs_*`` tasks will retrieve the necessary obs and place + them in the locations specified by ``*_OBS_DIR`` and ``OBS_*_FN_TEMPLATES[1,3,...]``. By default, + the files will be placed under the experiment directory, but + users may change the values of these variables if they want the retrieved files to be placed elsewhere. + + +As an example, consider a case in which all four types of obs are needed for verification. Then ``*_OBS_DIR`` +and ``OBS_*_FN_TEMPLATES`` might be set as follows: + +.. code-block:: console + + verification: + + CCPA_OBS_DIR: /path/to/UFS_SRW_data/develop/obs_data/ccpa + NOHRSC_OBS_DIR: /path/to/UFS_SRW_data/develop/obs_data/nohrsc + MRMS_OBS_DIR: /path/to/UFS_SRW_data/develop/obs_data/mrms + NDAS_OBS_DIR: /path/to/UFS_SRW_data/develop/obs_data/ndas + + OBS_CCPA_FN_TEMPLATES: [ 'APCP', '{valid?fmt=%Y%m%d}/ccpa.t{valid?fmt=%H}z.01h.hrap.conus.gb2' ] + OBS_NOHRSC_FN_TEMPLATES: [ 'ASNOW', 'sfav2_CONUS_6h_{valid?fmt=%Y%m%d%H}_grid184.grb2' ] + OBS_MRMS_FN_TEMPLATES: [ 'REFC', '{valid?fmt=%Y%m%d}/MergedReflectivityQCComposite_00.50_{valid?fmt=%Y%m%d}-{valid?fmt=%H%M%S}.grib2', + 'RETOP', '{valid?fmt=%Y%m%d}/EchoTop_18_00.50_{valid?fmt=%Y%m%d}-{valid?fmt=%H%M%S}.grib2' ] + OBS_NDAS_FN_TEMPLATES: [ 'SFC_UPA', 'prepbufr.ndas.{valid?fmt=%Y%m%d%H}' ] + +Now further consider the CCPA obs type. If one of the days encompassed by the forecast(s) is 20240429, +then the ``get_obs_ccpa`` task associated with this day will check for the existence of the set of obs +files given by + +``/path/to/UFS_SRW_data/develop/obs_data/ccpa/20240429/ccpa.t{HH}z.01h.hrap.conus.gb2`` + +where ``{HH}`` takes on all hours of this day at which the verification requires CCPA obs. For example, +if performing (deterministic or ensemble) VX on 1-hour APCP for a 3-hour forecast that starts at 06z, +``{HH}`` will take on the values 07, 08, and 09. Then the files that ``get_obs_ccpa`` will look for +are: + +.. code-block:: console + + /path/to/UFS_SRW_data/develop/obs_data/ccpa/20240429/ccpa.t07z.01h.hrap.conus.gb2 + /path/to/UFS_SRW_data/develop/obs_data/ccpa/20240429/ccpa.t08z.01h.hrap.conus.gb2 + /path/to/UFS_SRW_data/develop/obs_data/ccpa/20240429/ccpa.t09z.01h.hrap.conus.gb2 + +If all these exist, ``get_obs_ccpa`` will simply confirm their existence and will not need to retrieve +any files. If not, it will try to retrieve the files from a data store such as NOAA HPSS and place them +at the above locations. + + +.. _VXStageFcstFiles: + +Staging Forecast Files +`````````````````````````````````` +As noted above, the SRW App currently does not support the ability to retrieve forecast files from +data stores. Thus, the forecast files must either be generated by the forecast model in the SRW App, +or they must be manually staged by the user. Note that manually staged forecast files do not have +to be ones generated by the SRW App; they can be outputs from another forecasting system. + +The locations of the forecast files are defined by the variables ``VX_FCST_INPUT_BASEDIR``, +``FCST_SUBDIR_TEMPLATE``, and ``FCST_FN_TEMPLATE`` in the ``verification:`` section of ``config.yaml``. +``VX_FCST_INPUT_BASEDIR`` is the base directory in which the files are located, ``FCST_SUBDIR_TEMPLATE`` +is a template specifying a set of subdirectories under ``VX_FCST_INPUT_BASEDIR``, and ``FCST_FN_TEMPLATE`` +is the file name template. As with the obs, the templating in ``FCST_SUBDIR_TEMPLATE`` and +``FCST_FN_TEMPLATE`` uses METplus time strings. Thus, the full template to the forecast files +is given by + +.. code-block:: console + + {VX_FCST_INPUT_BASEDIR}/{FCST_SUBDIR_TEMPLATE}/{FCST_FN_TEMPLATE} + +If the forecast files are manually staged, then these three variables must be set such that they +together point to the locations of the staged files. If they are generated by the SRW App, then +the user does not need to set these variables; they will by default be set to point to the forecast +files. + + +Summary +`````````````` +In summary, users must take the following steps to enable VX tasks in the SRW App workflow: + + #. Add the necessary VX taskgroup files ``verify_*.yaml`` to the ``rocoto: tasks: taskgroups:`` + section of ``config.yaml``. ``verify_pre.yaml`` must always be added; ``verify_det.yaml`` + must be added to enable deterministic VX (either of a deterministic forecast or of each + member of an ensemble forecast); and ``verify_ens.yaml`` must be added to enable ensemble + VX (of an ensemble forecast as a whole). + + #. If performing ensemble verification and/or deterministic verification of ensemble members + (i.e. if the forecast to be verified is an ensemble), in the ``global:`` section of ``config.yaml`` + set ``DO_ENSEMBLE`` to ``True`` and ``NUM_ENS_MEMBERS`` to the number of ensemble members. + + #. If manually staging the obs files (e.g. because users don't have access to NOAA HPSS), set + the variables ``*_OBS_DIR`` and ``OBS_*_FN_TEMPLATES[1,3,...]`` in the ``verification:`` section + of ``config.yaml`` to the locations of these files on disk (where the ``*`` in these variable + names can be any of the supported obs types). + + #. If manually staging the forecast files (as opposed to generating them by running the weather + model in the SRW App), set the forecast file paths to the locations of these files on disk + using the variables ``VX_FCST_INPUT_BASEDIR``, ``FCST_SUBDIR_TEMPLATE``, and ``FCST_FN_TEMPLATE`` + in the ``verification:`` section of ``config.yaml``. + + #. Specify the field groups to verify in the list ``VX_FIELD_GROUPS`` in the ``verification:`` + section of ``config.yaml``. Valid values for field groups are given in :numref:`Table %s `. + +After completing these steps, users can proceed to generate the experiment (see :numref:`Section %s `) -After adding the VX tasks to the ``rocoto:`` section and the data paths to the ``platform:`` section, users can proceed to generate the experiment, which will perform VX tasks in addition to the default workflow tasks. .. _GenerateWorkflow: @@ -806,87 +1059,223 @@ In addition to the baseline tasks described in :numref:`Table %s ` below. The column "taskgroup" indicates the taskgroup file that must be included in the user's ``config.yaml`` file under ``rocoto: tasks: taskgroups:`` (see :numref:`Section %s ` for more details). For each task, ``mem###`` refers to either ``mem000`` (if running a deterministic forecast) or a specific forecast member number (if running an ensemble forecast). "Metatasks" indicate task definitions that will become more than one workflow task based on different variables, number of hours, etc., as described in the Task Description column. See :numref:`Section %s ` for more details about metatasks. + +The METplus verification tasks and metatasks that are included by default in ``verify_*.yaml`` are described +in :numref:`Table %s `. The ``taskgroup`` entry after the name of each (meta)task indicates +the taskgroup file that must be included in the user's ``config.yaml`` file under ``rocoto: tasks: taskgroups:`` +in order for that (meta)task to be considered for inclusion in the workflow (see :numref:`Section %s ` +for details). As described in :numref:`Section %s `, metatasks define a set of tasks in the +workflow based on multiple values of one or more parameters such as the ensemble member index, the accumulation +interval (for cumulative fields such as accumulated precipitation), and the name of the verification field group +(see description of ``VX_FIELD_GROUPS`` in :numref:`Section %s `). .. _VXWorkflowTasksTable: -.. list-table:: Verification (VX) Workflow Tasks in the SRW App - :widths: 20 20 50 +.. list-table:: Default Verification (VX) Workflow Tasks and Metatasks in the SRW App + :widths: 5 95 :header-rows: 1 - * - Workflow Task - - ``taskgroup`` + * - Workflow (Meta)Task (``taskgroup``) - Task Description - * - :bolditalic:`task_get_obs_ccpa` - - ``verify_pre.yaml`` - - If user has staged :term:`CCPA` data for verification, checks to ensure that data exists in the specified location (``CCPA_OBS_DIR``). If data does not exist, attempts to retrieve that data from NOAA :term:`HPSS`. - * - :bolditalic:`task_get_obs_ndas` - - ``verify_pre.yaml`` - - If user has staged :term:`NDAS` data for verification, checks to ensure that data exists in the specified location (``NDAS_OBS_DIR``). If data does not exist, attempts to retrieve that data from NOAA HPSS. - * - :bolditalic:`task_get_obs_nohrsc` - - ``verify_pre.yaml`` - - Retrieves and organizes hourly :term:`NOHRSC` data from NOAA HPSS. Can only be run if ``verify_pre.yaml`` is included in a ``tasksgroups`` list *and* user has access to NOAA :term:`HPSS` data. ``ASNOW`` should also be added to the ``VX_FIELDS`` list. - * - :bolditalic:`task_get_obs_mrms` - - ``verify_pre.yaml`` - - If user has staged :term:`MRMS` data for verification, checks to ensure that data exists in the specified location (``MRMS_OBS_DIR``). If data does not exist, attempts to retrieve that data from NOAA HPSS. - * - :bolditalic:`task_run_MET_Pb2nc_obs` - - ``verify_pre.yaml`` - - Converts files from prepbufr to NetCDF format. - * - :bolditalic:`metatask_PcpCombine_obs` - - ``verify_pre.yaml`` - - Derives 3-hr, 6-hr, and 24-hr accumulated precipitation observations from the 1-hr observation files. In log files, tasks will be named like ``MET_PcpCombine_obs_APCP##h``, where ``##h`` is 03h, 06h, or 24h. - * - :bolditalic:`metatask_check_post_output_all_mems` - - ``verify_pre.yaml`` - - Ensures that required post-processing tasks have completed and that the output exists in the correct form and location for each forecast member. In log files, tasks will be named like ``check_post_output_mem###``. - * - :bolditalic:`metatask_PcpCombine_fcst_APCP_all_accums_all_mems` - - ``verify_pre.yaml`` - - Derives accumulated precipitation forecast for 3-hr, 6-hr, and 24-hr windows for all forecast members based on 1-hr precipitation forecast values. In log files, tasks will be named like ``MET_PcpCombine_fcst_APCP##h_mem###``, where ``##h`` is 03h, 06h, or 24h. - * - :bolditalic:`metatask_PcpCombine_fcst_ASNOW_all_accums_all_mems` - - ``verify_pre.yaml`` - - Derives accumulated snow forecast for 6-hr and 24-hr windows for all forecast members based on 1-hr precipitation forecast values. In log files, tasks will be named like ``MET_PcpCombine_fcst_ASNOW##h_mem###``, where ``##h`` is 06h or 24h. - * - :bolditalic:`metatask_GridStat_CCPA_all_accums_all_mems` - - ``verify_det.yaml`` - - Runs METplus grid-to-grid verification for 1-h, 3-h, 6-h, and 24-h (i.e., daily) accumulated precipitation. In log files, tasks will be named like ``run_MET_GridStat_vx_APCP##h_mem###``. - * - :bolditalic:`metatask_GridStat_NOHRSC_all_accums_all_mems` - - ``verify_det.yaml`` - - Runs METplus grid-to-grid verification for 6-h and 24-h (i.e., daily) accumulated snow. In log files, tasks will be named like ``run_MET_GridStat_vx_ASNOW##h_mem###``. - * - :bolditalic:`metatask_GridStat_MRMS_all_mems` - - ``verify_det.yaml`` - - Runs METplus grid-to-grid verification for composite reflectivity and :term:`echo top`. In log files, tasks will be named like ``run_MET_GridStat_vx_REFC_mem###`` or ``run_MET_GridStat_vx_RETOP_mem###``. - * - :bolditalic:`metatask_PointStat_NDAS_all_mems` - - ``verify_det.yaml`` - - Runs METplus grid-to-point verification for surface and upper-air variables. In log files, tasks will be named like ``run_MET_PointStat_vx_SFC_mem###`` or ``run_MET_PointStat_vx_UPA_mem###``. - * - :bolditalic:`metatask_GenEnsProd_EnsembleStat_CCPA` :raw-html:`

` - (formerly *VX_ENSGRID_##h*) - - ``verify_ens.yaml`` - - Runs METplus grid-to-grid ensemble verification for 1-h, 3-h, 6-h, and 24-h (i.e., daily) accumulated precipitation. In log files, tasks will be named like ``run_MET_EnsembleStat_vx_APCP##h`` or ``run_MET_GenEnsProd_vx_APCP##h``. Can only be run if ``DO_ENSEMBLE: true`` in ``config.yaml``. - * - :bolditalic:`metatask_GenEnsProd_EnsembleStat_NOHRSC` - - ``verify_ens.yaml`` - - Runs METplus grid-to-grid ensemble verification for 6-h and 24-h (i.e., daily) accumulated snow. In log files, tasks will be named like ``run_MET_EnsembleStat_vx_ASNOW##h`` or ``run_MET_GenEnsProd_vx_ASNOW##h``. Can only be run if ``DO_ENSEMBLE: true`` in ``config.yaml``. - * - :bolditalic:`metatask_GenEnsProd_EnsembleStat_MRMS` :raw-html:`

` - (formerly *VX_ENSGRID_[REFC|RETOP]*) - - ``verify_ens.yaml`` - - Runs METplus grid-to-grid ensemble verification for composite reflectivity and :term:`echo top`. In log files, tasks will be named like ``run_MET_GenEnsProd_vx_[REFC|RETOP]`` or ``run_MET_EnsembleStat_vx_[REFC|RETOP]``. Can only be run if ``DO_ENSEMBLE: true`` in ``config.yaml``. - * - :bolditalic:`metatask_GridStat_CCPA_ensmeanprob_all_accums` :raw-html:`

` - (formerly *VX_ENSGRID_MEAN_##h* and *VX_ENSGRID_PROB_##h*) - - ``verify_ens.yaml`` - - Runs METplus grid-to-grid verification for (1) ensemble mean 1-h, 3-h, 6-h, and 24h (i.e., daily) accumulated precipitation and (2) 1-h, 3-h, 6-h, and 24h (i.e., daily) accumulated precipitation probabilistic output. In log files, the ensemble mean subtask will be named like ``run_MET_GridStat_vx_ensmean_APCP##h`` and the ensemble probabilistic output subtask will be named like ``run_MET_GridStat_vx_ensprob_APCP##h``, where ``##h`` is 01h, 03h, 06h, or 24h. Can only be run if ``DO_ENSEMBLE: true`` in ``config.yaml``. - * - :bolditalic:`metatask_GridStat_NOHRSC_ensmeanprob_all_accums` - - ``verify_ens.yaml`` - - Runs METplus grid-to-grid verification for (1) ensemble mean 6-h and 24h (i.e., daily) accumulated snow and (2) 6-h and 24h (i.e., daily) accumulated snow probabilistic output. In log files, the ensemble mean subtask will be named like ``run_MET_GridStat_vx_ensmean_ASNOW##h`` and the ensemble probabilistic output subtask will be named like ``run_MET_GridStat_vx_ensprob_ASNOW##h``, where ``##h`` is 06h or 24h. Can only be run if ``DO_ENSEMBLE: true`` in ``config.yaml``. - * - :bolditalic:`metatask_GridStat_MRMS_ensprob` :raw-html:`

` - (formerly *VX_ENSGRID_PROB_[REFC|RETOP]*) - - ``verify_ens.yaml`` - - Runs METplus grid-to-grid verification for ensemble probabilities for composite reflectivity and :term:`echo top`. In log files, tasks will be named like ``run_MET_GridStat_vx_ensprob_[REFC|RETOP]``. Can only be run if ``DO_ENSEMBLE: true`` in ``config.yaml``. - * - :bolditalic:`metatask_GenEnsProd_EnsembleStat_NDAS` :raw-html:`

` - (formerly *VX_ENSPOINT*) - - ``verify_ens.yaml`` - - Runs METplus grid-to-point ensemble verification for surface and upper-air variables. In log files, tasks will be named like ``run_MET_GenEnsProd_vx_[SFC|UPA]`` or ``run_MET_EnsembleStat_vx_[SFC|UPA]``. Can only be run if ``DO_ENSEMBLE: true`` in ``config.yaml``. - * - :bolditalic:`metatask_PointStat_NDAS_ensmeanprob` :raw-html:`

` - (formerly *VX_ENSPOINT_[MEAN|PROB]*) - - ``verify_ens.yaml`` - - Runs METplus grid-to-point verification for (1) ensemble mean surface and upper-air variables and (2) ensemble probabilities for surface and upper-air variables. In log files, tasks will be named like ``run_MET_PointStat_vx_ensmean_[SFC|UPA]`` or ``run_MET_PointStat_vx_ensprob_[SFC|UPA]``. Can only be run if ``DO_ENSEMBLE: true`` in ``config.yaml``. + + * - :bolditalic:`task_get_obs_ccpa` (``verify_pre.yaml``) + - Checks for existence of staged :term:`CCPA` obs files at locations specified by ``CCPA_OBS_DIR`` + and ``OBS_CCPA_FN_TEMPLATES``. If any files do not exist, it attempts to retrieve all the files + from a data store (e.g. NOAA :term:`HPSS`) and place them in those locations. This task is included + in the workflow only if ``'APCP'`` is included in ``VX_FIELD_GROUPS``. + + * - :bolditalic:`task_get_obs_nohrsc` (``verify_pre.yaml``) + - Checks for existence of staged :term:`NOHRSC` obs files at locations specified by ``NOHRSC_OBS_DIR`` + and ``OBS_NOHRSC_FN_TEMPLATES``. If any files do not exist, it attempts to retrieve all the files + from a data store (e.g. NOAA :term:`HPSS`) and place them in those locations. This task is included + in the workflow only if ``'ASNOW'`` is included in ``VX_FIELD_GROUPS``. + + * - :bolditalic:`task_get_obs_mrms` (``verify_pre.yaml``) + - Checks for existence of staged :term:`MRMS` obs files at locations specified by ``MRMS_OBS_DIR`` + and ``OBS_MRMS_FN_TEMPLATES``. If any files do not exist, it attempts to retrieve all the files + from a data store (e.g. NOAA :term:`HPSS`) and place them in those locations. This task is included + in the workflow only if ``'REFC'`` and/or ``'RETOP'`` are included in ``VX_FIELD_GROUPS``. + + * - :bolditalic:`task_get_obs_ndas` (``verify_pre.yaml``) + - Checks for existence of staged :term:`NDAS` obs files at locations specified by ``NDAS_OBS_DIR`` + and ``OBS_NDAS_FN_TEMPLATES``. If any files do not exist, it attempts to retrieve all the files + from a data store (e.g. NOAA :term:`HPSS`) and place them in those locations. This task is included + in the workflow only if ``'SFC'`` and/or ``'UPA'`` are included in ``VX_FIELD_GROUPS``. + + * - :bolditalic:`task_run_MET_Pb2nc_obs_NDAS` (``verify_pre.yaml``) + - Converts NDAS obs prepbufr files to NetCDF format. + + * - :bolditalic:`metatask_PcpCombine_APCP_all_accums_obs_CCPA` (``verify_pre.yaml``) + - Set of tasks that generate NetCDF files containing observed APCP for the accumulation intervals + specified in ``VX_APCP_ACCUMS_HRS``. Files for accumulation intervals larger than the one + provided in the obs are obtained by adding APCP values over multiple obs accumulation intervals. + For example, if the obs contain 1-hour accumulations and 3-hr accumulation is specified in ``VX_APCP_ACCUMS_HRS``, + then groups of 3 successive 1-hour APCP values in the obs are added to obtain the 3-hour values. + In Rocoto, the tasks under this metatask are named ``run_MET_PcpCombine_APCP{accum_intvl}h_obs_CCPA``, + where ``{accum_intvl}`` is the accumulation interval in hours (e.g., ``01``, ``03``, ``06``, etc.) + for which the task is being run. This metatask is included in the workflow only if ``'APCP'`` is + included in ``VX_FIELD_GROUPS``. + + * - :bolditalic:`metatask_PcpCombine_ASNOW_all_accums_obs_NOHRSC` (``verify_pre.yaml``) + - Set of tasks that generate NetCDF files containing observed ASNOW for the accumulation intervals + specified in ``VX_ASNOW_ACCUMS_HRS``. Files for accumulation intervals larger than the one + provided in the obs are obtained by adding ASNOW values over multiple obs accumulation intervals. + For example, if the obs contain 6-hour accumulations and 24-hr accumulation is specified in ``VX_ASNOW_ACCUMS_HRS``, + then groups of 4 successive 6-hour ASNOW values in the obs are added to obtain the 24-hour values. + In Rocoto, the tasks under this metatask are named ``run_MET_PcpCombine_ASNOW{accum_intvl}h_obs_NOHRSC``, + where ``{accum_intvl}`` is the accumulation interval in hours (e.g., ``06``, ``24``, etc.) for which + the task is being run. This metatask is included in the workflow only if ``'ASNOW'`` is included in + ``VX_FIELD_GROUPS``. + + * - :bolditalic:`metatask_check_post_output_all_mems` (``verify_pre.yaml``) + - Set of tasks that ensure that the post-processed forecast files required for verification exist in + the locations specified by ``VX_FCST_INPUT_BASEDIR``, ``FCST_SUBDIR_TEMPLATE``, and ``FCST_FN_TEMPLATE``. + In Rocoto, the tasks under this metatask are named ``check_post_output_mem{mem_indx}``, where ``{mem_indx}`` + is the index of the ensemble forecast member. This takes on the values ``001``, ``002``, ... for an + ensemble of forecasts or just ``000`` for a single deterministic forecast. This metatask is included + in the workflow if at least one other verification task or metatask is included. + + * - :bolditalic:`metatask_PcpCombine_APCP_all_accums_all_mems` (``verify_pre.yaml``) + - Set of tasks that generate NetCDF files containing forecast APCP for the accumulation intervals + specified in ``VX_APCP_ACCUMS_HRS``. Files for accumulation intervals larger than the one + provided in the forecasts are obtained by adding APCP values over multiple forecast accumulation + intervals. For example, if the forecasts contain 1-hour accumulations, but 3-hr accumulation is specified + in ``VX_APCP_ACCUMS_HRS``, then groups of 3 successive 1-hour APCP values in the forecasts are + added to obtain the 3-hour values. In Rocoto, the tasks under this metatask are named + ``run_MET_PcpCombine_APCP{accum_intvl}h_fcst_mem{mem_indx}``, where ``{accum_intvl}`` and + ``{mem_indx}`` are the accumulation interval (in hours, e.g., ``01``, ``03``, ``06``, etc.) and + the ensemble forecast member index (or just ``000`` for a single deterministic forecast) for + which the task is being run. This metatask is included in the workflow only if ``'APCP'`` is + included in ``VX_FIELD_GROUPS``. + + * - :bolditalic:`metatask_PcpCombine_ASNOW_all_accums_all_mems` (``verify_pre.yaml``) + - Set of tasks that generate NetCDF files containing forecast ASNOW for the accumulation intervals + specified in ``VX_ASNOW_ACCUMS_HRS``. Files for accumulation intervals larger than the one + provided in the forecasts are obtained by adding ASNOW values over multiple forecast accumulation + intervals. For example, if the forecasts contain 1-hour accumulations, but 6-hr accumulation is specified + in ``VX_ASNOW_ACCUMS_HRS``, then groups of 6 successive 1-hour ASNOW values in the forecasts are + added to obtain 6-hour values. In Rocoto, the tasks under this metatask are named + ``run_MET_PcpCombine_ASNOW{accum_intvl}h_fcst_mem{mem_indx}``, where ``{accum_intvl}`` and + ``{mem_indx}`` are the accumulation interval (in hours, e.g., ``06``, ``24``, etc.) and the ensemble + forecast member index (or just ``000`` for a single deterministic forecast) for which the task is + being run. This metatask is included in the workflow only if ``'ASNOW'`` is included in + ``VX_FIELD_GROUPS``. + + * - :bolditalic:`metatask_GridStat_APCP_all_accums_all_mems` (``verify_det.yaml``) + - Set of tasks that run grid-to-grid verification of accumulated precipitation (represented by the + verification field group ``APCP``) for the intervals specified in ``VX_APCP_ACCUMS_HRS``. In Rocoto, + the tasks under this metatask are named ``run_MET_GridStat_vx_APCP{accum_intvl}h_mem{mem_indx}``, + where ``{accum_intvl}`` and ``{mem_indx}`` are the accumulation interval in hours (e.g., ``01``, + ``03``, ``06``, etc.) and the ensemble forecast member index (or just ``000`` for a single deterministic + forecast) for which the task is being run. This metatask is included in the workflow only if ``'APCP'`` + is included in ``VX_FIELD_GROUPS``. + + * - :bolditalic:`metatask_GridStat_ASNOW_all_accums_all_mems` (``verify_det.yaml``) + - Set of tasks that run grid-to-grid verification of accumulated snowfall (represented by the verification + field group ``ASNOW``) for the intervals specified in ``VX_ASNOW_ACCUMS_HRS``. In Rocoto, the tasks under + this metatask are named ``run_MET_GridStat_vx_ASNOW{accum_intvl}h_mem{mem_indx}``, where ``{accum_intvl}`` + and ``{mem_indx}`` are the accumulation interval in hours (e.g., ``06``, ``24``, etc.) and the ensemble + forecast member index (or just ``000`` for a single deterministic forecast) for which the task is being + run. This metatask is included in the workflow only if ``'ASNOW'`` is included in ``VX_FIELD_GROUPS``. + + * - :bolditalic:`metatask_GridStat_REFC_RETOP_all_mems` (``verify_det.yaml``) + - Set of tasks that run grid-to-grid verification of :term:`composite reflectivity` (represented by + the verification field group ``REFC``) and :term:`echo top` (represented by the verification field + group ``RETOP``). In Rocoto, the tasks under this metatask are named ``run_MET_GridStat_vx_{field_group}_mem{mem_indx}``, + where ``field_group`` and ``{mem_indx}`` are the field group (in this case either ``REFC`` or ``RETOP``) + and the ensemble forecast member index (or just ``000`` for a single deterministic forecast) for which + the task is being run. The tasks for ``REFC`` are included in the workflow only if ``'REFC'`` is + included in ``VX_FIELD_GROUPS``, and the ones for ``RETOP`` are included only if ``'RETOP'`` is included + in ``VX_FIELD_GROUPS``. + + * - :bolditalic:`metatask_PointStat_SFC_UPA_all_mems` (``verify_det.yaml``) + - Set of tasks that run grid-to-point verification of surface fields (represented by the verification field + group ``SFC``) and upper-air fields (represented by the verification field group ``UPA``). In Rocoto, + the tasks under this metatask are named ``run_MET_PointStat_vx_{field_group}_mem{mem_indx}``, where + ``field_group`` and ``{mem_indx}`` are the field group (in this case either ``SFC`` or ``UPA``) and the + ensemble forecast member index (or just ``000`` for a single deterministic forecast) for which the task + is being run. The tasks for the surface fields are included in the workflow only if ``'SFC'`` is included + in ``VX_FIELD_GROUPS``, and the ones for the upper-air fields are included only if ``'UPA'`` is included + in ``VX_FIELD_GROUPS``. + + * - :bolditalic:`metatask_GenEnsProd_EnsembleStat_APCP_all_accums` (``verify_ens.yaml``) + - Set of tasks that run :term:`MET`'s ``GenEnsProd`` and ``EnsembleStat`` tools on APCP for the intervals + specified in ``VX_APCP_ACCUMS_HRS``. In Rocoto, the tasks under this metatask that run ``GenEnsProd`` + are named ``run_MET_GenEnsProd_vx_APCP{accum_intvl}h``, and the ones that run ``EnsembleStat`` are + named ``run_MET_EnsembleStat_vx_APCP{accum_intvl}h``, where ``{accum_intvl}`` is the accumulation + interval in hours (e.g., ``01``, ``03``, ``06``, etc.) for which the tasks are being run. This metatask + is included in the workflow only if ``DO_ENSEMBLE`` is set to ``True`` in ``config.yaml`` and ``'APCP'`` + is included in ``VX_FIELD_GROUPS``. + + * - :bolditalic:`metatask_GenEnsProd_EnsembleStat_ASNOW_all_accums` (``verify_ens.yaml``) + - Set of tasks that run :term:`MET`'s ``GenEnsProd`` and ``EnsembleStat`` tools on ASNOW for the intervals + specified in ``VX_ASNOW_ACCUMS_HRS``. In Rocoto, the tasks under this metatask that run ``GenEnsProd`` + are named ``run_MET_GenEnsProd_vx_ASNOW{accum_intvl}h`` and the ones that run ``EnsembleStat`` are + named ``run_MET_EnsembleStat_vx_ASNOW{accum_intvl}h``, where ``{accum_intvl}`` is the accumulation + interval in hours (e.g., ``06``, ``24``, etc.) for which the tasks are being run. This metatask will be + included in the workflow only if ``DO_ENSEMBLE`` is set to ``True`` in ``config.yaml`` and ``'ASNOW'`` + is included in ``VX_FIELD_GROUPS``. + + * - :bolditalic:`metatask_GenEnsProd_EnsembleStat_REFC_RETOP` (``verify_ens.yaml``) + - Set of tasks that run :term:`MET`'s ``GenEnsProd`` and ``EnsembleStat`` tools on REFC (:term:`composite + reflectivity`) and RETOP (:term:`echo top`). In Rocoto, the tasks under this metatask that run + ``GenEnsProd`` are named ``run_MET_GenEnsProd_vx_{field_group}``, and the ones that run ``EnsembleStat`` + are named ``run_MET_EnsembleStat_vx_{field_group}``, where ``{field_group}`` is the field group (in + this case either ``REFC`` or ``RETOP``) for which the tasks are being run. The tasks for ``REFC`` are + included in the workflow only if ``DO_ENSEMBLE`` is set to ``True`` in ``config.yaml`` and ``'REFC'`` + is included in ``VX_FIELD_GROUPS``, and the ones for ``RETOP`` are included only if ``DO_ENSEMBLE`` is + set to ``True`` in ``config.yaml`` and ``'RETOP'`` is included in ``VX_FIELD_GROUPS``. + + * - :bolditalic:`metatask_GenEnsProd_EnsembleStat_SFC_UPA` (``verify_ens.yaml``) + - Set of tasks that run :term:`MET`'s ``GenEnsProd`` and ``EnsembleStat`` tools on surface fields (represented + by the verification field group ``SFC``) and upper-air fields (represented by the verification field group + ``UPA``). In Rocoto, the tasks under this metatask that run ``GenEnsProd`` are named ``run_MET_GenEnsProd_vx_{field_group}``, + and the ones that run ``EnsembleStat`` are named ``run_MET_EnsembleStat_vx_{field_group}``, where ``{field_group}`` + is the field group (in this case either ``SFC`` or ``UPA``) for which the tasks are being run. The tasks for + ``SFC`` are included in the workflow only if ``DO_ENSEMBLE`` is set to ``True`` in ``config.yaml`` and ``'SFC'`` + is included in ``VX_FIELD_GROUPS``, and the ones for ``UPA`` are included only if ``DO_ENSEMBLE`` is set to + ``True`` in ``config.yaml`` and ``'UPA'`` is included in ``VX_FIELD_GROUPS``. + + * - :bolditalic:`metatask_GridStat_APCP_all_accums_ensmeanprob` (``verify_ens.yaml``) + - Set of tasks that run grid-to-grid verification of the ensemble mean of APCP and grid-to-grid probabilistic + verification of the ensemble of APCP forecasts as a whole. In Rocoto, the tasks under this metatask for + ensemble mean verification are named ``run_MET_GridStat_vx_APCP{accum_intvl}h_ensmean``, and the ones for + ensemble probabilistic verification are named ``run_MET_GridStat_vx_APCP{accum_intvl}h_ensprob``, where + ``{accum_intvl}`` is the accumulation interval in hours (e.g., ``01``, ``03``, ``06``, etc.) for which the + tasks are being run. This metatask is included in the workflow only if ``DO_ENSEMBLE`` is set to ``True`` + in ``config.yaml`` and ``'APCP'`` is included in ``VX_FIELD_GROUPS``. + + * - :bolditalic:`metatask_GridStat_ASNOW_all_accums_ensmeanprob` (``verify_ens.yaml``) + - Set of tasks that run grid-to-grid verification of the ensemble mean of ASNOW and grid-to-grid probabilistic + verification of the ensemble of ASNOW forecasts as a whole. In Rocoto, the tasks under this metatask for + ensemble mean verification are named ``run_MET_GridStat_vx_ASNOW{accum_intvl}h_ensmean``, and the ones for + ensemble probabilistic verification are named ``run_MET_GridStat_vx_ASNOW{accum_intvl}h_ensprob``, where + ``{accum_intvl}`` is the accumulation interval in hours (e.g., ``01``, ``03``, ``06``, etc.) for which the + tasks are being run. These tasks will be included in the workflow only if ``DO_ENSEMBLE`` is set to ``True`` + in ``config.yaml`` and ``'ASNOW'`` is included in ``VX_FIELD_GROUPS``. + + * - :bolditalic:`metatask_GridStat_REFC_RETOP_ensprob` (``verify_ens.yaml``) + - Set of tasks that run grid-to-grid probabilistic verification of the ensemble of :term:`composite reflectivity` + (represented by the verification field group ``REFC``) and :term:`echo top` (represented by the field group + ``RETOP``). (Note that there is no grid-to-grid verification of the ensemble mean of these quantities.) + In Rocoto, the tasks under this metatask are named ``run_MET_GridStat_vx_{field_group}_ensprob``, where + ``{field_group}`` is the field group (in this case either ``REFC`` or ``RETOP``) for which the task is + being run. The task for ``REFC`` is included in the workflow only if ``DO_ENSEMBLE`` is set to ``True`` + in ``config.yaml`` and ``'REFC'`` is included in ``VX_FIELD_GROUPS``, and the one for ``RETOP`` is included + only if ``DO_ENSEMBLE`` is set to ``True`` in ``config.yaml`` and ``'RETOP'`` is included in ``VX_FIELD_GROUPS``. + + * - :bolditalic:`metatask_PointStat_SFC_UPA_ensmeanprob` (``verify_ens.yaml``) + - Set of tasks that run grid-to-grid verification of the ensemble mean of surface fields (represented by the + verification field group ``SFC``) and upper-air fields (represented by the verification field group ``UPA``) + as well as grid-to-grid probabilistic verification of the ensemble of the surface and upper-air field + forecasts as a whole. In Rocoto, the tasks under this metatask for ensemble mean verification are named + ``run_MET_PointStat_vx_{field_group}_ensmean``, and the ones for ensemble probabilistic verification are + named ``run_MET_PointStat_vx_{field_group}_ensprob``, where ``{field_group}`` is the field group (in this + case either ``SFC`` or ``UPA``) on which the task is being run. The tasks for ``SFC`` are included in the + workflow only if ``DO_ENSEMBLE`` is set to ``True`` in ``config.yaml`` and ``'SFC'`` is included in + ``VX_FIELD_GROUPS``, and the ones for ``UPA`` are included only if ``DO_ENSEMBLE`` is set to ``True`` in + ``config.yaml`` and ``'UPA'`` is included in ``VX_FIELD_GROUPS``. + .. _Run: @@ -1180,7 +1569,7 @@ Each task should finish with error code 0. For example: End exregional_get_extrn_mdl_files.sh at Wed Nov 16 18:08:19 UTC 2022 with error code 0 (time elapsed: 00:00:01) -Check the batch script output file in your experiment directory for a “SUCCESS” message near the end of the file. +Check the batch script output file in your experiment directory for a "SUCCESS" message near the end of the file. .. _RegionalWflowTasks: diff --git a/doc/UsersGuide/BuildingRunningTesting/Tutorial.rst b/doc/UsersGuide/BuildingRunningTesting/Tutorial.rst index 445dee1b8f..2b7f169711 100644 --- a/doc/UsersGuide/BuildingRunningTesting/Tutorial.rst +++ b/doc/UsersGuide/BuildingRunningTesting/Tutorial.rst @@ -11,7 +11,7 @@ Users can run through the entire set of tutorials or jump to the one that intere #. :ref:`Severe Weather Over Indianapolis `: Change physics suites and compare graphics plots. #. :ref:`Cold Air Damming `: Coming soon! #. :ref:`Southern Plains Winter Weather Event `: Coming soon! - #. :ref:`Halloween Storm `: Coming soon! + #. :ref:`Halloween Storm `: Change :term:`IC/LBC ` sources and compare results. #. :ref:`Hurricane Barry `: Coming soon! Each section provides a summary of the weather event and instructions for configuring an experiment. @@ -41,29 +41,23 @@ A surface boundary associated with a vorticity maximum over the northern Great P Data ------- -On :srw-wiki:`Level 1 ` systems, users can find data for the Indianapolis Severe Weather Forecast in the usual input model data locations (see :numref:`Section %s ` for a list). The data can also be downloaded from the `UFS SRW Application Data Bucket `__. +On :srw-wiki:`Level 1 ` systems, users can find data for the Indianapolis Severe Weather Forecast in the usual input model data locations (see :numref:`Section %s ` for a list). The data can also be downloaded from the `UFS SRW Application Data Bucket `_. * FV3GFS data for the first forecast (``control``) is located at: - * https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#input_model_data/FV3GFS/grib2/2019061518/ + * https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#develop-20240618/input_model_data/FV3GFS/grib2/2019061518/ * HRRR and RAP data for the second forecast (``test_expt``) is located at: - * https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#input_model_data/HRRR/2019061518/ - * https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#input_model_data/RAP/2019061518/ + * https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#develop-20240618/input_model_data/HRRR/2019061518/ + * https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#develop-20240618/input_model_data/RAP/2019061518/ Load the Workflow -------------------- -To load the workflow environment, source the lmod-setup file. Then load the workflow conda environment. From the ``ufs-srweather-app`` directory, run: +To load the workflow environment, source the lmod-setup file and load the workflow conda environment by running: -.. code-block:: console - - source etc/lmod-setup.sh # OR: source etc/lmod-setup.csh when running in a csh/tcsh shell - module use modulefiles - module load wflow_ - -where ```` is a valid, lowercased machine name (see ``MACHINE`` in :numref:`Section %s ` for valid values). +.. include:: ../../doc-snippets/load-env.rst After loading the workflow, users should follow the instructions printed to the console. Usually, the instructions will tell the user to run |activate|. For example, a user on Hera with permissions on the ``nems`` project may issue the following commands to load the workflow (replacing ``User.Name`` with their actual username): @@ -77,35 +71,14 @@ After loading the workflow, users should follow the instructions printed to the Configuration ------------------------- -Navigate to the ``ufs-srweather-app/ush`` directory. The default (or "control") configuration for this experiment is based on the ``config.community.yaml`` file in that directory. Users can copy this file into ``config.yaml`` if they have not already done so: - -.. code-block:: console - - cd /path/to/ufs-srweather-app/ush - cp config.community.yaml config.yaml - -Users can save the location of the ``ush`` directory in an environment variable (``$USH``). This makes it easier to navigate between directories later. For example: - -.. code-block:: console - - export USH=/path/to/ufs-srweather-app/ush - -Users should substitute ``/path/to/ufs-srweather-app/ush`` with the actual path on their system. As long as a user remains logged into their system, they can run ``cd $USH``, and it will take them to the ``ush`` directory. The variable will need to be reset for each login session. +.. include:: ../../doc-snippets/expt-conf-intro.rst Experiment 1: Control ^^^^^^^^^^^^^^^^^^^^^^^^ Edit the configuration file (``config.yaml``) to include the variables and values in the sample configuration excerpts below. -.. Hint:: - - To open the configuration file in the command line, users may run the command: - - .. code-block:: console - - vi config.yaml - - To modify the file, hit the ``i`` key and then make any changes required. To close and save, hit the ``esc`` key and type ``:wq`` to write the changes to the file and exit/quit the file. Users may opt to use their preferred code editor instead. +.. include:: ../../doc-snippets/file-edit-hint.rst Start in the ``user:`` section and change the ``MACHINE`` and ``ACCOUNT`` variables. For example, when running on a personal MacOS device, users might set: @@ -138,11 +111,9 @@ In the ``workflow:`` section of ``config.yaml``, update ``EXPT_SUBDIR`` and ``PR .. _CronNote: -.. note:: - - Users may also want to set ``USE_CRON_TO_RELAUNCH: true`` and add ``CRON_RELAUNCH_INTVL_MNTS: 3``. This will automate submission of workflow tasks when running the experiment. However, not all systems have :term:`cron`. +.. include:: ../../doc-snippets/cron-note.rst -``EXPT_SUBDIR:`` This variable can be changed to any name the user wants from "gfsv16_physics_fcst" to "forecast1" to "a;skdfj". However, the best names will indicate useful information about the experiment. This tutorial uses ``control`` to establish a baseline, or "control", forecast. Since this tutorial helps users to compare the output from two different forecasts --- one that uses the FV3_GFS_v16 physics suite and one that uses the FV3_RRFS_v1beta physics suite --- "gfsv16_physics_fcst" could be a good alternative directory name. +``EXPT_SUBDIR:`` This variable can be changed to any name the user wants from "gfsv16_physics_fcst" to "forecast1" to "askdfj" (but note that whitespace and some punctuation characters are not allowed). However, the best names will indicate useful information about the experiment. This tutorial uses ``control`` to establish a baseline, or "control", forecast. Since this tutorial helps users to compare the output from two different forecasts --- one that uses the FV3_GFS_v16 physics suite and one that uses the FV3_RRFS_v1beta physics suite --- "gfsv16_physics_fcst" could be a good alternative directory name. ``PREDEF_GRID_NAME:`` This experiment uses the SUBCONUS_Ind_3km grid, rather than the default RRFS_CONUS_25km grid. The SUBCONUS_Ind_3km grid is a high-resolution grid (with grid cell size of approximately 3km) that covers a small area of the U.S. centered over Indianapolis, IN. For more information on this grid, see :numref:`Section %s `. @@ -251,8 +222,6 @@ Once the control case is running, users can return to the ``config.yaml`` file ( Later, users may want to conduct additional experiments using the FV3_HRRR and FV3_WoFS_v0 physics suites. Like FV3_RRFS_v1beta, these physics suites were designed for use with high-resolution grids for storm-scale predictions. -.. COMMENT: Maybe also FV3_RAP? - Next, users will need to modify the data parameters in ``task_get_extrn_ics:`` and ``task_get_extrn_lbcs:`` to use HRRR and RAP data rather than FV3GFS data. Users will need to change the following lines in each section: .. code-block:: console @@ -331,17 +300,7 @@ Navigate to ``test_expt/2019061518/postprd``. This directory contains the post-p Copy ``.png`` Files onto Local System ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Users who are working on the cloud or on an HPC cluster may want to copy the ``.png`` files onto their local system to view in their preferred image viewer. Detailed instructions are available in the :ref:`Introduction to SSH & Data Transfer `. - -In summary, users can run the ``scp`` command in a new terminal/command prompt window to securely copy files from a remote system to their local system if an SSH tunnel is already established between the local system and the remote system. Users can adjust one of the following commands for their system: - -.. code-block:: console - - scp username@your-IP-address:/path/to/source_file_or_directory /path/to/destination_file_or_directory - # OR - scp -P 12345 username@localhost:/path/to/source_file_or_directory /path/to/destination_file_or_directory - -Users would need to modify ``username``, ``your-IP-address``, ``-P 12345``, and the file paths to reflect their systems' information. See the :ref:`Introduction to SSH & Data Transfer ` for example commands. +.. include:: ../../doc-snippets/scp-files.rst .. _ComparePlots: @@ -555,18 +514,20 @@ A polar vortex brought arctic air to much of the U.S. and Mexico. A series of co *Southern Plains Winter Weather Event Over Oklahoma City* -.. COMMENT: Upload a png to the SRW wiki and change the hyperlink to point to that. - Tutorial Content ------------------- -Coming Soon! +Coming Soon! .. _fcst4: Sample Forecast #4: Halloween Storm ======================================= +**Objective:** + * Compare forecast outputs for similar experiments that use different :term:`IC/LBC ` sources. + * Coming soon: Option to use verification tools to assess forecast quality. + Weather Summary -------------------- @@ -574,17 +535,329 @@ A line of severe storms brought strong winds, flash flooding, and tornadoes to t **Weather Phenomena:** Flooding and high winds - * `Storm Prediction Center (SPC) Storm Report for 20191031 `__ + * `Storm Prediction Center (SPC) Storm Report for 20191031 `_ -.. figure:: https://github.com/ufs-community/ufs-srweather-app/wiki/Tutorial/HalloweenStorm.jpg +.. figure:: https://github.com/ufs-community/ufs-srweather-app/wiki/Tutorial/HalloweenStorm.gif :alt: Radar animation of the Halloween Storm that swept across the Eastern United States in 2019. *Halloween Storm 2019* -Tutorial Content -------------------- +Data +------- -Coming Soon! +Data for the Halloween Storm is publicly available in S3 data buckets. The Rapid Refresh (`RAP `_) data can be downloaded from the `SRW App data bucket `_ using ``wget``. Make sure to issue the command from the folder where you want to place the data. + +.. code-block:: console + + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/halloween_rap.tgz + tar -xzf halloween_rap.tgz + +This will untar the ``halloween_rap.tgz`` data into a directory named ``RAP``. + +The SRW App can pull HRRR data directly from the `HRRR data bucket `_. Users do not need to download the data separately. + +Load the workflow +--------------------- + +To load the workflow environment, source the lmod-setup file and load the workflow conda environment by running: + +.. include:: ../../doc-snippets/load-env.rst + +After loading the workflow, users should follow the instructions printed to the console. Usually, the instructions will tell the user to run |activate|. For example, a user on Hera with permissions on the ``nems`` project may issue the following commands to load the workflow (replacing ``User.Name`` with their actual username): + +.. code-block:: console + + source /scratch1/NCEPDEV/nems/User.Name/ufs-srweather-app/etc/lmod-setup.sh hera + module use /scratch1/NCEPDEV/nems/User.Name/ufs-srweather-app/modulefiles + module load wflow_hera + conda activate srw_app + +Configuration +------------------------- + +.. include:: ../../doc-snippets/expt-conf-intro.rst + +Experiment 1: RAP Data +^^^^^^^^^^^^^^^^^^^^^^^^ + +Edit the configuration file (``config.yaml``) to include the variables and values in the sample configuration excerpts below. + +.. include:: ../../doc-snippets/file-edit-hint.rst + +Start in the ``user:`` section and change the ``MACHINE`` and ``ACCOUNT`` variables. For example, when running on a personal MacOS device, users might set: + +.. code-block:: console + + user: + RUN_ENVIR: community + MACHINE: macos + ACCOUNT: none + +For a detailed description of these variables, see :numref:`Section %s `. + +Users do not need to change the ``platform:`` section of the configuration file for this tutorial. + +In the ``workflow:`` section of ``config.yaml``, update ``EXPT_SUBDIR``, ``CCPP_PHYS_SUITE``, ``PREDEF_GRID_NAME``, ``DATE_FIRST_CYCL``, ``DATE_LAST_CYCL``, and ``FCST_LEN_HRS``. + +.. code-block:: console + + workflow: + USE_CRON_TO_RELAUNCH: false + EXPT_SUBDIR: halloweenRAP + CCPP_PHYS_SUITE: FV3_RAP + PREDEF_GRID_NAME: RRFS_CONUS_13km + DATE_FIRST_CYCL: '2019103012' + DATE_LAST_CYCL: '2019103012' + FCST_LEN_HRS: 36 + PREEXISTING_DIR_METHOD: rename + VERBOSE: true + COMPILER: intel + +.. include:: ../../doc-snippets/cron-note.rst + +``EXPT_SUBDIR:`` This variable can be changed to any name the user wants from "halloweenRAP" to "HalloweenStorm1" to "askdfj" (but note that whitespace and some punctuation characters are not allowed). However, the best names will indicate useful information about the experiment. Since this tutorial helps users to compare the output from RAP and HRRR forecast input data, this tutorial will use ``halloweenRAP`` for the Halloween Storm experiment that uses RAP forecast data. + +``PREDEF_GRID_NAME:`` This experiment uses the RRFS_CONUS_13km, rather than the default RRFS_CONUS_25km grid. This 13-km resolution is used in the NOAA operational Rapid Refresh (`RAP `_) model and is the resolution envisioned for the initial operational implementation of the Rapid Refresh Forecast System (:term:`RRFS`). For more information on this grid, see :numref:`Section %s `. + +``CCPP_PHYS_SUITE:`` The FV3_RAP physics suite contains the evolving :term:`parameterizations` used operationally in the NOAA Rapid Refresh (`RAP `_) model; the suite is also a prime candidate under consideration for initial RRFS implementation and has been well-tested at the 13-km resolution. It is therefore an appropriate physics choice when using the RRFS_CONUS_13km grid. + +``DATE_FIRST_CYCL``, ``DATE_LAST_CYCL``, and ``FCST_LEN_HRS`` set parameters related to the date and duration of the forecast. Because this is a one-cycle experiment that does not use cycling or :term:`data assimilation`, the date of the first :term:`cycle` and last cycle are the same. + +For a detailed description of other ``workflow:`` variables, see :numref:`Section %s `. + +In the ``task_get_extrn_ics:`` section, add ``USE_USER_STAGED_EXTRN_FILES`` and ``EXTRN_MDL_SOURCE_BASEDIR_ICS``. Users will need to adjust the file path to point to the location of the data on their system. + +.. code-block:: console + + task_get_extrn_ics: + EXTRN_MDL_NAME_ICS: RAP + USE_USER_STAGED_EXTRN_FILES: true + EXTRN_MDL_SOURCE_BASEDIR_ICS: /path/to/RAP/for_ICS + +For a detailed description of the ``task_get_extrn_ics:`` variables, see :numref:`Section %s `. + +Similarly, in the ``task_get_extrn_lbcs:`` section, add ``USE_USER_STAGED_EXTRN_FILES`` and ``EXTRN_MDL_SOURCE_BASEDIR_LBCS``. Users will need to adjust the file path to point to the location of the data on their system. + +.. code-block:: console + + task_get_extrn_lbcs: + EXTRN_MDL_NAME_LBCS: RAP + LBC_SPEC_INTVL_HRS: 3 + USE_USER_STAGED_EXTRN_FILES: true + EXTRN_MDL_SOURCE_BASEDIR_LBCS: /path/to/RAP/for_LBCS + +For a detailed description of the ``task_get_extrn_lbcs:`` variables, see :numref:`Section %s `. + +Users do not need to modify the ``task_run_fcst:`` section for this tutorial. + +.. COMMENT: Do we need to set QUILTING to true? + +In the ``rocoto:tasks:`` section, increase the walltime for the data-related tasks and metatasks. Then include the YAML configuration file containing the plotting task in the ``rocoto:tasks:taskgroups:`` section, like this: + +.. code-block:: console + + rocoto: + tasks: + task_get_extrn_ics: + walltime: 06:00:00 + task_get_extrn_lbcs: + walltime: 06:00:00 + metatask_run_ensemble: + task_make_lbcs_mem#mem#: + walltime: 06:00:00 + task_run_fcst_mem#mem#: + walltime: 06:00:00 + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml"]|include }}' + +.. note:: + + Rocoto tasks are run once each. A :ref:`Rocoto ` metatask expands into one or more similar tasks by replacing the values between ``#`` symbols with the values under the ``var:`` key. See the `Rocoto documentation `_ for more information. + +For more information on how to turn on/off tasks in the workflow, please see :numref:`Section %s `. + +In the ``task_plot_allvars:`` section, add ``PLOT_FCST_INC: 6``. Users may also want to add ``PLOT_FCST_START: 0`` and ``PLOT_FCST_END: 36`` explicitly, but these can be omitted since the default values are the same as the forecast start and end time respectively. + +.. code-block:: console + + task_plot_allvars: + COMOUT_REF: "" + PLOT_FCST_INC: 6 + +``PLOT_FCST_INC:`` This variable indicates the forecast hour increment for the plotting task. By setting the value to ``6``, the task will generate a ``.png`` file for every 6th forecast hour starting from 12z on October 30, 2019 (the 0th forecast hour) through the 36th forecast hour (November 1, 2019 at 0z). + +After configuring the forecast, users can generate the forecast by running: + +.. code-block:: console + + ./generate_FV3LAM_wflow.py + +To see experiment progress, users should navigate to their experiment directory. Then, use the ``rocotorun`` command to launch new workflow tasks and ``rocotostat`` to check on experiment progress. + +.. code-block:: console + + cd /path/to/expt_dirs/halloweenRAP + rocotorun -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10 + rocotostat -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10 + +Users will need to rerun the ``rocotorun`` and ``rocotostat`` commands above regularly and repeatedly to continue submitting workflow tasks and receiving progress updates. + +.. note:: + + When using cron to automate the workflow submission (as described :ref:`above `), users can omit the ``rocotorun`` command and simply use ``rocotostat`` to check on progress periodically. + +Users can save the location of the ``halloweenRAP`` directory in an environment variable (e.g., ``$HRAP``). This makes it easier to navigate between directories later. For example: + +.. code-block:: console + + export HRAP=/path/to/expt_dirs/halloweenRAP + +Users should substitute ``/path/to/expt_dirs/halloweenRAP`` with the actual path to the experiment directory on their system. As long as a user remains logged into their system, they can run ``cd $HRAP``, and it will take them to the ``halloweenRAP`` experiment directory. The variable will need to be reset for each login session. + +Experiment 2: Changing the Forecast Input +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Once the ``halloweenRAP`` case is running, users can return to the ``config.yaml`` file (in ``$USH``) and adjust the parameters for a new forecast. In this forecast, users will change the forecast input to use ``HRRR`` data and alter a few associated parameters. + +In the ``workflow:`` section of ``config.yaml``, update ``EXPT_SUBDIR`` and ``PREDEF_GRID_NAME``. Other parameters should remain the same. + +.. code-block:: console + + workflow: + EXPT_SUBDIR: halloweenHRRR + PREDEF_GRID_NAME: RRFS_CONUScompact_13km + +.. note:: + + Relative to the original CONUS domain, the "compact" CONUS domains are slightly smaller. The original CONUS domains were a bit too large to run with :term:`LBCs` from HRRR, so the "compact" domains were created to be just small enough to work with HRRR data. + +In the ``task_get_extrn_ics:`` section, update the values for ``EXTRN_MDL_NAME_ICS`` and ``USE_USER_STAGED_EXTRN_FILES`` and add ``EXTRN_MDL_FILES_ICS``. Users may choose to comment out or remove ``EXTRN_MDL_SOURCE_BASEDIR_ICS``, but this is not necessary. + +.. code-block:: console + + task_get_extrn_ics: + EXTRN_MDL_NAME_ICS: HRRR + USE_USER_STAGED_EXTRN_FILES: false + EXTRN_MDL_FILES_ICS: + - '{yy}{jjj}{hh}00{fcst_hr:02d}00' + +For a detailed description of the ``task_get_extrn_ics:`` variables, see :numref:`Section %s `. + +Update the same values in the ``task_get_extrn_lbcs:`` section: + +.. code-block:: console + + task_get_extrn_lbcs: + EXTRN_MDL_NAME_LBCS: HRRR + LBC_SPEC_INTVL_HRS: 3 + USE_USER_STAGED_EXTRN_FILES: false + EXTRN_MDL_FILES_LBCS: + - '{yy}{jjj}{hh}00{fcst_hr:02d}00' + + +For a detailed description of the ``task_get_extrn_lbcs:`` variables, see :numref:`Section %s `. + +After configuring the forecast, users can generate the second forecast by running: + +.. code-block:: console + + ./generate_FV3LAM_wflow.py + +To see experiment progress, users should navigate to their experiment directory. As in the first forecast, the following commands allow users to launch new workflow tasks and check on experiment progress. + +.. code-block:: console + + cd /path/to/expt_dirs/halloweenHRRR + rocotorun -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10 + rocotostat -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10 + +.. note:: + + When using cron to automate the workflow submission (as described :ref:`above `), users can omit the ``rocotorun`` command and simply use ``rocotostat`` to check on progress periodically. + +.. note:: + + If users have not automated their workflow using cron, they will need to ensure that they continue issuing ``rocotorun`` commands to launch all of the tasks in each experiment. While switching between experiment directories to run ``rocotorun`` and ``rocotostat`` commands in both directories is possible, it may be easier to finish the ``halloweenRAP`` experiment's tasks before starting on ``halloweenHRRR``. + +As with the ``halloweenRAP`` experiment, users can save the location of the ``halloweenHRRR`` directory in an environment variable (e.g., ``$HHRRR``). This makes it easier to navigate between directories later. For example: + +.. code-block:: console + + export HHRRR=/path/to/expt_dirs/halloweenHRRR + +Users should substitute ``/path/to/expt_dirs/halloweenHRRR`` with the actual path on their system. + + +How to Analyze Results +----------------------- +Navigate to ``halloweenHRRR/2019103012/postprd`` and/or ``halloweenRAP/2019203012/postprd``. These directories contain the post-processed data generated by the :term:`UPP` from the Halloween Storm forecasts. After the ``plot_allvars`` task completes, this directory will contain ``.png`` images for several forecast variables. + +Copy ``.png`` Files onto Local System +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. include:: ../../doc-snippets/scp-files.rst + +Examining Forecast Plots at Peak Intensity +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +This experiment will be looking at plots from HRRR and RAP input files while the Halloween Storm is at or approaching peak intensity. + +.. _fcst4_250wind: + +250mb Wind +`````````` +An effective weather forecast begins with analyzing a 250mb wind chart. By using this wind plot, forecasters can identify key features such as jet stream placement, jet maxima, troughs, ridges, and more. This analysis also helps pinpoint areas with the potential for the strongest severe weather. + +In the 250mb wind plots below, the ``halloweenHRRR`` and ``halloweenRAP`` plots are nearly identical at forecast hour f036. This shows great model agreement. Analyzing this chart we can see multiple ingredients signaling a significant severe weather event over the eastern CONUS. The first thing to notice is the placement of the jet streak along with troughing approaching the eastern US. Also notice an extreme 150KT jet max over southern Ohio further fueling severe weather. The last thing to notice is the divergence aloft present over the eastern CONUS; seeing divergence present all the way up to 250mb indicates a strong system. + +.. figure:: https://github.com/ufs-community/ufs-srweather-app/wiki/fcst4_plots/250wind_rap_conus_f036.png + :align: center + :width: 75% + + *RAP Plot for 250mb Wind* + +.. figure:: https://github.com/ufs-community/ufs-srweather-app/wiki/fcst4_plots/250wind_hrrr_conus_f036.png + :align: center + :width: 75% + + *HRRR Plot for 250mb Wind* + +.. _fcst4_10mwind: + +10m Wind +`````````` +The 10m wind plots allows forecasters to pick up on patterns closer to the surface. It shows features such as convergence and pressure areas. + +In the 10m wind plots below, the ``halloweenHRRR`` and ``halloweenRAP`` are once again very similar, which makes sense given that the 250mb wind plots are also so similar. We can see a few key features on this chart. The most important is the area of convergence taking place over the east coast which is driving the line of severe storms. + +.. figure:: https://github.com/ufs-community/ufs-srweather-app/wiki/fcst4_plots/10mwind_rap_conus_f036.png + :align: center + :width: 75% + + *RAP Plot for 10m Winds* + +.. figure:: https://github.com/ufs-community/ufs-srweather-app/wiki/fcst4_plots/10mwind_hrrr_conus_f036.png + :align: center + :width: 75% + + *HRRR Plot for 10m Winds* + +.. _fcst4_refc: + +Composite Reflectivity +```````````````````````` +Reflectivity images visually represent the weather based on the energy (measured in decibels [dBZ]) reflected back from radar. Composite reflectivity generates an image based on reflectivity scans at multiple elevation angles, or "tilts", of the antenna. See https://www.noaa.gov/jetstream/reflectivity for a more detailed explanation of composite reflectivity. + +In the composite reflectivity plots below, the ``halloweenHRRR`` and ``halloweenRAP`` models remain quite similar, as expected. Utilizing the reflectivity plots provides the final piece of the puzzle. From the previous analyses, we already had a good understanding of where the storms were likely to occur. Composite reflectivity serves as an additional tool, allowing us to visualize where the models predict storm placement. In this case, the strongest storms are indicated by higher dBZ values and appear to be concentrated in the NC/VA region. + +.. figure:: https://github.com/ufs-community/ufs-srweather-app/wiki/fcst4_plots/refc_rap_conus_f036.png + :align: center + :width: 75% + + *RAP Plot for Composite Reflectivity* + +.. figure:: https://github.com/ufs-community/ufs-srweather-app/wiki/fcst4_plots/refc_hrrr_conus_f036.png + :align: center + :width: 75% + + *HRRR Plot for Composite Reflectivity* .. _fcst5: diff --git a/doc/UsersGuide/BuildingRunningTesting/VXCases.rst b/doc/UsersGuide/BuildingRunningTesting/VXCases.rst index 080e180b14..f8b01f993c 100644 --- a/doc/UsersGuide/BuildingRunningTesting/VXCases.rst +++ b/doc/UsersGuide/BuildingRunningTesting/VXCases.rst @@ -45,21 +45,21 @@ On :srw-wiki:`Level 1 ` systems, users can fi On other systems, users need to download the ``Indy-Severe-Weather.tgz`` file using any of the following methods: - #. Download directly from the S3 bucket using a browser. The data is available at https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#sample_cases/release-public-v2.2.0/. + #. Download directly from the S3 bucket using a browser. The data is available at https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#experiment-user-cases/release-public-v2.2.0/METplus-vx-sample/. #. Download from a terminal using the AWS command line interface (CLI), if installed: .. code-block:: console - aws s3 cp https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#sample_cases/release-public-v2.2.0/Indy-Severe-Weather.tgz Indy-Severe-Weather.tgz + aws s3 cp https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#experiment-user-cases/release-public-v2.2.0/METplus-vx-sample/Indy-Severe-Weather.tgz Indy-Severe-Weather.tgz #. Download from a terminal using ``wget``: .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/sample_cases/release-public-v2.2.0/Indy-Severe-Weather.tgz + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.2.0/METplus-vx-sample/Indy-Severe-Weather.tgz -This tar file contains :term:`IC/LBC ` files, observation data, model/forecast output, and MET verification output for the sample forecast. Users who have never run the SRW App on their system before will also need to download (1) the fix files required for SRW App forecasts and (2) the NaturalEarth shapefiles required for plotting. Users can download the fix file data from a browser at https://noaa-ufs-srw-pds.s3.amazonaws.com/current_srw_release_data/fix_data.tgz or visit :numref:`Section %s ` for instructions on how to download the data with ``wget``. NaturalEarth files are available at https://noaa-ufs-srw-pds.s3.amazonaws.com/NaturalEarth/NaturalEarth.tgz. See the :numref:`Section %s ` for more information on plotting. +This tar file contains :term:`IC/LBC ` files, observation data, model/forecast output, and MET verification output for the sample forecast. Users who have never run the SRW App on their system before will also need to download (1) the fix files required for SRW App forecasts and (2) the NaturalEarth shapefiles required for plotting. Users can download the fix file data from a browser at https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.2.0/out-of-the-box/fix_data.tgz or visit :numref:`Section %s ` for instructions on how to download the data with ``wget``. NaturalEarth files are available at https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/NaturalEarth/NaturalEarth.tgz. See the :numref:`Section %s ` for more information on plotting. After downloading ``Indy-Severe-Weather.tgz`` using one of the three methods above, untar the downloaded compressed archive file: @@ -81,14 +81,9 @@ Save the path to this file in and ``INDYDATA`` environment variable: Load the Workflow ^^^^^^^^^^^^^^^^^^^^ -First, navigate to the ``ufs-srweather-app/ush`` directory. Then, load the workflow environment: - -.. code-block:: console - - source /path/to/etc/lmod-setup.sh - module use /path/to/ufs-srweather-app/modulefiles - module load wflow_ +To load the workflow environment, run: +.. include:: ../../doc-snippets/load-env.rst Users running a csh/tcsh shell would run ``source /path/to/etc/lmod-setup.csh `` in place of the first command above. After loading the workflow, users should follow the instructions printed to the console. Usually, the instructions will tell the user to run |activate|. @@ -267,7 +262,8 @@ Point STAT Files The Point-Stat files contain continuous variables like temperature, pressure, and wind speed. A description of the Point-Stat file can be found :ref:`here ` in the MET documentation. -The Point-Stat files contain a potentially overwhelming amount of information. Therefore, it is recommended that users focus on the CNT MET test, which contains the `RMSE `__ and `MBIAS `__ statistics. The MET tests are defined in column 24 'LINE_TYPE' of the ``.stat`` file. Look for 'CNT' in this column. Then find column 66-68 for MBIAS and 78-80 for RMSE statistics. A full description of this file can be found :ref:`here `. +The Point-Stat files contain a potentially overwhelming amount of information. Therefore, it is recommended that users focus on the CNT MET test, which contains the Root Mean Squared Error (`RMSE `_) and Magnitude & +Multiplicative bias (`MBIAS `_) statistics. The MET tests are defined in column 24 'LINE_TYPE' of the ``.stat`` file. Look for 'CNT' in this column. Then find column 66-68 for MBIAS and 78-80 for RMSE statistics. A full description of this file can be found :ref:`here `. To narrow down the variable field even further, users can focus on these weather variables: diff --git a/doc/UsersGuide/BuildingRunningTesting/WE2Etests.rst b/doc/UsersGuide/BuildingRunningTesting/WE2Etests.rst index 4fca53b575..b3a7bf847b 100644 --- a/doc/UsersGuide/BuildingRunningTesting/WE2Etests.rst +++ b/doc/UsersGuide/BuildingRunningTesting/WE2Etests.rst @@ -78,6 +78,7 @@ For convenience, the WE2E tests are currently grouped into the following categor FV3GFS: RAP: HRRR: + RRFS: Some tests are duplicated among the above categories via symbolic links, both for legacy reasons (when tests for different capabilities were consolidated) and for convenience when a user would like to run all tests for a specific category (e.g., verification tests). diff --git a/doc/UsersGuide/BuildingRunningTesting/index.rst b/doc/UsersGuide/BuildingRunningTesting/index.rst index 5c0efc3c64..a0aa69c85b 100644 --- a/doc/UsersGuide/BuildingRunningTesting/index.rst +++ b/doc/UsersGuide/BuildingRunningTesting/index.rst @@ -13,3 +13,4 @@ Building, Running, and Testing the SRW App Tutorial VXCases AQM + FIRE diff --git a/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst index 3bfa5bdf7d..50d343f57e 100644 --- a/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst +++ b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst @@ -112,6 +112,7 @@ If non-default parameters are selected for the variables in this section, they s ``SCHED``: (Default: "") The job scheduler to use (e.g., Slurm) on the specified ``MACHINE``. Leaving this an empty string allows the experiment generation script to set it automatically depending on the machine the workflow is running on. Valid values: ``"slurm"`` | ``"pbspro"`` | ``"lsf"`` | ``"lsfcray"`` | ``"none"`` + Machine-Dependent Parameters ------------------------------- These parameters vary depending on machine. On :srw-wiki:`Level 1 and 2 ` systems, the appropriate values for each machine can be viewed in the ``ush/machine/.sh`` scripts. To specify a value other than the default, add these variables and the desired value in the ``config.yaml`` file so that they override the ``config_defaults.yaml`` and machine default values. @@ -156,8 +157,8 @@ These settings define platform-specific run commands. Users should set run comma ``RUN_CMD_SERIAL``: (Default: "") The run command for some serial jobs. -``RUN_CMD_AQM``: (Default: "") - The run command for some AQM tasks. +``RUN_CMD_NEXUS``: (Default: "") + The run command for the AQM NEXUS tasks. ``RUN_CMD_AQMLBC``: (Default: "") The run command for the ``aqm_lbcs`` task. @@ -168,48 +169,6 @@ These settings define platform-specific run commands. Users should set run comma ``PRE_TASK_CMDS``: (Default: "") Pre-task commands such as ``ulimit`` needed by tasks. For example: ``'{ ulimit -s unlimited; ulimit -a; }'`` -METplus Parameters ----------------------- - -:ref:`METplus ` is a scientific verification framework that spans a wide range of temporal and spatial scales. Many of the METplus parameters are described below, but additional documentation for the METplus components is available on the `METplus website `__. - -.. _METParamNote: - -.. note:: - Where a date field is required: - * ``YYYY`` refers to the 4-digit valid year - * ``MM`` refers to the 2-digit valid month - * ``DD`` refers to the 2-digit valid day of the month - * ``HH`` refers to the 2-digit valid hour of the day - * ``mm`` refers to the 2-digit valid minutes of the hour - * ``SS`` refers to the two-digit valid seconds of the hour - -``CCPA_OBS_DIR``: (Default: ``"{{ workflow.EXPTDIR }}/obs_data/ccpa/proc"``) - User-specified location of the directory where :term:`CCPA` hourly precipitation files used by METplus are located (or, if retrieved by the workflow, where they will be placed). See comments in file ``scripts/exregional_get_verif_obs.sh`` for more details about files and directory structure, as well as important caveats about errors in the metadata and file names. - - .. attention:: - Do not set this to the same path as other ``*_OBS_DIR`` variables; otherwise unexpected results and data loss may occur. - -``NOHRSC_OBS_DIR``: (Default: ``"{{ workflow.EXPTDIR }}/obs_data/nohrsc/proc"``) - User-specified location of top-level directory where NOHRSC 6- and 24-hour snowfall accumulation files used by METplus are located (or, if retrieved by the workflow, where they will be placed). See comments in file scripts/exregional_get_verif_obs.sh for more details about files and directory structure - - .. attention:: - Do not set this to the same path as other ``*_OBS_DIR`` variables; otherwise unexpected results and data loss may occur. - - .. note:: - Due to limited availability of NOHRSC observation data on NOAA :term:`HPSS` and the likelihood that snowfall accumulation verification will not be desired outside of winter cases, this verification option is currently not present in the workflow by default. In order to use it, the verification environment variable ``VX_FIELDS`` should be updated to include ``ASNOW``. This will allow the related workflow tasks to be run. - -``MRMS_OBS_DIR``: (Default: ``"{{ workflow.EXPTDIR }}/obs_data/mrms/proc"``) - User-specified location of the directory where :term:`MRMS` composite reflectivity and echo top files used by METplus are located (or, if retrieved by the workflow, where they will be placed). See comments in the ``scripts/exregional_get_verif_obs.sh`` for more details about files and directory structure. - - .. attention:: - Do not set this to the same path as other ``*_OBS_DIR`` variables; otherwise unexpected results and data loss may occur. - -``NDAS_OBS_DIR``: (Default: ``"{{ workflow.EXPTDIR }}/obs_data/ndas/proc"``) - User-specified location of top-level directory where :term:`NDAS` prepbufr files used by METplus are located (or, if retrieved by the workflow, where they will be placed). See comments in file ``scripts/exregional_get_verif_obs.sh`` for more details about files and directory structure. - - .. attention:: - Do not set this to the same path as other ``*_OBS_DIR`` variables; otherwise unexpected results and data loss may occur. Other Platform-Specific Directories -------------------------------------- @@ -271,6 +230,12 @@ These parameters are associated with the fixed (i.e., static) files. On :srw-wik ``FIXshp``: (Default: "") System directory containing the graphics shapefiles. On Level 1 systems, these are set within the machine files. Users on other systems will need to provide the path to the directory that contains the *Natural Earth* shapefiles. +``FIXaqm``: (Default: "") + Path to system directory containing AQM fixed files. + +``FIXemis``: (Default: "") + Path to system directory containing AQM emission data files. + ``FIXcrtm``: (Default: "") Path to system directory containing CRTM fixed files. @@ -287,7 +252,7 @@ WORKFLOW Configuration Parameters If non-default parameters are selected for the variables in this section, they should be added to the ``workflow:`` section of the ``config.yaml`` file. -``WORKFLOW_ID``: (Default: ``!nowtimestamp ''``) +``WORKFLOW_ID``: (Default: ``''``) Unique ID for the workflow run that will be set in ``setup.py``. ``RELATIVE_LINK_FLAG``: (Default: "--relative") @@ -329,6 +294,9 @@ Directory Parameters ``EXPTDIR``: (Default: ``'{{ [workflow.EXPT_BASEDIR, workflow.EXPT_SUBDIR]|path_join }}'``) The full path to the experiment directory. By default, this value will point to ``"${EXPT_BASEDIR}/${EXPT_SUBDIR}"``, but the user can define it differently in the configuration file if desired. +``WFLOW_FLAG_FILES_DIR``: (Default: ``'{{ [workflow.EXPTDIR, "wflow_flag_files"]|path_join }}'``) + Directory in which flag files marking completion of various workflow tasks can be placed. + Pre-Processing File Separator Parameters -------------------------------------------- @@ -338,9 +306,6 @@ Pre-Processing File Separator Parameters Set File Name Parameters ---------------------------- -``EXPT_CONFIG_FN``: (Default: "config.yaml") - Name of the user-specified configuration file for the forecast experiment. - ``CONSTANTS_FN``: (Default: "constants.yaml") Name of the file containing definitions of various mathematical, physical, and SRW App constants. @@ -387,7 +352,7 @@ Set File Name Parameters Name of a file that contains settings and configurations for the :term:`NUOPC`/:term:`ESMF` main component. In general, users should not set this variable in their configuration file (see :ref:`note `). ``UFS_CONFIG_FN``: (Default: "ufs.configure") - Name of a file that contains information about the various :term:`UFS` components and their run sequence. In general, users should not set this variable in their configuration file (see :ref:`note `). + Name of a template file that contains information about the various :term:`UFS` components and their run sequence. In general, users should not set this variable in their configuration file (see :ref:`note `). ``AQM_RC_FN``: (Default: "aqm.rc") Name of resource file for NOAA Air Quality Model (AQM). @@ -452,8 +417,8 @@ This section contains files and paths to files that are staged in the experiment ``WFLOW_XML_FN``: (Default: "FV3LAM_wflow.xml") Name of the Rocoto workflow XML file that the experiment generation script creates. This file defines the workflow for the experiment. -``GLOBAL_VAR_DEFNS_FN``: (Default: "var_defns.sh") - Name of the file (a shell script) containing definitions of the primary and secondary experiment variables (parameters). This file is sourced by many scripts (e.g., the J-job scripts corresponding to each workflow task) in order to make all the experiment variables available in those scripts. The primary variables are defined in the default configuration file (``config_defaults.yaml``) and in the user configuration file (``config.yaml``). The secondary experiment variables are generated by the experiment generation script. +``GLOBAL_VAR_DEFNS_FN``: (Default: "var_defns.yaml") + Name of the auto-generated experiment configuration file. It contains the primary experiment variables defined in this default configuration script and in the user-specified configuration as well as secondary experiment variables generated by the experiment generation script from machine files and other settings. This file is the primary source of information used in the scripts at run time. ``ROCOTO_YAML_FN``: (Default: "rocoto_defns.yaml") Name of the YAML file containing the YAML workflow definition from which the Rocoto XML file is created. @@ -532,7 +497,7 @@ CCPP Parameter ``CCPP_PHYS_SUITE_FP``: (Default: ``'{{ [workflow.EXPTDIR, CCPP_PHYS_SUITE_FN]|path_join }}'``) The full path to the suite definition file (SDF) in the experiment directory. -``CCPP_PHYS_DIR``: (Default: ``'{{ [user.UFS_WTHR_MDL_DIR, "FV3", "ccpp", "physics", "physics"] |path_join }}'``) +``CCPP_PHYS_DIR``: (Default: ``'{{ [user.UFS_WTHR_MDL_DIR, "FV3", "ccpp", "physics", "physics", "SFC_Models", "Land", "Noahmp"] |path_join }}'``) The directory containing the CCPP physics source code. This is needed to link table(s) contained in that repository. Field Dictionary Parameters @@ -701,7 +666,7 @@ A standard set of environment variables has been established for *nco* mode to s ``envir_default, NET_default, model_ver_default, RUN_default``: Standard environment variables defined in the NCEP Central Operations WCOSS Implementation Standards document. These variables are used in forming the path to various directories containing input, output, and workflow files. The variables are defined in the :nco:`WCOSS Implementation Standards ` document (pp. 4-5) as follows: - ``envir_default``: (Default: "para") + ``envir_default``: (Default: "test") Set to "test" during the initial testing phase, "para" when running in parallel (on a schedule), and "prod" in production. ``NET_default``: (Default: "srw") @@ -713,46 +678,28 @@ A standard set of environment variables has been established for *nco* mode to s ``RUN_default``: (Default: "srw") Name of model run (third level of ``com`` directory structure). In general, same as ``${NET_default}``. -``OPSROOT_default``: (Default: ``'{{ workflow.EXPT_BASEDIR }}/../nco_dirs'``) - The operations root directory in *nco* mode. - -``COMROOT_default``: (Default: ``'{{ OPSROOT_default }}/com'``) - The ``com`` root directory for input/output data that is located on the current system (typically ``$OPSROOT_default/com``). - -``DATAROOT_default``: (Default: ``'{{OPSROOT_default }}/tmp'``) - Directory containing the (temporary) working directory for running jobs; typically named ``$OPSROOT_default/tmp`` in production. - -``DCOMROOT_default``: (Default: ``'{{OPSROOT_default }}/dcom'``) - ``dcom`` root directory, typically ``$OPSROOT_default/dcom``. This directory contains input/incoming data that is retrieved from outside WCOSS. - -``LOGBASEDIR_default``: (Default: ``'{% if user.RUN_ENVIR == "nco" %}{{ [OPSROOT_default, "output"]|path_join }}{% else %}{{ [workflow.EXPTDIR, "log"]|path_join }}{% endif %}'``) - Directory in which the log files from the workflow tasks will be placed. - -``COMIN_BASEDIR``: (Default: ``'{{ COMROOT_default }}/{{ NET_default }}/{{ model_ver_default }}'``) - ``com`` directory for current model's input data, typically ``$COMROOT/$NET/$model_ver/$RUN.$PDY``. - -``COMOUT_BASEDIR``: (Default: ``'{{ COMROOT_default }}/{{ NET_default }}/{{ model_ver_default }}'``) - ``com`` directory for current model's output data, typically ``$COMROOT/$NET/$model_ver/$RUN.$PDY``. +``PTMP``: (Default ``'{{ workflow.EXPT_BASEDIR }}/../nco_dirs'``) + User-defined path to the com type directories (``OPSROOT=$PTMP/$envir``). ``DBNROOT_default``: (Default: "") Root directory for the data-alerting utilities. -``SENDECF_default``: (Default: false) +``SENDECF_default``: (Default: "NO") Boolean variable used to control ``ecflow_client`` child commands. -``SENDDBN_default``: (Default: false) +``SENDDBN_default``: (Default: "NO") Boolean variable used to control sending products off WCOSS2. -``SENDDBN_NTC_default``: (Default: false) +``SENDDBN_NTC_default``: (Default: "NO") Boolean variable used to control sending products with WMO headers off WCOSS2. -``SENDCOM_default``: (Default: false) +``SENDCOM_default``: (Default: "YES") Boolean variable to control data copies to ``$COMOUT``. -``SENDWEB_default``: (Default: false) +``SENDWEB_default``: (Default: "NO") Boolean variable used to control sending products to a web server, often ``ncorzdm``. -``KEEPDATA_default``: (Default: true) +``KEEPDATA_default``: (Default: "YES") Boolean variable used to specify whether or not the working directory should be kept upon successful job completion. ``MAILTO_default``: (Default: "") @@ -924,7 +871,7 @@ Basic Task Parameters For each workflow task, certain parameter values must be passed to the job scheduler (e.g., Slurm), which submits a job for the task. ``EXTRN_MDL_NAME_ICS``: (Default: "FV3GFS") - The name of the external model that will provide fields from which initial condition (IC) files, surface files, and 0-th hour boundary condition files will be generated for input into the forecast model. Valid values: ``"GSMGFS"`` | ``"FV3GFS"`` | ``"GEFS"`` | ``"GDAS"`` | ``"RAP"`` | ``"HRRR"`` | ``"NAM"`` | ``"UFS-CASE-STUDY"`` + The name of the external model that will provide fields from which initial condition (IC) files, surface files, and 0-th hour boundary condition files will be generated for input into the forecast model. Valid values: ``"GSMGFS"`` | ``"FV3GFS"`` | ``"GEFS"`` | ``"GDAS"`` | ``"RAP"`` | ``"HRRR"`` | ``"RRFS"`` | ``"NAM"`` | ``"UFS-CASE-STUDY"`` ``EXTRN_MDL_ICS_OFFSET_HRS``: (Default: 0) Users may wish to start a forecast using forecast data from a previous cycle of an external model. This variable indicates how many hours earlier the external model started than the FV3 forecast configured here. For example, if the forecast should start from a 6-hour forecast of the GFS, then ``EXTRN_MDL_ICS_OFFSET_HRS: "6"``. @@ -978,7 +925,7 @@ Basic Task Parameters For each workflow task, certain parameter values must be passed to the job scheduler (e.g., Slurm), which submits a job for the task. ``EXTRN_MDL_NAME_LBCS``: (Default: "FV3GFS") - The name of the external model that will provide fields from which lateral boundary condition (LBC) files (except for the 0-th hour LBC file) will be generated for input into the forecast model. Valid values: ``"GSMGFS"`` | ``"FV3GFS"`` | ``"GEFS"`` | ``"GDAS"`` | ``"RAP"`` | ``"HRRR"`` | ``"NAM"`` | ``"UFS-CASE-STUDY"`` + The name of the external model that will provide fields from which lateral boundary condition (LBC) files (except for the 0-th hour LBC file) will be generated for input into the forecast model. Valid values: ``"GSMGFS"`` | ``"FV3GFS"`` | ``"GEFS"`` | ``"GDAS"`` | ``"RAP"`` | ``"HRRR"`` | ``"RRFS"`` | ``"NAM"`` | ``"UFS-CASE-STUDY"`` ``LBC_SPEC_INTVL_HRS``: (Default: 6) The interval (in integer hours) at which LBC files will be generated. This is also referred to as the *boundary update interval*. Note that the model selected in ``EXTRN_MDL_NAME_LBCS`` must have data available at a frequency greater than or equal to that implied by ``LBC_SPEC_INTVL_HRS``. For example, if ``LBC_SPEC_INTVL_HRS`` is set to "6", then the model must have data available at least every 6 hours. It is up to the user to ensure that this is the case. @@ -1041,9 +988,15 @@ FVCOM Parameters ``FVCOM_FILE``: (Default: "fvcom.nc") Name of the file located in ``FVCOM_DIR`` that has :term:`FVCOM` data interpolated to the FV3-LAM grid. This file will be copied later to a new location, and the name will be changed to ``fvcom.nc`` if a name other than ``fvcom.nc`` is selected. -Vertical Coordinate File Parameter +Vertical Coordinate Parameters ------------------------------------ +``LEVP``: (Default: 65) + Number of vertical levels in the atmosphere. In order to change this + number, the user will additionally need to create a vertical coordinate + distribution file; this process is described in :numref:`Section %s ` + This value should be the same in both ``make_ics`` and ``make_lbcs``. + ``VCOORD_FILE``: (Default: ``"{{ workflow.FIXam }}/global_hyblev.l65.txt"``) Full path to the file used to set the vertical coordinates in FV3. This file should be the same in both ``make_ics`` and ``make_lbcs``. @@ -1061,9 +1014,15 @@ Non-default parameters for the ``make_lbcs`` task are set in the ``task_make_lbc ``OMP_STACKSIZE_MAKE_LBCS``: (Default: "1024m") Controls the size of the stack for threads created by the OpenMP implementation. -Vertical Coordinate File Parameter +Vertical Coordinate Parameters ------------------------------------ +``LEVP``: (Default: 65) + Number of vertical levels in the atmosphere. In order to change this + number, the user will additionally need to create a vertical coordinate + distribution file; this process is described in :numref:`Section %s ` + This value should be the same in both ``make_ics`` and ``make_lbcs``. + ``VCOORD_FILE``: (Default: ``"{{ workflow.FIXam }}/global_hyblev.l65.txt"``) Full path to the file used to set the vertical coordinates in FV3. This file should be the same in both ``make_ics`` and ``make_lbcs``. @@ -1108,10 +1067,10 @@ For each workflow task, certain parameter values must be passed to the job sched For more information, see the `Intel Development Reference Guide `__. -``OMP_NUM_THREADS_RUN_FCST``: (Default: 1) - The number of OpenMP threads to use for parallel regions. Corresponds to the ``atmos_nthreads`` value in ``model_configure``. +``OMP_NUM_THREADS_RUN_FCST``: (Default: 2) + The number of OpenMP threads to use for parallel regions. Corresponds to the ``ATM_omp_num_threads`` value in ``ufs.configure``. -``OMP_STACKSIZE_RUN_FCST``: (Default: "512m") +``OMP_STACKSIZE_RUN_FCST``: (Default: "1024m") Controls the size of the stack for threads created by the OpenMP implementation. .. _ModelConfigParams: @@ -1175,12 +1134,12 @@ Write-Component (Quilting) Parameters ``PRINT_ESMF``: (Default: false) Flag that determines whether to output extra (debugging) information from :term:`ESMF` routines. Note that the write component uses ESMF library routines to interpolate from the native forecast model grid to the user-specified output grid (which is defined in the model configuration file ``model_configure`` in the forecast run directory). Valid values: ``True`` | ``False`` -``PE_MEMBER01``: (Default: ``'{{ LAYOUT_Y * LAYOUT_X + WRTCMP_write_groups * WRTCMP_write_tasks_per_group if QUILTING else LAYOUT_Y * LAYOUT_X}}'``) +``PE_MEMBER01``: (Default: ``'{{ OMP_NUM_THREADS_RUN_FCST * (LAYOUT_Y * LAYOUT_X + WRTCMP_write_groups * WRTCMP_write_tasks_per_group) if QUILTING else OMP_NUM_THREADS_RUN_FCST * (LAYOUT_Y * LAYOUT_X)}}'``) The number of MPI processes required by the forecast. When QUILTING is true, it is calculated as: .. math:: - LAYOUT\_X * LAYOUT\_Y + WRTCMP\_write\_groups * WRTCMP\_write\_tasks\_per\_group + OMP\_NUM\_THREADS\_RUN\_FCST * (LAYOUT\_X * LAYOUT\_Y + WRTCMP\_write\_groups * WRTCMP\_write\_tasks\_per\_group) ``WRTCMP_write_groups``: (Default: "") The number of write groups (i.e., groups of :term:`MPI` tasks) to use in the write component. Each write group will write to one set of output files (a ``dynf${fhr}.nc`` and a ``phyf${fhr}.nc`` file, where ``${fhr}`` is the forecast hour). Each write group contains ``WRTCMP_write_tasks_per_group`` tasks. Usually, one write group is sufficient. This may need to be increased if the forecast is proceeding so quickly that a single write group cannot complete writing to its set of files before there is a need/request to start writing the next set of files at the next output time. @@ -1376,6 +1335,9 @@ Non-default parameters for the ``nexus_emission_*`` tasks are set in the ``task_ ``PPN_NEXUS_EMISSION``: (Default: ``'{{ platform.NCORES_PER_NODE // OMP_NUM_THREADS_NEXUS_EMISSION }}'``) Processes per node for the ``nexus_emission_*`` tasks. +``NNODES_NEXUS_EMISSION``: (Default: 4) + The number of nodes to request from the job scheduler for the NEXUS emission task. + ``KMP_AFFINITY_NEXUS_EMISSION``: (Default: "scatter") Intel Thread Affinity Interface for the ``nexus_emission_*`` tasks. See :ref:`this note ` for more information on thread affinity. @@ -1385,12 +1347,20 @@ Non-default parameters for the ``nexus_emission_*`` tasks are set in the ``task_ ``OMP_STACKSIZE_NEXUS_EMISSION``: (Default: "1024m") Controls the size of the stack for threads created by the OpenMP implementation. +POINT_SOURCE Configuration Parameters +------------------------------------------------ +Non-default parameters for the ``task_point_source`` tasks are set in the ``task_point_source:`` section of the ``config.yaml`` file. + +``PT_SRC_SUBDIR``: (Default: ``"NEI2016v1/v2023-01-PT"``) + Subdirectory structure of point source data under ``FIXemis``. + Full path: ``FIXemis/PT_SRC_SUBDIR`` + BIAS_CORRECTION_O3 Configuration Parameters ------------------------------------------------- Non-default parameters for the ``bias_correction_o3`` tasks are set in the ``task_bias_correction_o3:`` section of the ``config.yaml`` file. -``KMP_AFFINITY_BIAS_CORRECTION_O3``: "scatter" +``KMP_AFFINITY_BIAS_CORRECTION_O3``: (Default: "scatter") Intel Thread Affinity Interface for the ``bias_correction_o3`` task. See :ref:`this note ` for more information on thread affinity. ``OMP_NUM_THREADS_BIAS_CORRECTION_O3``: (Default: 32) @@ -1625,95 +1595,372 @@ Pressure Tendency Diagnostic ``PRINT_DIFF_PGR``: (Default: false) Option to turn on/off the pressure tendency diagnostic. -Verification Parameters -========================== +Verification (VX) Parameters +================================= Non-default parameters for verification tasks are set in the ``verification:`` section of the ``config.yaml`` file. -General Verification Parameters ---------------------------------- +.. note:: + The verification tasks in the SRW App are based on the :ref:`METplus ` + verification software developed at the Developmental Testbed Center (:term:`DTC`). + :ref:`METplus ` is a scientific verification framework that spans a wide range of temporal and spatial scales. + Full documentation for METplus is available on the `METplus website `__. -``METPLUS_VERBOSITY_LEVEL``: (Default: ``2``) - Logging verbosity level used by METplus verification tools. Valid values: 0 to 5, with 0 quiet and 5 loud. +.. _METParamNote: -Templates for Observation Files +.. note:: + Where a date field is required: + * ``YYYY`` refers to the 4-digit valid year + * ``MM`` refers to the 2-digit valid month + * ``DD`` refers to the 2-digit valid day of the month + * ``HH`` refers to the 2-digit valid hour of the day + * ``mm`` refers to the 2-digit valid minutes of the hour + * ``SS`` refers to the two-digit valid seconds of the hour + +.. _GeneralVXParams: + +General VX Parameters --------------------------------- -This section includes template variables for :term:`CCPA`, :term:`MRMS`, :term:`NOHRSC`, and :term:`NDAS` observation files. +``VX_FIELD_GROUPS``: (Default: [ "APCP", "REFC", "RETOP", "SFC", "UPA" ]) + The groups of fields (some of which may consist of only a single field) on which + to run verification. -``OBS_CCPA_APCP_FN_TEMPLATE``: (Default: ``'{valid?fmt=%Y%m%d}/ccpa.t{valid?fmt=%H}z.01h.hrap.conus.gb2'``) - File name template for CCPA accumulated precipitation (APCP) observations. This template is used by the workflow tasks that call the METplus *PcpCombine* tool on CCPA obs to find the input observation files containing 1-hour APCP and then generate NetCDF files containing either 1-hour or greater than 1-hour APCP. + Since accumulated snowfall (``ASNOW``) is often not of interest in non-winter + cases and because observation files for ``ASNOW`` are not available on NOAA + HPSS for retrospective cases before March 2020, by default ``ASNOW`` is not + included ``VX_FIELD_GROUPS``, but it may be added to this list in order to + include the verification tasks for ``ASNOW`` in the workflow. Valid values: + ``"APCP"`` | ``"ASNOW"`` | ``"REFC"`` | ``"RETOP"`` | ``"SFC"`` | ``"UPA"`` + +``VX_APCP_ACCUMS_HRS``: (Default: [ 1, 3, 6, 24 ]) + The accumulation intervals (in hours) to include in the verification of + accumulated precipitation (APCP). If ``VX_FIELD_GROUPS`` contains ``"APCP"``, + then ``VX_APCP_ACCUMS_HRS`` must contain at least one element. Otherwise, + ``VX_APCP_ACCUMS_HRS`` will be ignored. Valid values: ``1`` | ``3`` | ``6`` | ``24`` + +``VX_ASNOW_ACCUMS_HRS``: (Default: [ 6, 24 ]) + The accumulation intervals (in hours) to include in the verification of + accumulated snowfall (ASNOW). If ``VX_FIELD_GROUPS`` contains ``"ASNOW"``, + then ``VX_ASNOW_ACCUMS_HRS`` must contain at least one element. Otherwise, + ``VX_ASNOW_ACCUMS_HRS`` will be ignored. Valid values: ``6`` | ``12`` | ``18`` | ``24`` + +``VX_CONFIG_[DET|ENS]_FN``: (Default: ``vx_configs/vx_config_[det|ens].yaml``) + Names of configuration files for deterministic and ensemble verification + that specify the field groups, field names, levels, and (if applicable) + thresholds for which to run verification. These are relative to the + directory ``METPLUS_CONF`` in which the METplus config templates are + located. They may include leading relative paths before the file + names, e.g. ``some_dir/another_dir/vx_config_det.yaml``. -``OBS_NOHRSC_ASNOW_FN_TEMPLATE``: (Default: ``'{valid?fmt=%Y%m%d}/sfav2_CONUS_${ACCUM_HH}h_{valid?fmt=%Y%m%d%H}_grid184.grb2'``) - File name template for NOHRSC snow observations. +``VX_OUTPUT_BASEDIR``: (Default: ``'{% if user.RUN_ENVIR == "nco" %}$COMOUT/metout{% else %}{{ workflow.EXPTDIR }}{% endif %}'``) + Template for base (i.e. top-level) directory in which METplus will place + its output. -``OBS_MRMS_REFC_FN_TEMPLATE``: (Default: ``'{valid?fmt=%Y%m%d}/MergedReflectivityQCComposite_00.50_{valid?fmt=%Y%m%d}-{valid?fmt=%H%M%S}.grib2'``) - File name template for :term:`MRMS` reflectivity observations. -``OBS_MRMS_RETOP_FN_TEMPLATE``: (Default: ``'{valid?fmt=%Y%m%d}/EchoTop_18_00.50_{valid?fmt=%Y%m%d}-{valid?fmt=%H%M%S}.grib2'``) - File name template for MRMS echo top observations. +METplus-Specific Parameters +----------------------------------- -``OBS_NDAS_ADPSFCorADPUPA_FN_TEMPLATE``: (Default: ``'prepbufr.ndas.{valid?fmt=%Y%m%d%H}'``) - File name template for :term:`NDAS` surface and upper air observations. This template is used by the workflow tasks that call the METplus *Pb2nc* tool on NDAS obs to find the input observation files containing ADP surface (ADPSFC) or ADP upper air (ADPUPA) fields and then generate NetCDF versions of these files. +``METPLUS_VERBOSITY_LEVEL``: (Default: ``2``) + Logging verbosity level used by METplus verification tools. Valid values: 0 to 5, with 0 quiet and 5 loudest. -``OBS_NDAS_SFCorUPA_FN_METPROC_TEMPLATE``: (Default: ``'${OBS_NDAS_SFCorUPA_FN_TEMPLATE}.nc'``) - File name template for NDAS surface and upper air observations after processing by MET's *pb2nc* tool (to change format to NetCDF). -``OBS_CCPA_APCP_FN_TEMPLATE_PCPCOMBINE_OUTPUT``: (Default: ``'${OBS_CCPA_APCP_FN_TEMPLATE}_a${ACCUM_HH}h.nc'``) - Template used to specify the names of the output NetCDF observation files generated by the workflow verification tasks that call the METplus *PcpCombine* tool on CCPA observations. (These files will contain observations of accumulated precipitation [APCP], both for 1 hour and for > 1 hour accumulation periods, in NetCDF format.) +VX Parameters for Observations +------------------------------------- -``OBS_NDAS_ADPSFCorADPUPA_FN_TEMPLATE_PB2NC_OUTPUT``: (Default: ``'${OBS_NDAS_ADPSFCorADPUPA_FN_TEMPLATE}.nc'``) - Template used to specify the names of the output NetCDF observation files generated by the workflow verification tasks that call the METplus Pb2nc tool on NDAS observations. (These files will contain obs ADPSFC or ADPUPA fields in NetCDF format.) +.. note:: + The observation types that the SRW App can currently retrieve (if necessary) + and use in verification are: + * CCPA (Climatology-Calibrated Precipitation Analysis) + * NOHRSC (National Operational Hydrologic Remote Sensing Center) + * MRMS (Multi-Radar Multi-Sensor) + * NDAS (NAM Data Assimilation System) -VX Forecast Model Name ------------------------- + The script ``ush/get_obs.py`` contains further details on the files and + directory structure of each obs type. -``VX_FCST_MODEL_NAME``: (Default: ``'{{ nco.NET_default }}.{{ task_run_post.POST_OUTPUT_DOMAIN_NAME }}'``) - String that specifies a descriptive name for the model being verified. This is used in forming the names of the verification output files as well as in the contents of those files. +``[CCPA|NOHRSC|MRMS|NDAS]_OBS_AVAIL_INTVL_HRS``: (Defaults: [1|6|1|1]) + Time interval (in hours) at which the various types of obs are available + on NOAA's HPSS. -``VX_FIELDS``: (Default: [ "APCP", "REFC", "RETOP", "SFC", "UPA" ]) - The fields or groups of fields for which verification tasks will run. Because ``ASNOW`` is often not of interest in cases outside of winter, and because observation files are not located for retrospective cases on NOAA HPSS before March 2020, ``ASNOW`` is not included by default. ``"ASNOW"`` may be added to this list in order to include the related verification tasks in the workflow. Valid values: ``"APCP"`` | ``"REFC"`` | ``"RETOP"`` | ``"SFC"`` | ``"UPA"`` | ``"ASNOW"`` - -``VX_APCP_ACCUMS_HRS``: (Default: [ 1, 3, 6, 24 ]) - The accumulation periods (in hours) to consider for accumulated precipitation (APCP). If ``VX_FIELDS`` contains ``"APCP"``, then ``VX_APCP_ACCUMS_HRS`` must contain at least one element. If ``VX_FIELDS`` does not contain ``"APCP"``, ``VX_APCP_ACCUMS_HRS`` will be ignored. Valid values: ``1`` | ``3`` | ``6`` | ``24`` + Note that MRMS files are in fact available every few minutes, but here + we set the obs availability interval to 1 hour because currently that + is the shortest output interval for forecasts, i.e. the forecasts cannot + (yet) support sub-hourly output. -``VX_ASNOW_ACCUMS_HRS``: (Default: [ 6, 24 ]) - The accumulation periods (in hours) to consider for ``ASNOW`` (accumulated snowfall). If ``VX_FIELDS`` contains ``"ASNOW"``, then ``VX_ASNOW_ACCUMS_HRS`` must contain at least one element. If ``VX_FIELDS`` does not contain ``"ASNOW"``, ``VX_ASNOW_ACCUMS_HRS`` will be ignored. Valid values: ``6`` | ``24`` +``[CCPA|NOHRSC|MRMS|NDAS]_OBS_DIR``: (Default: ``"{{ workflow.EXPTDIR }}/obs_data/[ccpa|nohrsc|mrms|ndas]"``) + Base directory in which CCPA, NOHRSC, MRMS, or NDAS obs files needed by + the verification tasks are located. If the files do not exist, they + will be retrieved and placed under this directory. Note that: -Verification (VX) Directories ------------------------------- + * If the obs files need to be retrieved (e.g. from NOAA's HPSS), because + they are not already staged on disk, then the user must have write + permission to this directory. Otherwise, the ``get_obs`` workflow + tasks that attempt to create these files will fail. + + * CCPA obs contain errors in the metadata for a certain range of dates + that need to be corrected during obs retrieval. This is described + in more detail in the script ``ush/get_obs.py``. + +``OBS_[CCPA|NOHRSC|MRMS|NDAS]_FN_TEMPLATES``: + **Defaults:** + + ``OBS_CCPA_FN_TEMPLATES``: + .. code-block:: console + + [ 'APCP', + '{%- set obs_avail_intvl_hrs = "%02d" % CCPA_OBS_AVAIL_INTVL_HRS %} + {{- "{valid?fmt=%Y%m%d}/ccpa.t{valid?fmt=%H}z." ~ obs_avail_intvl_hrs ~ "h.hrap.conus.gb2" }}' ] + + ``OBS_NOHRSC_FN_TEMPLATES``: + .. code-block:: console + + [ 'ASNOW', + '{%- set obs_avail_intvl_hrs = "%d" % NOHRSC_OBS_AVAIL_INTVL_HRS %} + {{- "sfav2_CONUS_" ~ obs_avail_intvl_hrs ~ "h_{valid?fmt=%Y%m%d%H}_grid184.grb2" }}' ] + + ``OBS_MRMS_FN_TEMPLATES``: + .. code-block:: console + + [ 'REFC', '{valid?fmt=%Y%m%d}/MergedReflectivityQCComposite_00.50_{valid?fmt=%Y%m%d}-{valid?fmt=%H%M%S}.grib2', + 'RETOP', '{valid?fmt=%Y%m%d}/EchoTop_18_00.50_{valid?fmt=%Y%m%d}-{valid?fmt=%H%M%S}.grib2' ] + + ``OBS_NDAS_FN_TEMPLATES``: + .. code-block:: console + + [ 'SFCandUPA', 'prepbufr.ndas.{valid?fmt=%Y%m%d%H}' ] + + File name templates for various obs types. These are meant to be used + in METplus configuration files and thus contain METplus time formatting + strings. Each of these variables is a python list containing pairs of + values. The first element of each pair specifies the verification field + group(s) for which the file name template will be needed, and the second + element is the file name template itself, which may include a leading + relative directory. (Here, by "verification field group", we mean a + group of fields that is verified together in the workflow; see the + description of the variable ``VX_FIELD_GROUPS``.) For example, for CCPA + obs, the variable name is ``OBS_CCPA_FN_TEMPLATES``. From the default value + of this variable given above, we see that if ``CCPA_OBS_AVAIL_INTVL_HRS`` + is set to 1 (i.e. the CCPA obs are assumed to be available every hour) + and the valid time is 2024042903, then the obs file (including a relative + path) to look for and, if necessary, create is + + ``20240429/ccpa.t03z.01h.hrap.conus.gb2`` + + This file will be used in the verification of fields under the APCP + field group (which consist of accumulated precipitation for the + accumulation intervals specified in ``VX_APCP_ACCUMS_HRS``). + + Note that: + + * The file name templates are relative to the obs base directories given in + the variables + + ``[CCPA|NOHRSC|MRMS|NDAS]_OBS_DIR`` + + defined above. Thus, the template for the full path to the obs files + is given, e.g. for CCPA obs, by + + .. code-block:: console + + CCPA_OBS_DIR/OBS_CCPA_FN_TEMPLATES[1] + + where the ``[1]`` indicates the second element of the list ``OBS_CCPA_FN_TEMPLATES``. + + * The file name templates may represent file names only, or they may + include leading relative directories. + + * The default values of these variables for the CCPA, NOHRSC, and NDAS + obs types contain only one pair of values (because these obs types + contain only one set of files that we use in the verification) while + the default value for the MRMS obs type contains two pairs of values, + one for the set of files that contains composite reflectivity data + and another for the set that contains echo top data. This is simply + because the MRMS obs type does not group all its fields together into + one set of files as does, for example, the NDAS obs type. + + * Each file name template must contain full information about the year, + month, day, and hour by including METplus time formatting strings for + this information. Some of this information (e.g. the year, month, + and day) may be in the relative directory portion of the template and + the rest (e.g. the hour) in the file name, or there may be no relative + directory portion and all of this information may be in the file name, + but all four pieces of timing information must be present somewhere in + each template as METplus time formatting strings. If not, obs files + created by the ``get_obs`` tasks for different days might overwrite each + other. + + * The workflow generation scripts create a ``get_obs`` task for each obs + type that is needed in the verification and for each day on which that + obs type is needed at at least some hours. That ``get_obs`` task first + checks whether all the necessary obs files for that day already exist + at the locations specified by the full path template(s) (which are + obtained by combining the base directories [CCPA|NOHRSC|MRMS|NDAS]_OBS_DIR + with the file name template(s)). If for a given day one or more of + these obs files do not exist on disk, the ``get_obs`` task will retrieve + "raw" versions of these files from a data store (e.g. NOAA's HPSS) + and will place them in a temporary "raw" directory. It will then + move or copy these raw files to the locations specified by the full + path template(s). + + * The raw obs files, i.e. the obs files as they are named and arranged + in the data stores and retrieved and placed in the raw directories, + may be arranged differently and/or have names that are different from + the ones specified in the file name templates. If so, they are renamed + while being moved or copied from the raw directories to the locations + specified by the full path template(s). (The lists of templates for + searching for and retrieving files from the data stores is different + than the METplus templates described here; the former are given in + the data retrieval configuration file at ``parm/data_locations.yml``.) + + * When the ex-scripts for the various vx tasks are converted from bash + to python scripts, these variables should be converted from python + lists to python dictionaries, where the first element of each pair + becomes the key and the second becomes the value. This currently + cannot be done due to limitations in the workflow on converting + python dictionaries to bash variables. + +``REMOVE_RAW_OBS_DIRS_[CCPA|NOHRSC|MRMS|NDAS]``: (Defaults: [True|True|True|True]) + Flag specifying whether to remove the "raw" observation directories + after retrieving the specified type of obs (CCPA, NOHRSC, MRMS, or + NOHRSC) from a data store (e.g. NOAA's HPSS). The raw directories + are the ones in which the observation files are placed immediately + after pulling them from the data store but before performing any + processing on them such as renaming the files and/or reorganizing + their directory structure. + +``OBS_CCPA_APCP_FN_TEMPLATE_PCPCOMBINE_OUTPUT``: + **Default:** -``VX_FCST_INPUT_BASEDIR``: (Default: ``'{% if user.RUN_ENVIR == "nco" %}$COMOUT/../..{% else %}{{ workflow.EXPTDIR }}{% endif %}'``) - Template for top-level directory containing forecast (but not obs) files that will be used as input into METplus for verification. + .. code-block:: console -``VX_OUTPUT_BASEDIR``: (Default: ``'{% if user.RUN_ENVIR == "nco" %}$COMOUT/metout{% else %}{{ workflow.EXPTDIR }}{% endif %}'``) - Template for top-level directory in which METplus will place its output. + {%- set obs_avail_intvl_hrs = "%02d" % CCPA_OBS_AVAIL_INTVL_HRS %} + {{- "ccpa.t{valid?fmt=%H}z." ~ obs_avail_intvl_hrs ~ "h.hrap.conus.gb2_a${ACCUM_HH}h.nc" }} -``VX_NDIGITS_ENSMEM_NAMES``: 3 - Number of digits in the ensemble member names. This is a configurable variable to allow users to change its value (e.g., to go from "mem004" to "mem04") when using staged forecast files that do not use the same number of digits as the SRW App. + METplus template for the names of the NetCDF files generated by the + worfklow verification tasks that call METplus's PcpCombine tool on + CCPA observations. These files will contain observed accumulated + precipitation in NetCDF format for various accumulation intervals. -Verification (VX) File Name and Path Templates ------------------------------------------------- +``OBS_NOHRSC_ASNOW_FN_TEMPLATE_PCPCOMBINE_OUTPUT``: + **Default:** -This section contains file name and path templates used in the verification (VX) tasks. + .. code-block:: console -``FCST_SUBDIR_TEMPLATE``: (Default: ``'{% if user.RUN_ENVIR == "nco" %}${NET_default}.{init?fmt=%Y%m%d?shift=-${time_lag}}/{init?fmt=%H?shift=-${time_lag}}{% else %}{init?fmt=%Y%m%d%H?shift=-${time_lag}}{% if global.DO_ENSEMBLE %}/${ensmem_name}{% endif %}/postprd{% endif %}'``) - A template for the subdirectory containing input forecast files for VX tasks. + {%- set obs_avail_intvl_hrs = "%d" % NOHRSC_OBS_AVAIL_INTVL_HRS %} + {{- "sfav2_CONUS_" ~ obs_avail_intvl_hrs ~ "h_{valid?fmt=%Y%m%d%H}_grid184.grb2_a${ACCUM_HH}h.nc" }} -``FCST_FN_TEMPLATE``: (Default: ``'${NET_default}.t{init?fmt=%H?shift=-${time_lag}}z{% if user.RUN_ENVIR == "nco" and global.DO_ENSEMBLE %}.${ensmem_name}{% endif %}.prslev.f{lead?fmt=%HHH?shift=${time_lag}}.${POST_OUTPUT_DOMAIN_NAME}.grib2'``) - A template for the forecast file names used as input to verification tasks. + METplus template for the names of the NetCDF files generated by the + worfklow verification tasks that call METplus's PcpCombine tool on + NOHRSC observations. These files will contain observed accumulated + snowfall for various accumulaton intervals. -``FCST_FN_METPROC_TEMPLATE``: (Default: ``'${NET_default}.t{init?fmt=%H}z{% if user.RUN_ENVIR == "nco" and global.DO_ENSEMBLE %}.${ensmem_name}{% endif %}.prslev.f{lead?fmt=%HHH}.${POST_OUTPUT_DOMAIN_NAME}_${VAR}_a${ACCUM_HH}h.nc'``) - A template for how to name the forecast files for accumulated precipitation (APCP) with greater than 1-hour accumulation (i.e., 3-, 6-, and 24-hour accumulations) after processing by ``PcpCombine``. +``OBS_NDAS_SFCandUPA_FN_TEMPLATE_PB2NC_OUTPUT``: (Default: ``'${OBS_NDAS_FN_TEMPLATES[1]}.nc'``) + METplus template for the names of the NetCDF files generated by the + worfklow verification tasks that call METplus's Pb2nc tool on the + prepbufr files in NDAS observations. These files will contain the + observed surface (SFC) and upper-air (UPA) fields in NetCDF format + (instead of NDAS's native prepbufr format). ``NUM_MISSING_OBS_FILES_MAX``: (Default: 2) - For verification tasks that need observational data, this specifies the maximum number of observation files that may be missing. If more than this number are missing, the verification task will error out. - Note that this is a crude way of checking that there are enough observations to conduct verification since this number should probably depend on the field being verified, the time interval between observations, the length of the forecast, etc. An alternative may be to specify the maximum allowed fraction of observation files that can be missing (i.e., the number missing divided by the number that are expected to exist). + For verification tasks that need observational data, this specifies + the maximum number of observation files that may be missing. If more + than this number are missing, the verification task will error out. + This is a crude way of checking that there are enough obs to conduct + verification (crude because this number should probably depend on the + field being verified, the time interval between observations, the + length of the forecast, etc; an alternative may be to specify the + maximum allowed fraction of obs files that can be missing). + + +VX Parameters for Forecasts +---------------------------------- + +``VX_FCST_MODEL_NAME``: (Default: ``'{{ nco.NET_default }}.{{ task_run_post.POST_OUTPUT_DOMAIN_NAME }}'``) + String that specifies a descriptive name for the model being verified. + This is used in forming the names of the verification output files and + is also included in the contents of those files. + +``VX_FCST_OUTPUT_INTVL_HRS``: (Default: 1) + The forecast output interval (in hours) to assume for verification + purposes. + + .. note:: + If/when a variable is created in this configuration file that specifies + the forecast output interval for native SRW forecasts, it should be + used as the default value of this variable. + +``VX_FCST_INPUT_BASEDIR``: (Default: ``'{% if user.RUN_ENVIR == "nco" %}$COMOUT/../..{% else %}{{ workflow.EXPTDIR }}{% endif %}'``) + METplus template for the name of the base (i.e. top-level) directory + containing the forecast files to use as inputs to the verification + tasks. + +``FCST_SUBDIR_TEMPLATE``: + **Default:** + + .. code-block:: console + + {%- if user.RUN_ENVIR == "nco" %} + {{- "${NET_default}.{init?fmt=%Y%m%d?shift=-${time_lag}}/{init?fmt=%H?shift=-${time_lag}}" }} + {%- else %} + {{- "{init?fmt=%Y%m%d%H?shift=-${time_lag}}" }} + {%- if global.DO_ENSEMBLE %} + {{- "/${ensmem_name}" }} + {%- endif %} + {{- "/postprd" }} + {%- endif %} + + METplus template for the name of the subdirectory containing forecast + files to use as inputs to the verification tasks. + +``FCST_FN_TEMPLATE``: + **Default:** + + .. code-block:: console + + {{- "${NET_default}.t{init?fmt=%H?shift=-${time_lag}}z" }} + {%- if user.RUN_ENVIR == "nco" and global.DO_ENSEMBLE %} + {{- ".${ensmem_name}" }} + {%- endif %} + {{- ".prslev.f{lead?fmt=%HHH?shift=${time_lag}}.${POST_OUTPUT_DOMAIN_NAME}.grib2" }} + + METplus template for the names of the forecast files to use as inputs + to the verification tasks. + +``FCST_FN_TEMPLATE_PCPCOMBINE_OUTPUT``: + **Default:** + + .. code-block:: console + + {{- "${NET_default}.t{init?fmt=%H}z" }} + {%- if user.RUN_ENVIR == "nco" and global.DO_ENSEMBLE %} + {{- ".${ensmem_name}" }} + {%- endif %} + {{- ".prslev.f{lead?fmt=%HHH}.${POST_OUTPUT_DOMAIN_NAME}_${VAR}_a${ACCUM_HH}h.nc" }} + + METplus template for the names of the NetCDF files generated by the + worfklow verification tasks that call METplus's PcpCombine tool on + forecast output. These files will contain forecast accumulated + precipitation in NetCDF format for various accumulation intervals. + +``VX_NDIGITS_ENSMEM_NAMES``: (Default: 3) + Number of digits to assume/use in the forecast ensemble member identifier + string used in directory and file names and other instances in which the + ensemble member needs to be identified. For example, if this is set to + 3, the identifier for ensemble member 4 will be "mem004", while if it's + set to 2, the identifier will be "mem04". This is useful when verifying + staged forecast files from a forecasting model/system other than the + SRW that uses a different number of digits in the ensemble member + identifier string. ``NUM_MISSING_FCST_FILES_MAX``: (Default: 0) - For verification tasks that need forecast data, this specifies the maximum number of post-processed forecast files that may be missing. If more than this number are missing, the verification task will not be run. + For verification tasks that need forecast data, this specifies the + maximum number of post-processed forecast files that may be missing. + If more than this number are missing, the verification task will exit + with an error. + Coupled AQM Configuration Parameters ===================================== @@ -1744,38 +1991,14 @@ Non-default parameters for coupled Air Quality Modeling (AQM) tasks are set in t ``DO_AQM_SAVE_FIRE``: (Default: false) Archive fire emission file to HPSS. -``DCOMINbio_default``: (Default: "") - Path to the directory containing AQM bio files. - -``DCOMINdust_default``: (Default: "/path/to/dust/dir") - Path to the directory containing AQM dust file. - -``DCOMINcanopy_default``: (Default: "/path/to/canopy/dir") - Path to the directory containing AQM canopy files. - -``DCOMINfire_default``: (Default: "") - Path to the directory containing AQM fire files. - -``DCOMINchem_lbcs_default``: (Default: "") - Path to the directory containing chemical LBC files. - -``DCOMINgefs_default``: (Default: "") - Path to the directory containing GEFS aerosol LBC files. - -``DCOMINpt_src_default``: (Default: "/path/to/point/source/base/directory") - Parent directory containing point source files. - -``DCOMINairnow_default``: (Default: "/path/to/airnow/obaservation/data") +``COMINairnow_default``: (Default: "/path/to/airnow/observation/data") Path to the directory containing AIRNOW observation data. -``COMINbicor``: (Default: "/path/to/historical/airnow/data/dir") - Path of reading in historical training data for bias correction. - -``COMOUTbicor``: (Default: "/path/to/historical/airnow/data/dir") - Path to save the current cycle's model output and AirNow observations as training data for future use. ``$COMINbicor`` and ``$COMOUTbicor`` can be distinguished by the ``${yyyy}${mm}${dd}`` under the same location. +``COMINfire_default``: (Default: "") + Path to the directory containing AQM fire files. -``AQM_CONFIG_DIR``: (Default: "") - Configuration directory for AQM. +``COMINgefs_default``:(Default: "") + Path to the directory containing GEFS aerosol LBC files. ``AQM_BIO_FILE``: (Default: "BEIS_SARC401.ncf") File name of AQM BIO file. @@ -1801,9 +2024,6 @@ Non-default parameters for coupled Air Quality Modeling (AQM) tasks are set in t ``AQM_FIRE_FILE_OFFSET_HRS``: (Default: 0) Time offset when retrieving fire emission data files. In a real-time run, the data files for :term:`ICs/LBCs` are not ready for use until the case starts. To resolve this issue, a real-time run uses the input data files in the previous cycle. For example, if the experiment run cycle starts at 12z, and ``AQM_FIRE_FILE_OFFSET_HRS: 6``, the fire emission data file from the previous cycle (06z) is used. -``AQM_FIRE_ARCHV_DIR``: (Default: "/path/to/archive/dir/for/RAVE/on/HPSS") - Path to the archive directory for RAVE emission files on :term:`HPSS`. - ``AQM_RC_FIRE_FREQUENCY``: (Default: "static") Fire frequency in ``aqm.rc``. @@ -1822,12 +2042,6 @@ Non-default parameters for coupled Air Quality Modeling (AQM) tasks are set in t ``AQM_GEFS_FILE_CYC``: (Default: "") Cycle of the GEFS aerosol LBC files only if it is fixed. -``NEXUS_INPUT_DIR``: (Default: "") - Same as ``GRID_DIR`` but for the the air quality emission generation task. Should be blank for the default value specified in ``setup.sh``. - -``NEXUS_FIX_DIR``: (Default: "") - Directory containing ``grid_spec`` files as the input file of NEXUS. - ``NEXUS_GRID_FN``: (Default: "grid_spec_GSD_HRRR_25km.nc") File name of the input ``grid_spec`` file of NEXUS. @@ -1843,6 +2057,117 @@ Non-default parameters for coupled Air Quality Modeling (AQM) tasks are set in t ``NEXUS_GFS_SFC_ARCHV_DIR``: (Default: "/NCEPPROD/hpssprod/runhistory") Path to archive directory for gfs surface files on HPSS. +.. _fire-parameters: + +Community Fire Behavior Model Parameters +======================================== + +Non-default parameters for the Community Fire Behavior Model (CFBM) in SRW are set in the ``fire:`` section of the ``config.yaml`` file. + +``UFS_FIRE``: (Default: false) + Flag for turning on CFBM fire simulation + +``FIRE_INPUT_DIR``: (Default: "") + Directory where fire input file (geo_em.d01.nc) can be found + +``DT_FIRE``: (Default: 0.5) + The fire behavior component’s integration timestep in seconds + +``OUTPUT_DT_FIRE``: (Default: 300) + The fire behavior component’s output timestep in seconds + +``FIRE_NUM_TASKS``: (Default: 0) + Number of MPI tasks assigned to the FIRE_BEHAVIOR component. Currently only 1 task is supported. + +.. note:: + The following options control namelist values in the ``&fire`` section of the Community Fire + Behavior Model. See the :fire-ug:`CFBM Users Guide ` for more information. + +``FIRE_PRINT_MSG``: (Default: 0) + Debug print level for the weather model fire core. Levels greater than 1 will print extra + messages to the log file at run time. + + 0: no extra prints + + 1: Extra prints + + 2: More extra prints + + 3: Even more extra prints + +``FIRE_WIND_HEIGHT``: (Default: 5.0) + Height to interpolate winds to for calculating fire spread rate + +``FIRE_ATM_FEEDBACK``: (Default: 0.0) + Multiplier for heat fluxes. Use 1.0 for normal two-way coupling. Use 0.0 for one-way coupling. + Intermediate values will vary the amount of forcing provided from the fire to the dynamical core. + +``FIRE_VISCOSITY``: (Default: 0.4) + Artificial viscosity in level set method. Maximum value of 1. Required for ``FIRE_UPWINDING=0`` + +``FIRE_UPWINDING``: (Default: 9) + Upwinding scheme used for calculating the normal spread of the fire front. More detailed descriptions + of these options can be found in the :fire-ug:`CFBM Users Guide `. + + 0 = Central Difference + + 1 = Standard + + 2 = Godunov + + 3 = ENO1 + + 4 = Sethian + + 5 = 2nd-order Sethian + + 6 = WENO3 + + 7 = WENO5 + + 8 = Hybrid WENO3/ENO1 + + 9 = Hybrid WENO5/ENO1 + +``FIRE_LSM_ZCOUPLING`` (Default: false) + When true, uses ``FIRE_LSM_ZCOUPLING_REF`` instead of ``FIRE_WIND_HEIGHT`` as a reference height + to calculate the logarithmic surface layer wind profile + +``FIRE_LSM_ZCOUPLING_REF`` (Default: 60.0) + Reference height from which the velocity at FIRE_WIND_HEIGHT is calculated using a logarithmic profile + + +``FIRE_NUM_IGNITIONS`` (Default: 1) + Number of fire ignitions. + +.. note:: + If ``FIRE_NUM_IGNITIONS > 1``, the following variables should be lists with one entry for each ignition + +``FIRE_IGNITION_ROS`` (Default: 0.0) + Ignition rate of spread (Rothermel parameterization) + +``FIRE_IGNITION_START_LAT`` (Default: 40.609) + Latitude for start of ignition(s) + +``FIRE_IGNITION_START_LON`` (Default: -105.879) + Longitude for start of ignition(s) + +``FIRE_IGNITION_END_LAT`` (Default: 40.609) + Latitude for end of ignition(s) + +``FIRE_IGNITION_END_LON`` (Default: -105.879) + Longitude for end of ignition(s) + +``FIRE_IGNITION_RADIUS`` (Default: 250) + Radius of ignition area in meters + +``FIRE_IGNITION_START_TIME`` (Default: 6480) + Start time of ignition(s) in seconds (counting from the beginning of the simulation) + +``FIRE_IGNITION_START_TIME`` (Default: 7000) + End time of ignition(s) in seconds (counting from the beginning of the simulation) + + Rocoto Parameters =================== diff --git a/doc/UsersGuide/CustomizingTheWorkflow/DefineWorkflow.rst b/doc/UsersGuide/CustomizingTheWorkflow/DefineWorkflow.rst index 4ea8f7052d..b5d587969e 100644 --- a/doc/UsersGuide/CustomizingTheWorkflow/DefineWorkflow.rst +++ b/doc/UsersGuide/CustomizingTheWorkflow/DefineWorkflow.rst @@ -6,7 +6,7 @@ Defining an SRW App Workflow Many predefined workflows with optional variants exist within the Short-Range Weather Application, but the Application also includes the ability to define a new workflow from scratch. This functionality allows users to add tasks to the workflow to meet their scientific exploration needs. -Rocoto is the primary workflow manager software used by the UFS SRW App. Rocoto workflows are defined in an XML file (``FV3LAM_wflow.xml``) based on parameters set during experiment generation. This section explains how the Rocoto XML is built using a Jinja2 template (`Jinja docs here `__) and structured YAML files. The YAML follows the requirements in the `Rocoto documentation `__ with a few exceptions or additions outlined in this documentation. +Rocoto is the primary workflow manager software used by the UFS SRW App. Rocoto workflows are defined in an XML file (``FV3LAM_wflow.xml``) based on parameters set during experiment generation. This section explains how the Rocoto XML is built using a Jinja2 template (`Jinja docs here `_) and structured YAML files. The YAML follows the requirements in the `Rocoto documentation `__ with a few exceptions or additions outlined in this documentation. The Jinja2 Template =================== diff --git a/doc/UsersGuide/CustomizingTheWorkflow/InputOutputFiles.rst b/doc/UsersGuide/CustomizingTheWorkflow/InputOutputFiles.rst index 128b080655..bf24055de4 100644 --- a/doc/UsersGuide/CustomizingTheWorkflow/InputOutputFiles.rst +++ b/doc/UsersGuide/CustomizingTheWorkflow/InputOutputFiles.rst @@ -20,8 +20,9 @@ The external model files needed for initializing an experiment can be obtained i ways, including: * Pulled from the `SRW App Data Bucket `__, - * Pulled from the NOAA High Performance Storage System (:term:`HPSS`) during the workflow execution (requires user access), or - * Obtained and staged by the user from a different source. + * Pulled from the NOAA High Performance Storage System (:term:`HPSS`) during the workflow execution (requires user access), + * Obtained and staged by the user from a different source, or + * Pulled from the `RRFS data bucket (rrfs_a data) `_. The data format for these files can be :term:`GRIB2` or :term:`NEMSIO`. More information on downloading and setting up the external model data can be found in :numref:`Section %s `. Once the data is set up, the end-to-end application will run the system and write output files to disk. @@ -225,14 +226,14 @@ A set of input files, including static (fix) data and raw initial and lateral bo Static Files -------------- -Static files are available in the `"fix" directory `__ of the SRW App Data Bucket. Users can download the full set of fix files as a tar file: +Static files are available in the `"fix" directory `__ of the SRW App Data Bucket. Users can download the full set of fix files as a tar file: .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/current_srw_release_data/fix_data.tgz + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.2.0/out-of-the-box/fix_data.tgz tar -xzf fix_data.tgz -Alternatively, users can download the static files individually from the `"fix" directory `__ of the SRW Data Bucket using the ``wget`` command for each required file. Users will need to create an appropriate directory structure for the files when downloading them individually. The best solution is to download the files into directories that mirror the structure of the `Data Bucket `__. +Alternatively, users can download the static files individually from the `"fix" directory `__ of the SRW Data Bucket using the ``wget`` command for each required file. Users will need to create an appropriate directory structure for the files when downloading them individually. The best solution is to download the files into directories that mirror the structure of the `Data Bucket `__. The environment variables ``FIXgsm``, ``FIXorg``, and ``FIXsfc`` indicate the path to the directories where the static files are located. After downloading the experiment data, users must set the paths to the files in ``config.yaml``. Add the following code to the ``task_run_fcst:`` section of the ``config.yaml`` file, and alter the variable paths accordingly: @@ -246,13 +247,13 @@ The environment variables ``FIXgsm``, ``FIXorg``, and ``FIXsfc`` indicate the pa Initial Condition/Lateral Boundary Condition File Formats and Source ----------------------------------------------------------------------- -The SRW Application currently supports raw initial and lateral boundary conditions from numerous models (i.e., FV3GFS, GEFS, GDAS, NAM, RAP, HRRR). The data can be provided in three formats: :term:`NEMSIO`, :term:`netCDF`, or :term:`GRIB2`. +The SRW Application currently supports raw initial and lateral boundary conditions from numerous models (i.e., FV3GFS, GEFS, GDAS, NAM, RAP, HRRR, RRFS). The data can be provided in three formats: :term:`NEMSIO`, :term:`netCDF`, or :term:`GRIB2`. To download the model input data for the 12-hour "out-of-the-box" experiment configuration in ``config.community.yaml`` file, run: .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/current_srw_release_data/gst_data.tgz + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.2.0/out-of-the-box/gst_data.tgz tar -xzf gst_data.tgz To download data for different dates, model types, and formats, users can explore the ``input_model_data`` section of the data bucket and replace the links above with ones that fetch their desired data. @@ -273,7 +274,7 @@ The paths to ``EXTRN_MDL_SOURCE_BASEDIR_ICS`` and ``EXTRN_MDL_SOURCE_BASEDIR_LBC USE_USER_STAGED_EXTRN_FILES: true EXTRN_MDL_SOURCE_BASEDIR_LBCS: /path/to/ufs-srweather-app/input_model_data/FV3GFS/grib2/YYYYMMDDHH -The two ``EXTRN_MDL_SOURCE_BASEDIR_*CS`` variables describe where the :term:`IC ` and :term:`LBC ` file directories are located, respectively. For ease of reusing ``config.yaml`` across experiments, it is recommended that users set up the raw :term:`IC/LBC ` file paths to include the model name (e.g., FV3GFS, GEFS, GDAS, NAM, RAP, HRRR), data format (e.g., grib2, nemsio), and date (in ``YYYYMMDDHH`` format). For example: ``/path/to/input_model_data/FV3GFS/grib2/2019061518/``. While there is flexibility to modify these settings, this structure will provide the most reusability for multiple dates when using the SRW Application workflow. +The two ``EXTRN_MDL_SOURCE_BASEDIR_*CS`` variables describe where the :term:`IC ` and :term:`LBC ` file directories are located, respectively. For ease of reusing ``config.yaml`` across experiments, it is recommended that users set up the raw :term:`IC/LBC ` file paths to include the model name (e.g., FV3GFS, GEFS, GDAS, NAM, RAP, HRRR, RRFS), data format (e.g., grib2, nemsio, netcdf), and date (in ``YYYYMMDDHH`` format). For example: ``/path/to/input_model_data/FV3GFS/grib2/2019061518/``. While there is flexibility to modify these settings, this structure will provide the most reusability for multiple dates when using the SRW Application workflow. When files are pulled from NOAA :term:`HPSS` (rather than downloaded from the data bucket), the naming convention looks something like this: @@ -290,11 +291,12 @@ When files are pulled from NOAA :term:`HPSS` (rather than downloaded from the da * RAP (GRIB2): ``rap.t{cycle}z.wrfprsf{fhr}.grib2`` * HRRR (GRIB2): ``hrrr.t{cycle}z.wrfprsf{fhr}.grib2`` +* RRFS (GRIB2): ``rrfs.t{cycle}z.prslev.f{fhr}.conus.grib2`` where: * ``{cycle}`` corresponds to the 2-digit hour of the day when the forecast cycle starts, and - * ``{fhr}`` corresponds to the 2- or 3-digit nth hour of the forecast (3-digits for FV3GFS/GDAS data and 2 digits for RAP/HRRR data). + * ``{fhr}`` corresponds to the 2- or 3-digit nth hour of the forecast (3-digits for FV3GFS/GDAS/RRFS data and 2 digits for RAP/HRRR data). For example, a forecast using FV3GFS GRIB2 data that starts at 18h00 UTC would have a ``{cycle}`` value of 18, which is the 000th forecast hour. The LBCS file for 21h00 UTC would be named ``gfs.t18z.pgrb2.0p25.f003``. @@ -318,7 +320,7 @@ Default Initial and Lateral Boundary Conditions ----------------------------------------------- The default initial and lateral boundary condition files are set to be a severe weather case from June 15, 2019 (20190615) at 18 UTC. FV3GFS GRIB2 files are the default model and file format. A tar file -(``gst_data.tgz``) containing the model data for this case is available in the `UFS SRW App Data Bucket `__. +(``gst_data.tgz``) containing the model data for this case is available in the `UFS SRW App Data Bucket `__. Running the App for Different Dates ----------------------------------- @@ -353,6 +355,8 @@ AWS S3 Data Buckets: * GDAS: https://registry.opendata.aws/noaa-gfs-bdp-pds/ * HRRR: https://registry.opendata.aws/noaa-hrrr-pds/ (necessary fields for initializing available for dates 2015 and newer) * A list of the NOAA Open Data Dissemination (NODD) datasets can be found here: https://www.noaa.gov/nodd/datasets +* RRFS - experimental data is available starting 02/01/2024 for deteministic forecasts starting hourly. Forecast data are available out to 60 hours for 00, 06, 12, and 18 UTC starting times (cycles), and out to 18 hours for other cycles. Earlier dates, from 05/01/2023 to 01/31/2024, may contain only forecasts at 00, 06, 12, 18 UTC; user needs to verify that data exist for needed dates. + https://noaa-rrfs-pds.s3.amazonaws.com/index.html#rrfs_a/ NCEI Archive: diff --git a/doc/UsersGuide/CustomizingTheWorkflow/LAMGrids.rst b/doc/UsersGuide/CustomizingTheWorkflow/LAMGrids.rst index 1fd163e8c6..7bc13c0794 100644 --- a/doc/UsersGuide/CustomizingTheWorkflow/LAMGrids.rst +++ b/doc/UsersGuide/CustomizingTheWorkflow/LAMGrids.rst @@ -75,7 +75,7 @@ The 3-km CONUS domain is ideal for running the ``FV3_RRFS_v1beta`` physics suite The boundary of the ``RRFS_CONUS_3km`` domain is shown in :numref:`Figure %s ` (in red), and the boundary of the :ref:`write component grid ` sits just inside the computational domain (in blue). This extra grid is required because the post-processing utility (:term:`UPP`) is unable to process data on the native FV3 gnomonic grid (in red). Therefore, model data are interpolated to a Lambert conformal grid (the write component grid) in order for the :term:`UPP` to read in and correctly process the data. .. note:: - While it is possible to initialize the FV3-LAM with coarser external model data when using the ``RRFS_CONUS_3km`` domain, it is generally advised to use external model data (such as HRRR or RAP data) that has a resolution similar to that of the native FV3-LAM (predefined) grid. + While it is possible to initialize the FV3-LAM with coarser external model data when using the ``RRFS_CONUS_3km`` domain, it is generally advised to use external model data (such as HRRR, RRFS, or RAP data) that has a resolution similar to that of the native FV3-LAM (predefined) grid. Predefined SUBCONUS Grid Over Indianapolis @@ -429,37 +429,24 @@ After hitting ``Enter``, the program will print a ``pmin`` value (e.g., ``pmin= Configure the SRW App ----------------------- -To use the new ``ak``/``bk`` file to define vertical levels in an experiment, users will need to modify the input namelist file (``input.nml.FV3``) and their configuration file (``config.yaml``). - -Modify ``input.nml.FV3`` -^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The FV3 namelist file, ``input.nml.FV3``, is located in ``ufs-srweather-app/parm``. Users will need to update the ``levp`` and ``npz`` variables in this file. For ``n`` vertical levels, users should set ``levp=n`` and ``npz=n-1``. For example, a user who wants 128 vertical levels would set ``levp`` and ``npz`` as follows: - -.. code-block:: console - - &external_ic_nml - levp = 128 - - &fv_core_nml - npz = 127 - -Additionally, check that ``external_eta = .true.``. - -.. note:: - - Keep in mind that levels and layers are not the same. In UFS code, ``levp`` is the number of vertical *levels*, and ``npz`` is the number of vertical levels without TOA. Thus, ``npz`` is equivalent to the number of vertical *layers*. For ``v`` vertical *layers*, set ``npz=v`` and ``levp=v+1``. Use the value of ``levp`` as the number of vertical levels when generating ``ak``/``bk``. +To use the new ``ak``/``bk`` file to define vertical levels in an experiment, users will need to modify their configuration file (``config.yaml``). Modify ``config.yaml`` ^^^^^^^^^^^^^^^^^^^^^^^^ -To use the text file produced by ``vcoord_gen`` in the SRW App, users need to set the ``VCOORD_FILE`` variable in their ``config.yaml`` file. Normally, this file is named ``global_hyblev.l65.txt`` and is located in the ``fix_am`` directory on Level 1 systems, but users should adjust the path and name of the file to suit their system. For example, in ``config.yaml``, a user (Jane Smith) might set: +To use the new vertical levels produced by ``vcoord_gen`` in the SRW App, users need to set the ``VCOORD_FILE`` and ``LEVP`` variables in their ``config.yaml`` file. Normally, the vertical coordinate file is named ``global_hyblev.l65.txt`` and is located in the ``fix_am`` directory on Level 1 systems, but users should adjust the path and name of the file to suit their system. Additionally, update the ``LEVP`` variable for the new number of vertical levels. For example, in ``config.yaml``, a user (Jane Smith) might set: .. code-block:: console task_make_ics: + LEVP: 65 VCOORD_FILE: /Users/Jane.Smith/ufs-srweather-app/parm/global_hyblev.L128.txt task_make_lbcs: + LEVP: 65 VCOORD_FILE: /Users/Jane.Smith/ufs-srweather-app/parm/global_hyblev.L128.txt +.. note:: + + Keep in mind that levels and layers are not the same. In UFS code, ``levp`` is the number of vertical *levels*, and ``npz`` is the number of vertical levels without TOA. Thus, ``npz`` in the UFS namelist file is equivalent to the number of vertical *layers*. For ``v`` vertical *layers*, ``npz=v`` and ``levp=v+1``. Use the value of ``levp`` as the number of vertical levels when generating ``ak``/``bk``. + Configure other variables as desired and generate the experiment as described in :numref:`Section %s `. diff --git a/doc/UsersGuide/Reference/FAQ.rst b/doc/UsersGuide/Reference/FAQ.rst index 21bef328a3..b1c7bcce1f 100644 --- a/doc/UsersGuide/Reference/FAQ.rst +++ b/doc/UsersGuide/Reference/FAQ.rst @@ -20,34 +20,48 @@ Building the SRW App How can I clean up the SRW App code if something went wrong during the build? =============================================================================== -The ``ufs-srweather-app`` repository contains a ``devclean.sh`` convenience script. This script can be used to clean up code if something goes wrong when checking out externals or building the application. To view usage instructions and to get help, run with the ``-h`` flag: +The ``ufs-srweather-app`` repository contains a ``devclean.sh`` convenience script. This script can be used to clean up code if something goes wrong when checking out externals or building the application. To view usage instructions and to get help, run with the ``-h`` or ``--help`` flag: .. code-block:: console ./devclean.sh -h -To remove the ``build`` directory, run: +To remove all the build artifacts and directories except conda installation, use the ``-b`` or ``--build`` flag: .. code-block:: console - ./devclean.sh --remove + ./devclean.sh --build -To remove all build artifacts (including ``build``, ``exec``, ``lib``, and ``share``), run: +When using a containerized approach of running the SRW, use the ``--container`` option that will make sure to remove ``container-bin`` directory in lieu of the ``exec``, i.e.: .. code-block:: console - ./devclean.sh --clean + ./devclean.sh -b --container + +To remove only conda directory and conda_loc file in the main SRW directory, run with the ``-c`` or ``--conda`` flag: + +.. code-block:: console + + ./devclean.sh --conda OR - ./devclean.sh -a + ./devclean.sh -c -To remove external submodules, run: +To remove external submodules, run with the ``-s`` or ``--sub-modules`` flag: .. code-block:: console ./devclean.sh --sub-modules +To remove all build artifacts, conda and submodules (equivalent to \`-b -c -s\`), run with the ``-a`` or ``--all`` flag: + +.. code-block:: console + + ./devclean.sh --all + + Users will need to check out the external submodules again before building the application. + In addition to the options above, many standard terminal commands can be run to remove unwanted files and directories (e.g., ``rm -rf expt_dirs``). A complete explanation of these options is beyond the scope of this User's Guide. =========================== @@ -267,12 +281,7 @@ How can I run a new experiment? To run a new experiment at a later time, users need to rerun the commands in :numref:`Section %s ` that reactivate the |wflow_env| environment: -.. code-block:: console - - source /path/to/etc/lmod-setup.sh/or/lmod-setup.csh - module use /path/to/modulefiles - module load wflow_ - +.. include:: ../../doc-snippets/load-env.rst Follow any instructions output by the console (e.g., |activate|). Then, users can configure a new experiment by updating the experiment parameters in ``config.yaml`` to reflect the desired experiment configuration. Detailed instructions can be viewed in :numref:`Section %s `. Parameters and valid values are listed in :numref:`Section %s `. After adjusting the configuration file, generate the new experiment by running ``./generate_FV3LAM_wflow.py``. Check progress by navigating to the ``$EXPTDIR`` and running ``rocotostat -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10``. diff --git a/doc/UsersGuide/Reference/Glossary.rst b/doc/UsersGuide/Reference/Glossary.rst index 90f9c8ab89..5b60c5b40d 100644 --- a/doc/UsersGuide/Reference/Glossary.rst +++ b/doc/UsersGuide/Reference/Glossary.rst @@ -10,7 +10,7 @@ Glossary To transport substances in the atmostphere by :term:`advection`. advection - According to the American Meteorological Society (AMS) `definition `__, advection is "The process of transport of an atmospheric property solely by the mass motion (velocity field) of the atmosphere." In common parlance, advection is movement of atmospheric substances that are carried around by the wind. + According to the American Meteorological Society (AMS) definition, `advection `_ is "The process of transport of an atmospheric property solely by the mass motion (velocity field) of the atmosphere." In common parlance, advection is movement of atmospheric substances that are carried around by the wind. AQM The `Air Quality Model `__ (AQM) is a UFS Application that dynamically couples the Community Multiscale Air Quality (:term:`CMAQ`) model with the UFS Weather Model through the :term:`NUOPC` Layer to simulate temporal and spatial variations of atmospheric compositions (e.g., ozone and aerosol compositions). The CMAQ, treated as a column chemistry model, updates concentrations of chemical species (e.g., ozone and aerosol compositions) at each integration time step. The transport terms (e.g., :term:`advection` and diffusion) of all chemical species are handled by the UFS Weather Model as :term:`tracers`. @@ -22,11 +22,11 @@ Glossary Climatology-Calibrated Precipitation Analysis (CCPA) data. This data is required for METplus precipitation verification tasks within the SRW App. The most recent 8 days worth of data are publicly available and can be accessed `here `__. CCPP - The `Common Community Physics Package `__ is a forecast-model agnostic, vetted collection of code containing atmospheric physical parameterizations and suites of parameterizations for use in Numerical Weather Prediction (NWP) along with a framework that connects the physics to the host forecast model. + The `Common Community Physics Package `_ is a forecast-model agnostic, vetted collection of code containing atmospheric physical parameterizations and suites of parameterizations for use in Numerical Weather Prediction (NWP) along with a framework that connects the physics to the host forecast model. chgres_cube The preprocessing software used to create initial and boundary condition files to - “cold start” the forecast model. It is part of :term:`UFS_UTILS`. + "cold start" the forecast model. It is part of :term:`UFS_UTILS`. CIN Convective Inhibition. @@ -48,6 +48,9 @@ Glossary Component Repository A :term:`repository` that contains, at a minimum, source code for a single component. + composite reflectivity + `Composite reflectivity `__ is a display or mapping of the maximum radar reflectivity factor at any altitude as a function of position on the ground. + Container `Docker `__ describes a container as "a standard unit of software that packages up code and all its dependencies so the application runs quickly and reliably from one computing environment to another." @@ -70,6 +73,9 @@ Glossary data assimilation Data assimilation is the process of combining observations, model data, and error statistics to achieve the best estimate of the state of a system. One of the major sources of error in weather and climate forecasts is uncertainty related to the initial conditions that are used to generate future predictions. Even the most precise instruments have a small range of unavoidable measurement error, which means that tiny measurement errors (e.g., related to atmospheric conditions and instrument location) can compound over time. These small differences result in very similar forecasts in the short term (i.e., minutes, hours), but they cause widely divergent forecasts in the long term. Errors in weather and climate forecasts can also arise because models are imperfect representations of reality. Data assimilation systems seek to mitigate these problems by combining the most timely observational data with a "first guess" of the atmospheric state (usually a previous forecast) and other sources of data to provide a "best guess" analysis of the atmospheric state to start a weather or climate simulation. When combined with an "ensemble" of model runs (many forecasts with slightly different conditions), data assimilation helps predict a range of possible atmospheric states, giving an overall measure of uncertainty in a given forecast. + DTC + The `Developmental Testbed Center `__ is a distributed facility where the NWP community can test and evaluate new models and techniques for use in research and operations. + dycore dynamical core Global atmospheric model based on fluid dynamics principles, including Euler's equations of motion. @@ -78,16 +84,16 @@ Glossary The radar-indicated top of an area of precipitation. Specifically, it contains the height of the 18 dBZ reflectivity value. EMC - The `Environmental Modeling Center `__. - + The `Environmental Modeling Center `_. + EPIC - The `Earth Prediction Innovation Center `__ seeks to accelerate scientific research and modeling contributions through continuous and sustained community engagement in order to produce the most accurate and reliable operational modeling system in the world. + The `Earth Prediction Innovation Center `_ seeks to accelerate scientific research and modeling contributions through continuous and sustained community engagement in order to produce the most accurate and reliable operational modeling system in the world. ESG Extended Schmidt Gnomonic (ESG) grid. The ESG grid uses the map projection developed by Jim Purser of NOAA :term:`EMC` (:cite:t:`Purser_2020`). ESMF - `Earth System Modeling Framework `__. The ESMF defines itself as “a suite of software tools for developing high-performance, multi-component Earth science modeling applications.” + `Earth System Modeling Framework `__. The ESMF defines itself as "a suite of software tools for developing high-performance, multi-component Earth science modeling applications." ex-scripts Scripting layer (contained in ``ufs-srweather-app/scripts/``) that should be called by a :term:`J-job ` for each workflow componentto run a specific task or sub-task in the workflow. The different scripting layers are described in detail in the :nco:`NCO Implementation Standards document ` @@ -118,7 +124,7 @@ Glossary High-Performance Computing. HPC-Stack - The `HPC-Stack `__ is a repository that provides a unified, shell script-based build system for building the software stack required for numerical weather prediction (NWP) tools such as the `Unified Forecast System (UFS) `__ and the `Joint Effort for Data assimilation Integration (JEDI) `__ framework. View the HPC-Stack documentation :doc:`here `. + The `HPC-Stack `__ is a repository that provides a unified, shell script-based build system for building the software stack required for numerical weather prediction (NWP) tools such as the `Unified Forecast System (UFS) `_ and the `Joint Effort for Data assimilation Integration (JEDI) `_ framework. View the HPC-Stack documentation :doc:`here `. HPSS High Performance Storage System (HPSS). @@ -152,6 +158,9 @@ Glossary MERRA2 The `Modern-Era Retrospective analysis for Research and Applications, Version 2 `__ provides satellite observation data back to 1980. According to NASA, "It was introduced to replace the original MERRA dataset because of the advances made in the assimilation system that enable assimilation of modern hyperspectral radiance and microwave observations, along with GPS-Radio Occultation datasets. It also uses NASA's ozone profile observations that began in late 2004. Additional advances in both the GEOS model and the GSI assimilation system are included in MERRA-2. Spatial resolution remains about the same (about 50 km in the latitudinal direction) as in MERRA." + MET + The `Model Evaluation Tools `__ is a highly-configurable, state-of-the-art suite of verification tools developed at the :term:`DTC`. + MPI MPI stands for Message Passing Interface. An MPI is a standardized communication system used in parallel programming. It establishes portable and efficient syntax for the exchange of messages and data between multiple processors that are used by a single computer program. An MPI is required for high-performance computing (HPC) systems. @@ -215,7 +224,7 @@ Glossary The branch of physical geography dealing with mountains. Parameterizations - Simplified functions that approximate the effects of small-scale processes (e.g., microphysics, gravity wave drag) that cannot be explicitly resolved by a model grid’s representation of the earth. + Simplified functions that approximate the effects of small-scale processes (e.g., microphysics, gravity wave drag) that cannot be explicitly resolved by a model grid's representation of the earth. RAP `Rapid Refresh `__. The continental-scale NOAA hourly-updated assimilation/modeling system operational at :term:`NCEP`. RAP covers North America and is comprised primarily of a numerical forecast model and an analysis/assimilation system to initialize that model. RAP is complemented by the higher-resolution 3km High-Resolution Rapid Refresh (:term:`HRRR`) model. @@ -227,24 +236,28 @@ Glossary A central location in which files (e.g., data, code, documentation) are stored and managed. RRFS - The `Rapid Refresh Forecast System `__ (RRFS) is NOAA's next-generation convection-allowing, rapidly-updated, ensemble-based data assimilation and forecasting system currently scheduled for operational implementation in 2024. It is designed to run forecasts on a 3-km :term:`CONUS` domain. + The `Rapid Refresh Forecast System `_ (RRFS) is NOAA's next-generation convection-allowing, rapidly-updated, ensemble-based data assimilation and forecasting system currently scheduled for operational implementation in 2024. It is designed to run forecasts on a 3-km :term:`CONUS` domain, see also `NOAA Rapid Refresh Forecast System (RRFS) `__. Experimental data is currently available from the `AWS S3 NOAA-RRFS `__ bucket for deterministic forecasts out to 60 hours at 00, 06, 12, and 18 UTC. Additionally, hourly forecasts out to 18 hours may be available for more recent RRFS model runs; the user needs to verify that data exists for needed dates. + SDF Suite Definition File. An external file containing information about the construction of a physics suite. It describes the schemes that are called, in which order they are called, whether they are subcycled, and whether they are assembled into groups to be called together. Spack - `Spack `__ is a package management tool designed to support multiple versions and configurations of software on a wide variety of platforms and environments. It was designed for large supercomputing centers, where many users and application teams share common installations of software on clusters with exotic architectures. + `Spack `_ is a package management tool designed to support multiple versions and configurations of software on a wide variety of platforms and environments. It was designed for large supercomputing centers, where many users and application teams share common installations of software on clusters with exotic architectures. spack-stack - The `spack-stack `__ is a collaborative effort between the NOAA Environmental Modeling Center (EMC), the UCAR Joint Center for Satellite Data Assimilation (JCSDA), and the Earth Prediction Innovation Center (EPIC). *spack-stack* is a repository that provides a :term:`Spack`-based method for building the software stack required for numerical weather prediction (NWP) tools such as the `Unified Forecast System (UFS) `__ and the `Joint Effort for Data assimilation Integration (JEDI) `__ framework. *spack-stack* uses the Spack package manager along with custom Spack configuration files and Python scripts to simplify installation of the libraries required to run various applications. The *spack-stack* can be installed on a range of platforms and comes pre-configured for many systems. Users can install the necessary packages for a particular application and later add the missing packages for another application without having to rebuild the entire stack. To get started, check out the documentation :doc:`here `. + The `spack-stack `_ is a collaborative effort between the NOAA Environmental Modeling Center (EMC), the UCAR Joint Center for Satellite Data Assimilation (JCSDA), and the Earth Prediction Innovation Center (EPIC). *spack-stack* is a repository that provides a :term:`Spack`-based method for building the software stack required for numerical weather prediction (NWP) tools such as the `Unified Forecast System (UFS) `_ and the `Joint Effort for Data assimilation Integration (JEDI) `_ framework. *spack-stack* uses the Spack package manager along with custom Spack configuration files and Python scripts to simplify installation of the libraries required to run various applications. The *spack-stack* can be installed on a range of platforms and comes pre-configured for many systems. Users can install the necessary packages for a particular application and later add the missing packages for another application without having to rebuild the entire stack. To get started, check out the documentation :doc:`here `. + + submodule + A `submodule `_ is a git repository linked to another repository as a subdirectory. Many UFS components are linked in this way; for example, the :term:`UPP` repository is a submodule of the :term:`FV3` repository. tracer - According to the American Meteorological Society (AMS) `definition `__, a tracer is "Any substance in the atmosphere that can be used to track the history [i.e., movement] of an air mass." Tracers are carried around by the motion of the atmosphere (i.e., by :term:`advection`). These substances are usually gases (e.g., water vapor, CO2), but they can also be non-gaseous (e.g., rain drops in microphysics parameterizations). In weather models, temperature (or potential temperature), absolute humidity, and radioactivity are also usually treated as tracers. According to AMS, "The main requirement for a tracer is that its lifetime be substantially longer than the transport process under study." + According to the American Meteorological Society (AMS) definition, a `tracer `_ is "Any substance in the atmosphere that can be used to track the history [i.e., movement] of an air mass." Tracers are carried around by the motion of the atmosphere (i.e., by :term:`advection`). These substances are usually gases (e.g., water vapor, CO2), but they can also be non-gaseous (e.g., rain drops in microphysics parameterizations). In weather models, temperature (or potential temperature), absolute humidity, and radioactivity are also usually treated as tracers. According to AMS, "The main requirement for a tracer is that its lifetime be substantially longer than the transport process under study." UFS The Unified Forecast System is a community-based, coupled, comprehensive Earth modeling system consisting of several applications (apps). These apps span regional to global - domains and sub-hourly to seasonal time scales. The UFS is designed to support the :term:`Weather Enterprise` and to be the source system for NOAA's operational numerical weather prediction applications. For more information, visit https://ufscommunity.org/. + domains and sub-hourly to seasonal time scales. The UFS is designed to support the :term:`Weather Enterprise` and to be the source system for NOAA's operational numerical weather prediction applications. For more information, visit https://ufs.epic.noaa.gov/. UFS_UTILS A collection of code used by multiple :term:`UFS` apps (e.g., the UFS Short-Range Weather App, diff --git a/doc/UsersGuide/SSHIntro.rst b/doc/UsersGuide/SSHIntro.rst index 7292a37e97..25837b9cc0 100644 --- a/doc/UsersGuide/SSHIntro.rst +++ b/doc/UsersGuide/SSHIntro.rst @@ -139,7 +139,7 @@ Download the Data from a Remote System to a Local System .. note:: - Users should transfer data to or from non-:srw-wiki:`Level 1 ` platforms using the recommended approach for that platform. This section outlines some basic guidance, but users may need to supplement with research of their own. On Level 1 systems, users may find it helpful to refer to the `RDHPCS CommonDocs Wiki `__. + Users should transfer data to or from non-:srw-wiki:`Level 1 ` platforms using the recommended approach for that platform. This section outlines some basic guidance, but users may need to supplement with research of their own. On Level 1 systems, users may find it helpful to refer to the `RDHPCS Data Transfer Documentation `_. To download data using ``scp``, users can typically adjust one of the following commands for use on their system: diff --git a/doc/UsersGuide/index.rst b/doc/UsersGuide/index.rst index 58c6fe6089..3deb9351e1 100644 --- a/doc/UsersGuide/index.rst +++ b/doc/UsersGuide/index.rst @@ -2,6 +2,7 @@ User's Guide ============== .. toctree:: + :numbered: :maxdepth: 3 BackgroundInfo/index diff --git a/doc/_static/theme_overrides.css b/doc/_static/theme_overrides.css index a4e5cab82f..fe54c2e2e9 100644 --- a/doc/_static/theme_overrides.css +++ b/doc/_static/theme_overrides.css @@ -1,9 +1,10 @@ +/* !important prevents the common CSS stylesheets from overriding +this as on RTD they are loaded after this stylesheet */ + .wy-table-responsive table td { - /* !important prevents the common CSS stylesheets from overriding - this as on RTD they are loaded after this stylesheet */ white-space: normal !important; } - + .wy-nav-content { max-width: 100% !important; } @@ -12,11 +13,6 @@ overflow: visible !important; } -/* Darken background for contrast with logo */ -.wy-side-nav-search, .wy-nav-top { - background: #2779B0; -} - /* Darken navbar blue background for contrast with logo */ .wy-side-nav-search, .wy-nav-top { background: #2779B0; diff --git a/doc/conf.py b/doc/conf.py index 6b0f461ba8..e8f15567d8 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -16,6 +16,9 @@ import sys import sphinx from sphinx.util import logging +sys.path.insert(0, os.path.abspath('../ush')) +sys.path.insert(0, os.path.abspath('../tests')) +sys.path.insert(0, os.path.abspath('../tests/WE2E')) @@ -33,6 +36,9 @@ numfig = True +nitpick_ignore = [('py:class', 'obj'),('py:class', + 'yaml.dumper.Dumper'),('py:class', + 'xml.etree.ElementTree'),('py:class', 'Basemap'),] # -- General configuration --------------------------------------------------- @@ -43,10 +49,7 @@ 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.extlinks', - 'sphinx.ext.coverage', 'sphinx.ext.mathjax', - 'sphinx.ext.ifconfig', - 'sphinx.ext.viewcode', 'sphinx.ext.napoleon', 'sphinxcontrib.bibtex', ] @@ -99,14 +102,16 @@ # Ignore working links that cause a linkcheck 403 error. linkcheck_ignore = [r'https://www\.intel\.com/content/www/us/en/docs/cpp\-compiler/developer\-guide\-reference/2021\-10/thread\-affinity\-interface\.html', r'https://www\.intel\.com/content/www/us/en/developer/tools/oneapi/hpc\-toolkit\-download\.html', - #r'https://glossary.ametsoc.org/.*', + r'https://glossary.ametsoc.org/.*', ] # Ignore anchor tags for SRW App data bucket. Shows Not Found even when they exist. linkcheck_anchors_ignore = [r"current_srw_release_data/", r"input_model_data/.*", r"fix.*", - r"sample_cases/.*", + r"experiment-user-cases/.*", + r"rrfs_a/*", + r"develop-20240618/*", ] linkcheck_allowed_redirects = {r"https://github\.com/ufs-community/ufs-srweather-app/wiki/.*": @@ -151,6 +156,7 @@ def setup(app): app.add_css_file('custom.css') # may also be an URL app.add_css_file('theme_overrides.css') # may also be a URL + app.connect('autodoc-process-docstring', warn_undocumented_members) # Custom sidebar templates, must be a dictionary that maps document names # to template names. @@ -244,6 +250,36 @@ def setup(app): # -- Extension configuration ------------------------------------------------- +# -- Options for autodoc extension --------------------------------------- + +autodoc_mock_imports = ["f90nml","cartopy","mpl_toolkits.basemap","fill_jinja_template", + "matplotlib","numpy","uwtools","mpl_toolkits","metplus", + ] + +logger = logging.getLogger(__name__) + +members_to_watch = ['function', 'attribute', 'method'] +def warn_undocumented_members(app, what, name, obj, options, lines): + if(what in members_to_watch and len(lines)==0): + message = what + " is undocumented: " + name + "(%d)"% len(lines) + logger.warning(message) + +autodoc_default_options = { + "members": True, + "undoc-members": True, + "show-inheritance": True, + #"private-members": True +} + +add_module_names = False + +# -- Options for napoleon extension --------------------------------------- + +napoleon_numpy_docstring = False +napoleon_google_docstring = True +napoleon_custom_sections = [('Returns', 'params_style')] # Allows return of multiple values + + # -- Options for intersphinx extension --------------------------------------- # Example configuration for intersphinx: refer to the Python standard library. @@ -271,4 +307,14 @@ def setup(app): 'srw-repo': ('https://github.com/ufs-community/ufs-srweather-app/%s', '%s'), 'srw-wiki': ('https://github.com/ufs-community/ufs-srweather-app/wiki/%s','%s'), 'uw': ('https://uwtools.readthedocs.io/en/main/%s', '%s'), - } \ No newline at end of file + 'fire-ug': ('https://fire-behavior.readthedocs.io/en/latest/%s', '%s'), + } + +# Define environment variables that need to exist when running the top-level code in python +# files (outside of functions, classes, etc.). +# +# METPLUS_ROOT just needs to exist in the environment; its value is not important since for +# the purpose of building the documentation, METplus is loaded by including "metplus" in +# the autodoc_mock_imports list above, not via use of the METPLUS_ROOT environment variable. +os.environ["METPLUS_ROOT"] = "" + diff --git a/doc/doc-snippets/cron-note.rst b/doc/doc-snippets/cron-note.rst new file mode 100644 index 0000000000..99192d0a1e --- /dev/null +++ b/doc/doc-snippets/cron-note.rst @@ -0,0 +1,3 @@ +.. note:: + + Users may also want to set ``USE_CRON_TO_RELAUNCH: true`` and add ``CRON_RELAUNCH_INTVL_MNTS: 3``. This will automate submission of workflow tasks when running the experiment. However, not all systems have :term:`cron`. \ No newline at end of file diff --git a/doc/doc-snippets/expt-conf-intro.rst b/doc/doc-snippets/expt-conf-intro.rst new file mode 100644 index 0000000000..d23fc546cb --- /dev/null +++ b/doc/doc-snippets/expt-conf-intro.rst @@ -0,0 +1,14 @@ +Navigate to the ``ufs-srweather-app/ush`` directory. The default (or "control") configuration for this experiment is based on the ``config.community.yaml`` file in that directory. Users can copy this file into ``config.yaml`` if they have not already done so: + +.. code-block:: console + + cd /path/to/ufs-srweather-app/ush + cp config.community.yaml config.yaml + +Users can save the location of the ``ush`` directory in an environment variable (``$USH``). This makes it easier to navigate between directories later. For example: + +.. code-block:: console + + export USH=/path/to/ufs-srweather-app/ush + +Users should substitute ``/path/to/ufs-srweather-app/ush`` with the actual path on their system. As long as a user remains logged into their system, they can run ``cd $USH``, and it will take them to the ``ush`` directory. The variable will need to be reset for each login session. \ No newline at end of file diff --git a/doc/doc-snippets/file-edit-hint.rst b/doc/doc-snippets/file-edit-hint.rst new file mode 100644 index 0000000000..b7a9b99b6a --- /dev/null +++ b/doc/doc-snippets/file-edit-hint.rst @@ -0,0 +1,9 @@ +.. Hint:: + + To open the configuration file in the command line, users may run the command: + + .. code-block:: console + + vi config.yaml + + To modify the file, hit the ``i`` key and then make any changes required. To close and save, hit the ``esc`` key and type ``:wq`` to write the changes to the file and exit/quit the file. Users may opt to use their preferred code editor instead. \ No newline at end of file diff --git a/doc/doc-snippets/load-env.rst b/doc/doc-snippets/load-env.rst new file mode 100644 index 0000000000..85afec6199 --- /dev/null +++ b/doc/doc-snippets/load-env.rst @@ -0,0 +1,7 @@ +.. code-block:: console + + source /path/to/ufs-srweather-app/etc/lmod-setup.sh + module use /path/to/ufs-srweather-app/modulefiles + module load wflow_ + +where ```` is a valid, lowercased machine name (see ``MACHINE`` in :numref:`Section %s ` for valid values), and ``/path/to/`` is replaced by the actual path to the ``ufs-srweather-app``. \ No newline at end of file diff --git a/doc/doc-snippets/scp-files.rst b/doc/doc-snippets/scp-files.rst new file mode 100644 index 0000000000..bc29780e9c --- /dev/null +++ b/doc/doc-snippets/scp-files.rst @@ -0,0 +1,11 @@ +Users who are working on the cloud or on an HPC cluster may want to copy the ``.png`` files onto their local system to view in their preferred image viewer. Detailed instructions are available in the :ref:`Introduction to SSH & Data Transfer `. + +In summary, users can run the ``scp`` command in a new terminal/command prompt window to securely copy files from a remote system to their local system if an SSH tunnel is already established between the local system and the remote system. Users can adjust one of the following commands for their system: + +.. code-block:: console + + scp username@your-IP-address:/path/to/source_file_or_directory /path/to/destination_file_or_directory + # OR + scp -P 12345 username@localhost:/path/to/source_file_or_directory /path/to/destination_file_or_directory + +Users would need to modify ``username``, ``your-IP-address``, ``-P 12345``, and the file paths to reflect their systems' information. See the :ref:`Introduction to SSH & Data Transfer ` for example commands. \ No newline at end of file diff --git a/doc/index.rst b/doc/index.rst index c8cf2b32fc..d6a93695df 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -1,9 +1,9 @@ -UFS Short-Range Weather App Documentation (|version|) -===================================================== - +UFS Short-Range Weather (SRW) App Documentation (|version|) +============================================================ + .. toctree:: - :numbered: :maxdepth: 3 UsersGuide/index + TechDocs/index ContribGuide/index diff --git a/doc/make.bat b/doc/make.bat index 4d9eb83d9f..295c7e3942 100644 --- a/doc/make.bat +++ b/doc/make.bat @@ -7,7 +7,7 @@ REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) -set SOURCEDIR=source +set SOURCEDIR=doc set BUILDDIR=build if "%1" == "" goto help diff --git a/doc/requirements.in b/doc/requirements.in index 26c778f4aa..75a70ab416 100644 --- a/doc/requirements.in +++ b/doc/requirements.in @@ -1,3 +1,3 @@ -sphinx>=6.0.0 +sphinx>=7.4.0 sphinx_rtd_theme sphinxcontrib-bibtex diff --git a/doc/requirements.txt b/doc/requirements.txt index 0671225d72..faa8455abe 100644 --- a/doc/requirements.txt +++ b/doc/requirements.txt @@ -8,7 +8,7 @@ alabaster==0.7.16 # via sphinx babel==2.14.0 # via sphinx -certifi==2024.2.2 +certifi==2024.7.4 # via requests charset-normalizer==3.3.2 # via requests @@ -18,11 +18,11 @@ docutils==0.20.1 # sphinx # sphinx-rtd-theme # sphinxcontrib-bibtex -idna==3.6 +idna==3.7 # via requests imagesize==1.4.1 # via sphinx -jinja2==3.1.3 +jinja2==3.1.4 # via sphinx latexcodec==2.0.1 # via pybtex @@ -40,7 +40,7 @@ pygments==2.17.2 # via sphinx pyyaml==6.0.1 # via pybtex -requests==2.31.0 +requests==2.32.2 # via sphinx six==1.16.0 # via @@ -48,7 +48,7 @@ six==1.16.0 # pybtex snowballstemmer==2.2.0 # via sphinx -sphinx==7.2.6 +sphinx==7.4.7 # via # -r requirements.in # sphinx-rtd-theme @@ -72,5 +72,5 @@ sphinxcontrib-qthelp==1.0.7 # via sphinx sphinxcontrib-serializinghtml==1.1.10 # via sphinx -urllib3==2.2.0 +urllib3==2.2.2 # via requests diff --git a/doc/tables/Tests.csv b/doc/tables/Tests.csv index d063e5c3fa..325b4dad69 100644 --- a/doc/tables/Tests.csv +++ b/doc/tables/Tests.csv @@ -1,6 +1,5 @@ Fundamental,Comprehensive,Test Name,PREDEF_GRID_NAME,CCPP_PHYS_SUITE,EXTRN_MDL_NAME_ICS,EXTRN_MDL_NAME_LBCS,DATES (UTC),FCST_LEN_HRS (hrs),est. core hours, walltime (min),notes yes,yes,grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta,RRFS_CONUScompact_25km,FV3_RRFS_v1beta,HRRR,RAP,2020081000,3,8,22, -yes,yes,nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16,RRFS_CONUS_25km,FV3_GFS_v16,FV3GFS,FV3GFS,2022081012,6,10,15, yes,yes,grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2,RRFS_CONUS_25km,FV3_GFS_v15p2,FV3GFS,FV3GFS,2019070100,6,7,10, yes,yes,grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v17_p8_plot,RRFS_CONUS_25km,FV3_GFS_v17_p8,FV3GFS,FV3GFS,2019070100,6,11,15, yes,yes,grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR,RRFS_CONUScompact_25km,FV3_HRRR,HRRR,HRRR,2020081000,24,26,20 @@ -50,11 +49,6 @@ yes,yes,grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16,RRFS_CONUS_25km,FV3_ ,yes,grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta,SUBCONUS_Ind_3km,FV3_RRFS_v1beta,RAP,RAP,2020080103,3,20,22, ,yes,MET_ensemble_verification_only_vx,RRFS_CONUS_25km,*none*,*none*,*none*,2019061500,6,1,15,Runs ensemble verification tasks on staged data without running the rest of the workflow ,yes,MET_verification_only_vx,RRFS_CONUS_25km,*none*,*none*,*none*,2019061500,6,1,8,Runs verification tasks on staged data without running the rest of the workflow -,yes,nco,RRFS_CONUS_25km,FV3_GFS_v16,FV3GFS,FV3GFS,2022040700,6,7,20, -,yes,nco_ensemble,RRFS_CONUS_25km,FV3_GFS_v15p2,FV3GFS,FV3GFS,2019070100 2019070112 2019070200 2019070212,6,55,20, -,yes,nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16,RRFS_CONUS_13km,FV3_GFS_v16,FV3GFS,FV3GFS,2019061500,6,26,20, -,yes,nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km,RRFS_CONUS_3km,FV3_GFS_v15_thompson_mynn_lam3km,FV3GFS,FV3GFS,2019061500,6,320,25, -,yes,nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR,RRFS_CONUScompact_25km,FV3_HRRR,HRRR,RAP,2020081000,6,12,17, ,yes,pregen_grid_orog_sfc_climo,RRFS_CONUS_25km,FV3_GFS_v15p2,FV3GFS,FV3GFS,2019070100,6,6,17, ,yes,specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS,RRFS_CONUS_25km,FV3_GFS_v15p2,FV3GFS,FV3GFS,2021061500,6,6,19, ,yes,specify_template_filenames,RRFS_CONUS_25km,FV3_GFS_v15p2,FV3GFS,FV3GFS,2019070100,6,6,28, @@ -74,4 +68,4 @@ yes,yes,grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16,RRFS_CONUS_25km,FV3_ ,,aqm_grid_AQM_NA13km_suite_GFS_v16,AQM_NA_13km,FV3_GFS_v16,FV3GFS,FV3GFS,2023021700 2023021706,6,,,This is an air-quality model test that requires special compilation to run; not supported in this release ,,grid_RRFS_NA_3km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta,RRFS_NA_3km,FV3_RRFS_v1beta,FV3GFS,FV3GFS,2019070100,3,,,The RRFS_NA_3km domain currently has segfault problems--this test is not run ,,subhourly_post,RRFS_CONUScompact_25km,FV3_RRFS_v1beta,HRRR,RAP,2020081000,3,,,Subhourly post tasks are currently broken--these tests are not run -,,subhourly_post_ensemble_2mems,RRFS_CONUScompact_25km,FV3_RRFS_v1beta,HRRR,RAP,2020081000,3,,,Subhourly post tasks are currently broken--these tests are not run \ No newline at end of file +,,subhourly_post_ensemble_2mems,RRFS_CONUScompact_25km,FV3_RRFS_v1beta,HRRR,RAP,2020081000,3,,,Subhourly post tasks are currently broken--these tests are not run diff --git a/doc/tables/fix_file_list.rst b/doc/tables/fix_file_list.rst index a20bd39245..628c124bc3 100644 --- a/doc/tables/fix_file_list.rst +++ b/doc/tables/fix_file_list.rst @@ -11,599 +11,599 @@ Static Files for SRW App Release v2.1.0 .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m01.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m02.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m03.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m04.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m06.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m07.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m08.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m09.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m10.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m11.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m12.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m01.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m02.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m03.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m04.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m06.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m07.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m08.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m09.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m10.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m11.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m12.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m01.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m02.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m03.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m04.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m06.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m07.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m08.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m09.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m10.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m11.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m12.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m01.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m02.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m03.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m04.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m06.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m07.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m08.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m09.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m10.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m11.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m12.nc ``fix_am`` Files --------------------- .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/CCN_ACTIVATE.BIN - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/cfs_ice1x1monclim19822001.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/cfs_oi2sst1x1monclim19822001.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/cfs_v2_soilmcpc.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/CFSR.OISST.1982.2010.monthly.clim - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/CFSR.OISST.1999.2012.monthly.clim.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/CFSR.SEAICE.1982.2010.monthly.clim - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/CFSR.SEAICE.1982.2012.monthly.clim.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2monthlycyc.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/emcsfc_gland5min.grib2 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/emcsfc_snow_cover.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/emcsfc_snow_cover_climo.grib2 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/freezeH2O.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/geo_em.d01.lat-lon.2.5m.HGT_M.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/geo_em.d01.nc_HRRR_AK - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/geo_em.d01.nc_HRRRX - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/geo_em.d01.nc_RAPX - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_1x1_paramlist - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_1x1_paramlist.anl - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_1x1_paramlist.f00 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeroinfo.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m01.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m02.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m03.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m04.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m05.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m06.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m07.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m08.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m09.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m10.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m11.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m12.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_albedo4.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_cldtune.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_climaeropac_global.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2con.l28.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2con.l42.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2con.l64.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1956.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1957.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1958.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1959.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1960.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1961.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1962.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1963.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1964.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1965.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1966.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1967.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1968.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1969.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1970.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1971.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1972.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1973.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1974.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1975.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1976.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1977.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1978.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1979.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1980.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1981.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1982.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1983.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1984.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1985.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1986.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1987.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1988.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1989.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1990.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1991.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1992.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1993.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1994.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1995.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1996.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1997.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1998.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1999.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2000.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2001.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2002.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2003.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2004.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2005.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2006.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2007.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2008.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2009.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2010.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2011.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2012.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2013.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_glob.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2monthlycyc1976_2006.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2monthlycyc1976_2007.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2monthlycyc1976_2009.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_divten.l28.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_divten.l42.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_divten.l64.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_emissivity_coefs.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_gaussian_latitudes.t1148.2304.1152.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_gaussian_latitudes.t1534.3072.1536.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_gaussian_latitudes.t574.1152.576.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_gaussian_latitudes.t670.1344.672.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_gaussian_latitudes.t766.1536.768.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_gaussian_latitudes.t94.192.96.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_glacier.2x2.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_h2o_pltc.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hd_paramlist - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hd_paramlist.f00 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l128.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l128C.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l150.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l28.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l42.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l60.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l64.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l64sl.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l65.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l65.txt_0.1hPa - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l91.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l98.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev3.l28.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev3.l42.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev3.l60.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev3.l64.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_iceclim.2x2.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_coeff_hflux.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_coeff_lflux.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_coeff_lte.150 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_coeff_lte.360 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_coeff_lte.540 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_coeff_lte.720 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_ggww_in1.par - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_ggww_in4.par - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_h2ort_kg7t.par - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_h2ovb_kg7t.par - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_wei96.cofcnts - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_kplist.1d.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_kplist.hd.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_kplist.master.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t1148.2304.1152.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t126.384.190.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t1534.3072.1536.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t170.512.256.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t190.384.192.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t190.576.288.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t254.512.256.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t254.768.384.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t3070.6144.3072.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t382.1152.576.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t382.768.384.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t574.1152.576.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t574.1760.880.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t62.192.94.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t670.1344.672.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t766.1536.768.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t878.1760.880.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t878.2640.1320.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t92.192.94.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t94.192.96.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_maskh.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_master-catchup_parmlist - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_maxice.2x2.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t1148.2304.1152.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t126.384.190.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t126.384.190.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t1534.3072.1536.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t1534.3072.1536.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t170.512.256.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t190.384.192.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t190.384.192.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t190.576.288.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t254.512.256.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t254.512.256.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t254.768.384.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t382.1152.576.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t382.768.384.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t382.768.384.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t574.1152.576.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t574.1152.576.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t574.1760.880.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t62.192.94.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t670.1344.672.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t670.1344.672.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t766.1536.768.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t878.1760.880.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t878.2640.1320.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t92.192.94.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t92.192.94.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t126.384.190.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t1534.3072.1536.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t190.384.192.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t190.576.288.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t254.512.256.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t382.768.384.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t574.1152.576.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t62.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t670.1344.672.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t766.1536.768.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t766.1536.768.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t92.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t94.192.96.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_npoess_paramlist - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_o3clim.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_o3prdlos.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t126.384.190.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t126.384.190.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t1534.3072.1536.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t1534.3072.1536.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t190.384.192.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t190.384.192.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t254.512.256.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t254.512.256.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t382.768.384.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t382.768.384.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t574.1152.576.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t574.1152.576.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t62.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t670.1344.672.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t670.1344.672.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t766.1536.768.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t766.1536.768.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t92.192.94.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t92.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t126.384.190.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t126.384.190.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t1534.3072.1536.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t1534.3072.1536.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t190.384.192.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t190.384.192.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t254.512.256.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t254.512.256.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t382.768.384.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t382.768.384.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t574.1152.576.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t574.1152.576.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t62.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t670.1344.672.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t670.1344.672.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t766.1536.768.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t766.1536.768.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t92.192.94.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t92.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_0.5x0.5.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_salclm.t1534.3072.1536.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_sfc_emissivity_idx.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_shdmax.0.144x0.144.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_shdmax.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_shdmin.0.144x0.144.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_shdmin.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_siglevel.l28.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_siglevel.l42.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_siglevel.l64.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t126.384.190.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t126.384.190.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t1534.3072.1536.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t1534.3072.1536.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t190.384.192.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t190.384.192.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t254.512.256.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t254.512.256.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t382.768.384.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t382.768.384.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t574.1152.576.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t574.1152.576.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t62.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t670.1344.672.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t670.1344.672.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t766.1536.768.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t766.1536.768.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t766.1536.768.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t92.192.94.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t92.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slope.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slptyp.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snoalb.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snoalb.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snoclim.1.875.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t126.384.190.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t1534.3072.1536.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t190.384.192.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t190.576.288.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t254.512.256.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t382.768.384.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t574.1152.576.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t62.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t670.1344.672.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t766.1536.768.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t766.1536.768.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t92.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t94.192.96.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmcpc.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t766.1536.768.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t94.192.96.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t190.576.288.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t766.1536.768.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t126.384.190.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t1534.3072.1536.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t190.384.192.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t190.576.288.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t254.512.256.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t382.768.384.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t574.1152.576.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t62.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t670.1344.672.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t766.1536.768.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t766.1536.768.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t92.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t94.192.96.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstant_cmip_an.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstant_cmip_mn.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstant_noaa_a0.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstant_noaa_an.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstant_noaa_an.txt_v2011 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstant_noaa_an.txt_v2019 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstantdata.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_spectral_coefs.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_sstclim.2x2.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_tbthe.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_tg3clim.2.6x1.5.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_transmittance_coefs.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vars.l28.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vars.l42.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vars.l64.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegfrac.0.144.decpercent.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegfrac.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t126.384.190.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t1534.3072.1536.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t190.384.192.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t190.576.288.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t254.512.256.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t382.768.384.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t574.1152.576.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t62.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t670.1344.672.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t766.1536.768.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t766.1536.768.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t92.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t94.192.96.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1850-1859.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1860-1869.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1870-1879.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1880-1889.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1890-1899.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1900-1909.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1910-1919.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1920-1929.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1930-1939.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1940-1949.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1950-1959.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1960-1969.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1970-1979.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1980-1989.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1990-1999.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_zorclim.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/HGT.Beljaars_filtered.lat-lon.30s_res.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/latlon_grid3.32769.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/ozone.clim - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/qr_acr_qg.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/qr_acr_qgV2.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/qr_acr_qs.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/qr_acr_qsV2.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/rrtmgp-cloud-optics-coeffs-lw.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/rrtmgp-cloud-optics-coeffs-sw.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/rrtmgp-data-lw-g256-2018-12-04.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/rrtmgp-data-sw-g224-2018-12-04.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/rrtmgp-lw-prototype-g128-210413.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/rrtmgp-sw-prototype-g131-210413.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/RTGSST.1982.2012.monthly.clim.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/seaice_newland.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/syndat_fildef.vit - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/syndat_slmask.t126.gaussian - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/syndat_stmnames - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/syndat_stmnames_old - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/syndat_stmnames_old1 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/syndat_stmnames_old2 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/Thompson_MP_MONTHLY_CLIMO.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/ugwp_limb_tau.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/CCN_ACTIVATE.BIN + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/cfs_ice1x1monclim19822001.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/cfs_oi2sst1x1monclim19822001.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/cfs_v2_soilmcpc.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/CFSR.OISST.1982.2010.monthly.clim + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/CFSR.OISST.1999.2012.monthly.clim.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/CFSR.SEAICE.1982.2010.monthly.clim + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/CFSR.SEAICE.1982.2012.monthly.clim.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2monthlycyc.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/emcsfc_gland5min.grib2 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/emcsfc_snow_cover.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/emcsfc_snow_cover_climo.grib2 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/freezeH2O.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/geo_em.d01.lat-lon.2.5m.HGT_M.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/geo_em.d01.nc_HRRR_AK + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/geo_em.d01.nc_HRRRX + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/geo_em.d01.nc_RAPX + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_1x1_paramlist + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_1x1_paramlist.anl + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_1x1_paramlist.f00 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeroinfo.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m01.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m02.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m03.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m04.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m05.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m06.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m07.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m08.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m09.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m10.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m11.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m12.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_albedo4.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_cldtune.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_climaeropac_global.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2con.l28.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2con.l42.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2con.l64.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1956.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1957.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1958.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1959.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1960.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1961.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1962.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1963.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1964.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1965.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1966.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1967.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1968.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1969.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1970.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1971.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1972.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1973.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1974.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1975.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1976.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1977.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1978.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1979.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1980.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1981.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1982.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1983.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1984.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1985.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1986.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1987.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1988.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1989.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1990.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1991.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1992.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1993.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1994.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1995.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1996.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1997.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1998.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1999.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2000.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2001.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2002.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2003.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2004.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2005.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2006.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2007.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2008.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2009.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2010.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2011.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2012.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2013.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_glob.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2monthlycyc1976_2006.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2monthlycyc1976_2007.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2monthlycyc1976_2009.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_divten.l28.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_divten.l42.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_divten.l64.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_emissivity_coefs.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_gaussian_latitudes.t1148.2304.1152.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_gaussian_latitudes.t1534.3072.1536.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_gaussian_latitudes.t574.1152.576.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_gaussian_latitudes.t670.1344.672.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_gaussian_latitudes.t766.1536.768.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_gaussian_latitudes.t94.192.96.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_glacier.2x2.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_h2o_pltc.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hd_paramlist + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hd_paramlist.f00 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l128.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l128C.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l150.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l28.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l42.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l60.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l64.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l64sl.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l65.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l65.txt_0.1hPa + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l91.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l98.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev3.l28.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev3.l42.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev3.l60.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev3.l64.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_iceclim.2x2.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_coeff_hflux.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_coeff_lflux.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_coeff_lte.150 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_coeff_lte.360 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_coeff_lte.540 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_coeff_lte.720 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_ggww_in1.par + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_ggww_in4.par + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_h2ort_kg7t.par + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_h2ovb_kg7t.par + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_wei96.cofcnts + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_kplist.1d.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_kplist.hd.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_kplist.master.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t1148.2304.1152.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t126.384.190.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t1534.3072.1536.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t170.512.256.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t190.384.192.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t190.576.288.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t254.512.256.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t254.768.384.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t3070.6144.3072.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t382.1152.576.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t382.768.384.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t574.1152.576.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t574.1760.880.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t62.192.94.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t670.1344.672.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t766.1536.768.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t878.1760.880.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t878.2640.1320.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t92.192.94.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t94.192.96.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_maskh.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_master-catchup_parmlist + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_maxice.2x2.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t1148.2304.1152.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t126.384.190.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t126.384.190.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t1534.3072.1536.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t1534.3072.1536.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t170.512.256.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t190.384.192.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t190.384.192.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t190.576.288.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t254.512.256.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t254.512.256.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t254.768.384.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t382.1152.576.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t382.768.384.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t382.768.384.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t574.1152.576.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t574.1152.576.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t574.1760.880.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t62.192.94.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t670.1344.672.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t670.1344.672.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t766.1536.768.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t878.1760.880.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t878.2640.1320.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t92.192.94.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t92.192.94.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t126.384.190.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t1534.3072.1536.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t190.384.192.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t190.576.288.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t254.512.256.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t382.768.384.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t574.1152.576.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t62.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t670.1344.672.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t766.1536.768.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t766.1536.768.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t92.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t94.192.96.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_npoess_paramlist + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_o3clim.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_o3prdlos.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t126.384.190.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t126.384.190.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t1534.3072.1536.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t1534.3072.1536.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t190.384.192.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t190.384.192.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t254.512.256.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t254.512.256.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t382.768.384.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t382.768.384.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t574.1152.576.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t574.1152.576.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t62.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t670.1344.672.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t670.1344.672.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t766.1536.768.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t766.1536.768.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t92.192.94.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t92.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t126.384.190.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t126.384.190.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t1534.3072.1536.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t1534.3072.1536.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t190.384.192.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t190.384.192.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t254.512.256.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t254.512.256.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t382.768.384.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t382.768.384.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t574.1152.576.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t574.1152.576.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t62.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t670.1344.672.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t670.1344.672.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t766.1536.768.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t766.1536.768.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t92.192.94.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t92.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_0.5x0.5.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_salclm.t1534.3072.1536.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_sfc_emissivity_idx.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_shdmax.0.144x0.144.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_shdmax.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_shdmin.0.144x0.144.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_shdmin.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_siglevel.l28.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_siglevel.l42.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_siglevel.l64.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t126.384.190.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t126.384.190.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t1534.3072.1536.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t1534.3072.1536.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t190.384.192.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t190.384.192.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t254.512.256.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t254.512.256.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t382.768.384.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t382.768.384.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t574.1152.576.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t574.1152.576.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t62.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t670.1344.672.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t670.1344.672.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t766.1536.768.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t766.1536.768.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t766.1536.768.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t92.192.94.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t92.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slope.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slptyp.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snoalb.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snoalb.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snoclim.1.875.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t126.384.190.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t1534.3072.1536.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t190.384.192.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t190.576.288.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t254.512.256.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t382.768.384.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t574.1152.576.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t62.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t670.1344.672.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t766.1536.768.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t766.1536.768.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t92.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t94.192.96.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmcpc.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t766.1536.768.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t94.192.96.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t190.576.288.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t766.1536.768.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t126.384.190.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t1534.3072.1536.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t190.384.192.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t190.576.288.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t254.512.256.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t382.768.384.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t574.1152.576.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t62.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t670.1344.672.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t766.1536.768.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t766.1536.768.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t92.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t94.192.96.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstant_cmip_an.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstant_cmip_mn.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstant_noaa_a0.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstant_noaa_an.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstant_noaa_an.txt_v2011 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstant_noaa_an.txt_v2019 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstantdata.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_spectral_coefs.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_sstclim.2x2.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_tbthe.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_tg3clim.2.6x1.5.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_transmittance_coefs.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vars.l28.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vars.l42.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vars.l64.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegfrac.0.144.decpercent.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegfrac.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t126.384.190.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t1534.3072.1536.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t190.384.192.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t190.576.288.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t254.512.256.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t382.768.384.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t574.1152.576.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t62.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t670.1344.672.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t766.1536.768.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t766.1536.768.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t92.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t94.192.96.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1850-1859.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1860-1869.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1870-1879.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1880-1889.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1890-1899.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1900-1909.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1910-1919.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1920-1929.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1930-1939.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1940-1949.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1950-1959.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1960-1969.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1970-1979.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1980-1989.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1990-1999.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_zorclim.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/HGT.Beljaars_filtered.lat-lon.30s_res.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/latlon_grid3.32769.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/ozone.clim + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/qr_acr_qg.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/qr_acr_qgV2.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/qr_acr_qs.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/qr_acr_qsV2.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/rrtmgp-cloud-optics-coeffs-lw.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/rrtmgp-cloud-optics-coeffs-sw.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/rrtmgp-data-lw-g256-2018-12-04.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/rrtmgp-data-sw-g224-2018-12-04.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/rrtmgp-lw-prototype-g128-210413.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/rrtmgp-sw-prototype-g131-210413.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/RTGSST.1982.2012.monthly.clim.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/seaice_newland.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/syndat_fildef.vit + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/syndat_slmask.t126.gaussian + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/syndat_stmnames + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/syndat_stmnames_old + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/syndat_stmnames_old1 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/syndat_stmnames_old2 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/Thompson_MP_MONTHLY_CLIMO.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/ugwp_limb_tau.nc ``fix_am/co2dat_4a/`` Files: @@ -611,102 +611,102 @@ Static Files for SRW App Release v2.1.0 .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1956.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1957.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1958.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1959.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1960.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1961.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1962.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1963.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1964.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1965.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1966.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1967.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1968.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1969.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1970.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1971.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1972.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1973.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1974.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1975.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1976.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1977.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1978.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1979.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1980.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1981.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1982.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1983.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1984.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1985.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1986.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1987.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1988.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1989.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1990.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1991.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1992.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1993.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1994.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1995.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1996.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1997.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1998.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1999.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2000.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2001.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2002.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2003.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2004.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2005.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2006.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2007.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2008.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2009.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2009.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2009.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2010.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2010.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2010.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2011.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2011.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2011.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2012.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2012.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2012.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2013.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2013.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2013.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2014.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2014.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2014.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2015.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2015.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2015.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2016.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2016.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2016.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2017.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2017.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2017.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2018.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2018.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2018.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2019.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2019.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2019.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2020.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2020.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2020.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2021.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2021.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2022.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_glob.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2monthlycyc1976_2006.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2monthlycyc1976_2009.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/MEMO + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1956.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1957.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1958.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1959.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1960.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1961.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1962.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1963.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1964.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1965.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1966.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1967.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1968.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1969.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1970.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1971.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1972.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1973.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1974.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1975.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1976.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1977.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1978.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1979.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1980.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1981.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1982.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1983.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1984.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1985.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1986.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1987.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1988.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1989.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1990.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1991.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1992.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1993.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1994.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1995.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1996.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1997.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1998.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1999.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2000.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2001.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2002.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2003.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2004.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2005.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2006.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2007.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2008.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2009.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2009.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2009.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2010.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2010.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2010.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2011.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2011.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2011.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2012.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2012.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2012.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2013.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2013.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2013.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2014.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2014.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2014.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2015.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2015.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2015.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2016.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2016.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2016.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2017.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2017.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2017.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2018.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2018.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2018.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2019.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2019.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2019.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2020.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2020.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2020.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2021.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2021.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2022.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_glob.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2monthlycyc1976_2006.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2monthlycyc1976_2009.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/MEMO ``fix_am/fix_co2_proj`` Files: @@ -714,20 +714,20 @@ Static Files for SRW App Release v2.1.0 .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2009.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2010.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2011.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2012.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2013.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2014.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2015.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2016.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2017.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2018.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2019.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2020.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2021.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2022.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2009.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2010.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2011.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2012.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2013.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2014.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2015.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2016.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2017.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2018.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2019.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2020.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2021.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2022.txt ``fix_am/fix_co2_update`` Files: @@ -735,19 +735,19 @@ Static Files for SRW App Release v2.1.0 .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2009.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2010.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2011.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2012.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2013.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2014.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2015.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2016.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2017.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2018.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2019.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2020.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2021.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2009.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2010.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2011.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2012.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2013.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2014.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2015.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2016.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2017.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2018.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2019.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2020.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2021.txt ``fix_lut`` Files @@ -755,12 +755,12 @@ Static Files for SRW App Release v2.1.0 .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_lut/optics_BC.v1_3.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_lut/optics_DU.v15_3.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_lut/optics_DU.v15_3.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_lut/optics_OC.v1_3.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_lut/optics_SS.v3_3.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_lut/optics_SU.v1_3.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_lut/optics_BC.v1_3.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_lut/optics_DU.v15_3.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_lut/optics_DU.v15_3.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_lut/optics_OC.v1_3.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_lut/optics_SS.v3_3.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_lut/optics_SU.v1_3.dat ``fix_orog`` Files @@ -768,26 +768,26 @@ Static Files for SRW App Release v2.1.0 .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/clmgrb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/clmgrb.index - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/convert.f90 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/gmted2010.30sec.flt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/gmted2010.30sec.int - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/gmted2010.30sec.flt.ctl - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/gmted2010.30sec.int.ctl - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/thirty.second.antarctic.new.bin - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/GlobalLakeDepth.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/GlobalLakeDepth.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/GlobalLakeStatus.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/GlobalLakeStatus.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/gtopo30_gg.fine - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/gtopo30_gg.fine.nh - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/landcover30.fixed - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/makefile - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/run.lsf - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/TOP8M_avg.20I4.asc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/TOP8M_max.20I4.asc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/TOP8M_slm.80I1.asc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/clmgrb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/clmgrb.index + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/convert.f90 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/gmted2010.30sec.flt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/gmted2010.30sec.int + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/gmted2010.30sec.flt.ctl + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/gmted2010.30sec.int.ctl + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/thirty.second.antarctic.new.bin + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/GlobalLakeDepth.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/GlobalLakeDepth.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/GlobalLakeStatus.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/GlobalLakeStatus.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/gtopo30_gg.fine + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/gtopo30_gg.fine.nh + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/landcover30.fixed + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/makefile + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/run.lsf + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/TOP8M_avg.20I4.asc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/TOP8M_max.20I4.asc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/TOP8M_slm.80I1.asc @@ -796,26 +796,26 @@ Static Files for SRW App Release v2.1.0 .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/facsf.1.0.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.igbp.0.03.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/leaf_area_index.0.05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.igbp.0.05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/maximum_snow_albedo.0.05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.igbp.conus.0.01.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/slope_type.1.0.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.modis.igbp.0.03.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/snowfree_albedo.4comp.0.05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.modis.igbp.0.05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/soil_type.statsgo.0.03.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.modis.igbp.conus.0.01.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/soil_type.statsgo.0.05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.viirs.igbp.0.03.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/soil_type.statsgo.conus.0.01.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.viirs.igbp.0.05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/substrate_temperature.1.0.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.viirs.igbp.0.1.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/substrate_temperature.2.6x1.5.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.viirs.igbp.conus.0.01.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_greenness.0.144.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/facsf.1.0.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.igbp.0.03.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/leaf_area_index.0.05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.igbp.0.05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/maximum_snow_albedo.0.05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.igbp.conus.0.01.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/slope_type.1.0.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.modis.igbp.0.03.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/snowfree_albedo.4comp.0.05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.modis.igbp.0.05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/soil_type.statsgo.0.03.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.modis.igbp.conus.0.01.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/soil_type.statsgo.0.05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.viirs.igbp.0.03.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/soil_type.statsgo.conus.0.01.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.viirs.igbp.0.05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/substrate_temperature.1.0.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.viirs.igbp.0.1.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/substrate_temperature.2.6x1.5.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.viirs.igbp.conus.0.01.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_greenness.0.144.nc diff --git a/environment.yml b/environment.yml index faeb19d466..a735213198 100644 --- a/environment.yml +++ b/environment.yml @@ -5,4 +5,4 @@ channels: dependencies: - pylint=2.17* - pytest=7.2* - - uwtools=2.1* + - uwtools=2.3* diff --git a/jobs/JREGIONAL_CHECK_POST_OUTPUT b/jobs/JREGIONAL_CHECK_POST_OUTPUT index 2b1fe69bbb..358b1fad72 100755 --- a/jobs/JREGIONAL_CHECK_POST_OUTPUT +++ b/jobs/JREGIONAL_CHECK_POST_OUTPUT @@ -3,7 +3,22 @@ # #----------------------------------------------------------------------- # +# The J-Job script for checking the post output. # +# Run-time environment variables: +# +# CDATE +# ENSMEM_INDX +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# workflow: +# EXPTDIR # #----------------------------------------------------------------------- # @@ -16,7 +31,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -51,10 +68,11 @@ print_info_msg " Entering script: \"${scrfunc_fn}\" In directory: \"${scrfunc_dir}\" -This is the J-job script for the task that checks that all the post- -processed output files in fact exist and are at least a certain age. -These files may have been generated by UPP as part of the current SRW -App workflow, or they may be user-staged. +This is the J-job for the task that checks that no more than +NUM_MISSING_FCST_FILES_MAX of each forecast's (ensemble member's) post- +processed output files are missing. Note that such files may have been +generated by UPP as part of the current SRW App workflow, or they may be +user-staged. ========================================================================" # #----------------------------------------------------------------------- @@ -70,15 +88,15 @@ Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." # #----------------------------------------------------------------------- # -# Create a flag file to make rocoto aware that the make_grid task has -# successfully completed (so that other tasks that depend on it can be -# launched). +# Create a flag file to make rocoto aware that the check_post_output task +# has successfully completed (so that other tasks that depend on it can +# be launched). # #----------------------------------------------------------------------- # ensmem_name="mem${ENSMEM_INDX}" cycle_dir="$EXPTDIR/$CDATE" -mkdir_vrfy -p "${cycle_dir}" +mkdir -p "${cycle_dir}" touch "${cycle_dir}/post_files_exist_${ensmem_name}.txt" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_GET_EXTRN_MDL_FILES b/jobs/JREGIONAL_GET_EXTRN_MDL_FILES index 8efd332dd9..fbd582201a 100755 --- a/jobs/JREGIONAL_GET_EXTRN_MDL_FILES +++ b/jobs/JREGIONAL_GET_EXTRN_MDL_FILES @@ -3,20 +3,48 @@ # #----------------------------------------------------------------------- # -# This script gets either from the system directory or from mass store -# (HPSS) the files generated by the external model (specified by the -# variable EXTRN_MDL_NAME) for either the initial conditions (ICs) or the -# lateral boundary conditions (LBCs). Which of these we are considering -# depends on the value of the variable ICS_OR_LBCS, which should be defined -# in the environment (when calling this script from a rocoto workflow, -# the workflow should define this variable, e.g. using rocoto's -# tag). -# -# Note that when we refer to ICs, we are referring to not only the atmospheric -# fields at the initial time but also various surface fields (which are -# for now time-independent) as well as the 0-th forecast hour LBCs. Also, -# when we refer to LBCs, we are referring to the LBCs excluding the one -# at the 0-th hour. +# The J-Job script for getting the model files that will be used for +# either initial conditions or lateral boundary conditions for the +# experiment. +# +# Run-time environment variables: +# +# CDATE +# COMIN +# cyc +# DATA +# EXTRN_MDL_STAGING_DIR +# GLOBAL_VAR_DEFNS_FP +# ICS_OR_LBCS +# PDY +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# workflow: +# EXPTDIR +# +# task_get_extrn_lbcs: +# EXTRN_MDL_FILES_LBCS +# EXTRN_MDL_LBCS_OFFSET_HRS +# EXTRN_MDL_NAME_LBCS +# EXTRN_MDL_SOURCE_BASEDIR_LBCS +# EXTRN_MDL_SYSBASEDIR_LBCS +# FV3GFS_FILE_FMT_LBCS +# LBC_SPEC_INTVL_HRS +# USE_USER_STAGED_EXTRN_FILES +# +# task_get_extrn_ics: +# EXTRN_MDL_FILES_ICS +# EXTRN_MDL_ICS_OFFSET_HRS +# EXTRN_MDL_NAME_ICS +# EXTRN_MDL_SOURCE_BASEDIR_ICS +# EXTRN_MDL_SYSBASEDIR_ICS +# FV3GFS_FILE_FMT_ICS +# USE_USER_STAGED_EXTRN_FILES # #----------------------------------------------------------------------- # @@ -29,8 +57,12 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_get_extrn_ics|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh "TRUE" +for sect in user nco workflow task_get_extrn_lbcs task_get_extrn_ics ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done +. $USHdir/job_preamble.sh + + # #----------------------------------------------------------------------- # @@ -222,8 +254,8 @@ if [ $RUN_ENVIR = "nco" ]; then export EXTRN_MDL_STAGING_DIR="${EXTRN_MDL_STAGING_DIR:-${DATA}}" else export EXTRN_MDL_STAGING_DIR="${COMIN}/${EXTRN_MDL_NAME}/for_${ICS_OR_LBCS}" - mkdir_vrfy -p "${EXTRN_MDL_STAGING_DIR}" - cd_vrfy "${EXTRN_MDL_STAGING_DIR}" + mkdir -p "${EXTRN_MDL_STAGING_DIR}" + cd "${EXTRN_MDL_STAGING_DIR}" fi # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_GET_VERIF_OBS b/jobs/JREGIONAL_GET_VERIF_OBS index 3820a739db..8190314fc5 100755 --- a/jobs/JREGIONAL_GET_VERIF_OBS +++ b/jobs/JREGIONAL_GET_VERIF_OBS @@ -3,7 +3,19 @@ # #----------------------------------------------------------------------- # -# This script checks, pulls, and stages observation data for model verification. +# The J-Job script that checks, pulls, and stages observation data for +# model verification. +# +# Run-time environment variables: +# +# CDATE +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -16,7 +28,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task " " ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -55,7 +69,6 @@ This is the J-job script for the task that checks, pulls, and stages observation data for verification purposes. ========================================================================" -# # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_INTEGRATION_TEST b/jobs/JREGIONAL_INTEGRATION_TEST index cbb93e86cf..983981ecf3 100755 --- a/jobs/JREGIONAL_INTEGRATION_TEST +++ b/jobs/JREGIONAL_INTEGRATION_TEST @@ -1,5 +1,31 @@ #!/bin/bash + +# +#----------------------------------------------------------------------- +# +# This J-Job script runs a set of tests at the end of WE2E tests. +# +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# CDATE +# FCST_DIR +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# RUN_ENV +# SCRIPTSdir +# USHdir +# +# workflow: +# FCST_LEN_HRS +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,8 +34,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_integration_test|task_run_fcst" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh + # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_MAKE_GRID b/jobs/JREGIONAL_MAKE_GRID index 844d782bc7..01484041e9 100755 --- a/jobs/JREGIONAL_MAKE_GRID +++ b/jobs/JREGIONAL_MAKE_GRID @@ -3,97 +3,25 @@ # #----------------------------------------------------------------------- # -# This script generates grid and orography files in NetCDF format that -# are required as inputs for running the FV3-LAM model (i.e. the FV3 mo- -# del on a regional domain). It in turn calls three other scripts whose -# file names are specified in the variables grid_gen_scr, orog_gen_scr, -# and orog_fltr_scr and then calls the executable defined in the varia- -# ble shave_exec. These scripts/executable perform the following tasks: -# -# 1) grid_gen_scr: -# -# This script generates grid files that will be used by subsequent -# preprocessing steps. It places its output in the directory defined -# by GRID_DIR. Note that: -# -# a) This script creates grid files for each of the 7 tiles of the -# cubed sphere grid (where tiles 1 through 6 cover the globe, and -# tile 7 is the regional grid located somewhere within tile 6) -# even though the forecast will be performed only on tile 7. -# -# b) The tile 7 grid file that this script creates includes a halo, -# i.e. a layer of cells beyond the boundary of tile 7). The width -# of this halo (i.e. the number of cells in the halo in the direc- -# tion perpendicular to the boundary of the tile) must be made -# large enough such that the "shave" steps later below (which take -# this file as input and generate grid files with thinner halos) -# have a wide enough starting halo to work with. More specifical- -# ly, the FV3-LAM model needs as inputs two grid files: one with a -# halo that is 3 cells and another with a halo that is 4 cells -# wide. Thus, the halo in the grid file that the grid_gen_scr -# script generates must be greater than 4 since otherwise, the -# shave steps would shave off cells from within the interior of -# tile 7. We will let NHW denote the width of the halo in the -# grid file generated by grid_gen_scr. The "n" in this variable -# name denotes number of cells, the "h" is used to indicate that -# it refers to a halo region, the "w" is used to indicate that it -# refers to a wide halo (i.e. wider than the 3-cell and 4-cell ha- -# los that the FV3-LAM model requires as inputs, and the "T7" is -# used to indicate that the cell count is on tile 7. -# -# 2) orog_gen_scr: -# -# This script generates the orography file. It places its output in -# the directory defined by OROG_DIR. Note that: -# -# a) This script generates an orography file only on tile 7. -# -# b) This orography file contains a halo of the same width (NHW) -# as the grid file for tile 7 generated by the grid_gen_scr script -# in the previous step. -# -# 3) orog_fltr_scr: -# -# This script generates a filtered version of the orography file ge- -# nerated by the script orog_gen_scr. This script places its output -# in the temporary directory defined in WORKDIR_FLTR. Note that: -# -# a) The filtered orography file generated by this script contains a -# halo of the same width (NHW) as the (unfiltered) orography file -# generated by script orog_gen_scr (and the grid file generated by -# grid_gen_scr). -# -# b) In analogy with the input grid files, the FV3-LAM model needs as -# input two (filtered) orography files -- one with no halo cells -# and another with 3. These are obtained later below by "shaving" -# off layers of halo cells from the (filtered) orography file ge- -# nerated in this step. -# -# 4) shave_exec: -# -# This "shave" executable is called 4 times to generate 4 files from -# the tile 7 grid file generated by grid_gen_scr and the tile 7 fil- -# tered orography file generated by orog_fltr_scr (both of which have -# a halo of width NHW cells). The 4 output files are placed in the -# temporary directory defined in WORKDIR_SHVE. More specifically: -# -# a) shave_exec is called to shave the halo in the tile 7 grid file -# generated by grid_gen_scr down to a width of 3 cells and store -# the result in a new grid file in WORKDIR_SHVE. -# -# b) shave_exec is called to shave the halo in the tile 7 grid file -# generated by grid_gen_scr down to a width of 4 cells and store -# the result in a new grid file in WORKDIR_SHVE. -# -# c) shave_exec is called to shave the halo in the tile 7 filtered -# orography file generated by orog_fltr_scr down to a width of 0 -# cells (i.e. no halo) and store the result in a new filtered oro- -# graphy file in WORKDIR_SHVE. -# -# d) shave_exec is called to shave the halo in the tile 7 filtered -# orography file generated by orog_fltr_scr down to a width of 4 -# cells and store the result in a new filtered orography file in -# WORKDIR_SHVE. +# The J-Job that generates input NetCDF grid files for running the +# regional configuration of FV3 +# +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# DATA +# +# Experiment variables +# +# user: +# USHdir +# SCRIPTSdir +# +# workflow: +# PREEXISTING_DIR_METHOD +# +# task_make_grid: +# GRID_DIR # #----------------------------------------------------------------------- # @@ -106,7 +34,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_grid" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow task_make_grid ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -153,7 +83,7 @@ This is the J-job script for the task that generates grid files. #----------------------------------------------------------------------- # check_for_preexist_dir_file "${GRID_DIR}" "${PREEXISTING_DIR_METHOD}" -mkdir_vrfy -p "${GRID_DIR}" +mkdir -p "${GRID_DIR}" # #----------------------------------------------------------------------- # @@ -162,7 +92,7 @@ mkdir_vrfy -p "${GRID_DIR}" #----------------------------------------------------------------------- # DATA="${DATA:-${GRID_DIR}/tmp}" -mkdir_vrfy -p "$DATA" +mkdir -p "$DATA" # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_MAKE_ICS b/jobs/JREGIONAL_MAKE_ICS index 70306c0a87..10a3b36fb7 100755 --- a/jobs/JREGIONAL_MAKE_ICS +++ b/jobs/JREGIONAL_MAKE_ICS @@ -1,5 +1,31 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The J-job to run chgres_cube for preparing initial conditions for the +# FV3 forecast +# +# Run-time environment variables: +# +# COMIN +# DATA +# GLOBAL_VAR_DEFNS_FP +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# RUN_ENVIR +# SCRIPTSdir +# USHdir +# +# workflow: +# EXPTDIR +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +34,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_ics" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -60,7 +88,7 @@ if [ $RUN_ENVIR = "nco" ]; then else export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT" fi -mkdir_vrfy -p "${INPUT_DATA}" +mkdir -p "${INPUT_DATA}" # # #----------------------------------------------------------------------- @@ -72,8 +100,8 @@ mkdir_vrfy -p "${INPUT_DATA}" if [ $RUN_ENVIR = "community" ]; then DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_MAKE_ICS}" check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA + mkdir -p $DATA + cd $DATA fi # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_LBCS b/jobs/JREGIONAL_MAKE_LBCS index 16ac382fee..91d9d3edbe 100755 --- a/jobs/JREGIONAL_MAKE_LBCS +++ b/jobs/JREGIONAL_MAKE_LBCS @@ -1,5 +1,29 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The J-job to run chgres_cube for preparing lateral boundary conditions +# for the FV3 forecast +# +# Run-time environment variables: +# +# CDATE +# COMIN +# DATA +# GLOBAL_VAR_DEFNS_FP +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# RUN_ENVIR +# SCRIPTSdir +# USHdir +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +32,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_lbcs" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -60,7 +86,7 @@ if [ $RUN_ENVIR = "nco" ]; then else export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT" fi -mkdir_vrfy -p "${INPUT_DATA}" +mkdir -p "${INPUT_DATA}" # #----------------------------------------------------------------------- # @@ -71,8 +97,8 @@ mkdir_vrfy -p "${INPUT_DATA}" if [ "${RUN_ENVIR}" = "community" ]; then DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_MAKE_LBCS}" check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA + mkdir -p $DATA + cd $DATA fi # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_OROG b/jobs/JREGIONAL_MAKE_OROG index b6f674e5ee..28e2f965a5 100755 --- a/jobs/JREGIONAL_MAKE_OROG +++ b/jobs/JREGIONAL_MAKE_OROG @@ -1,5 +1,27 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The J-Job that generates input NetCDF orography files for running the +# regional configuration of FV3 +# +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# task_make_orog: +# OROG_DIR +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +30,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_orog" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow task_make_orog ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_SFC_CLIMO b/jobs/JREGIONAL_MAKE_SFC_CLIMO index eee25b193a..30b2d2c346 100755 --- a/jobs/JREGIONAL_MAKE_SFC_CLIMO +++ b/jobs/JREGIONAL_MAKE_SFC_CLIMO @@ -1,5 +1,30 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The J-job to run chgres_cube for preparing lateral boundary conditions +# for the FV3 forecast +# +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# workflow: +# PREEXISTING_DIR_METHOD +# +# task_make_sfc_climo: +# SFC_CLIMO_DIR +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +33,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_sfc_climo" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow task_make_sfc_climo ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -55,7 +82,7 @@ climatology. #----------------------------------------------------------------------- # check_for_preexist_dir_file "${SFC_CLIMO_DIR}" "${PREEXISTING_DIR_METHOD}" -mkdir_vrfy -p "${SFC_CLIMO_DIR}" +mkdir -p "${SFC_CLIMO_DIR}" # #----------------------------------------------------------------------- # @@ -66,7 +93,7 @@ mkdir_vrfy -p "${SFC_CLIMO_DIR}" DATA="${DATA:-${SFC_CLIMO_DIR}/tmp}" if [ $RUN_ENVIR != "nco" ]; then check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy $DATA + mkdir $DATA fi # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_PLOT_ALLVARS b/jobs/JREGIONAL_PLOT_ALLVARS index 5e59abd93d..be5ee10f82 100755 --- a/jobs/JREGIONAL_PLOT_ALLVARS +++ b/jobs/JREGIONAL_PLOT_ALLVARS @@ -1,5 +1,45 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The J-job to plot the forecast output +# +# Run-time environment variables: +# +# CDATE +# COMOUT +# GLOBAL_VAR_DEFNS_FP +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# RUN_ENVIR +# SCRIPTSdir +# USHdir +# +# platform: +# FIXshp +# +# workflow: +# EXPT_SUBDIR +# PREEXISTING_DIR_METHOD +# PREDEF_GRID_NAME +# +# task_plot_allvars: +# COMOUT_REF +# PLOT_DOMAINS +# PLOT_FCST_END +# PLOT_FCST_INC +# PLOT_FCST_START +# +# task_run_fcst: +# FCST_LEN_HRS +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +48,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_plot_allvars|task_run_fcst" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow task_plot_allvars task_run_fcst ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -67,6 +109,11 @@ COMOUT_REF=$(eval echo ${COMOUT_REF}) #----------------------------------------------------------------------- # +if [ -n "${SRW_GRAPHICS_ENV:-}" ] ; then + set +u + conda activate ${SRW_GRAPHICS_ENV} + set -u +fi # plot all variables $SCRIPTSdir/exregional_plot_allvars.py \ --cycle ${CDATE} \ diff --git a/jobs/JREGIONAL_RUN_FCST b/jobs/JREGIONAL_RUN_FCST index a7f7c96031..2542ab32f8 100755 --- a/jobs/JREGIONAL_RUN_FCST +++ b/jobs/JREGIONAL_RUN_FCST @@ -3,9 +3,24 @@ # #----------------------------------------------------------------------- # -# This script copies files from various directories into the experiment -# directory, creates links to some of them, and modifies others (e.g. -# templates) to customize them for the current experiment setup. +# The J-Job that runs the forecast +# +# Run-time environment variables: +# +# CDATE +# COMIN +# DATA +# GLOBAL_VAR_DEFNS_FP +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# workflow: +# RUN_ENVIR # #----------------------------------------------------------------------- # @@ -18,7 +33,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_fcst" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh "TRUE" # #----------------------------------------------------------------------- @@ -76,8 +93,8 @@ fi # #----------------------------------------------------------------------- # -mkdir_vrfy -p ${DATA}/INPUT -mkdir_vrfy -p ${DATA}/RESTART +mkdir -p ${DATA}/INPUT +mkdir -p ${DATA}/RESTART # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT b/jobs/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT index 707697b5ab..c7aee12df1 100755 --- a/jobs/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT +++ b/jobs/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT @@ -3,7 +3,18 @@ # #----------------------------------------------------------------------- # +# The J-Job that runs that runs either METplus's gen_ens_prod tool or its +# ensemble_stat tool for ensemble verification. # +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -16,7 +27,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_ensgrid|task_run_vx_enspoint" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX index 0301e9946a..e1207e0a81 100755 --- a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX +++ b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX @@ -3,8 +3,18 @@ # #----------------------------------------------------------------------- # -# This script runs the METplus GridStat or PointStat tool for deterministic -# verification. +# This script runs the METplus GridStat or PointStat tool for +# deterministic verification. +# +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -17,7 +27,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_gridstat" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSMEAN b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSMEAN index ab08320f33..29b22502a4 100755 --- a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSMEAN +++ b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSMEAN @@ -3,7 +3,19 @@ # #----------------------------------------------------------------------- # +# The J-Job that runs MET/METplus's GridStat or PointStat tool to +# perform verification on the ensemble mean of a specified field (or +# group of fields). # +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -16,7 +28,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_ensgrid_mean" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB index 7da98212ac..731cf575a5 100755 --- a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB +++ b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB @@ -3,7 +3,19 @@ # #----------------------------------------------------------------------- # +# The J-Job that runs METplus's GridStat or PointStat tool to perform +# verification on the ensemble frequencies/ probabilities of a specified +# field (or group of fields). # +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -16,7 +28,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_ensgrid_prob" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_MET_PB2NC_OBS b/jobs/JREGIONAL_RUN_MET_PB2NC_OBS index e36e72418f..89c9bb73f4 100755 --- a/jobs/JREGIONAL_RUN_MET_PB2NC_OBS +++ b/jobs/JREGIONAL_RUN_MET_PB2NC_OBS @@ -4,6 +4,18 @@ #----------------------------------------------------------------------- # # +# The J-Job that runs METplus for point-stat by initialization time for +# all forecast hours. +# +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -16,7 +28,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_met_pb2nc_obs" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -51,8 +65,8 @@ print_info_msg " Entering script: \"${scrfunc_fn}\" In directory: \"${scrfunc_dir}\" -This is the J-job script for the task that runs METplus for point-stat -by initialization time for all forecast hours. +This is the J-job script for the task that runs METplus for pb2nc on +NDAS observations. ========================================================================" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_MET_PCPCOMBINE b/jobs/JREGIONAL_RUN_MET_PCPCOMBINE index 7364ed96c9..8ac29887e8 100755 --- a/jobs/JREGIONAL_RUN_MET_PCPCOMBINE +++ b/jobs/JREGIONAL_RUN_MET_PCPCOMBINE @@ -3,7 +3,20 @@ # #----------------------------------------------------------------------- # +# The J-job that runs the MET/METplus PcpCombine tool on hourly +# accumulated precipitation (APCP) data to obtain APCP for multi-hour +# accumulation periods. The data can be from CCPA observations or a +# focrecast. # +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -16,7 +29,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_met_pcpcombine" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_POST b/jobs/JREGIONAL_RUN_POST index 97b100967c..58c469fc6d 100755 --- a/jobs/JREGIONAL_RUN_POST +++ b/jobs/JREGIONAL_RUN_POST @@ -3,8 +3,38 @@ # #----------------------------------------------------------------------- # -# This script runs the post-processor (UPP) on the NetCDF output files -# of the write component of the FV3-LAM model. +# The J-Job that runs the Unified Post-processor (UPP) on the NetCDF +# output from FV3. +# +# Run-time environment variables: +# +# COMIN +# COMOUT +# cyc +# DATA +# DATAROOT +# GLOBAL_VAR_DEFNS_FP +# PDY +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# platform: +# WORKFLOW_MANAGER +# +# workflow: +# DATE_FIRST_CYCL +# FCST_LEN_CYCL +# FCST_LEN_HRS +# INCR_CYCL_FREQ +# RUN_ENVIR +# +# task_run_post: +# SUB_HOURLY_POST # #----------------------------------------------------------------------- # @@ -17,7 +47,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_post|task_run_fcst" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -60,7 +92,7 @@ on the output files corresponding to a specified forecast hour. # minutes (fmn) are set to "00". This is necessary in order to pass # "fmn" into the post ex-script for the calculation of post_time. # -if [ "${SUB_HOURLY_POST}" != "TRUE" ]; then +if [ $(boolify "${SUB_HOURLY_POST}") != "TRUE" ]; then export fmn="00" fi # @@ -85,18 +117,18 @@ fi if [ "${RUN_ENVIR}" = "community" ]; then DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}" export COMOUT="${DATA}/postprd" - mkdir_vrfy -p "${COMOUT}" + mkdir -p "${COMOUT}" fi -if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then +if [ $(boolify "${SUB_HOURLY_POST}") = "TRUE" ]; then export DATA_FHR="${DATA:-$COMOUT}/$fhr$fmn" else export DATA_FHR="${DATA:-$COMOUT}/$fhr" fi check_for_preexist_dir_file "${DATA_FHR}" "delete" -mkdir_vrfy -p "${DATA_FHR}" +mkdir -p "${DATA_FHR}" -cd_vrfy "${DATA_FHR}" +cd "${DATA_FHR}" # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_RUN_PRDGEN b/jobs/JREGIONAL_RUN_PRDGEN index 2d30ced9db..1cf933b666 100755 --- a/jobs/JREGIONAL_RUN_PRDGEN +++ b/jobs/JREGIONAL_RUN_PRDGEN @@ -3,10 +3,33 @@ # #----------------------------------------------------------------------- # -# This script runs wgrib2 to create various subdomain GRIB2 files from -# the raw UPP-generated GRIB2 output from the run_post task of the +# The J-Job that runs wgrib2 to create various subdomain GRIB2 files +# from the raw UPP-generated GRIB2 output from the run_post task of the # FV3-LAM model. # +# Run-time environment variables: +# +# COMIN +# COMOUT +# DATA +# GLOBAL_VAR_DEFNS_FP +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# platform: +# WORKFLOW_MANAGER +# +# workflow: +# RUN_ENVIR +# +# task_run_post: +# SUB_HOURLY_POST +# #----------------------------------------------------------------------- # @@ -18,7 +41,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_prdgen|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -84,18 +109,18 @@ DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}" if [ "${RUN_ENVIR}" = "community" ]; then export COMOUT="${DATA}/postprd" fi -mkdir_vrfy -p "${COMOUT}" +mkdir -p "${COMOUT}" # subhourly post -if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then +if [ $(boolify "${SUB_HOURLY_POST}") = "TRUE" ]; then export DATA_FHR="${DATA:-$COMOUT}/$fhr$fmn" else export DATA_FHR="${DATA:-$COMOUT}/$fhr" fi check_for_preexist_dir_file "${DATA_FHR}" "delete" -mkdir_vrfy -p "${DATA_FHR}" +mkdir -p "${DATA_FHR}" -cd_vrfy "${DATA_FHR}" +cd "${DATA_FHR}" # #----------------------------------------------------------------------- # diff --git a/jobs/JSRW_AQM_ICS b/jobs/JSRW_AQM_ICS index 0c4df8aa5b..5d5f6d970e 100755 --- a/jobs/JSRW_AQM_ICS +++ b/jobs/JSRW_AQM_ICS @@ -31,7 +31,9 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_aqm_ics" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -130,7 +132,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_AQM_LBCS b/jobs/JSRW_AQM_LBCS index 11a1420d5e..9279dbe190 100755 --- a/jobs/JSRW_AQM_LBCS +++ b/jobs/JSRW_AQM_LBCS @@ -31,7 +31,10 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_get_extrn_lbcs|task_make_orog|task_make_lbcs|cpl_aqm_parm|task_aqm_lbcs" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm task_get_extrn_lbcs \ + task_make_orog task_make_lbcs ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -131,7 +134,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_BIAS_CORRECTION_O3 b/jobs/JSRW_BIAS_CORRECTION_O3 index 3ab2f2d40f..0849614840 100755 --- a/jobs/JSRW_BIAS_CORRECTION_O3 +++ b/jobs/JSRW_BIAS_CORRECTION_O3 @@ -30,7 +30,10 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_bias_correction_o3" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm task_run_post \ + task_bias_correction_o3 ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -123,7 +126,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_BIAS_CORRECTION_PM25 b/jobs/JSRW_BIAS_CORRECTION_PM25 index 42210e7f29..a0a7f76dad 100755 --- a/jobs/JSRW_BIAS_CORRECTION_PM25 +++ b/jobs/JSRW_BIAS_CORRECTION_PM25 @@ -30,7 +30,10 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_bias_correction_pm25" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm task_run_post \ + task_bias_correction_pm25 ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -123,7 +126,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_FIRE_EMISSION b/jobs/JSRW_FIRE_EMISSION index ae0343e60e..8a2b581274 100755 --- a/jobs/JSRW_FIRE_EMISSION +++ b/jobs/JSRW_FIRE_EMISSION @@ -30,7 +30,9 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_fire_emission" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -126,7 +128,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_NEXUS_EMISSION b/jobs/JSRW_NEXUS_EMISSION index 33f1aca757..aab5869cff 100755 --- a/jobs/JSRW_NEXUS_EMISSION +++ b/jobs/JSRW_NEXUS_EMISSION @@ -30,7 +30,9 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_run_fcst|cpl_aqm_parm|task_nexus_emission" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm task_nexus_emission ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -128,7 +130,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_NEXUS_GFS_SFC b/jobs/JSRW_NEXUS_GFS_SFC index 89d84c740d..ceed6be32a 100755 --- a/jobs/JSRW_NEXUS_GFS_SFC +++ b/jobs/JSRW_NEXUS_GFS_SFC @@ -30,7 +30,9 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_nexus_gfs_sfc" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -128,7 +130,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= @@ -143,7 +145,7 @@ fi if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_NEXUS_POST_SPLIT b/jobs/JSRW_NEXUS_POST_SPLIT index 6e5a0a259a..10f4101d5c 100755 --- a/jobs/JSRW_NEXUS_POST_SPLIT +++ b/jobs/JSRW_NEXUS_POST_SPLIT @@ -30,7 +30,9 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_nexus_post_split" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -128,7 +130,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_POINT_SOURCE b/jobs/JSRW_POINT_SOURCE index a112a2d275..6218acaa99 100755 --- a/jobs/JSRW_POINT_SOURCE +++ b/jobs/JSRW_POINT_SOURCE @@ -30,7 +30,10 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_run_fcst|cpl_aqm_parm|task_point_source" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm task_point_source \ + task_run_fcst ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -123,7 +126,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_POST_STAT_O3 b/jobs/JSRW_POST_STAT_O3 index 8924cba9e5..5fadd70d30 100755 --- a/jobs/JSRW_POST_STAT_O3 +++ b/jobs/JSRW_POST_STAT_O3 @@ -30,7 +30,10 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_o3" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -124,7 +127,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_POST_STAT_PM25 b/jobs/JSRW_POST_STAT_PM25 index 83434fa8c7..2d7d6e9e88 100755 --- a/jobs/JSRW_POST_STAT_PM25 +++ b/jobs/JSRW_POST_STAT_PM25 @@ -30,7 +30,10 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_pm25" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -123,7 +126,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_PRE_POST_STAT b/jobs/JSRW_PRE_POST_STAT index 12561085c2..8c51e18510 100755 --- a/jobs/JSRW_PRE_POST_STAT +++ b/jobs/JSRW_PRE_POST_STAT @@ -30,7 +30,9 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_pre_post_stat" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -127,7 +129,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/manage_externals/checkout_externals b/manage_externals/checkout_externals index a0698baef0..48bce24010 100755 --- a/manage_externals/checkout_externals +++ b/manage_externals/checkout_externals @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 """Main driver wrapper around the manic/checkout utility. diff --git a/modulefiles/build_cheyenne_gnu.lua b/modulefiles/build_cheyenne_gnu.lua deleted file mode 100644 index 58398891b2..0000000000 --- a/modulefiles/build_cheyenne_gnu.lua +++ /dev/null @@ -1,33 +0,0 @@ -help([[ -This module loads libraries for building the UFS SRW App on -the CISL machine Cheyenne using GNU -]]) - -whatis([===[Loads libraries needed for building the UFS SRW App on Cheyenne ]===]) - -load(pathJoin("cmake", os.getenv("cmake_ver") or "3.22.0")) -load(pathJoin("ncarenv", os.getenv("ncarenv_ver") or "1.3")) -load(pathJoin("gnu", os.getenv("gnu_ver") or "11.2.0")) -load(pathJoin("mpt", os.getenv("mpt_ver") or "2.25")) -setenv("MKLROOT", "/glade/u/apps/opt/intel/2022.1/mkl/latest") -load(pathJoin("ncarcompilers", os.getenv("ncarcompilers_ver") or "0.5.0")) -unload("netcdf") - -prepend_path("MODULEPATH","/glade/work/epicufsrt/contrib/hpc-stack/gnu11.2.0_ncdf492/modulefiles/stack") -load(pathJoin("hpc", os.getenv("hpc_ver") or "1.2.0")) -load(pathJoin("hpc-gnu", os.getenv("hpc_gnu_ver") or "11.2.0")) -load(pathJoin("hpc-mpt", os.getenv("hpc_mpt_ver") or "2.25")) - -load("srw_common") - -load(pathJoin("openblas", os.getenv("openblas_ver") or "0.3.23")) - -unsetenv("MKLROOT") -setenv("CMAKE_C_COMPILER","mpicc") -setenv("CMAKE_CXX_COMPILER","mpicxx") -setenv("CMAKE_Fortran_COMPILER","mpif90") -setenv("CMAKE_Platform","cheyenne.gnu") -setenv("CC", "mpicc") -setenv("CXX", "mpicxx") -setenv("FC", "mpif90") - diff --git a/modulefiles/build_cheyenne_intel.lua b/modulefiles/build_cheyenne_intel.lua deleted file mode 100644 index 298c9879ac..0000000000 --- a/modulefiles/build_cheyenne_intel.lua +++ /dev/null @@ -1,27 +0,0 @@ -help([[ -This module loads libraries for building the UFS SRW App on -the CISL machine Cheyenne using Intel-2022.1 -]]) - -whatis([===[Loads libraries needed for building the UFS SRW App on Cheyenne ]===]) - -load(pathJoin("cmake", os.getenv("cmake_ver") or "3.22.0")) -load(pathJoin("ncarenv", os.getenv("ncarenv_ver") or "1.3")) -load(pathJoin("intel", os.getenv("intel_ver") or "2022.1")) -load(pathJoin("mpt", os.getenv("mpt_ver") or "2.25")) -load(pathJoin("mkl", os.getenv("mkl_ver") or "2022.1")) -load(pathJoin("ncarcompilers", os.getenv("ncarcompilers_ver") or "0.5.0")) -unload("netcdf") - -prepend_path("MODULEPATH","/glade/work/epicufsrt/contrib/hpc-stack/intel2022.1_ncdf492/modulefiles/stack") -load(pathJoin("hpc", os.getenv("hpc_ver") or "1.2.0")) -load(pathJoin("hpc-intel", os.getenv("hpc_intel_ver") or "2022.1")) -load(pathJoin("hpc-mpt", os.getenv("hpc_mpt_ver") or "2.25")) - -load("srw_common") - -setenv("CMAKE_C_COMPILER","mpicc") -setenv("CMAKE_CXX_COMPILER","mpicpc") -setenv("CMAKE_Fortran_COMPILER","mpif90") -setenv("CMAKE_Platform","cheyenne.intel") - diff --git a/modulefiles/build_derecho_intel.lua b/modulefiles/build_derecho_intel.lua index ac98c39e53..1356fdb3e0 100644 --- a/modulefiles/build_derecho_intel.lua +++ b/modulefiles/build_derecho_intel.lua @@ -6,13 +6,15 @@ the CISL machine Derecho (Cray) using Intel@2021.10.0 whatis([===[Loads libraries needed for building the UFS SRW App on Derecho ]===]) prepend_path("MODULEPATH","/lustre/desc1/scratch/epicufsrt/contrib/modulefiles_extra") -prepend_path("MODULEPATH", "/glade/work/epicufsrt/contrib/spack-stack/derecho/spack-stack-1.5.1/envs/unified-env/install/modulefiles/Core") +prepend_path("MODULEPATH", "/glade/work/epicufsrt/contrib/spack-stack/derecho/spack-stack-1.6.0/envs/upp-addon-env/install/modulefiles/Core") load(pathJoin("stack-intel", os.getenv("stack_intel_ver") or "2021.10.0")) load(pathJoin("stack-cray-mpich", os.getenv("stack_cray_mpich_ver") or "8.1.25")) -load(pathJoin("cmake", os.getenv("cmake_ver") or "3.26.3")) +load(pathJoin("cmake", os.getenv("cmake_ver") or "3.23.1")) load("srw_common") +load(pathJoin("prod_util", os.getenv("prod_util_ver") or "2.1.1")) + setenv("CMAKE_Platform","derecho.intel") diff --git a/modulefiles/build_gaea_intel.lua b/modulefiles/build_gaea_intel.lua index 0eca20b5e1..2a53acf15b 100644 --- a/modulefiles/build_gaea_intel.lua +++ b/modulefiles/build_gaea_intel.lua @@ -5,14 +5,14 @@ the NOAA RDHPC machine Gaea C5 using Intel-2023.1.0 whatis([===[Loads libraries needed for building the UFS SRW App on Gaea C5 ]===]) -prepend_path("MODULEPATH","/ncrc/proj/epic/spack-stack/spack-stack-1.5.1/envs/unified-env/install/modulefiles/Core") -stack_intel_ver=os.getenv("stack_intel_ver") or "2023.1.0" +prepend_path("MODULEPATH","/ncrc/proj/epic/spack-stack/spack-stack-1.6.0/envs/upp-addon-env/install/modulefiles/Core") +stack_intel_ver=os.getenv("stack_intel_ver") or "2023.2.0" load(pathJoin("stack-intel", stack_intel_ver)) -stack_mpich_ver=os.getenv("stack_mpich_ver") or "8.1.25" +stack_mpich_ver=os.getenv("stack_mpich_ver") or "8.1.28" load(pathJoin("stack-cray-mpich", stack_mpich_ver)) -stack_python_ver=os.getenv("stack_python_ver") or "3.10.8" +stack_python_ver=os.getenv("stack_python_ver") or "3.10.13" load(pathJoin("stack-python", stack_python_ver)) cmake_ver=os.getenv("cmake_ver") or "3.23.1" diff --git a/modulefiles/build_hera_gnu.lua b/modulefiles/build_hera_gnu.lua index 5355895da9..621c7581a6 100644 --- a/modulefiles/build_hera_gnu.lua +++ b/modulefiles/build_hera_gnu.lua @@ -1,25 +1,30 @@ help([[ This module loads libraries for building the UFS SRW App on -the NOAA RDHPC machine Hera using GNU 9.2.0 +the NOAA RDHPC machine Hera using GNU 13.3.0 ]]) -whatis([===[Loads libraries needed for building the UFS SRW App on Hera using GNU 9.2.0 ]===]) +whatis([===[Loads libraries needed for building the UFS SRW App on Hera using GNU 13.3.0 ]===]) -prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.1/envs/unified-env-rocky8/install/modulefiles/Core") -prepend_path("MODULEPATH", "/scratch1/NCEPDEV/jcsda/jedipara/spack-stack/modulefiles") +prepend_path("MODULEPATH", "/scratch2/NCEPDEV/stmp1/role.epic/installs/gnu/modulefiles") +prepend_path("MODULEPATH", "/scratch2/NCEPDEV/stmp1/role.epic/installs/openmpi/modulefiles") +prepend_path("MODULEPATH", "/scratch2/NCEPDEV/stmp1/role.epic/spack-stack/spack-stack-1.6.0_gnu13/envs/upp-addon-env/install/modulefiles/Core") -load("stack-gcc/9.2.0") -load("stack-openmpi/4.1.5") -load("stack-python/3.10.8") +load("stack-gcc/13.3.0") +load("stack-openmpi/4.1.6") +load("stack-python/3.10.13") load("cmake/3.23.1") load("srw_common") load(pathJoin("nccmp", os.getenv("nccmp_ver") or "1.9.0.1")) -load(pathJoin("nco", os.getenv("nco_ver") or "5.0.6")) -load(pathJoin("openblas", os.getenv("openblas_ver") or "0.3.19")) +load(pathJoin("nco", os.getenv("nco_ver") or "5.1.6")) +load(pathJoin("openblas", os.getenv("openblas_ver") or "0.3.24")) -setenv("CMAKE_C_COMPILER","mpicc") -setenv("CMAKE_CXX_COMPILER","mpicxx") -setenv("CMAKE_Fortran_COMPILER","mpif90") -setenv("CMAKE_Platform","hera.gnu") +prepend_path("CPPFLAGS", " -I/apps/slurm_hera/23.11.3/include/slurm"," ") +prepend_path("LD_LIBRARY_PATH", "/apps/slurm_hera/23.11.3/lib") +setenv("LD_PRELOAD", "/scratch2/NCEPDEV/stmp1/role.epic/installs/gnu/13.3.0/lib64/libstdc++.so.6") + +setenv("CC", "mpicc") +setenv("CXX", "mpic++") +setenv("FC", "mpif90") +setenv("CMAKE_Platform", "hera.gnu") diff --git a/modulefiles/build_hera_intel.lua b/modulefiles/build_hera_intel.lua index ee11e4a386..d7ef51b1e7 100644 --- a/modulefiles/build_hera_intel.lua +++ b/modulefiles/build_hera_intel.lua @@ -8,8 +8,7 @@ whatis([===[Loads libraries needed for building the UFS SRW App on Hera ]===]) prepend_path("MODULEPATH","/contrib/sutils/modulefiles") load("sutils") -prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.1/envs/unified-env-rocky8/install/modulefiles/Core") -prepend_path("MODULEPATH", "/scratch1/NCEPDEV/jcsda/jedipara/spack-stack/modulefiles") +prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.6.0/envs/upp-addon-env/install/modulefiles/Core") stack_intel_ver=os.getenv("stack_intel_ver") or "2021.5.0" load(pathJoin("stack-intel", stack_intel_ver)) @@ -17,7 +16,7 @@ load(pathJoin("stack-intel", stack_intel_ver)) stack_impi_ver=os.getenv("stack_impi_ver") or "2021.5.1" load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver)) -stack_python_ver=os.getenv("stack_python_ver") or "3.10.8" +stack_python_ver=os.getenv("stack_python_ver") or "3.10.13" load(pathJoin("stack-python", stack_python_ver)) cmake_ver=os.getenv("cmake_ver") or "3.23.1" @@ -26,8 +25,10 @@ load(pathJoin("cmake", cmake_ver)) load("srw_common") load(pathJoin("nccmp", os.getenv("nccmp_ver") or "1.9.0.1")) -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load(pathJoin("prod_util", os.getenv("prod_util_ver") or "1.2.2")) +load(pathJoin("nco", os.getenv("nco_ver") or "5.0.6")) +load(pathJoin("prod_util", os.getenv("prod_util_ver") or "2.1.1")) + +setenv("FC", "mpiifort") setenv("CMAKE_C_COMPILER","mpiicc") setenv("CMAKE_CXX_COMPILER","mpiicpc") diff --git a/modulefiles/build_hercules_intel.lua b/modulefiles/build_hercules_intel.lua index cec2a3a30e..54c82569e9 100644 --- a/modulefiles/build_hercules_intel.lua +++ b/modulefiles/build_hercules_intel.lua @@ -5,22 +5,24 @@ the MSU machine Hercules using intel-oneapi-compilers/2022.2.1 whatis([===[Loads libraries needed for building the UFS SRW App on Hercules ]===]) -prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.5.1/envs/unified-env/install/modulefiles/Core") -prepend_path("MODULEPATH", "/work/noaa/da/role-da/spack-stack/modulefiles") +prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.6.0/envs/upp-addon-env/install/modulefiles/Core") load("stack-intel/2021.9.0") load("stack-intel-oneapi-mpi/2021.9.0") -load("stack-python/3.10.8") -load("cmake/3.26.3") +load("stack-python/3.10.13") +load("cmake/3.23.1") load("srw_common") load("nccmp/1.9.0.1") load("nco/5.0.6") +load(pathJoin("prod_util", os.getenv("prod_util_ver") or "2.1.1")) setenv("CFLAGS","-diag-disable=10441") setenv("FFLAGS","-diag-disable=10441") +setenv("FC", "mpiifort") + setenv("CMAKE_C_COMPILER","mpiicc") setenv("CMAKE_CXX_COMPILER","mpiicpc") setenv("CMAKE_Fortran_COMPILER","mpiifort") diff --git a/modulefiles/build_jet_intel.lua b/modulefiles/build_jet_intel.lua index 925fef3853..04124e4bf5 100644 --- a/modulefiles/build_jet_intel.lua +++ b/modulefiles/build_jet_intel.lua @@ -5,12 +5,11 @@ the NOAA RDHPC machine Jet using Intel-2021.5.0 whatis([===[Loads libraries needed for building the UFS SRW App on Jet ]===]) -prepend_path("MODULEPATH","/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.5.1/envs/unified-env-rocky8/install/modulefiles/Core") -prepend_path("MODULEPATH", "/lfs4/HFIP/hfv3gfs/spack-stack/modulefiles") +prepend_path("MODULEPATH","/contrib/spack-stack/spack-stack-1.6.0/envs/upp-addon-env/install/modulefiles/Core") load("stack-intel/2021.5.0") load("stack-intel-oneapi-mpi/2021.5.1") -load("stack-python/3.10.8") +load("stack-python/3.10.13") load("cmake/3.23.1") load("srw_common") @@ -18,6 +17,8 @@ load("srw_common") load("nccmp/1.9.0.1") load("nco/5.0.6") +setenv("FC", "mpiifort") + setenv("CMAKE_C_COMPILER","mpiicc") setenv("CMAKE_CXX_COMPILER","mpiicpc") setenv("CMAKE_Fortran_COMPILER","mpiifort") diff --git a/modulefiles/build_noaacloud_intel.lua b/modulefiles/build_noaacloud_intel.lua index 0b6a9c1ca4..1f5855343d 100644 --- a/modulefiles/build_noaacloud_intel.lua +++ b/modulefiles/build_noaacloud_intel.lua @@ -5,11 +5,12 @@ the NOAA cloud using Intel-oneapi whatis([===[Loads libraries needed for building the UFS SRW App on NOAA cloud ]===]) -prepend_path("MODULEPATH", "/contrib/spack-stack/spack-stack-1.5.1/envs/unified-env/install/modulefiles/Core") +prepend_path("MODULEPATH", "/contrib/spack-stack-rocky8/spack-stack-1.6.0/envs/upp-addon-env/install/modulefiles/Core") prepend_path("MODULEPATH", "/apps/modules/modulefiles") -prepend_path("PATH", "/contrib/EPIC/bin") +load("gnu") load("stack-intel") load("stack-intel-oneapi-mpi") +unload("gnu") load("cmake/3.23.1") load("srw_common") diff --git a/modulefiles/build_odin_intel.lua b/modulefiles/build_odin_intel.lua index f4059a868a..40c3ada501 100644 --- a/modulefiles/build_odin_intel.lua +++ b/modulefiles/build_odin_intel.lua @@ -45,6 +45,8 @@ load("sigio") load("w3emc") load("wgrib2") +setenv("FC", "ftn") + setenv("CMAKE_C_COMPILER","cc") setenv("CMAKE_CXX_COMPILER","CC") setenv("CMAKE_Fortran_COMPILER","ftn") diff --git a/modulefiles/build_orion_intel.lua b/modulefiles/build_orion_intel.lua index d3e777d1dc..31efcb57c6 100644 --- a/modulefiles/build_orion_intel.lua +++ b/modulefiles/build_orion_intel.lua @@ -1,23 +1,27 @@ help([[ This module loads libraries for building the UFS SRW App on -the MSU machine Orion using Intel-2022.1.2 +the MSU machine Orion using intel-oneapi-compilers/2021.9.0 ]]) whatis([===[Loads libraries needed for building the UFS SRW App on Orion ]===]) -prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.5.1/envs/unified-env/install/modulefiles/Core") -prepend_path("MODULEPATH", "/work/noaa/da/role-da/spack-stack/modulefiles") +prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.6.0/envs/upp-addon-env/install/modulefiles/Core") -load("stack-intel/2022.0.2") -load("stack-intel-oneapi-mpi/2021.5.1") -load("stack-python/3.10.8") -load("cmake/3.22.1") +load("stack-intel/2021.9.0") +load("stack-intel-oneapi-mpi/2021.9.0") +load("stack-python/3.10.13") +load("cmake/3.23.1") load("srw_common") load("nccmp/1.9.0.1") load("nco/5.0.6") -load("wget") +load(pathJoin("prod_util", os.getenv("prod_util_ver") or "2.1.1")) + +setenv("CFLAGS","-diag-disable=10441") +setenv("FFLAGS","-diag-disable=10441") + +setenv("FC", "mpiifort") setenv("CMAKE_C_COMPILER","mpiicc") setenv("CMAKE_CXX_COMPILER","mpiicpc") diff --git a/modulefiles/build_singularity_gnu.lua b/modulefiles/build_singularity_gnu.lua index 434ac448d4..275842ffc2 100644 --- a/modulefiles/build_singularity_gnu.lua +++ b/modulefiles/build_singularity_gnu.lua @@ -33,6 +33,8 @@ load("gfsio") load("wgrib2") load("upp") +setenv("FC", "mpif90") + setenv("CMAKE_C_COMPILER","mpiicc") setenv("CMAKE_CXX_COMPILER","mpicxx") setenv("CMAKE_Fortran_COMPILER","mpif90") diff --git a/modulefiles/srw_common.lua b/modulefiles/srw_common.lua index 79c67283f9..d2bdbe6790 100644 --- a/modulefiles/srw_common.lua +++ b/modulefiles/srw_common.lua @@ -3,21 +3,21 @@ load("zlib/1.2.13") load("libpng/1.6.37") load("netcdf-c/4.9.2") -load("netcdf-fortran/4.6.0") +load("netcdf-fortran/4.6.1") load("parallelio/2.5.10") -load("esmf/8.5.0") -load("fms/2023.02.01") +load("esmf/8.6.0") +load("fms/2023.04") load("bacio/2.4.1") -load("crtm/2.4.0") -load("g2/3.4.5") -load("g2tmpl/1.10.2") +load("crtm/2.4.0.1") +load("g2/3.5.1") +load("g2tmpl/1.13.0") load("ip/4.3.0") -load("sp/2.3.3") +load("sp/2.5.0") load("w3emc/2.10.0") load("gftl-shared/1.6.1") -load("mapl/2.40.3-esmf-8.5.0") +load("mapl/2.40.3-esmf-8.6.0") load("nemsio/2.5.4") load("sfcio/1.4.1") diff --git a/modulefiles/tasks/cheyenne/aqm_ics.local.lua b/modulefiles/tasks/cheyenne/aqm_ics.local.lua deleted file mode 100644 index 9c9f0ca3d5..0000000000 --- a/modulefiles/tasks/cheyenne/aqm_ics.local.lua +++ /dev/null @@ -1,3 +0,0 @@ -load(pathJoin("cmake", os.getenv("cmake_ver") or "3.22.0")) -load("nco/4.9.5") -load("python_srw_aqm") diff --git a/modulefiles/tasks/cheyenne/aqm_lbcs.local.lua b/modulefiles/tasks/cheyenne/aqm_lbcs.local.lua deleted file mode 100644 index 9c9f0ca3d5..0000000000 --- a/modulefiles/tasks/cheyenne/aqm_lbcs.local.lua +++ /dev/null @@ -1,3 +0,0 @@ -load(pathJoin("cmake", os.getenv("cmake_ver") or "3.22.0")) -load("nco/4.9.5") -load("python_srw_aqm") diff --git a/modulefiles/tasks/cheyenne/fire_emission.local.lua b/modulefiles/tasks/cheyenne/fire_emission.local.lua deleted file mode 100644 index 86252a9a4f..0000000000 --- a/modulefiles/tasks/cheyenne/fire_emission.local.lua +++ /dev/null @@ -1,2 +0,0 @@ -load("ncarenv") -load("python_srw_aqm") diff --git a/modulefiles/tasks/cheyenne/nexus_emission.local.lua b/modulefiles/tasks/cheyenne/nexus_emission.local.lua deleted file mode 100644 index 3c690fa12a..0000000000 --- a/modulefiles/tasks/cheyenne/nexus_emission.local.lua +++ /dev/null @@ -1,5 +0,0 @@ -load("nco/4.9.5") -load("mpt/2.25") - -load("ncarenv") -load("python_srw_aqm") diff --git a/modulefiles/tasks/cheyenne/nexus_gfs_sfc.local.lua b/modulefiles/tasks/cheyenne/nexus_gfs_sfc.local.lua deleted file mode 100644 index 86252a9a4f..0000000000 --- a/modulefiles/tasks/cheyenne/nexus_gfs_sfc.local.lua +++ /dev/null @@ -1,2 +0,0 @@ -load("ncarenv") -load("python_srw_aqm") diff --git a/modulefiles/tasks/cheyenne/nexus_post_split.local.lua b/modulefiles/tasks/cheyenne/nexus_post_split.local.lua deleted file mode 100644 index e3f4bbe95d..0000000000 --- a/modulefiles/tasks/cheyenne/nexus_post_split.local.lua +++ /dev/null @@ -1,3 +0,0 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.5")) -load("ncarenv") -load("python_srw_aqm") diff --git a/modulefiles/tasks/cheyenne/plot_allvars.local.lua b/modulefiles/tasks/cheyenne/plot_allvars.local.lua deleted file mode 100644 index b49b8bb863..0000000000 --- a/modulefiles/tasks/cheyenne/plot_allvars.local.lua +++ /dev/null @@ -1,3 +0,0 @@ -unload("python") -load("conda") -setenv("SRW_ENV", "srw_graphics") diff --git a/modulefiles/tasks/cheyenne/point_source.local.lua b/modulefiles/tasks/cheyenne/point_source.local.lua deleted file mode 100644 index 86252a9a4f..0000000000 --- a/modulefiles/tasks/cheyenne/point_source.local.lua +++ /dev/null @@ -1,2 +0,0 @@ -load("ncarenv") -load("python_srw_aqm") diff --git a/modulefiles/tasks/cheyenne/pre_post_stat.local.lua b/modulefiles/tasks/cheyenne/pre_post_stat.local.lua deleted file mode 100644 index 042eb2f732..0000000000 --- a/modulefiles/tasks/cheyenne/pre_post_stat.local.lua +++ /dev/null @@ -1,2 +0,0 @@ -load("nco/4.9.5") -load("python_srw_aqm") diff --git a/modulefiles/tasks/cheyenne/python_srw.lua b/modulefiles/tasks/cheyenne/python_srw.lua deleted file mode 100644 index fe6c73a7d5..0000000000 --- a/modulefiles/tasks/cheyenne/python_srw.lua +++ /dev/null @@ -1,3 +0,0 @@ -unload("python") -load("conda") -setenv("SRW_ENV", "srw_app") diff --git a/modulefiles/tasks/cheyenne/run_vx.local.lua b/modulefiles/tasks/cheyenne/run_vx.local.lua deleted file mode 100644 index 54cc632c21..0000000000 --- a/modulefiles/tasks/cheyenne/run_vx.local.lua +++ /dev/null @@ -1,25 +0,0 @@ ---[[ -Compiler-specific modules are used for met and metplus libraries ---]] -local met_ver = (os.getenv("met_ver") or "10.1.2") -local metplus_ver = (os.getenv("metplus_ver") or "4.1.3") -if (mode() == "load") then - load(pathJoin("met", met_ver)) - load(pathJoin("metplus",metplus_ver)) -end -local base_met = os.getenv("met_ROOT") or os.getenv("MET_ROOT") -local base_metplus = os.getenv("metplus_ROOT") or os.getenv("METPLUS_ROOT") - -setenv("MET_INSTALL_DIR", base_met) -setenv("MET_BIN_EXEC", pathJoin(base_met,"bin")) -setenv("MET_BASE", pathJoin(base_met,"share/met")) -setenv("MET_VERSION", met_ver) -setenv("METPLUS_VERSION", metplus_ver) -setenv("METPLUS_ROOT", base_metplus) -setenv("METPLUS_PATH", base_metplus) - -if (mode() == "unload") then - unload(pathJoin("met", met_ver)) - unload(pathJoin("metplus",metplus_ver)) -end -load("python_srw") diff --git a/modulefiles/tasks/derecho/aqm_ics.local.lua b/modulefiles/tasks/derecho/aqm_ics.local.lua index 30f1157fbb..9b519c10f6 100644 --- a/modulefiles/tasks/derecho/aqm_ics.local.lua +++ b/modulefiles/tasks/derecho/aqm_ics.local.lua @@ -1,2 +1,2 @@ -load("nco/5.0.6") +load("nco/5.1.9") load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/aqm_lbcs.local.lua b/modulefiles/tasks/derecho/aqm_lbcs.local.lua index 30f1157fbb..9b519c10f6 100644 --- a/modulefiles/tasks/derecho/aqm_lbcs.local.lua +++ b/modulefiles/tasks/derecho/aqm_lbcs.local.lua @@ -1,2 +1,2 @@ -load("nco/5.0.6") +load("nco/5.1.9") load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/fire_emission.local.lua b/modulefiles/tasks/derecho/fire_emission.local.lua index 86252a9a4f..df0e35d5da 100644 --- a/modulefiles/tasks/derecho/fire_emission.local.lua +++ b/modulefiles/tasks/derecho/fire_emission.local.lua @@ -1,2 +1 @@ -load("ncarenv") load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/get_obs.local.lua b/modulefiles/tasks/derecho/get_obs.local.lua new file mode 100644 index 0000000000..c03abd8dfe --- /dev/null +++ b/modulefiles/tasks/derecho/get_obs.local.lua @@ -0,0 +1 @@ +load("run_vx.local") diff --git a/modulefiles/tasks/derecho/nexus_emission.local.lua b/modulefiles/tasks/derecho/nexus_emission.local.lua index e7f216375c..9b519c10f6 100644 --- a/modulefiles/tasks/derecho/nexus_emission.local.lua +++ b/modulefiles/tasks/derecho/nexus_emission.local.lua @@ -1,4 +1,2 @@ -load("nco/5.0.6") - -load("ncarenv") +load("nco/5.1.9") load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua b/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua index 86252a9a4f..df0e35d5da 100644 --- a/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua +++ b/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua @@ -1,2 +1 @@ -load("ncarenv") load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/nexus_post_split.local.lua b/modulefiles/tasks/derecho/nexus_post_split.local.lua index 07d126ff0b..9b519c10f6 100644 --- a/modulefiles/tasks/derecho/nexus_post_split.local.lua +++ b/modulefiles/tasks/derecho/nexus_post_split.local.lua @@ -1,3 +1,2 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "5.0.6")) -load("ncarenv") +load("nco/5.1.9") load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/plot_allvars.local.lua b/modulefiles/tasks/derecho/plot_allvars.local.lua index b49b8bb863..7cee04231e 100644 --- a/modulefiles/tasks/derecho/plot_allvars.local.lua +++ b/modulefiles/tasks/derecho/plot_allvars.local.lua @@ -1,3 +1,3 @@ unload("python") load("conda") -setenv("SRW_ENV", "srw_graphics") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/derecho/point_source.local.lua b/modulefiles/tasks/derecho/point_source.local.lua index 86252a9a4f..df0e35d5da 100644 --- a/modulefiles/tasks/derecho/point_source.local.lua +++ b/modulefiles/tasks/derecho/point_source.local.lua @@ -1,2 +1 @@ -load("ncarenv") load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/pre_post_stat.local.lua b/modulefiles/tasks/derecho/pre_post_stat.local.lua index 30f1157fbb..9b519c10f6 100644 --- a/modulefiles/tasks/derecho/pre_post_stat.local.lua +++ b/modulefiles/tasks/derecho/pre_post_stat.local.lua @@ -1,2 +1,2 @@ -load("nco/5.0.6") +load("nco/5.1.9") load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/python_srw.lua b/modulefiles/tasks/derecho/python_srw.lua deleted file mode 100644 index fe6c73a7d5..0000000000 --- a/modulefiles/tasks/derecho/python_srw.lua +++ /dev/null @@ -1,3 +0,0 @@ -unload("python") -load("conda") -setenv("SRW_ENV", "srw_app") diff --git a/modulefiles/tasks/gaea/get_obs.local.lua b/modulefiles/tasks/gaea/get_obs.local.lua new file mode 100644 index 0000000000..c03abd8dfe --- /dev/null +++ b/modulefiles/tasks/gaea/get_obs.local.lua @@ -0,0 +1 @@ +load("run_vx.local") diff --git a/modulefiles/tasks/gaea/plot_allvars.local.lua b/modulefiles/tasks/gaea/plot_allvars.local.lua index 104da06f5c..41da34ecca 100644 --- a/modulefiles/tasks/gaea/plot_allvars.local.lua +++ b/modulefiles/tasks/gaea/plot_allvars.local.lua @@ -1,4 +1,4 @@ unload("python") load("conda") -setenv("SRW_ENV", "srw_graphics") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/gaea/python_srw.lua b/modulefiles/tasks/gaea/python_srw.lua index 5058b3f615..84df70682d 100644 --- a/modulefiles/tasks/gaea/python_srw.lua +++ b/modulefiles/tasks/gaea/python_srw.lua @@ -3,5 +3,6 @@ unload("python") load("conda") setenv("SRW_ENV", "srw_app") -setenv("LD_PRELOAD", "/opt/cray/pe/gcc/12.2.0/snos/lib64/libstdc++.so.6") +setenv("LD_PRELOAD", "/usr/lib64/libstdc++.so.6") +setenv("FI_VERBS_PREFER_XRC", "0") diff --git a/modulefiles/tasks/hera/get_obs.local.lua b/modulefiles/tasks/hera/get_obs.local.lua index dcca3116d8..e8d902abab 100644 --- a/modulefiles/tasks/hera/get_obs.local.lua +++ b/modulefiles/tasks/hera/get_obs.local.lua @@ -1,3 +1,2 @@ load("hpss") -unload("python") -load("python_srw") +load("run_vx.local") diff --git a/modulefiles/tasks/hera/plot_allvars.local.lua b/modulefiles/tasks/hera/plot_allvars.local.lua index b7e9528710..85291013c7 100644 --- a/modulefiles/tasks/hera/plot_allvars.local.lua +++ b/modulefiles/tasks/hera/plot_allvars.local.lua @@ -1,2 +1,2 @@ load("conda") -setenv("SRW_ENV", "srw_graphics") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/hercules/aqm_ics.local.lua b/modulefiles/tasks/hercules/aqm_ics.local.lua index 2aac950d8d..df0e35d5da 100644 --- a/modulefiles/tasks/hercules/aqm_ics.local.lua +++ b/modulefiles/tasks/hercules/aqm_ics.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) load("python_srw_aqm") diff --git a/modulefiles/tasks/hercules/aqm_lbcs.local.lua b/modulefiles/tasks/hercules/aqm_lbcs.local.lua index 5a7b0cece6..df0e35d5da 100644 --- a/modulefiles/tasks/hercules/aqm_lbcs.local.lua +++ b/modulefiles/tasks/hercules/aqm_lbcs.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load("miniconda_regional_workflow_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/hercules/fire_emission.local.lua b/modulefiles/tasks/hercules/fire_emission.local.lua index 2aac950d8d..df0e35d5da 100644 --- a/modulefiles/tasks/hercules/fire_emission.local.lua +++ b/modulefiles/tasks/hercules/fire_emission.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) load("python_srw_aqm") diff --git a/modulefiles/tasks/hercules/get_obs.local.lua b/modulefiles/tasks/hercules/get_obs.local.lua new file mode 100644 index 0000000000..c03abd8dfe --- /dev/null +++ b/modulefiles/tasks/hercules/get_obs.local.lua @@ -0,0 +1 @@ +load("run_vx.local") diff --git a/modulefiles/tasks/hercules/nexus_emission.local.lua b/modulefiles/tasks/hercules/nexus_emission.local.lua index 2aac950d8d..df0e35d5da 100644 --- a/modulefiles/tasks/hercules/nexus_emission.local.lua +++ b/modulefiles/tasks/hercules/nexus_emission.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) load("python_srw_aqm") diff --git a/modulefiles/tasks/hercules/nexus_post_split.local.lua b/modulefiles/tasks/hercules/nexus_post_split.local.lua index 2aac950d8d..df0e35d5da 100644 --- a/modulefiles/tasks/hercules/nexus_post_split.local.lua +++ b/modulefiles/tasks/hercules/nexus_post_split.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) load("python_srw_aqm") diff --git a/modulefiles/tasks/hercules/plot_allvars.local.lua b/modulefiles/tasks/hercules/plot_allvars.local.lua index b49b8bb863..7cee04231e 100644 --- a/modulefiles/tasks/hercules/plot_allvars.local.lua +++ b/modulefiles/tasks/hercules/plot_allvars.local.lua @@ -1,3 +1,3 @@ unload("python") load("conda") -setenv("SRW_ENV", "srw_graphics") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/hercules/python_srw.lua b/modulefiles/tasks/hercules/python_srw.lua deleted file mode 100644 index fe6c73a7d5..0000000000 --- a/modulefiles/tasks/hercules/python_srw.lua +++ /dev/null @@ -1,3 +0,0 @@ -unload("python") -load("conda") -setenv("SRW_ENV", "srw_app") diff --git a/modulefiles/tasks/jet/get_obs.local.lua b/modulefiles/tasks/jet/get_obs.local.lua index dcca3116d8..e8d902abab 100644 --- a/modulefiles/tasks/jet/get_obs.local.lua +++ b/modulefiles/tasks/jet/get_obs.local.lua @@ -1,3 +1,2 @@ load("hpss") -unload("python") -load("python_srw") +load("run_vx.local") diff --git a/modulefiles/tasks/jet/plot_allvars.local.lua b/modulefiles/tasks/jet/plot_allvars.local.lua index b7e9528710..85291013c7 100644 --- a/modulefiles/tasks/jet/plot_allvars.local.lua +++ b/modulefiles/tasks/jet/plot_allvars.local.lua @@ -1,2 +1,2 @@ load("conda") -setenv("SRW_ENV", "srw_graphics") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/noaacloud/get_obs.local.lua b/modulefiles/tasks/noaacloud/get_obs.local.lua new file mode 100644 index 0000000000..c03abd8dfe --- /dev/null +++ b/modulefiles/tasks/noaacloud/get_obs.local.lua @@ -0,0 +1 @@ +load("run_vx.local") diff --git a/modulefiles/tasks/noaacloud/plot_allvars.local.lua b/modulefiles/tasks/noaacloud/plot_allvars.local.lua index cc122f69b2..85291013c7 100644 --- a/modulefiles/tasks/noaacloud/plot_allvars.local.lua +++ b/modulefiles/tasks/noaacloud/plot_allvars.local.lua @@ -1,5 +1,2 @@ -unload("python") -append_path("MODULEPATH","/contrib/EPIC/miniconda3/modulefiles") -load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) - -setenv("SRW_ENV", "regional_workflow") +load("conda") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/noaacloud/python_srw.lua b/modulefiles/tasks/noaacloud/python_srw.lua index a2dd45084c..e6e4268c35 100644 --- a/modulefiles/tasks/noaacloud/python_srw.lua +++ b/modulefiles/tasks/noaacloud/python_srw.lua @@ -1,2 +1,7 @@ load("conda") setenv("SRW_ENV", "srw_app") + +-- Add missing libstdc binary for Azure +if os.getenv("PW_CSP") == "azure" then + setenv("LD_PRELOAD","/opt/nvidia/nsight-systems/2023.1.2/host-linux-x64/libstdc++.so.6") +end diff --git a/modulefiles/tasks/noaacloud/run_vx.local.lua b/modulefiles/tasks/noaacloud/run_vx.local.lua index 737fc4f7cc..67b1b98ad6 100644 --- a/modulefiles/tasks/noaacloud/run_vx.local.lua +++ b/modulefiles/tasks/noaacloud/run_vx.local.lua @@ -25,3 +25,8 @@ end load("ufs-pyenv") load("conda") setenv("SRW_ENV", "srw_app") + +-- Add missing libstdc binary for Azure +if os.getenv("PW_CSP") == "azure" then + setenv("LD_PRELOAD","/opt/nvidia/nsight-systems/2023.1.2/host-linux-x64/libstdc++.so.6") +end diff --git a/modulefiles/tasks/orion/aqm_ics.local.lua b/modulefiles/tasks/orion/aqm_ics.local.lua index 2aac950d8d..df0e35d5da 100644 --- a/modulefiles/tasks/orion/aqm_ics.local.lua +++ b/modulefiles/tasks/orion/aqm_ics.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) load("python_srw_aqm") diff --git a/modulefiles/tasks/orion/aqm_lbcs.local.lua b/modulefiles/tasks/orion/aqm_lbcs.local.lua index 5a7b0cece6..df0e35d5da 100644 --- a/modulefiles/tasks/orion/aqm_lbcs.local.lua +++ b/modulefiles/tasks/orion/aqm_lbcs.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load("miniconda_regional_workflow_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/orion/fire_emission.local.lua b/modulefiles/tasks/orion/fire_emission.local.lua index 2aac950d8d..df0e35d5da 100644 --- a/modulefiles/tasks/orion/fire_emission.local.lua +++ b/modulefiles/tasks/orion/fire_emission.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) load("python_srw_aqm") diff --git a/modulefiles/tasks/orion/get_obs.local.lua b/modulefiles/tasks/orion/get_obs.local.lua new file mode 100644 index 0000000000..c03abd8dfe --- /dev/null +++ b/modulefiles/tasks/orion/get_obs.local.lua @@ -0,0 +1 @@ +load("run_vx.local") diff --git a/modulefiles/tasks/orion/nexus_emission.local.lua b/modulefiles/tasks/orion/nexus_emission.local.lua index 2aac950d8d..df0e35d5da 100644 --- a/modulefiles/tasks/orion/nexus_emission.local.lua +++ b/modulefiles/tasks/orion/nexus_emission.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) load("python_srw_aqm") diff --git a/modulefiles/tasks/orion/nexus_post_split.local.lua b/modulefiles/tasks/orion/nexus_post_split.local.lua index 2aac950d8d..df0e35d5da 100644 --- a/modulefiles/tasks/orion/nexus_post_split.local.lua +++ b/modulefiles/tasks/orion/nexus_post_split.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) load("python_srw_aqm") diff --git a/modulefiles/tasks/orion/plot_allvars.local.lua b/modulefiles/tasks/orion/plot_allvars.local.lua index b49b8bb863..7cee04231e 100644 --- a/modulefiles/tasks/orion/plot_allvars.local.lua +++ b/modulefiles/tasks/orion/plot_allvars.local.lua @@ -1,3 +1,3 @@ unload("python") load("conda") -setenv("SRW_ENV", "srw_graphics") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/orion/python_srw.lua b/modulefiles/tasks/orion/python_srw.lua deleted file mode 100644 index fe6c73a7d5..0000000000 --- a/modulefiles/tasks/orion/python_srw.lua +++ /dev/null @@ -1,3 +0,0 @@ -unload("python") -load("conda") -setenv("SRW_ENV", "srw_app") diff --git a/modulefiles/tasks/orion/run_vx.local.lua b/modulefiles/tasks/orion/run_vx.local.lua index 5bafb4d46b..737fc4f7cc 100644 --- a/modulefiles/tasks/orion/run_vx.local.lua +++ b/modulefiles/tasks/orion/run_vx.local.lua @@ -1,8 +1,6 @@ --[[ Compiler-specific modules are used for met and metplus libraries --]] ---load("build_orion_intel") - local met_ver = (os.getenv("met_ver") or "11.1.0") local metplus_ver = (os.getenv("metplus_ver") or "5.1.0") if (mode() == "load") then @@ -20,12 +18,10 @@ setenv("METPLUS_VERSION", metplus_ver) setenv("METPLUS_ROOT", base_metplus) setenv("METPLUS_PATH", base_metplus) - if (mode() == "unload") then unload(pathJoin("met", met_ver)) unload(pathJoin("metplus",metplus_ver)) end ---load("ufs-pyenv") -load("stack-python/3.10.8") +load("ufs-pyenv") load("conda") setenv("SRW_ENV", "srw_app") diff --git a/modulefiles/wflow_cheyenne.lua b/modulefiles/wflow_cheyenne.lua deleted file mode 100644 index fd4bc3eae5..0000000000 --- a/modulefiles/wflow_cheyenne.lua +++ /dev/null @@ -1,23 +0,0 @@ -help([[ -This module loads python environement for running the UFS SRW App on -on the CISL machine Cheyenne -]]) - -whatis([===[Loads libraries needed for running the UFS SRW App on Cheyenne ]===]) - -load("ncarenv") - -append_path("MODULEPATH","/glade/p/ral/jntp/UFS_SRW_app/modules") -load("rocoto") - -unload("python") - -load("conda") - - -if mode() == "load" then - LmodMsgRaw([===[Please do the following to activate conda: - > conda activate srw_app -]===]) -end - diff --git a/modulefiles/wflow_derecho.lua b/modulefiles/wflow_derecho.lua index d9a3e24e2f..28bc7ec2f6 100644 --- a/modulefiles/wflow_derecho.lua +++ b/modulefiles/wflow_derecho.lua @@ -5,8 +5,6 @@ on the CISL machine Derecho (Cray) whatis([===[Loads libraries for running the UFS SRW Workflow on Derecho ]===]) -load("ncarenv") - append_path("MODULEPATH","/glade/work/epicufsrt/contrib/derecho/rocoto/modulefiles") load("rocoto") diff --git a/modulefiles/wflow_gaea.lua b/modulefiles/wflow_gaea.lua index 6c24672c30..c2207ff57b 100644 --- a/modulefiles/wflow_gaea.lua +++ b/modulefiles/wflow_gaea.lua @@ -11,7 +11,6 @@ load("rocoto") load("conda") pushenv("MKLROOT", "/opt/intel/oneapi/mkl/2023.1.0/") -setenv("LD_PRELOAD", "/opt/cray/pe/gcc/12.2.0/snos/lib64/libstdc++.so.6") if mode() == "load" then LmodMsgRaw([===[Please do the following to activate conda: diff --git a/modulefiles/wflow_noaacloud.lua b/modulefiles/wflow_noaacloud.lua index ebf907545b..27fb8296c1 100644 --- a/modulefiles/wflow_noaacloud.lua +++ b/modulefiles/wflow_noaacloud.lua @@ -8,15 +8,8 @@ whatis([===[Loads libraries needed for running the UFS SRW App on NOAA cloud ]== prepend_path("MODULEPATH","/apps/modules/modulefiles") load("rocoto") - - load("conda") -setenv("PROJ_LIB","/contrib/EPIC/miniconda3/4.12.0/envs/regional_workflow/share/proj") -setenv("OPT","/contrib/EPIC/hpc-modules") -append_path("PATH","/contrib/EPIC/miniconda3/4.12.0/envs/regional_workflow/bin") -prepend_path("PATH","/contrib/EPIC/bin") - if mode() == "load" then LmodMsgRaw([===[Please do the following to activate conda: > conda activate srw_app diff --git a/modulefiles/wflow_orion.lua b/modulefiles/wflow_orion.lua index 711991bb09..8bbc5663da 100644 --- a/modulefiles/wflow_orion.lua +++ b/modulefiles/wflow_orion.lua @@ -6,9 +6,8 @@ the MSU machine Orion whatis([===[Loads libraries needed for running SRW on Orion ]===]) load("contrib") -load("rocoto") -load("wget") - +load("ruby/3.2.3") +load("rocoto/1.3.7") unload("python") load("conda") diff --git a/parm/data_locations.yml b/parm/data_locations.yml index 7901f4c085..a706676f9b 100644 --- a/parm/data_locations.yml +++ b/parm/data_locations.yml @@ -236,6 +236,20 @@ RAP: file_names: <<: *rap_file_names +RRFS: + hpss: + protocol: htar + file_names: &rrfs_file_names + anl: + - rrfs.t{hh}z.prslev.f{fcst_hr:03d}.conus.grib2 + fcst: + - rrfs.t{hh}z.prslev.f{fcst_hr:03d}.conus.grib2 + aws: + protocol: download + url: https://noaa-rrfs-pds.s3.amazonaws.com/rrfs_a/rrfs_a.{yyyymmdd}/{hh}/control/ + file_names: + <<: *rrfs_file_names + HRRR: hpss: protocol: htar @@ -305,10 +319,7 @@ CCPA_obs: - /NCEPPROD/hpssprod/runhistory/rh{yyyy}/{yyyy}{mm}/{yyyy}{mm}{dd} - /NCEPPROD/hpssprod/runhistory/rh{yyyy}/{yyyy}{mm}/{yyyy}{mm}{dd} archive_internal_dir: - - "./00" - - "./06" - - "./12" - - "./18" + - "./{hh}" archive_file_names: - "com2_ccpa_prod_ccpa.{yyyy}{mm}{dd}.tar" - "gpfs_dell1_nco_ops_com_ccpa_prod_ccpa.{yyyy}{mm}{dd}.tar" @@ -316,7 +327,7 @@ CCPA_obs: - "com_ccpa_v4.2_ccpa.{yyyy}{mm}{dd}.tar" file_names: obs: - - "ccpa.t{hh}z.01h.hrap.conus.gb2" + - "ccpa.t*z.01h.hrap.conus.gb2" MRMS_obs: hpss: @@ -351,11 +362,13 @@ NDAS_obs: - /NCEPPROD/hpssprod/runhistory/rh{yyyy}/{yyyy}{mm}/{yyyy}{mm}{dd} - /NCEPPROD/hpssprod/runhistory/rh{yyyy}/{yyyy}{mm}/{yyyy}{mm}{dd} - /NCEPPROD/hpssprod/runhistory/rh{yyyy}/{yyyy}{mm}/{yyyy}{mm}{dd} + - /NCEPPROD/hpssprod/runhistory/rh{yyyy}/{yyyy}{mm}/{yyyy}{mm}{dd} archive_file_names: - "com2_nam_prod_nam.{yyyy}{mm}{dd}{hh}.bufr.tar" - "gpfs_dell1_nco_ops_com_nam_prod_nam.{yyyy}{mm}{dd}{hh}.bufr.tar" - "com_nam_prod_nam.{yyyy}{mm}{dd}{hh}.bufr.tar" - "com_obsproc_v1.1_nam.{yyyy}{mm}{dd}{hh}.bufr.tar" + - "com_obsproc_v1.2_nam.{yyyy}{mm}{dd}{hh}.bufr.tar" file_names: obs: - "./nam.t{hh}z.prepbufr.tm*.nr" @@ -372,6 +385,6 @@ NOHRSC_obs: - "dcom_{yyyy}{mm}{dd}.tar" file_names: obs: - - "sfav2_CONUS_*h_{yyyy}{mm}{dd}{hh}_grid184.grb2" + - "sfav2_CONUS_6h_{yyyy}{mm}{dd}*_grid184.grb2" archive_internal_dir: - ./wgrbbul/nohrsc_snowfall/ diff --git a/parm/diag_table.FV3_HRRR b/parm/diag_table.FV3_HRRR index 893ca25a2b..c7bbc59b9e 100755 --- a/parm/diag_table.FV3_HRRR +++ b/parm/diag_table.FV3_HRRR @@ -339,6 +339,9 @@ "gfs_phys", "spp_wts_rad", "spp_wts_rad", "fv3_history", "all", .false., "none", 2 "gfs_phys", "spp_wts_gwd", "spp_wts_gwd", "fv3_history", "all", .false., "none", 2 +# Additional entries from user-specified SRW settings (e.g. UFS-FIRE) +{{ additional_entries }} + #============================================================================================= # #====> This file can be used with diag_manager/v2.0a (or higher) <==== diff --git a/parm/metplus/EnsembleStat.conf b/parm/metplus/EnsembleStat.conf new file mode 100644 index 0000000000..15ba1d9321 --- /dev/null +++ b/parm/metplus/EnsembleStat.conf @@ -0,0 +1,735 @@ +# {{MetplusToolName}} METplus Configuration + +[config] + +# List of applications (tools) to run. +PROCESS_LIST = {{MetplusToolName}} + +# time looping - options are INIT, VALID, RETRO, and REALTIME +# If set to INIT or RETRO: +# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set +# If set to VALID or REALTIME: +# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set +LOOP_BY = INIT + +# Format of INIT_BEG and INIT_END using % items +# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. +# see www.strftime.org for more information +# %Y%m%d%H expands to YYYYMMDDHH +INIT_TIME_FMT = %Y%m%d%H + +# Start time for METplus run - must match INIT_TIME_FMT +INIT_BEG = {{cdate}} + +# End time for METplus run - must match INIT_TIME_FMT +INIT_END = {{cdate}} + +# Increment between METplus runs (in seconds if no units are specified). +# Must be >= 60 seconds. +INIT_INCREMENT = 3600 + +# List of forecast leads to process for each run time (init or valid) +# In hours if units are not specified +# If unset, defaults to 0 (don't loop through forecast leads) +LEAD_SEQ = {{vx_leadhr_list}} +# +# Order of loops to process data - Options are times, processes +# Not relevant if only one item is in the PROCESS_LIST +# times = run all wrappers in the PROCESS_LIST for a single run time, then +# increment the run time and run all wrappers again until all times have +# been evaluated. +# processes = run the first wrapper in the PROCESS_LIST for all times +# specified, then repeat for the next item in the PROCESS_LIST until all +# wrappers have been run +# +LOOP_ORDER = times +# +# Specify the name of the METplus log file. +# +LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} +# +# Specify the location and name of the final METplus conf file. +# +METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}} +# +# Location of MET configuration file to pass to {{MetplusToolName}}. +# +# References PARM_BASE, which is the location of the parm directory +# corresponding to the ush directory of the run_metplus.py script that +# is called or the value of the environment variable METPLUS_PARM_BASE +# if set. +# +{{METPLUS_TOOL_NAME}}_CONFIG_FILE = {PARM_BASE}/met_config/{{MetplusToolName}}Config_wrapped +# +# Name to identify model (forecast) data in output. +# +MODEL = {{vx_fcst_model_name}} + +{{METPLUS_TOOL_NAME}}_DESC = NA +# +# Name to identify observation data in output. +# +OBTYPE = {{obtype}} +# +# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. +# +LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} +# +# Observation data time window(s). +# +{%- if input_field_group in ['APCP', 'ASNOW'] %} +OBS_FILE_WINDOW_BEGIN = 0 +OBS_FILE_WINDOW_END = 0 +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_BEGIN = 0 +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_END = 0 +{%- elif input_field_group in ['REFC', 'RETOP'] %} +OBS_FILE_WINDOW_BEGIN = -300 +OBS_FILE_WINDOW_END = 300 +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_BEGIN = 0 +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_END = 0 +{%- elif input_field_group in ['SFC', 'UPA'] %} +OBS_WINDOW_BEGIN = -1799 +OBS_WINDOW_END = 1800 +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_END = {OBS_WINDOW_END} +{%- endif %} + +# number of expected members for ensemble. Should correspond with the +# number of items in the list for FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE +{{METPLUS_TOOL_NAME}}_N_MEMBERS = {{num_ens_members}} + +# ens.ens_thresh value in the MET config file +# threshold for ratio of valid files to expected files to allow app to run +{{METPLUS_TOOL_NAME}}_ENS_THRESH = 0.05 + +# ens.vld_thresh value in the MET config file +{{METPLUS_TOOL_NAME}}_ENS_VLD_THRESH = 1.0 + +{%- if input_field_group in ['SFC', 'UPA'] %} + +{{METPLUS_TOOL_NAME}}_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA +#{{METPLUS_TOOL_NAME}}_OBS_QUALITY_EXC = +{%- endif %} + +# {{METPLUS_TOOL_NAME}}_MET_OBS_ERR_TABLE is not required. +# If the variable is not defined, or the value is not set, then the MET +# default is used. +{%- if input_field_group in ['APCP', 'ASNOW'] %} +{{METPLUS_TOOL_NAME}}_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt +{%- elif input_field_group in ['REFC', 'RETOP'] %} +{{METPLUS_TOOL_NAME}}_MET_OBS_ERR_TABLE = +{%- elif input_field_group in ['SFC', 'UPA'] %} +{{METPLUS_TOOL_NAME}}_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt +{%- endif %} + + +# Used in the MET config file for: regrid to_grid field +{%- set comment_or_null = '' %} +{%- set regrid_to_grid = '' %} +{%- set regrid_method = '' %} +{%- if input_field_group in ['APCP', 'ASNOW'] %} + {%- set comment_or_null = '' %} + {%- set regrid_to_grid = 'FCST' %} + {%- set regrid_method = 'BUDGET' %} +{%- elif input_field_group in ['REFC', 'RETOP'] %} + {%- set comment_or_null = '' %} + {%- set regrid_to_grid = 'FCST' %} + {%- set regrid_method = 'BUDGET' %} +{%- elif input_field_group in ['SFC', 'UPA'] %} + {%- set comment_or_null = '#' %} + {%- set regrid_to_grid = 'NONE' %} + {%- set regrid_method = 'BILIN' %} +{%- endif %} +{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = {{regrid_to_grid}} +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_REGRID_METHOD = {{regrid_method}} +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2 +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_REGRID_VLD_THRESH = 0.5 +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_REGRID_SHAPE = SQUARE + +{{METPLUS_TOOL_NAME}}_CENSOR_THRESH = +{{METPLUS_TOOL_NAME}}_CENSOR_VAL = +{% if input_field_group in ['APCP', 'ASNOW'] %} +{{METPLUS_TOOL_NAME}}_MESSAGE_TYPE = +{{METPLUS_TOOL_NAME}}_DUPLICATE_FLAG = UNIQUE +{{METPLUS_TOOL_NAME}}_SKIP_CONST = TRUE +{{METPLUS_TOOL_NAME}}_OBS_ERROR_FLAG = TRUE +{%- elif input_field_group in ['REFC', 'RETOP'] %} +# Should this parameter be set to something other than ADPSFC (maybe +# just leave empty) since we are not verifying surface fields? +{{METPLUS_TOOL_NAME}}_MESSAGE_TYPE = ADPSFC +{{METPLUS_TOOL_NAME}}_DUPLICATE_FLAG = NONE +{{METPLUS_TOOL_NAME}}_SKIP_CONST = TRUE +{{METPLUS_TOOL_NAME}}_OBS_ERROR_FLAG = FALSE +{%- elif input_field_group in ['SFC', 'UPA'] %} +{{METPLUS_TOOL_NAME}}_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} +{{METPLUS_TOOL_NAME}}_DUPLICATE_FLAG = NONE +{{METPLUS_TOOL_NAME}}_SKIP_CONST = FALSE +{{METPLUS_TOOL_NAME}}_OBS_ERROR_FLAG = FALSE +{%- endif %} + +{{METPLUS_TOOL_NAME}}_ENS_SSVAR_BIN_SIZE = 1.0 +{{METPLUS_TOOL_NAME}}_ENS_PHIST_BIN_SIZE = 0.05 + +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FILE_NAME = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FIELD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_WIDTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_SHAPE = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_MATCH_MONTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_DAY_INTERVAL = 31 +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_HOUR_INTERVAL = 6 + +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FILE_NAME = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FIELD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_WIDTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_SHAPE = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_TIME_INTERP_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_MATCH_MONTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_DAY_INTERVAL = 31 +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_HOUR_INTERVAL = 6 + +{% set comment_or_null = '' %} +{%- if input_field_group in ['APCP', 'ASNOW'] %} + {%- set comment_or_null = '' %} +{%- elif input_field_group in ['REFC', 'RETOP'] %} + {%- set comment_or_null = '' %} +{%- elif input_field_group in ['SFC', 'UPA'] %} + {%- set comment_or_null = '#' %} +{%- endif %} +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1 +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False +{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = False +{% if input_field_group in ['APCP', 'ASNOW'] %} +{{METPLUS_TOOL_NAME}}_MASK_GRID = +{%- elif input_field_group in ['REFC', 'RETOP'] %} +{{METPLUS_TOOL_NAME}}_MASK_GRID = FULL +{%- elif input_field_group in ['SFC', 'UPA'] %} +{{METPLUS_TOOL_NAME}}_MASK_GRID = +{%- endif %} + +{{METPLUS_TOOL_NAME}}_CI_ALPHA = 0.05 + +{{METPLUS_TOOL_NAME}}_INTERP_FIELD = BOTH +{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = 1.0 +{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = SQUARE +{{METPLUS_TOOL_NAME}}_INTERP_METHOD = NEAREST +{{METPLUS_TOOL_NAME}}_INTERP_WIDTH = 1 + +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECNT = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_RPS = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_RHIST = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PHIST = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ORANK = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SSVAR = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_RELP = STAT + +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_RANK = FALSE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_WEIGHT = FALSE +# +# Forecast and observation variables and levels as specified in the fcst +# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME, +# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME, +# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION. +# +{#- +Import the file containing jinja macros. +#} +{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %} + +{#- +Jinja requires certain variables to be defined globally within the template +before they can be used in if-statements and other scopes (see Jinja scoping +rules). Define such variables. +#} +{%- set indx_level_fcst = '' %} +{%- set indx_input_thresh_fcst = '' %} +{%- set error_msg = '' %} +{%- set opts_indent = '' %} +{%- set opts_indent_len = '' %} + +{%- set field_fcst = '' %} +{%- set field_obs = '' %} +{%- set level_fcst = '' %} +{%- set level_obs = '' %} +{%- set thresh_fcst = '' %} +{%- set thresh_obs = '' %} + +{%- set threshes_fcst = [] %} +{%- set threshes_obs = [] %} + +{#- +Get the set of valid field groups and ensure that the specified input +field group appears in this list. +#} +{%- set valid_field_groups = vx_config_dict.keys()|list %} +{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }} + +{#- +Reset the input forecast level so that if it happens to be an accumulation +(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g. +reset to 'A3'). +#} +{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} + +{#- +Extract from the configuration dictionary the set (which itself is a +dictionary) of fields, levels, and thresholds corresponding to the input +field group. Then set the delimiter string that separates forecast and +observation values in the various items (i.e. dictionary keys and values +representing field names, levels, and thresholds) in this dictionary. +#} +{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %} +{%- set delim_str = metplus_macros.set_delim_str() %} + +{#- +Loop over the fields and set field names, levels, thresholds, and/or +options for each field, both for forecasts and for observations, in the +METplus configuration file. +#} +{%- set ns = namespace(var_count = 0) %} +{%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %} + + {%- if delim_str in field_cpld %} + {%- set field_fcst, field_obs = field_cpld.split(delim_str) %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- set field_obs = field_cpld %} + {%- endif %} + +{#- +For convenience, create lists of valid forecast and observation levels +for the current field. +#} + {%- set valid_levels_fcst = [] %} + {%- set valid_levels_obs = [] %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + {%- set tmp = valid_levels_fcst.append(level_fcst) %} + {%- set tmp = valid_levels_obs.append(level_obs) %} + {%- endfor %} + +{#- +Make sure that the input forecast level (input_level_fcst) is set to a +valid value. +#} + {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %} + {%- set error_msg = '\n' ~ +'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~ +'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~ +'for the current forecast field (field_fcst). This is not the case:\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~ +' input_level_fcst = ' ~ input_level_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + +{#- +Loop over the (coupled) levels and corresponding lists of thresholds. +Extract from these the level values for forecasts and observations and +use them to set the forecast and observation field names, levels, +thresholds, and/or options in the METplus configuration file. +#} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + + {%- set valid_threshes_fcst = [] %} + {%- set valid_threshes_obs = [] %} + {%- for thresh_cpld in threshes_cpld %} + {%- if delim_str in thresh_cpld %} + {%- set thresh_fcst, thresh_obs = thresh_cpld.split(delim_str) %} + {%- else %} + {%- set thresh_fcst = thresh_cpld %} + {%- set thresh_obs = thresh_cpld %} + {%- endif %} + {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %} + {%- set tmp = valid_threshes_obs.append(thresh_obs) %} + {%- endfor %} + + {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %} +{#- +Increment the METplus variable counter. +#} + {%- set ns.var_count = ns.var_count+1 %} + +{#- +Set forecast field name. Note that this has to exactly match the name +of the field in the input forecast file. + +For accumulated fields, the input forecast file is generated by MET's +PcpCombine tool. In that file, the field name consists of the forecast +field name here (field_fcst) with the accumulation period appended to +it (separated by an underscore), so we must do the same here to get an +exact match. +#} + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{accum_hh}} + {%- else %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}} + {%- endif %} + +{#- +Set forecast field level. +#} +FCST_VAR{{ns.var_count}}_LEVELS = {{level_fcst}} + +{#- +Set forecast field threshold(s). Note that no forecast thresholds are +included in the METplus configuration file if input_thresh_fcst is set +to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +If input_thresh_fcst is set to 'all', set the list of forecast thresholds +to the full set of valid values. +#} + {%- if (input_thresh_fcst == 'all') %} + + {%- set threshes_fcst = valid_threshes_fcst %} +{#- +If input_thresh_fcst is set to a specific value: +* If that value is valid, i.e. it exists in the list of valid forecast + thresholds, get its index in that list and use it to set the forecast + threshold to a one-element list containing that value. Note that the + index will be needed later below when setting the observation threshold(s). +* If the input forecast threshold is not valid, print out a warning message + and exit. +#} + {%- else %} + + {%- if input_thresh_fcst in valid_threshes_fcst %} + {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %} + {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %} + {%- else %} + {%- set error_msg = '\n' ~ +'For the current forecast field (field_fcst) and forecast level (level_fcst),\n' ~ +'the input forecast threshold (input_thresh_fcst) does not exist in the list\n' ~ +'of valid forecast thresholds (valid_threshes_fcst):\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' level_fcst = ' ~ level_fcst ~ '\n' ~ +' valid_threshes_fcst = ' ~ valid_threshes_fcst ~ '\n' ~ +' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + + {%- endif %} +{#- +If threshes_fcst has been reset to something other than its default +value of an empty list, then set the forecast thresholds in the METplus +configuration file because that implies threshes_fcst was set above to +a non-empty value. Then reset threshes_fcst to its default value for +proper processing of thresholds for the next field. +#} + {%- if (threshes_fcst != []) %} +FCST_VAR{{ns.var_count}}_THRESH = {{threshes_fcst|join(', ')}} + {%- endif %} + {%- set threshes_fcst = [] %} + + {%- endif %} + +{#- +Set forecast field options. +#} + {%- set opts_indent_len = 20 %} + {%- if (ns.var_count > 9) and (ns.var_count <= 99) %} + {%- set opts_indent_len = opts_indent_len + 1 %} + {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %} + {%- set opts_indent_len = opts_indent_len + 2 %} + {%- elif (ns.var_count > 999) %} + {%- set opts_indent_len = opts_indent_len + 3 %} + {%- endif %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'APCP' %} + + {%- if field_fcst == 'APCP' %} +FCST_VAR{{ns.var_count}}_OPTIONS = ens_ssvar_bin_size = 50.0; +{{opts_indent}}ens_phist_bin_size = 0.05; + {%- endif %} + + {%- elif input_field_group == 'ASNOW' %} + + {%- if field_fcst == 'ASNOW' %} +FCST_VAR{{ns.var_count}}_OPTIONS = ens_ssvar_bin_size = 50.0; +{{opts_indent}}ens_phist_bin_size = 0.05; + {%- endif %} + + {%- elif input_field_group == 'REFC' %} + + {%- if field_fcst == 'REFC' %} +FCST_VAR{{ns.var_count}}_OPTIONS = ens_ssvar_bin_size = 50.0; +{{opts_indent}}ens_phist_bin_size = 0.05; + {%- endif %} + + {%- elif input_field_group == 'RETOP' %} + + {%- if field_fcst == 'RETOP' %} +FCST_VAR{{ns.var_count}}_OPTIONS = convert(x) = x * 3.28084 * 0.001; ;; Convert from meters to kilofeet. +{{opts_indent}}ens_ssvar_bin_size = 50.0; +{{opts_indent}}ens_phist_bin_size = 0.05; + {%- endif %} + + {%- elif input_field_group == 'SFC' %} + + {%- if field_fcst == 'HGT' %} +FCST_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 215; +{{opts_indent}}desc = "CEILING"; + {%- elif field_fcst == 'TCDC' %} +FCST_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 200; +{{opts_indent}}GRIB2_ipdtmpl_index=[27]; +{{opts_indent}}GRIB2_ipdtmpl_val=[255]; +{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- elif field_fcst == 'VIS' %} +FCST_VAR{{ns.var_count}}_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- elif field_fcst == 'WIND' %} +FCST_VAR{{ns.var_count}}_OPTIONS = GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind. + {%- endif %} + + {%- elif input_field_group == 'UPA' %} + + {%- if field_fcst == 'CAPE' %} +FCST_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; + {%- endif %} + + {%- endif %} + +{#- +Set observation field name. Note that this has to exactly match the name +of the field in the input observation file. + +For accumulated fields, the input observation file is generated by MET's +PcpCombine tool. In that file, the field name consists of the observation +field name here (field_obs) with the accumulation period appended to it +(separated by an underscore), so we must do the same here to get an exact +match. +#} + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}}_{{accum_hh}} + {%- else %} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}} + {%- endif %} + +{#- +Set observation field level. +#} + {%- set indx_level_fcst = valid_levels_fcst.index(level_fcst) %} + {%- set level_obs = valid_levels_obs[indx_level_fcst] %} +OBS_VAR{{ns.var_count}}_LEVELS = {{level_obs}} + +{#- +Set observation field threshold(s). Note that no observation thresholds +are included in the METplus configuration file if input_thresh_fcst is +set to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +If input_thresh_fcst is set to 'all', set the list of observation thresholds +to the full set of valid values. +#} + {%- if (input_thresh_fcst == 'all') %} + + {%- set threshes_obs = valid_threshes_obs %} +{#- +If input_thresh_fcst is set to a specific forecast threshold, then the +observation threshold is given by the element in the list of valid +observation thresholds that has the same index as that of input_thresh_fcst +in the list of valid forecast thresholds. +#} + {%- else %} + {%- set threshes_obs = [valid_threshes_obs[indx_input_thresh_fcst]] %} + {%- endif %} +{#- +If threshes_obs has been reset to something other than its default value +of an empty list, then set the observation thresholds in the METplus +configuration file because that implies threshes_obs was set above to +a non-empty value. Then reset threshes_obs to its default value for +proper processing of thresholds for the next field. +#} + {%- if (threshes_obs != []) %} +OBS_VAR{{ns.var_count}}_THRESH = {{threshes_obs|join(', ')}} + {%- endif %} + {%- set threshes_obs = [] %} + + {%- endif %} + +{#- +Set observation field options. +#} + {%- set opts_indent_len = opts_indent_len - 1 %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'APCP' %} + + {%- if field_obs == 'APCP' %} +OBS_VAR{{ns.var_count}}_OPTIONS = {FCST_VAR{{ns.var_count}}_OPTIONS} + {%- endif %} + + {%- elif input_field_group == 'ASNOW' %} + + {%- if field_obs == 'ASNOW' %} +OBS_VAR{{ns.var_count}}_OPTIONS = {FCST_VAR{{ns.var_count}}_OPTIONS}; +{{opts_indent}}convert(x) = 100.0*x; + {%- endif %} + + {%- elif input_field_group == 'REFC' %} + + {%- if field_obs == 'MergedReflectivityQCComposite' %} +OBS_VAR{{ns.var_count}}_OPTIONS = censor_thresh = lt-20; +{{opts_indent}}censor_val = -20.0; +{{opts_indent}}cnt_thresh = [ >15 ]; +{{opts_indent}}cnt_logic = UNION; +{{opts_indent}}ens_ssvar_bin_size = 50.0; +{{opts_indent}}ens_phist_bin_size = 0.05; + {%- endif %} + + {%- elif input_field_group == 'RETOP' %} + + {%- if field_obs == 'EchoTop18' %} +OBS_VAR{{ns.var_count}}_OPTIONS = censor_thresh = lt-20; +{{opts_indent}}censor_val = -20.0; +{{opts_indent}}cnt_thresh = [ >15 ]; +{{opts_indent}}cnt_logic = UNION; +{{opts_indent}}convert(x) = x * 3280.84 * 0.001; ;; Convert from kilometers to kilofeet. +{{opts_indent}}ens_ssvar_bin_size = 50.0; +{{opts_indent}}ens_phist_bin_size = 0.05; + {%- endif %} + + {%- elif input_field_group == 'SFC' %} + + {%- if field_obs in ['DPT', 'TMP', 'WIND'] %} +OBS_VAR{{ns.var_count}}_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } + {%- elif field_obs == 'CEILING' %} +OBS_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 215 + {%- endif %} + + {%- elif input_field_group == 'UPA' %} + + {%- if field_obs in ['DPT', 'HGT', 'TMP', 'WIND'] %} +OBS_VAR{{ns.var_count}}_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } + {%- elif field_obs == 'CAPE' %} +OBS_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; +{{opts_indent}}cnt_logic = UNION; + {%- elif field_obs == 'PBL' %} +OBS_VAR{{ns.var_count}}_OPTIONS = desc = "TKE"; + {%- endif %} + + {%- endif %} + +{#- +Print out a newline to separate the settings for the current field (both +forecast and observation settings) from those for the next field. +#} + {{- '\n' }} + + {%- endif %} + + {%- endfor %} +{%- endfor %} +[dir] +# +# OUTPUT_BASE apparently has to be set to something; it cannot be left +# to its default value. But it is not explicitly used elsewhere in this +# configuration file. +# +OUTPUT_BASE = {{output_base}} +# +# Point observation input directory for {{MetplusToolName}}. +# +{%- if input_field_group in ['SFC', 'UPA'] %} +OBS_{{METPLUS_TOOL_NAME}}_POINT_INPUT_DIR = {{obs_input_dir}} +{%- else %} +OBS_{{METPLUS_TOOL_NAME}}_POINT_INPUT_DIR = +{%- endif %} +# +# Grid observation input directory for {{MetplusToolName}}. +# +{%- if input_field_group in ['SFC', 'UPA'] %} +OBS_{{METPLUS_TOOL_NAME}}_GRID_INPUT_DIR = +{%- else %} +OBS_{{METPLUS_TOOL_NAME}}_GRID_INPUT_DIR = {{obs_input_dir}} +{%- endif %} +# +# Forecast model input directory for {{MetplusToolName}}. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}} +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used +# in this example +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR = +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used +# in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR = +# +# Output directory for {{MetplusToolName}}. +# +{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}} +# +# Directory for staging data. +# +STAGING_DIR = {{staging_dir}} + +# End of [dir] section and start of [filename_templates] section. +[filename_templates] +# +# Template for point observation input to {{MetplusToolName}} relative to +# OBS_{{METPLUS_TOOL_NAME}}_POINT_INPUT_DIR. +# +{%- if input_field_group in ['SFC', 'UPA'] %} +OBS_{{METPLUS_TOOL_NAME}}_POINT_INPUT_TEMPLATE = {{obs_input_fn_template}} +{%- else %} +OBS_{{METPLUS_TOOL_NAME}}_POINT_INPUT_TEMPLATE = +{%- endif %} +# +# Template for gridded observation input to {{MetplusToolName}} relative to +# OBS_{{METPLUS_TOOL_NAME}}_GRID_INPUT_DIR. +# +{%- if input_field_group in ['SFC', 'UPA'] %} +OBS_{{METPLUS_TOOL_NAME}}_GRID_INPUT_TEMPLATE = +{%- else %} +OBS_{{METPLUS_TOOL_NAME}}_GRID_INPUT_TEMPLATE = {{obs_input_fn_template}} +{%- endif %} +# +# Template for forecast input to {{MetplusToolName}} relative to +# FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +# Note that this can be a comma separated list of ensemble members +# or a single line, - filename wildcard characters may be used, ? or *. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}} +# +# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR. +# +{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE = +{{METPLUS_TOOL_NAME}}_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_TEMPLATE = +# +# Variable used to specify one or more verification mask files for +# {{MetplusToolName}}. Not used for this example. +# +{{METPLUS_TOOL_NAME}}_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/EnsembleStat_ADPSFC.conf b/parm/metplus/EnsembleStat_ADPSFC.conf deleted file mode 100644 index 07238030c1..0000000000 --- a/parm/metplus/EnsembleStat_ADPSFC.conf +++ /dev/null @@ -1,307 +0,0 @@ -# EnsembleStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = EnsembleStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {ENSEMBLE_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to EnsembleStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -ENSEMBLE_STAT_CONFIG_FILE = {PARM_BASE}/met_config/EnsembleStatConfig_wrapped -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -ENSEMBLE_STAT_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Observation data time window(s). -# -OBS_WINDOW_BEGIN = -1799 -OBS_WINDOW_END = 1800 -OBS_ENSEMBLE_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} -OBS_ENSEMBLE_STAT_WINDOW_END = {OBS_WINDOW_END} - -# number of expected members for ensemble. Should correspond with the -# number of items in the list for FCST_ENSEMBLE_STAT_INPUT_TEMPLATE -ENSEMBLE_STAT_N_MEMBERS = {{num_ens_members}} - -# ens.ens_thresh value in the MET config file -# threshold for ratio of valid files to expected files to allow app to run -ENSEMBLE_STAT_ENS_THRESH = 0.05 - -# ens.vld_thresh value in the MET config file -ENSEMBLE_STAT_ENS_VLD_THRESH = 1.0 - -ENSEMBLE_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA -#ENSEMBLE_STAT_OBS_QUALITY_EXC = - -# ENSEMBLE_STAT_MET_OBS_ERR_TABLE is not required. -# If the variable is not defined, or the value is not set, then the MET -# default is used. -ENSEMBLE_STAT_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt - - -# Used in the MET config file for: regrid to_grid field -ENSEMBLE_STAT_REGRID_TO_GRID = NONE -#ENSEMBLE_STAT_REGRID_METHOD = BILIN -#ENSEMBLE_STAT_REGRID_WIDTH = 2 -#ENSEMBLE_STAT_REGRID_VLD_THRESH = 0.5 -#ENSEMBLE_STAT_REGRID_SHAPE = SQUARE - -ENSEMBLE_STAT_CENSOR_THRESH = -ENSEMBLE_STAT_CENSOR_VAL = - -ENSEMBLE_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} -ENSEMBLE_STAT_DUPLICATE_FLAG = NONE -ENSEMBLE_STAT_SKIP_CONST = FALSE -ENSEMBLE_STAT_OBS_ERROR_FLAG = FALSE - -ENSEMBLE_STAT_ENS_SSVAR_BIN_SIZE = 1.0 -ENSEMBLE_STAT_ENS_PHIST_BIN_SIZE = 0.05 - -#ENSEMBLE_STAT_CLIMO_MEAN_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_MEAN_FIELD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_MEAN_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_MEAN_HOUR_INTERVAL = 6 - -#ENSEMBLE_STAT_CLIMO_STDEV_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_STDEV_FIELD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_STDEV_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_STDEV_HOUR_INTERVAL = 6 - - -#ENSEMBLE_STAT_CLIMO_CDF_BINS = 1 -#ENSEMBLE_STAT_CLIMO_CDF_CENTER_BINS = False -ENSEMBLE_STAT_CLIMO_CDF_WRITE_BINS = False - -ENSEMBLE_STAT_MASK_GRID = - -ENSEMBLE_STAT_CI_ALPHA = 0.05 - -ENSEMBLE_STAT_INTERP_FIELD = BOTH -ENSEMBLE_STAT_INTERP_VLD_THRESH = 1.0 -ENSEMBLE_STAT_INTERP_SHAPE = SQUARE -ENSEMBLE_STAT_INTERP_METHOD = NEAREST -ENSEMBLE_STAT_INTERP_WIDTH = 1 - -ENSEMBLE_STAT_OUTPUT_FLAG_ECNT = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RPS = NONE -ENSEMBLE_STAT_OUTPUT_FLAG_RHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_PHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_ORANK = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_SSVAR = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RELP = STAT - -ENSEMBLE_STAT_ENSEMBLE_FLAG_RANK = FALSE -ENSEMBLE_STAT_ENSEMBLE_FLAG_WEIGHT = FALSE -# -# Forecast and observation variables and levels as specified in the fcst -# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME, -# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME, -# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION. -# -FCST_VAR1_NAME = TMP -FCST_VAR1_LEVELS = Z2 -FCST_VAR1_THRESH = ge268, ge273, ge278, ge293, ge298, ge303 -OBS_VAR1_NAME = TMP -OBS_VAR1_LEVELS = Z2 -OBS_VAR1_THRESH = ge268, ge273, ge278, ge293, ge298, ge303 -OBS_VAR1_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR2_NAME = DPT -FCST_VAR2_LEVELS = Z2 -FCST_VAR2_THRESH = ge263, ge268, ge273, ge288, ge293, ge298 -OBS_VAR2_NAME = DPT -OBS_VAR2_LEVELS = Z2 -OBS_VAR2_THRESH = ge263, ge268, ge273, ge288, ge293, ge298 -OBS_VAR2_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR3_NAME = WIND -FCST_VAR3_LEVELS = Z10 -FCST_VAR3_THRESH = ge5, ge10, ge15 -FCST_VAR3_OPTIONS = GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind. -OBS_VAR3_NAME = WIND -OBS_VAR3_LEVELS = Z10 -OBS_VAR3_THRESH = ge5, ge10, ge15 -OBS_VAR3_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR4_NAME = TCDC -FCST_VAR4_LEVELS = L0 -FCST_VAR4_THRESH = lt25, gt75 -FCST_VAR4_OPTIONS = GRIB_lvl_typ = 200; - GRIB2_ipdtmpl_index=[27]; - GRIB2_ipdtmpl_val=[255]; - interp = { type = [ { method = NEAREST; width = 1; } ]; } -OBS_VAR4_NAME = TCDC -OBS_VAR4_LEVELS = L0 -OBS_VAR4_THRESH = lt25, gt75 - -FCST_VAR5_NAME = VIS -FCST_VAR5_LEVELS = L0 -FCST_VAR5_THRESH = lt1609, lt8045, ge8045 -FCST_VAR5_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } -OBS_VAR5_NAME = VIS -OBS_VAR5_LEVELS = L0 -OBS_VAR5_THRESH = lt1609, lt8045, ge8045 - -FCST_VAR6_NAME = HGT -FCST_VAR6_LEVELS = L0 -FCST_VAR6_THRESH = lt152, lt1520, ge914 -FCST_VAR6_OPTIONS = GRIB_lvl_typ = 215; - desc = "CEILING"; -OBS_VAR6_NAME = CEILING -OBS_VAR6_LEVELS = L0 -OBS_VAR6_THRESH = lt152, lt305, ge914 -OBS_VAR6_OPTIONS = GRIB_lvl_typ = 215 - -[dir] -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -# -# Point observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_DIR = {{obs_input_dir}} -# -# Grid observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_DIR = -# -# Forecast model input directory for EnsembleStat. -# -FCST_ENSEMBLE_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Output directory for EnsembleStat. -# -ENSEMBLE_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for point observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_POINT_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for gridded observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_GRID_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_TEMPLATE = -# -# Template for forecast input to EnsembleStat relative to -# FCST_ENSEMBLE_STAT_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from EnsembleStat relative to ENSEMBLE_STAT_OUTPUT_DIR. -# -ENSEMBLE_STAT_OUTPUT_TEMPLATE = -ENSEMBLE_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# EnsembleStat. Not used for this example. -# -ENSEMBLE_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/EnsembleStat_ADPUPA.conf b/parm/metplus/EnsembleStat_ADPUPA.conf deleted file mode 100644 index edfda41b89..0000000000 --- a/parm/metplus/EnsembleStat_ADPUPA.conf +++ /dev/null @@ -1,351 +0,0 @@ -# EnsembleStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = EnsembleStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {ENSEMBLE_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to EnsembleStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -ENSEMBLE_STAT_CONFIG_FILE = {PARM_BASE}/met_config/EnsembleStatConfig_wrapped -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -ENSEMBLE_STAT_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Observation data time window(s). -# -OBS_WINDOW_BEGIN = -1799 -OBS_WINDOW_END = 1800 -OBS_ENSEMBLE_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} -OBS_ENSEMBLE_STAT_WINDOW_END = {OBS_WINDOW_END} - -# number of expected members for ensemble. Should correspond with the -# number of items in the list for FCST_ENSEMBLE_STAT_INPUT_TEMPLATE -ENSEMBLE_STAT_N_MEMBERS = {{num_ens_members}} - -# ens.ens_thresh value in the MET config file -# threshold for ratio of valid files to expected files to allow app to run -ENSEMBLE_STAT_ENS_THRESH = 0.05 - -# ens.vld_thresh value in the MET config file -ENSEMBLE_STAT_ENS_VLD_THRESH = 1.0 - -ENSEMBLE_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA -#ENSEMBLE_STAT_OBS_QUALITY_EXC = - -# ENSEMBLE_STAT_MET_OBS_ERR_TABLE is not required. -# If the variable is not defined, or the value is not set, then the MET -# default is used. -ENSEMBLE_STAT_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt - - -# Used in the MET config file for: regrid to_grid field -ENSEMBLE_STAT_REGRID_TO_GRID = NONE -#ENSEMBLE_STAT_REGRID_METHOD = BILIN -#ENSEMBLE_STAT_REGRID_WIDTH = 2 -#ENSEMBLE_STAT_REGRID_VLD_THRESH = 0.5 -#ENSEMBLE_STAT_REGRID_SHAPE = SQUARE - -ENSEMBLE_STAT_CENSOR_THRESH = -ENSEMBLE_STAT_CENSOR_VAL = - -ENSEMBLE_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} -ENSEMBLE_STAT_DUPLICATE_FLAG = NONE -ENSEMBLE_STAT_SKIP_CONST = FALSE -ENSEMBLE_STAT_OBS_ERROR_FLAG = FALSE - -ENSEMBLE_STAT_ENS_SSVAR_BIN_SIZE = 1.0 -ENSEMBLE_STAT_ENS_PHIST_BIN_SIZE = 0.05 - -#ENSEMBLE_STAT_CLIMO_MEAN_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_MEAN_FIELD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_MEAN_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_MEAN_HOUR_INTERVAL = 6 - -#ENSEMBLE_STAT_CLIMO_STDEV_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_STDEV_FIELD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_STDEV_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_STDEV_HOUR_INTERVAL = 6 - - -#ENSEMBLE_STAT_CLIMO_CDF_BINS = 1 -#ENSEMBLE_STAT_CLIMO_CDF_CENTER_BINS = False -ENSEMBLE_STAT_CLIMO_CDF_WRITE_BINS = False - -ENSEMBLE_STAT_MASK_GRID = - -ENSEMBLE_STAT_CI_ALPHA = 0.05 - -ENSEMBLE_STAT_INTERP_FIELD = BOTH -ENSEMBLE_STAT_INTERP_VLD_THRESH = 1.0 -ENSEMBLE_STAT_INTERP_SHAPE = SQUARE -ENSEMBLE_STAT_INTERP_METHOD = NEAREST -ENSEMBLE_STAT_INTERP_WIDTH = 1 - -ENSEMBLE_STAT_OUTPUT_FLAG_ECNT = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RPS = NONE -ENSEMBLE_STAT_OUTPUT_FLAG_RHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_PHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_ORANK = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_SSVAR = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RELP = STAT - -ENSEMBLE_STAT_ENSEMBLE_FLAG_RANK = FALSE -ENSEMBLE_STAT_ENSEMBLE_FLAG_WEIGHT = FALSE -# -# Forecast and observation variables and levels as specified in the fcst -# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME, -# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME, -# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION. -# -FCST_VAR1_NAME = TMP -FCST_VAR1_LEVELS = P850 -FCST_VAR1_THRESH = ge288, ge293, ge298 -OBS_VAR1_NAME = TMP -OBS_VAR1_LEVELS = P850 -OBS_VAR1_THRESH = ge288, ge293, ge298 -OBS_VAR1_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR2_NAME = TMP -FCST_VAR2_LEVELS = P700 -FCST_VAR2_THRESH = ge273, ge278, ge283 -OBS_VAR2_NAME = TMP -OBS_VAR2_LEVELS = P700 -OBS_VAR2_THRESH = ge273, ge278, ge283 -OBS_VAR2_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR3_NAME = TMP -FCST_VAR3_LEVELS = P500 -FCST_VAR3_THRESH = ge258, ge263, ge268 -OBS_VAR3_NAME = TMP -OBS_VAR3_LEVELS = P500 -OBS_VAR3_THRESH = ge258, ge263, ge268 -OBS_VAR3_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR4_NAME = DPT -FCST_VAR4_LEVELS = P850 -FCST_VAR4_THRESH = ge273, ge278, ge283 -OBS_VAR4_NAME = DPT -OBS_VAR4_LEVELS = P850 -OBS_VAR4_THRESH = ge273, ge278, ge283 -OBS_VAR4_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR5_NAME = DPT -FCST_VAR5_LEVELS = P700 -FCST_VAR5_THRESH = ge263, ge286, ge273 -OBS_VAR5_NAME = DPT -OBS_VAR5_LEVELS = P700 -OBS_VAR5_THRESH = ge263, ge286, ge273 -OBS_VAR5_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR6_NAME = WIND -FCST_VAR6_LEVELS = P850 -FCST_VAR6_THRESH = ge5, ge10, ge15 -OBS_VAR6_NAME = WIND -OBS_VAR6_LEVELS = P850 -OBS_VAR6_THRESH = ge5, ge10, ge15 -OBS_VAR6_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR7_NAME = WIND -FCST_VAR7_LEVELS = P700 -FCST_VAR7_THRESH = ge10, ge15, ge20 -OBS_VAR7_NAME = WIND -OBS_VAR7_LEVELS = P700 -OBS_VAR7_THRESH = ge10, ge15, ge20 -OBS_VAR7_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR8_NAME = WIND -FCST_VAR8_LEVELS = P500 -FCST_VAR8_THRESH = ge15, ge21, ge26 -OBS_VAR8_NAME = WIND -OBS_VAR8_LEVELS = P500 -OBS_VAR8_THRESH = ge15, ge21, ge26 -OBS_VAR8_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR9_NAME = WIND -FCST_VAR9_LEVELS = P250 -FCST_VAR9_THRESH = ge26, ge31, ge36, ge46, ge62 -OBS_VAR9_NAME = WIND -OBS_VAR9_LEVELS = P250 -OBS_VAR9_THRESH = ge26, ge31, ge36, ge46, ge62 -OBS_VAR9_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR10_NAME = HGT -FCST_VAR10_LEVELS = P500 -FCST_VAR10_THRESH = ge5400, ge5600, ge5880 -OBS_VAR10_NAME = HGT -OBS_VAR10_LEVELS = P500 -OBS_VAR10_THRESH = ge5400, ge5600, ge5880 -OBS_VAR10_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR11_NAME = CAPE -FCST_VAR11_LEVELS = L0 -FCST_VAR11_THRESH = le1000, gt1000&<2500, ge2500&<4000, ge2500 -FCST_VAR11_OPTIONS = cnt_thresh = [ >0 ]; -OBS_VAR11_NAME = CAPE -OBS_VAR11_LEVELS = L0-100000 -OBS_VAR11_THRESH = le1000, gt1000&<2500, ge2500&<4000, ge2500 -OBS_VAR11_OPTIONS = cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -FCST_VAR12_NAME = HPBL -FCST_VAR12_LEVELS = Z0 -FCST_VAR12_THRESH = lt500, lt1500, gt1500 -OBS_VAR12_NAME = PBL -OBS_VAR12_LEVELS = L0 -OBS_VAR12_THRESH = lt500, lt1500, gt1500 -OBS_VAR12_OPTIONS = desc = "TKE"; - -[dir] -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -# -# Point observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_DIR = {{obs_input_dir}} -# -# Grid observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_DIR = -# -# Forecast model input directory for EnsembleStat. -# -FCST_ENSEMBLE_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Output directory for EnsembleStat. -# -ENSEMBLE_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for point observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_POINT_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for gridded observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_GRID_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_TEMPLATE = -# -# Template for forecast input to EnsembleStat relative to -# FCST_ENSEMBLE_STAT_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from EnsembleStat relative to ENSEMBLE_STAT_OUTPUT_DIR. -# -ENSEMBLE_STAT_OUTPUT_TEMPLATE = -ENSEMBLE_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# EnsembleStat. Not used for this example. -# -ENSEMBLE_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/EnsembleStat_APCP.conf b/parm/metplus/EnsembleStat_APCP.conf deleted file mode 100644 index 7604a90bd7..0000000000 --- a/parm/metplus/EnsembleStat_APCP.conf +++ /dev/null @@ -1,258 +0,0 @@ -# EnsembleStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = EnsembleStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {ENSEMBLE_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to EnsembleStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -ENSEMBLE_STAT_CONFIG_FILE = {PARM_BASE}/met_config/EnsembleStatConfig_wrapped -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -ENSEMBLE_STAT_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Observation data time window(s). -# -OBS_FILE_WINDOW_BEGIN = 0 -OBS_FILE_WINDOW_END = 0 -OBS_ENSEMBLE_STAT_WINDOW_BEGIN = 0 -OBS_ENSEMBLE_STAT_WINDOW_END = 0 - -# number of expected members for ensemble. Should correspond with the -# number of items in the list for FCST_ENSEMBLE_STAT_INPUT_TEMPLATE -ENSEMBLE_STAT_N_MEMBERS = {{num_ens_members}} - -# ens.ens_thresh value in the MET config file -# threshold for ratio of valid files to expected files to allow app to run -ENSEMBLE_STAT_ENS_THRESH = 0.05 - -# ens.vld_thresh value in the MET config file -ENSEMBLE_STAT_ENS_VLD_THRESH = 1.0 - -# ENSEMBLE_STAT_MET_OBS_ERR_TABLE is not required. -# If the variable is not defined, or the value is not set, then the MET -# default is used. -ENSEMBLE_STAT_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt - - -# Used in the MET config file for: regrid to_grid field -ENSEMBLE_STAT_REGRID_TO_GRID = FCST -ENSEMBLE_STAT_REGRID_METHOD = BUDGET -ENSEMBLE_STAT_REGRID_WIDTH = 2 -ENSEMBLE_STAT_REGRID_VLD_THRESH = 0.5 -ENSEMBLE_STAT_REGRID_SHAPE = SQUARE - -ENSEMBLE_STAT_CENSOR_THRESH = -ENSEMBLE_STAT_CENSOR_VAL = - -ENSEMBLE_STAT_MESSAGE_TYPE = -ENSEMBLE_STAT_DUPLICATE_FLAG = UNIQUE -ENSEMBLE_STAT_SKIP_CONST = TRUE -ENSEMBLE_STAT_OBS_ERROR_FLAG = TRUE - -ENSEMBLE_STAT_ENS_SSVAR_BIN_SIZE = 1.0 -ENSEMBLE_STAT_ENS_PHIST_BIN_SIZE = 0.05 - -#ENSEMBLE_STAT_CLIMO_MEAN_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_MEAN_FIELD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_MEAN_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_MEAN_HOUR_INTERVAL = 6 - -#ENSEMBLE_STAT_CLIMO_STDEV_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_STDEV_FIELD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_STDEV_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_STDEV_HOUR_INTERVAL = 6 - - -ENSEMBLE_STAT_CLIMO_CDF_BINS = 1 -ENSEMBLE_STAT_CLIMO_CDF_CENTER_BINS = False -ENSEMBLE_STAT_CLIMO_CDF_WRITE_BINS = False - -ENSEMBLE_STAT_MASK_GRID = - -ENSEMBLE_STAT_CI_ALPHA = 0.05 - -ENSEMBLE_STAT_INTERP_FIELD = BOTH -ENSEMBLE_STAT_INTERP_VLD_THRESH = 1.0 -ENSEMBLE_STAT_INTERP_SHAPE = SQUARE -ENSEMBLE_STAT_INTERP_METHOD = NEAREST -ENSEMBLE_STAT_INTERP_WIDTH = 1 - -ENSEMBLE_STAT_OUTPUT_FLAG_ECNT = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RPS = NONE -ENSEMBLE_STAT_OUTPUT_FLAG_RHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_PHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_ORANK = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_SSVAR = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RELP = STAT - -ENSEMBLE_STAT_ENSEMBLE_FLAG_RANK = FALSE -ENSEMBLE_STAT_ENSEMBLE_FLAG_WEIGHT = FALSE -# -# Forecast and observation variables and levels as specified in the fcst -# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME, -# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME, -# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION. -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -FCST_VAR1_LEVELS = A{{accum_hh}} -FCST_VAR1_OPTIONS = ens_ssvar_bin_size = 50.0; - ens_phist_bin_size = 0.05; -OBS_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR1_LEVELS = A{{accum_hh}} -OBS_VAR1_OPTIONS = {FCST_VAR1_OPTIONS} - -[dir] -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -# -# Point observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_DIR = -# -# Grid observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_DIR = {{obs_input_dir}} -# -# Forecast model input directory for EnsembleStat. -# -FCST_ENSEMBLE_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Output directory for EnsembleStat. -# -ENSEMBLE_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for point observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_POINT_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_TEMPLATE = -# -# Template for gridded observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_GRID_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to EnsembleStat relative to -# FCST_ENSEMBLE_STAT_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from EnsembleStat relative to ENSEMBLE_STAT_OUTPUT_DIR. -# -ENSEMBLE_STAT_OUTPUT_TEMPLATE = -ENSEMBLE_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# EnsembleStat. Not used for this example. -# -ENSEMBLE_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/EnsembleStat_ASNOW.conf b/parm/metplus/EnsembleStat_ASNOW.conf deleted file mode 100644 index 8897b03295..0000000000 --- a/parm/metplus/EnsembleStat_ASNOW.conf +++ /dev/null @@ -1,259 +0,0 @@ -# EnsembleStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = EnsembleStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {ENSEMBLE_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to EnsembleStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -ENSEMBLE_STAT_CONFIG_FILE = {PARM_BASE}/met_config/EnsembleStatConfig_wrapped -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -ENSEMBLE_STAT_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Observation data time window(s). -# -OBS_FILE_WINDOW_BEGIN = 0 -OBS_FILE_WINDOW_END = 0 -OBS_ENSEMBLE_STAT_WINDOW_BEGIN = 0 -OBS_ENSEMBLE_STAT_WINDOW_END = 0 - -# number of expected members for ensemble. Should correspond with the -# number of items in the list for FCST_ENSEMBLE_STAT_INPUT_TEMPLATE -ENSEMBLE_STAT_N_MEMBERS = {{num_ens_members}} - -# ens.ens_thresh value in the MET config file -# threshold for ratio of valid files to expected files to allow app to run -ENSEMBLE_STAT_ENS_THRESH = 0.05 - -# ens.vld_thresh value in the MET config file -ENSEMBLE_STAT_ENS_VLD_THRESH = 1.0 - -# ENSEMBLE_STAT_MET_OBS_ERR_TABLE is not required. -# If the variable is not defined, or the value is not set, then the MET -# default is used. -ENSEMBLE_STAT_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt - - -# Used in the MET config file for: regrid to_grid field -ENSEMBLE_STAT_REGRID_TO_GRID = FCST -ENSEMBLE_STAT_REGRID_METHOD = BUDGET -ENSEMBLE_STAT_REGRID_WIDTH = 2 -ENSEMBLE_STAT_REGRID_VLD_THRESH = 0.5 -ENSEMBLE_STAT_REGRID_SHAPE = SQUARE - -ENSEMBLE_STAT_CENSOR_THRESH = -ENSEMBLE_STAT_CENSOR_VAL = - -ENSEMBLE_STAT_MESSAGE_TYPE = -ENSEMBLE_STAT_DUPLICATE_FLAG = UNIQUE -ENSEMBLE_STAT_SKIP_CONST = TRUE -ENSEMBLE_STAT_OBS_ERROR_FLAG = TRUE - -ENSEMBLE_STAT_ENS_SSVAR_BIN_SIZE = 1.0 -ENSEMBLE_STAT_ENS_PHIST_BIN_SIZE = 0.05 - -#ENSEMBLE_STAT_CLIMO_MEAN_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_MEAN_FIELD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_MEAN_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_MEAN_HOUR_INTERVAL = 6 - -#ENSEMBLE_STAT_CLIMO_STDEV_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_STDEV_FIELD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_STDEV_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_STDEV_HOUR_INTERVAL = 6 - - -ENSEMBLE_STAT_CLIMO_CDF_BINS = 1 -ENSEMBLE_STAT_CLIMO_CDF_CENTER_BINS = False -ENSEMBLE_STAT_CLIMO_CDF_WRITE_BINS = False - -ENSEMBLE_STAT_MASK_GRID = - -ENSEMBLE_STAT_CI_ALPHA = 0.05 - -ENSEMBLE_STAT_INTERP_FIELD = BOTH -ENSEMBLE_STAT_INTERP_VLD_THRESH = 1.0 -ENSEMBLE_STAT_INTERP_SHAPE = SQUARE -ENSEMBLE_STAT_INTERP_METHOD = NEAREST -ENSEMBLE_STAT_INTERP_WIDTH = 1 - -ENSEMBLE_STAT_OUTPUT_FLAG_ECNT = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RPS = NONE -ENSEMBLE_STAT_OUTPUT_FLAG_RHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_PHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_ORANK = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_SSVAR = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RELP = STAT - -ENSEMBLE_STAT_ENSEMBLE_FLAG_RANK = FALSE -ENSEMBLE_STAT_ENSEMBLE_FLAG_WEIGHT = FALSE -# -# Forecast and observation variables and levels as specified in the fcst -# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME, -# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME, -# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION. -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -FCST_VAR1_LEVELS = A{{accum_hh}} -FCST_VAR1_OPTIONS = ens_ssvar_bin_size = 50.0; - ens_phist_bin_size = 0.05; -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = A{{accum_hh}} -OBS_VAR1_OPTIONS = {FCST_VAR1_OPTIONS}; - convert(x) = 100.0*x; - -[dir] -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -# -# Point observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_DIR = -# -# Grid observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_DIR = {{obs_input_dir}} -# -# Forecast model input directory for EnsembleStat. -# -FCST_ENSEMBLE_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Output directory for EnsembleStat. -# -ENSEMBLE_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for point observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_POINT_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_TEMPLATE = -# -# Template for gridded observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_GRID_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to EnsembleStat relative to -# FCST_ENSEMBLE_STAT_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from EnsembleStat relative to ENSEMBLE_STAT_OUTPUT_DIR. -# -ENSEMBLE_STAT_OUTPUT_TEMPLATE = -ENSEMBLE_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# EnsembleStat. Not used for this example. -# -ENSEMBLE_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/EnsembleStat_REFC.conf b/parm/metplus/EnsembleStat_REFC.conf deleted file mode 100644 index 6de6eddeb8..0000000000 --- a/parm/metplus/EnsembleStat_REFC.conf +++ /dev/null @@ -1,265 +0,0 @@ -# EnsembleStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = EnsembleStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {ENSEMBLE_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to EnsembleStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -ENSEMBLE_STAT_CONFIG_FILE = {PARM_BASE}/met_config/EnsembleStatConfig_wrapped -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -ENSEMBLE_STAT_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Observation data time window(s). -# -OBS_FILE_WINDOW_BEGIN = -300 -OBS_FILE_WINDOW_END = 300 -OBS_ENSEMBLE_STAT_WINDOW_BEGIN = 0 -OBS_ENSEMBLE_STAT_WINDOW_END = 0 - -# number of expected members for ensemble. Should correspond with the -# number of items in the list for FCST_ENSEMBLE_STAT_INPUT_TEMPLATE -ENSEMBLE_STAT_N_MEMBERS = {{num_ens_members}} - -# ens.ens_thresh value in the MET config file -# threshold for ratio of valid files to expected files to allow app to run -ENSEMBLE_STAT_ENS_THRESH = 0.05 - -# ens.vld_thresh value in the MET config file -ENSEMBLE_STAT_ENS_VLD_THRESH = 1.0 - -# ENSEMBLE_STAT_MET_OBS_ERR_TABLE is not required. -# If the variable is not defined, or the value is not set, then the MET -# default is used. -ENSEMBLE_STAT_MET_OBS_ERR_TABLE = - - -# Used in the MET config file for: regrid to_grid field -ENSEMBLE_STAT_REGRID_TO_GRID = FCST -ENSEMBLE_STAT_REGRID_METHOD = BUDGET -ENSEMBLE_STAT_REGRID_WIDTH = 2 -ENSEMBLE_STAT_REGRID_VLD_THRESH = 0.5 -ENSEMBLE_STAT_REGRID_SHAPE = SQUARE - -ENSEMBLE_STAT_CENSOR_THRESH = -ENSEMBLE_STAT_CENSOR_VAL = - -# Should this parameter be set to something other than ADPSFC (maybe -# just leave empty) since we are not verifying surface fields? -ENSEMBLE_STAT_MESSAGE_TYPE = ADPSFC -ENSEMBLE_STAT_DUPLICATE_FLAG = NONE -ENSEMBLE_STAT_SKIP_CONST = TRUE -ENSEMBLE_STAT_OBS_ERROR_FLAG = FALSE - -ENSEMBLE_STAT_ENS_SSVAR_BIN_SIZE = 1.0 -ENSEMBLE_STAT_ENS_PHIST_BIN_SIZE = 0.05 - -#ENSEMBLE_STAT_CLIMO_MEAN_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_MEAN_FIELD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_MEAN_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_MEAN_HOUR_INTERVAL = 6 - -#ENSEMBLE_STAT_CLIMO_STDEV_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_STDEV_FIELD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_STDEV_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_STDEV_HOUR_INTERVAL = 6 - - -ENSEMBLE_STAT_CLIMO_CDF_BINS = 1 -ENSEMBLE_STAT_CLIMO_CDF_CENTER_BINS = False -ENSEMBLE_STAT_CLIMO_CDF_WRITE_BINS = False - -ENSEMBLE_STAT_MASK_GRID = FULL - -ENSEMBLE_STAT_CI_ALPHA = 0.05 - -ENSEMBLE_STAT_INTERP_FIELD = BOTH -ENSEMBLE_STAT_INTERP_VLD_THRESH = 1.0 -ENSEMBLE_STAT_INTERP_SHAPE = SQUARE -ENSEMBLE_STAT_INTERP_METHOD = NEAREST -ENSEMBLE_STAT_INTERP_WIDTH = 1 - -ENSEMBLE_STAT_OUTPUT_FLAG_ECNT = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RPS = NONE -ENSEMBLE_STAT_OUTPUT_FLAG_RHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_PHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_ORANK = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_SSVAR = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RELP = STAT - -ENSEMBLE_STAT_ENSEMBLE_FLAG_RANK = FALSE -ENSEMBLE_STAT_ENSEMBLE_FLAG_WEIGHT = FALSE -# -# Forecast and observation variables and levels as specified in the fcst -# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME, -# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME, -# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION. -# -FCST_VAR1_NAME = {{fieldname_in_fcst_input}} -FCST_VAR1_LEVELS = L0 -FCST_VAR1_OPTIONS = ens_ssvar_bin_size = 50.0; - ens_phist_bin_size = 0.05; -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = Z500 -OBS_VAR1_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - ens_ssvar_bin_size = 50.0; - ens_phist_bin_size = 0.05; - -[dir] -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -# -# Point observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_DIR = -# -# Grid observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_DIR = {{obs_input_dir}} -# -# Forecast model input directory for EnsembleStat. -# -FCST_ENSEMBLE_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Output directory for EnsembleStat. -# -ENSEMBLE_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for point observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_POINT_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_TEMPLATE = -# -# Template for gridded observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_GRID_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to EnsembleStat relative to -# FCST_ENSEMBLE_STAT_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from EnsembleStat relative to ENSEMBLE_STAT_OUTPUT_DIR. -# -ENSEMBLE_STAT_OUTPUT_TEMPLATE = -ENSEMBLE_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# EnsembleStat. Not used for this example. -# -ENSEMBLE_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/EnsembleStat_RETOP.conf b/parm/metplus/EnsembleStat_RETOP.conf deleted file mode 100644 index abd2dd2a45..0000000000 --- a/parm/metplus/EnsembleStat_RETOP.conf +++ /dev/null @@ -1,267 +0,0 @@ -# EnsembleStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = EnsembleStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {ENSEMBLE_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to EnsembleStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -ENSEMBLE_STAT_CONFIG_FILE = {PARM_BASE}/met_config/EnsembleStatConfig_wrapped -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -ENSEMBLE_STAT_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Observation data time window(s). -# -OBS_FILE_WINDOW_BEGIN = -300 -OBS_FILE_WINDOW_END = 300 -OBS_ENSEMBLE_STAT_WINDOW_BEGIN = 0 -OBS_ENSEMBLE_STAT_WINDOW_END = 0 - -# number of expected members for ensemble. Should correspond with the -# number of items in the list for FCST_ENSEMBLE_STAT_INPUT_TEMPLATE -ENSEMBLE_STAT_N_MEMBERS = {{num_ens_members}} - -# ens.ens_thresh value in the MET config file -# threshold for ratio of valid files to expected files to allow app to run -ENSEMBLE_STAT_ENS_THRESH = 0.05 - -# ens.vld_thresh value in the MET config file -ENSEMBLE_STAT_ENS_VLD_THRESH = 1.0 - -# ENSEMBLE_STAT_MET_OBS_ERR_TABLE is not required. -# If the variable is not defined, or the value is not set, then the MET -# default is used. -ENSEMBLE_STAT_MET_OBS_ERR_TABLE = - - -# Used in the MET config file for: regrid to_grid field -ENSEMBLE_STAT_REGRID_TO_GRID = FCST -ENSEMBLE_STAT_REGRID_METHOD = BUDGET -ENSEMBLE_STAT_REGRID_WIDTH = 2 -ENSEMBLE_STAT_REGRID_VLD_THRESH = 0.5 -ENSEMBLE_STAT_REGRID_SHAPE = SQUARE - -ENSEMBLE_STAT_CENSOR_THRESH = -ENSEMBLE_STAT_CENSOR_VAL = - -# Should this parameter be set to something other than ADPSFC (maybe -# just leave empty) since we are not verifying surface fields? -ENSEMBLE_STAT_MESSAGE_TYPE = ADPSFC -ENSEMBLE_STAT_DUPLICATE_FLAG = NONE -ENSEMBLE_STAT_SKIP_CONST = TRUE -ENSEMBLE_STAT_OBS_ERROR_FLAG = FALSE - -ENSEMBLE_STAT_ENS_SSVAR_BIN_SIZE = 1.0 -ENSEMBLE_STAT_ENS_PHIST_BIN_SIZE = 0.05 - -#ENSEMBLE_STAT_CLIMO_MEAN_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_MEAN_FIELD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_MEAN_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_MEAN_HOUR_INTERVAL = 6 - -#ENSEMBLE_STAT_CLIMO_STDEV_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_STDEV_FIELD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_STDEV_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_STDEV_HOUR_INTERVAL = 6 - - -ENSEMBLE_STAT_CLIMO_CDF_BINS = 1 -ENSEMBLE_STAT_CLIMO_CDF_CENTER_BINS = False -ENSEMBLE_STAT_CLIMO_CDF_WRITE_BINS = False - -ENSEMBLE_STAT_MASK_GRID = FULL - -ENSEMBLE_STAT_CI_ALPHA = 0.05 - -ENSEMBLE_STAT_INTERP_FIELD = BOTH -ENSEMBLE_STAT_INTERP_VLD_THRESH = 1.0 -ENSEMBLE_STAT_INTERP_SHAPE = SQUARE -ENSEMBLE_STAT_INTERP_METHOD = NEAREST -ENSEMBLE_STAT_INTERP_WIDTH = 1 - -ENSEMBLE_STAT_OUTPUT_FLAG_ECNT = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RPS = NONE -ENSEMBLE_STAT_OUTPUT_FLAG_RHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_PHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_ORANK = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_SSVAR = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RELP = STAT - -ENSEMBLE_STAT_ENSEMBLE_FLAG_RANK = FALSE -ENSEMBLE_STAT_ENSEMBLE_FLAG_WEIGHT = FALSE -# -# Forecast and observation variables and levels as specified in the fcst -# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME, -# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME, -# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION. -# -FCST_VAR1_NAME = {{fieldname_in_fcst_input}} -FCST_VAR1_LEVELS = L0 -FCST_VAR1_OPTIONS = convert(x) = x * 3.28084 * 0.001; ;; Convert from meters to kilofeet. - ens_ssvar_bin_size = 50.0; - ens_phist_bin_size = 0.05; -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = Z500 -OBS_VAR1_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; ;; Convert from kilometers to kilofeet. - ens_ssvar_bin_size = 50.0; - ens_phist_bin_size = 0.05; - -[dir] -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -# -# Point observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_DIR = -# -# Grid observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_DIR = {{obs_input_dir}} -# -# Forecast model input directory for EnsembleStat. -# -FCST_ENSEMBLE_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Output directory for EnsembleStat. -# -ENSEMBLE_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for point observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_POINT_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_TEMPLATE = -# -# Template for gridded observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_GRID_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to EnsembleStat relative to -# FCST_ENSEMBLE_STAT_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from EnsembleStat relative to ENSEMBLE_STAT_OUTPUT_DIR. -# -ENSEMBLE_STAT_OUTPUT_TEMPLATE = -ENSEMBLE_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# EnsembleStat. Not used for this example. -# -ENSEMBLE_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GenEnsProd.conf b/parm/metplus/GenEnsProd.conf new file mode 100644 index 0000000000..153eae196b --- /dev/null +++ b/parm/metplus/GenEnsProd.conf @@ -0,0 +1,432 @@ +# {{MetplusToolName}} METplus Configuration + +[config] + +# List of applications (tools) to run. +PROCESS_LIST = {{MetplusToolName}} + +# time looping - options are INIT, VALID, RETRO, and REALTIME +# If set to INIT or RETRO: +# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set +# If set to VALID or REALTIME: +# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set +LOOP_BY = INIT + +# Format of INIT_BEG and INIT_END using % items +# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. +# see www.strftime.org for more information +# %Y%m%d%H expands to YYYYMMDDHH +INIT_TIME_FMT = %Y%m%d%H + +# Start time for METplus run - must match INIT_TIME_FMT +INIT_BEG = {{cdate}} + +# End time for METplus run - must match INIT_TIME_FMT +INIT_END = {{cdate}} + +# Increment between METplus runs (in seconds if no units are specified). +# Must be >= 60 seconds. +INIT_INCREMENT = 3600 + +# List of forecast leads to process for each run time (init or valid) +# In hours if units are not specified +# If unset, defaults to 0 (don't loop through forecast leads) +LEAD_SEQ = {{vx_leadhr_list}} +# +# Order of loops to process data - Options are times, processes +# Not relevant if only one item is in the PROCESS_LIST +# times = run all wrappers in the PROCESS_LIST for a single run time, then +# increment the run time and run all wrappers again until all times have +# been evaluated. +# processes = run the first wrapper in the PROCESS_LIST for all times +# specified, then repeat for the next item in the PROCESS_LIST until all +# wrappers have been run +# +LOOP_ORDER = times +# +# Specify the name of the METplus log file. +# +LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} +# +# Specify the location and name of the final METplus conf file. +# +METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}} +# +# Name to identify model (forecast) data in output. +# +MODEL = {{vx_fcst_model_name}} + +{{METPLUS_TOOL_NAME}}_DESC = NA +# +# Name to identify observation data in output. +# +OBTYPE = {{obtype}} +# +# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. +# +LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} + +### +# File I/O +### + +# +# Forecast model input directory for {{MetplusToolName}}. +# +{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}} +# +# Template for forecast input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +# Note that this can be a comma separated list of ensemble members +# or a single line, - filename wildcard characters may be used, ? or *. +# +{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}} + +# {{METPLUS_TOOL_NAME}}_CTRL_INPUT_DIR = {INPUT_BASE} +# {{METPLUS_TOOL_NAME}}_CTRL_INPUT_TEMPLATE = +# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2 + +# +# Output directory for {{MetplusToolName}}. +# +# OUTPUT_BASE apparently has to be set to something; it cannot be left +# to its default value. But it is not explicitly used elsewhere in this +# configuration file. +# +OUTPUT_BASE = {{output_base}} +{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}} +# +# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR. +# +{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE = {{metplus_tool_name}}_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc +# +# Directory for staging data. +# +STAGING_DIR = {{staging_dir}} +# +# There are n ensembles but 1 is used as control, so specify n-1 members. +# +{{METPLUS_TOOL_NAME}}_N_MEMBERS = {{num_ens_members}} + +### +# Field Info +### +# +# Ensemble variables and levels as specified in the ens field dictionary +# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS, +# (optional) ENS_VARn_OPTION +# +{#- +Import the file containing jinja macros. +#} +{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %} + +{#- +Jinja requires certain variables to be defined globally within the template +before they can be used in if-statements and other scopes (see Jinja scoping +rules). Define such variables. +#} +{%- set indx_input_thresh_fcst = '' %} +{%- set error_msg = '' %} +{%- set opts_indent = '' %} +{%- set opts_indent_len = '' %} + +{%- set field_fcst = '' %} +{%- set level_fcst = '' %} +{%- set thresh_fcst = '' %} + +{%- set threshes_fcst = [] %} + +{#- +Get the set of valid field groups and ensure that the specified input +field group appears in this list. +#} +{%- set valid_field_groups = vx_config_dict.keys()|list %} +{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }} + +{#- +Reset the input forecast level so that if it happens to be an accumulation +(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g. +reset to 'A3'). +#} +{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} + +{#- +Extract from the configuration dictionary the set (which itself is a +dictionary) of fields, levels, and thresholds corresponding to the input +field group. Then set the delimiter string that separates forecast and +observation values in the various items (i.e. dictionary keys and values +representing field names, levels, and thresholds) in this dictionary. +#} +{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %} +{%- set delim_str = metplus_macros.set_delim_str() %} + +{#- +Loop over the fields and set field names, levels, thresholds, and/or +options for each forecast field in the METplus configuration file. Note +that GenEnsProd only deals with forecasts; it does not consider observations. +#} +{%- set ns = namespace(var_count = 0) %} +{%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %} + + {%- if delim_str in field_cpld %} + {%- set field_fcst = field_cpld.split(delim_str)[0] %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- endif %} + +{#- +For convenience, create list of valid forecast levels for the current +field. +#} + {%- set valid_levels_fcst = [] %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + {%- if delim_str in level_cpld %} + {%- set level_fcst = level_cpld.split(delim_str)[0] %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- endif %} + {%- set tmp = valid_levels_fcst.append(level_fcst) %} + {%- endfor %} + +{#- +Make sure that the input forecast level (input_level_fcst) is set to a +valid value. +#} + {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %} + {%- set error_msg = '\n' ~ +'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~ +'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~ +'for the current forecast field (field_fcst). This is not the case:\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~ +' input_level_fcst = ' ~ input_level_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + +{#- +Loop over the (coupled) levels and corresponding lists of thresholds. +Extract from these the level values for forecasts and use them to set the +forecast field names, levels, thresholds, and/or options in the METplus +configuration file. +#} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + + {%- if delim_str in level_cpld %} + {%- set level_fcst = level_cpld.split(delim_str)[0] %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- endif %} + + {%- set valid_threshes_fcst = [] %} + {%- for thresh_cpld in threshes_cpld %} + {%- if delim_str in thresh_cpld %} + {%- set thresh_fcst = thresh_cpld.split(delim_str)[0] %} + {%- else %} + {%- set thresh_fcst = thresh_cpld %} + {%- endif %} + {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %} + {%- endfor %} + + {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %} +{#- +Increment the METplus variable counter. +#} + {%- set ns.var_count = ns.var_count+1 %} + +{#- +Set forecast field name. Note that this has to exactly match the name +of the field in the input forecast file. + +For accumulated fields, the input forecast file is generated by MET's +PcpCombine tool. In that file, the field name consists of the forecast +field name here (field_fcst) with the accumulation period appended to +it (separated by an underscore), so we must do the same here to get an +exact match. +#} + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +ENS_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{accum_hh}} + {%- else %} +ENS_VAR{{ns.var_count}}_NAME = {{field_fcst}} + {%- endif %} + +{#- +Set forecast field level. +#} +ENS_VAR{{ns.var_count}}_LEVELS = {{level_fcst}} + +{#- +Set forecast field threshold(s). Note that no forecast thresholds are +included in the METplus configuration file if input_thresh_fcst is set +to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +If input_thresh_fcst is set to 'all', set the list of forecast thresholds +to the full set of valid values. +#} + {%- if (input_thresh_fcst == 'all') %} + + {%- set threshes_fcst = valid_threshes_fcst %} +{#- +If input_thresh_fcst is set to a specific value: +* If that value is valid, i.e. it exists in the list of valid forecast + thresholds, get its index in that list and use it to set the forecast + threshold to a one-element list containing that value. +* If the input forecast threshold is not valid, print out a warning message + and exit. +#} + {%- else %} + + {%- if input_thresh_fcst in valid_threshes_fcst %} + {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %} + {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %} + {%- else %} + {%- set error_msg = '\n' ~ +'For the current forecast field (field_fcst) and forecast level (level_fcst),\n' ~ +'the input forecast threshold (input_thresh_fcst) does not exist in the list\n' ~ +'of valid forecast thresholds (valid_threshes_fcst):\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' level_fcst = ' ~ level_fcst ~ '\n' ~ +' valid_threshes_fcst = ' ~ valid_threshes_fcst ~ '\n' ~ +' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + + {%- endif %} +{#- +If threshes_fcst has been reset to something other than its default +value of an empty list, then set the forecast thresholds in the METplus +configuration file because that implies threshes_fcst was set above to +a non-empty value. Then reset threshes_fcst to its default value for +proper processing of thresholds for the next field. +#} + {%- if (threshes_fcst != []) %} +ENS_VAR{{ns.var_count}}_THRESH = {{threshes_fcst|join(', ')}} + {%- endif %} + {%- set threshes_fcst = [] %} + + {%- endif %} + +{#- +Set forecast field options. +#} + {%- set opts_indent_len = 19 %} + {%- if (ns.var_count > 9) and (ns.var_count <= 99) %} + {%- set opts_indent_len = opts_indent_len + 1 %} + {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %} + {%- set opts_indent_len = opts_indent_len + 2 %} + {%- elif (ns.var_count > 999) %} + {%- set opts_indent_len = opts_indent_len + 3 %} + {%- endif %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'RETOP' %} + + {%- if field_fcst == 'RETOP' %} +ENS_VAR{{ns.var_count}}_OPTIONS = convert(x) = x * 3.28084 * 0.001; ;; Convert from meters to kilofeet. + {%- endif %} + + {%- elif input_field_group == 'SFC' %} + + {%- if field_fcst == 'HGT' %} +ENS_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 215; +{{opts_indent}}desc = "CEILING"; + {%- elif field_fcst == 'TCDC' %} +ENS_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 200; +{{opts_indent}}GRIB2_ipdtmpl_index=[27]; +{{opts_indent}}GRIB2_ipdtmpl_val=[255]; +{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- elif field_fcst == 'VIS' %} +ENS_VAR{{ns.var_count}}_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- elif field_fcst == 'WIND' %} +ENS_VAR{{ns.var_count}}_OPTIONS = GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind. + {%- endif %} + + {%- elif input_field_group == 'UPA' %} + + {%- if field_fcst == 'CAPE' %} +ENS_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; + {%- endif %} + + {%- endif %} + +{#- +Print out a newline to separate the settings for the current field from +those for the next field. +#} + {{- '\n' }} + + {%- endif %} + + {%- endfor %} +{%- endfor %} +### +# {{MetplusToolName}} +### + +# {{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = NONE +# {{METPLUS_TOOL_NAME}}_REGRID_METHOD = NEAREST +# {{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 1 +# {{METPLUS_TOOL_NAME}}_REGRID_VLD_THRESH = 0.5 +# {{METPLUS_TOOL_NAME}}_REGRID_SHAPE = SQUARE + +# {{METPLUS_TOOL_NAME}}_CENSOR_THRESH = +# {{METPLUS_TOOL_NAME}}_CENSOR_VAL = +# {{METPLUS_TOOL_NAME}}_CAT_THRESH = +# {{METPLUS_TOOL_NAME}}_NC_VAR_STR = + +# Threshold for ratio of valid files to expected files to allow app to run +{{METPLUS_TOOL_NAME}}_ENS_THRESH = 0.05 + +{{METPLUS_TOOL_NAME}}_NBRHD_PROB_WIDTH = 27 +{{METPLUS_TOOL_NAME}}_NBRHD_PROB_SHAPE = CIRCLE +{{METPLUS_TOOL_NAME}}_NBRHD_PROB_VLD_THRESH = 0.0 + +# {{METPLUS_TOOL_NAME}}_NMEP_SMOOTH_VLD_THRESH = 0.0 +# {{METPLUS_TOOL_NAME}}_NMEP_SMOOTH_SHAPE = CIRCLE +# {{METPLUS_TOOL_NAME}}_NMEP_SMOOTH_GAUSSIAN_DX = 81.27 +# {{METPLUS_TOOL_NAME}}_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120 +# {{METPLUS_TOOL_NAME}}_NMEP_SMOOTH_METHOD = GAUSSIAN +# {{METPLUS_TOOL_NAME}}_NMEP_SMOOTH_WIDTH = 1 + +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FILE_NAME = +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FIELD = +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_METHOD = +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_WIDTH = +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_VLD_THRESH = +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_SHAPE = +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_MATCH_MONTH = +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_DAY_INTERVAL = 31 +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_HOUR_INTERVAL = 6 + +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FILE_NAME = +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FIELD = +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_METHOD = +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_WIDTH = +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_VLD_THRESH = +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_SHAPE = +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_TIME_INTERP_METHOD = +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_MATCH_MONTH = +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_DAY_INTERVAL = 31 +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_HOUR_INTERVAL = 6 + +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_LATLON = TRUE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_MEAN = TRUE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_STDEV = TRUE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_MINUS = FALSE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_PLUS = FALSE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_MIN = FALSE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_MAX = FALSE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_RANGE = TRUE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_VLD_COUNT = TRUE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_FREQUENCY = TRUE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_NEP = TRUE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_NMEP = TRUE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_CLIMO = FALSE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_CLIMO_CDF = FALSE + +# {{METPLUS_TOOL_NAME}}_ENS_MEMBER_IDS = +# {{METPLUS_TOOL_NAME}}_CONTROL_ID = diff --git a/parm/metplus/GenEnsProd_ADPSFC.conf b/parm/metplus/GenEnsProd_ADPSFC.conf deleted file mode 100644 index cb253f575b..0000000000 --- a/parm/metplus/GenEnsProd_ADPSFC.conf +++ /dev/null @@ -1,219 +0,0 @@ -# GenEnsProd METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GenEnsProd - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GEN_ENS_PROD_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -GEN_ENS_PROD_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} - -### -# File I/O -### - -# -# Forecast model input directory for GenEnsProd. -# -GEN_ENS_PROD_INPUT_DIR = {{fcst_input_dir}} -# -# Template for forecast input to GenEnsProd relative to -# GEN_ENS_PROD_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -GEN_ENS_PROD_INPUT_TEMPLATE = {{fcst_input_fn_template}} - -# GEN_ENS_PROD_CTRL_INPUT_DIR = {INPUT_BASE} -# GEN_ENS_PROD_CTRL_INPUT_TEMPLATE = -# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2 - -# -# Output directory for GenEnsProd. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GEN_ENS_PROD_OUTPUT_DIR = {{output_dir}} -# -# Template for output from GenEnsProd relative to GEN_ENS_PROD_OUTPUT_DIR. -# -GEN_ENS_PROD_OUTPUT_TEMPLATE = gen_ens_prod_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} -# -# There are n ensembles but 1 is used as control, so specify n-1 members. -# -GEN_ENS_PROD_N_MEMBERS = {{num_ens_members}} - -### -# Field Info -### -# -# Ensemble variables and levels as specified in the ens field dictionary -# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS, -# (optional) ENS_VARn_OPTION -# -ENS_VAR1_NAME = TMP -ENS_VAR1_LEVELS = Z02 -ENS_VAR1_THRESH = ge268, ge273, ge278, ge293, ge298, ge303 - -ENS_VAR2_NAME = DPT -ENS_VAR2_LEVELS = Z2 -ENS_VAR2_THRESH = ge263, ge268, ge273, ge288, ge293, ge298 - -ENS_VAR3_NAME = WIND -ENS_VAR3_LEVELS = Z10 -ENS_VAR3_THRESH = ge5, ge10, ge15 -ENS_VAR3_OPTIONS = GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind. - -ENS_VAR4_NAME = TCDC -ENS_VAR4_LEVELS = L0 -ENS_VAR4_THRESH = lt25, gt75 -ENS_VAR4_OPTIONS = GRIB_lvl_typ = 200; - GRIB2_ipdtmpl_index=[27]; - GRIB2_ipdtmpl_val=[255]; - interp = { type = [ { method = NEAREST; width = 1; } ]; } - -ENS_VAR5_NAME = VIS -ENS_VAR5_LEVELS = L0 -ENS_VAR5_THRESH = lt1609, lt8045, ge8045 -ENS_VAR5_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } - -ENS_VAR6_NAME = HGT -ENS_VAR6_LEVELS = L0 -ENS_VAR6_THRESH = lt152, lt1520, ge914 -ENS_VAR6_OPTIONS = GRIB_lvl_typ = 215; - desc = "CEILING"; - -### -# GenEnsProd -### - -# GEN_ENS_PROD_REGRID_TO_GRID = NONE -# GEN_ENS_PROD_REGRID_METHOD = NEAREST -# GEN_ENS_PROD_REGRID_WIDTH = 1 -# GEN_ENS_PROD_REGRID_VLD_THRESH = 0.5 -# GEN_ENS_PROD_REGRID_SHAPE = SQUARE - -# GEN_ENS_PROD_CENSOR_THRESH = -# GEN_ENS_PROD_CENSOR_VAL = -# GEN_ENS_PROD_CAT_THRESH = -# GEN_ENS_PROD_NC_VAR_STR = - -# Threshold for ratio of valid files to expected files to allow app to run -GEN_ENS_PROD_ENS_THRESH = 0.05 - -GEN_ENS_PROD_NBRHD_PROB_WIDTH = 27 -GEN_ENS_PROD_NBRHD_PROB_SHAPE = CIRCLE -GEN_ENS_PROD_NBRHD_PROB_VLD_THRESH = 0.0 - -# GEN_ENS_PROD_NMEP_SMOOTH_VLD_THRESH = 0.0 -# GEN_ENS_PROD_NMEP_SMOOTH_SHAPE = CIRCLE -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_DX = 81.27 -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120 -# GEN_ENS_PROD_NMEP_SMOOTH_METHOD = GAUSSIAN -# GEN_ENS_PROD_NMEP_SMOOTH_WIDTH = 1 - -# GEN_ENS_PROD_CLIMO_MEAN_FILE_NAME = -# GEN_ENS_PROD_CLIMO_MEAN_FIELD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_MEAN_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_MEAN_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_MEAN_HOUR_INTERVAL = 6 - -# GEN_ENS_PROD_CLIMO_STDEV_FILE_NAME = -# GEN_ENS_PROD_CLIMO_STDEV_FIELD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_STDEV_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_STDEV_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_STDEV_HOUR_INTERVAL = 6 - -GEN_ENS_PROD_ENSEMBLE_FLAG_LATLON = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MEAN = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_STDEV = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MINUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_PLUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MIN = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MAX = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_RANGE = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_VLD_COUNT = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_FREQUENCY = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NMEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO_CDF = FALSE - -# GEN_ENS_PROD_ENS_MEMBER_IDS = -# GEN_ENS_PROD_CONTROL_ID = diff --git a/parm/metplus/GenEnsProd_ADPUPA.conf b/parm/metplus/GenEnsProd_ADPUPA.conf deleted file mode 100644 index 863427752f..0000000000 --- a/parm/metplus/GenEnsProd_ADPUPA.conf +++ /dev/null @@ -1,236 +0,0 @@ -# GenEnsProd METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GenEnsProd - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GEN_ENS_PROD_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -GEN_ENS_PROD_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} - -### -# File I/O -### - -# -# Forecast model input directory for GenEnsProd. -# -GEN_ENS_PROD_INPUT_DIR = {{fcst_input_dir}} -# -# Template for forecast input to GenEnsProd relative to -# GEN_ENS_PROD_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -GEN_ENS_PROD_INPUT_TEMPLATE = {{fcst_input_fn_template}} - -# GEN_ENS_PROD_CTRL_INPUT_DIR = {INPUT_BASE} -# GEN_ENS_PROD_CTRL_INPUT_TEMPLATE = -# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2 - -# -# Output directory for GenEnsProd. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GEN_ENS_PROD_OUTPUT_DIR = {{output_dir}} -# -# Template for output from GenEnsProd relative to GEN_ENS_PROD_OUTPUT_DIR. -# -GEN_ENS_PROD_OUTPUT_TEMPLATE = gen_ens_prod_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} -# -# There are n ensembles but 1 is used as control, so specify n-1 members. -# -GEN_ENS_PROD_N_MEMBERS = {{num_ens_members}} - -### -# Field Info -### -# -# Ensemble variables and levels as specified in the ens field dictionary -# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS, -# (optional) ENS_VARn_OPTION -# -ENS_VAR1_NAME = TMP -ENS_VAR1_LEVELS = P850 -ENS_VAR1_THRESH = ge288, ge293, ge298 - -ENS_VAR2_NAME = TMP -ENS_VAR2_LEVELS = P700 -ENS_VAR2_THRESH = ge273, ge278, ge283 - -ENS_VAR3_NAME = TMP -ENS_VAR3_LEVELS = P500 -ENS_VAR3_THRESH = ge258, ge263, ge268 - -ENS_VAR4_NAME = DPT -ENS_VAR4_LEVELS = P850 -ENS_VAR4_THRESH = ge273, ge278, ge283 - -ENS_VAR5_NAME = DPT -ENS_VAR5_LEVELS = P700 -ENS_VAR5_THRESH = ge263, ge268, ge273 - -ENS_VAR6_NAME = WIND -ENS_VAR6_LEVELS = P850 -ENS_VAR6_THRESH = ge5, ge10, ge15 - -ENS_VAR7_NAME = WIND -ENS_VAR7_LEVELS = P700 -ENS_VAR7_THRESH = ge10, ge15, ge20 - -ENS_VAR8_NAME = WIND -ENS_VAR8_LEVELS = P500 -ENS_VAR8_THRESH = ge15, ge21, ge26 - -ENS_VAR9_NAME = WIND -ENS_VAR9_LEVELS = P250 -ENS_VAR9_THRESH = ge26, ge31, ge36, ge46, ge62 - -ENS_VAR10_NAME = HGT -ENS_VAR10_LEVELS = P500 -ENS_VAR10_THRESH = ge5400, ge5600, ge5880 - -ENS_VAR11_NAME = CAPE -ENS_VAR11_LEVELS = L0 -ENS_VAR11_THRESH = le1000, gt1000&<2500, gt2500&<4000, gt2500 -ENS_VAR11_OPTIONS = cnt_thresh = [ >0 ]; - -ENS_VAR12_NAME = HPBL -ENS_VAR12_LEVELS = Z0 -ENS_VAR12_THRESH = lt500, lt1500, gt1500 - -### -# GenEnsProd -### - -# GEN_ENS_PROD_REGRID_TO_GRID = NONE -# GEN_ENS_PROD_REGRID_METHOD = NEAREST -# GEN_ENS_PROD_REGRID_WIDTH = 1 -# GEN_ENS_PROD_REGRID_VLD_THRESH = 0.5 -# GEN_ENS_PROD_REGRID_SHAPE = SQUARE - -# GEN_ENS_PROD_CENSOR_THRESH = -# GEN_ENS_PROD_CENSOR_VAL = -# GEN_ENS_PROD_CAT_THRESH = -# GEN_ENS_PROD_NC_VAR_STR = - -# Threshold for ratio of valid files to expected files to allow app to run -GEN_ENS_PROD_ENS_THRESH = 0.05 - -GEN_ENS_PROD_NBRHD_PROB_WIDTH = 27 -GEN_ENS_PROD_NBRHD_PROB_SHAPE = CIRCLE -GEN_ENS_PROD_NBRHD_PROB_VLD_THRESH = 0.0 - -# GEN_ENS_PROD_NMEP_SMOOTH_VLD_THRESH = 0.0 -# GEN_ENS_PROD_NMEP_SMOOTH_SHAPE = CIRCLE -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_DX = 81.27 -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120 -# GEN_ENS_PROD_NMEP_SMOOTH_METHOD = GAUSSIAN -# GEN_ENS_PROD_NMEP_SMOOTH_WIDTH = 1 - -# GEN_ENS_PROD_CLIMO_MEAN_FILE_NAME = -# GEN_ENS_PROD_CLIMO_MEAN_FIELD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_MEAN_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_MEAN_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_MEAN_HOUR_INTERVAL = 6 - -# GEN_ENS_PROD_CLIMO_STDEV_FILE_NAME = -# GEN_ENS_PROD_CLIMO_STDEV_FIELD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_STDEV_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_STDEV_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_STDEV_HOUR_INTERVAL = 6 - -GEN_ENS_PROD_ENSEMBLE_FLAG_LATLON = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MEAN = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_STDEV = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MINUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_PLUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MIN = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MAX = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_RANGE = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_VLD_COUNT = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_FREQUENCY = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NMEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO_CDF = FALSE - -# GEN_ENS_PROD_ENS_MEMBER_IDS = -# GEN_ENS_PROD_CONTROL_ID = diff --git a/parm/metplus/GenEnsProd_APCP.conf b/parm/metplus/GenEnsProd_APCP.conf deleted file mode 100644 index 0d05843a87..0000000000 --- a/parm/metplus/GenEnsProd_APCP.conf +++ /dev/null @@ -1,191 +0,0 @@ -# GenEnsProd METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GenEnsProd - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GEN_ENS_PROD_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -GEN_ENS_PROD_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} - -### -# File I/O -### - -# -# Forecast model input directory for GenEnsProd. -# -GEN_ENS_PROD_INPUT_DIR = {{fcst_input_dir}} -# -# Template for forecast input to GenEnsProd relative to -# GEN_ENS_PROD_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -GEN_ENS_PROD_INPUT_TEMPLATE = {{fcst_input_fn_template}} - -# GEN_ENS_PROD_CTRL_INPUT_DIR = {INPUT_BASE} -# GEN_ENS_PROD_CTRL_INPUT_TEMPLATE = -# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2 - -# -# Output directory for GenEnsProd. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GEN_ENS_PROD_OUTPUT_DIR = {{output_dir}} -# -# Template for output from GenEnsProd relative to GEN_ENS_PROD_OUTPUT_DIR. -# -GEN_ENS_PROD_OUTPUT_TEMPLATE = gen_ens_prod_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} -# -# There are n ensembles but 1 is used as control, so specify n-1 members. -# -GEN_ENS_PROD_N_MEMBERS = {{num_ens_members}} - -### -# Field Info -### -# -# Ensemble variables and levels as specified in the ens field dictionary -# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS, -# (optional) ENS_VARn_OPTION -# -ENS_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -ENS_VAR1_LEVELS = A{{accum_hh}} -ENS_VAR1_THRESH = {{field_thresholds}} - -### -# GenEnsProd -### - -# GEN_ENS_PROD_REGRID_TO_GRID = NONE -# GEN_ENS_PROD_REGRID_METHOD = NEAREST -# GEN_ENS_PROD_REGRID_WIDTH = 1 -# GEN_ENS_PROD_REGRID_VLD_THRESH = 0.5 -# GEN_ENS_PROD_REGRID_SHAPE = SQUARE - -# GEN_ENS_PROD_CENSOR_THRESH = -# GEN_ENS_PROD_CENSOR_VAL = -# GEN_ENS_PROD_CAT_THRESH = -# GEN_ENS_PROD_NC_VAR_STR = - -# Threshold for ratio of valid files to expected files to allow app to run -GEN_ENS_PROD_ENS_THRESH = 0.05 - -GEN_ENS_PROD_NBRHD_PROB_WIDTH = 27 -GEN_ENS_PROD_NBRHD_PROB_SHAPE = CIRCLE -GEN_ENS_PROD_NBRHD_PROB_VLD_THRESH = 0.0 - -# GEN_ENS_PROD_NMEP_SMOOTH_VLD_THRESH = 0.0 -# GEN_ENS_PROD_NMEP_SMOOTH_SHAPE = CIRCLE -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_DX = 81.27 -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120 -# GEN_ENS_PROD_NMEP_SMOOTH_METHOD = GAUSSIAN -# GEN_ENS_PROD_NMEP_SMOOTH_WIDTH = 1 - -# GEN_ENS_PROD_CLIMO_MEAN_FILE_NAME = -# GEN_ENS_PROD_CLIMO_MEAN_FIELD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_MEAN_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_MEAN_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_MEAN_HOUR_INTERVAL = 6 - -# GEN_ENS_PROD_CLIMO_STDEV_FILE_NAME = -# GEN_ENS_PROD_CLIMO_STDEV_FIELD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_STDEV_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_STDEV_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_STDEV_HOUR_INTERVAL = 6 - -GEN_ENS_PROD_ENSEMBLE_FLAG_LATLON = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MEAN = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_STDEV = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MINUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_PLUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MIN = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MAX = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_RANGE = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_VLD_COUNT = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_FREQUENCY = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NMEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO_CDF = FALSE - -# GEN_ENS_PROD_ENS_MEMBER_IDS = -# GEN_ENS_PROD_CONTROL_ID = diff --git a/parm/metplus/GenEnsProd_ASNOW.conf b/parm/metplus/GenEnsProd_ASNOW.conf deleted file mode 100644 index ea9dac02d9..0000000000 --- a/parm/metplus/GenEnsProd_ASNOW.conf +++ /dev/null @@ -1,192 +0,0 @@ -# GenEnsProd METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GenEnsProd - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GEN_ENS_PROD_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -GEN_ENS_PROD_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} - -### -# File I/O -### - -# -# Forecast model input directory for GenEnsProd. -# -GEN_ENS_PROD_INPUT_DIR = {{fcst_input_dir}} -# -# Template for forecast input to GenEnsProd relative to -# GEN_ENS_PROD_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -GEN_ENS_PROD_INPUT_TEMPLATE = {{fcst_input_fn_template}} - -# GEN_ENS_PROD_CTRL_INPUT_DIR = {INPUT_BASE} -# GEN_ENS_PROD_CTRL_INPUT_TEMPLATE = -# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2 - -# -# Output directory for GenEnsProd. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GEN_ENS_PROD_OUTPUT_DIR = {{output_dir}} -# -# Template for output from GenEnsProd relative to GEN_ENS_PROD_OUTPUT_DIR. -# -GEN_ENS_PROD_OUTPUT_TEMPLATE = gen_ens_prod_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} -# -# There are n ensembles but 1 is used as control, so specify n-1 members. -# -GEN_ENS_PROD_N_MEMBERS = {{num_ens_members}} - -### -# Field Info -### - -# -# Ensemble variables and levels as specified in the ens field dictionary -# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS, -# (optional) ENS_VARn_OPTION -# -ENS_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -ENS_VAR1_LEVELS = A{{accum_hh}} -ENS_VAR1_THRESH = {{field_thresholds}} - -### -# GenEnsProd -### - -# GEN_ENS_PROD_REGRID_TO_GRID = NONE -# GEN_ENS_PROD_REGRID_METHOD = NEAREST -# GEN_ENS_PROD_REGRID_WIDTH = 1 -# GEN_ENS_PROD_REGRID_VLD_THRESH = 0.5 -# GEN_ENS_PROD_REGRID_SHAPE = SQUARE - -# GEN_ENS_PROD_CENSOR_THRESH = -# GEN_ENS_PROD_CENSOR_VAL = -# GEN_ENS_PROD_CAT_THRESH = -# GEN_ENS_PROD_NC_VAR_STR = - -# Threshold for ratio of valid files to expected files to allow app to run -GEN_ENS_PROD_ENS_THRESH = 0.05 - -GEN_ENS_PROD_NBRHD_PROB_WIDTH = 27 -GEN_ENS_PROD_NBRHD_PROB_SHAPE = CIRCLE -GEN_ENS_PROD_NBRHD_PROB_VLD_THRESH = 0.0 - -# GEN_ENS_PROD_NMEP_SMOOTH_VLD_THRESH = 0.0 -# GEN_ENS_PROD_NMEP_SMOOTH_SHAPE = CIRCLE -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_DX = 81.27 -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120 -# GEN_ENS_PROD_NMEP_SMOOTH_METHOD = GAUSSIAN -# GEN_ENS_PROD_NMEP_SMOOTH_WIDTH = 1 - -# GEN_ENS_PROD_CLIMO_MEAN_FILE_NAME = -# GEN_ENS_PROD_CLIMO_MEAN_FIELD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_MEAN_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_MEAN_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_MEAN_HOUR_INTERVAL = 6 - -# GEN_ENS_PROD_CLIMO_STDEV_FILE_NAME = -# GEN_ENS_PROD_CLIMO_STDEV_FIELD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_STDEV_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_STDEV_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_STDEV_HOUR_INTERVAL = 6 - -GEN_ENS_PROD_ENSEMBLE_FLAG_LATLON = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MEAN = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_STDEV = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MINUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_PLUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MIN = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MAX = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_RANGE = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_VLD_COUNT = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_FREQUENCY = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NMEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO_CDF = FALSE - -# GEN_ENS_PROD_ENS_MEMBER_IDS = -# GEN_ENS_PROD_CONTROL_ID = diff --git a/parm/metplus/GenEnsProd_REFC.conf b/parm/metplus/GenEnsProd_REFC.conf deleted file mode 100644 index 553c23f69e..0000000000 --- a/parm/metplus/GenEnsProd_REFC.conf +++ /dev/null @@ -1,191 +0,0 @@ -# GenEnsProd METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GenEnsProd - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GEN_ENS_PROD_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -GEN_ENS_PROD_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} - -### -# File I/O -### - -# -# Forecast model input directory for GenEnsProd. -# -GEN_ENS_PROD_INPUT_DIR = {{fcst_input_dir}} -# -# Template for forecast input to GenEnsProd relative to -# GEN_ENS_PROD_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -GEN_ENS_PROD_INPUT_TEMPLATE = {{fcst_input_fn_template}} - -# GEN_ENS_PROD_CTRL_INPUT_DIR = {INPUT_BASE} -# GEN_ENS_PROD_CTRL_INPUT_TEMPLATE = -# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2 - -# -# Output directory for GenEnsProd. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GEN_ENS_PROD_OUTPUT_DIR = {{output_dir}} -# -# Template for output from GenEnsProd relative to GEN_ENS_PROD_OUTPUT_DIR. -# -GEN_ENS_PROD_OUTPUT_TEMPLATE = gen_ens_prod_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} -# -# There are n ensembles but 1 is used as control, so specify n-1 members. -# -GEN_ENS_PROD_N_MEMBERS = {{num_ens_members}} - -### -# Field Info -### -# -# Ensemble variables and levels as specified in the ens field dictionary -# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS, -# (optional) ENS_VARn_OPTION -# -ENS_VAR1_NAME = {{fieldname_in_fcst_input}} -ENS_VAR1_LEVELS = L0 -ENS_VAR1_THRESH = {{field_thresholds}} - -### -# GenEnsProd -### - -# GEN_ENS_PROD_REGRID_TO_GRID = NONE -# GEN_ENS_PROD_REGRID_METHOD = NEAREST -# GEN_ENS_PROD_REGRID_WIDTH = 1 -# GEN_ENS_PROD_REGRID_VLD_THRESH = 0.5 -# GEN_ENS_PROD_REGRID_SHAPE = SQUARE - -# GEN_ENS_PROD_CENSOR_THRESH = -# GEN_ENS_PROD_CENSOR_VAL = -# GEN_ENS_PROD_CAT_THRESH = -# GEN_ENS_PROD_NC_VAR_STR = - -# Threshold for ratio of valid files to expected files to allow app to run -GEN_ENS_PROD_ENS_THRESH = 0.05 - -GEN_ENS_PROD_NBRHD_PROB_WIDTH = 27 -GEN_ENS_PROD_NBRHD_PROB_SHAPE = CIRCLE -GEN_ENS_PROD_NBRHD_PROB_VLD_THRESH = 0.0 - -# GEN_ENS_PROD_NMEP_SMOOTH_VLD_THRESH = 0.0 -# GEN_ENS_PROD_NMEP_SMOOTH_SHAPE = CIRCLE -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_DX = 81.27 -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120 -# GEN_ENS_PROD_NMEP_SMOOTH_METHOD = GAUSSIAN -# GEN_ENS_PROD_NMEP_SMOOTH_WIDTH = 1 - -# GEN_ENS_PROD_CLIMO_MEAN_FILE_NAME = -# GEN_ENS_PROD_CLIMO_MEAN_FIELD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_MEAN_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_MEAN_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_MEAN_HOUR_INTERVAL = 6 - -# GEN_ENS_PROD_CLIMO_STDEV_FILE_NAME = -# GEN_ENS_PROD_CLIMO_STDEV_FIELD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_STDEV_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_STDEV_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_STDEV_HOUR_INTERVAL = 6 - -GEN_ENS_PROD_ENSEMBLE_FLAG_LATLON = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MEAN = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_STDEV = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MINUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_PLUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MIN = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MAX = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_RANGE = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_VLD_COUNT = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_FREQUENCY = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NMEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO_CDF = FALSE - -# GEN_ENS_PROD_ENS_MEMBER_IDS = -# GEN_ENS_PROD_CONTROL_ID = diff --git a/parm/metplus/GenEnsProd_RETOP.conf b/parm/metplus/GenEnsProd_RETOP.conf deleted file mode 100644 index 49e5e5c3b6..0000000000 --- a/parm/metplus/GenEnsProd_RETOP.conf +++ /dev/null @@ -1,192 +0,0 @@ -# GenEnsProd METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GenEnsProd - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GEN_ENS_PROD_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -GEN_ENS_PROD_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} - -### -# File I/O -### - -# -# Forecast model input directory for GenEnsProd. -# -GEN_ENS_PROD_INPUT_DIR = {{fcst_input_dir}} -# -# Template for forecast input to GenEnsProd relative to -# GEN_ENS_PROD_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -GEN_ENS_PROD_INPUT_TEMPLATE = {{fcst_input_fn_template}} - -# GEN_ENS_PROD_CTRL_INPUT_DIR = {INPUT_BASE} -# GEN_ENS_PROD_CTRL_INPUT_TEMPLATE = -# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2 - -# -# Output directory for GenEnsProd. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GEN_ENS_PROD_OUTPUT_DIR = {{output_dir}} -# -# Template for output from GenEnsProd relative to GEN_ENS_PROD_OUTPUT_DIR. -# -GEN_ENS_PROD_OUTPUT_TEMPLATE = gen_ens_prod_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} -# -# There are n ensembles but 1 is used as control, so specify n-1 members. -# -GEN_ENS_PROD_N_MEMBERS = {{num_ens_members}} - -### -# Field Info -### -# -# Ensemble variables and levels as specified in the ens field dictionary -# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS, -# (optional) ENS_VARn_OPTION -# -ENS_VAR1_NAME = {{fieldname_in_fcst_input}} -ENS_VAR1_LEVELS = L0 -ENS_VAR1_THRESH = {{field_thresholds}} -ENS_VAR1_OPTIONS = convert(x) = x * 3.28084 * 0.001; ;; Convert from meters to kilofeet. - -### -# GenEnsProd -### - -# GEN_ENS_PROD_REGRID_TO_GRID = NONE -# GEN_ENS_PROD_REGRID_METHOD = NEAREST -# GEN_ENS_PROD_REGRID_WIDTH = 1 -# GEN_ENS_PROD_REGRID_VLD_THRESH = 0.5 -# GEN_ENS_PROD_REGRID_SHAPE = SQUARE - -# GEN_ENS_PROD_CENSOR_THRESH = -# GEN_ENS_PROD_CENSOR_VAL = -# GEN_ENS_PROD_CAT_THRESH = -# GEN_ENS_PROD_NC_VAR_STR = - -# Threshold for ratio of valid files to expected files to allow app to run -GEN_ENS_PROD_ENS_THRESH = 0.05 - -GEN_ENS_PROD_NBRHD_PROB_WIDTH = 27 -GEN_ENS_PROD_NBRHD_PROB_SHAPE = CIRCLE -GEN_ENS_PROD_NBRHD_PROB_VLD_THRESH = 0.0 - -# GEN_ENS_PROD_NMEP_SMOOTH_VLD_THRESH = 0.0 -# GEN_ENS_PROD_NMEP_SMOOTH_SHAPE = CIRCLE -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_DX = 81.27 -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120 -# GEN_ENS_PROD_NMEP_SMOOTH_METHOD = GAUSSIAN -# GEN_ENS_PROD_NMEP_SMOOTH_WIDTH = 1 - -# GEN_ENS_PROD_CLIMO_MEAN_FILE_NAME = -# GEN_ENS_PROD_CLIMO_MEAN_FIELD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_MEAN_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_MEAN_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_MEAN_HOUR_INTERVAL = 6 - -# GEN_ENS_PROD_CLIMO_STDEV_FILE_NAME = -# GEN_ENS_PROD_CLIMO_STDEV_FIELD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_STDEV_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_STDEV_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_STDEV_HOUR_INTERVAL = 6 - -GEN_ENS_PROD_ENSEMBLE_FLAG_LATLON = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MEAN = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_STDEV = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MINUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_PLUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MIN = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MAX = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_RANGE = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_VLD_COUNT = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_FREQUENCY = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NMEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO_CDF = FALSE - -# GEN_ENS_PROD_ENS_MEMBER_IDS = -# GEN_ENS_PROD_CONTROL_ID = diff --git a/parm/metplus/GridStat_APCP.conf b/parm/metplus/GridStat_APCP.conf deleted file mode 100644 index 51e5125951..0000000000 --- a/parm/metplus/GridStat_APCP.conf +++ /dev/null @@ -1,309 +0,0 @@ -# GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -#GRID_STAT_INTERP_FIELD = BOTH -#GRID_STAT_INTERP_VLD_THRESH = 1.0 -#GRID_STAT_INTERP_SHAPE = SQUARE -#GRID_STAT_INTERP_TYPE_METHOD = NEAREST -#GRID_STAT_INTERP_TYPE_WIDTH = 1 - -#GRID_STAT_GRID_WEIGHT_FLAG = -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the forecast ensemble member. This -# makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_{{ensmem_name}} -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. Here, -# we store the value of the original lead in this column, i.e. the lead -# with zero corresponding to the actual start time of the forecast (which -# is (cdate - time_lag)), not to cdate. This is just the lead in -# LEAD_SEQ with the time lag (time_lag) of the current forecast member -# added on. -# -# Uncomment this line only after upgrading to METplus 5.x. -#GRID_STAT_DESC = {lead?fmt=%H%M%S?shift={{time_lag}}} -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# List of forecast and corresponding observation fields to process. -# -# Note on use of set_attr_lead and ensemble member time-lagging: -# ------------------------------------------------------------- -# The set_attr_lead parameter appearing below in [FCST|OBS]_VAR_OPTIONS -# specifies the lead to use both in naming of the output .stat and .nc -# files and for setting the lead values contained in those files. This -# option causes MET/METplus to use the lead values in the variable LEAD_SEQ -# set above, which are the same for all ensemble forecast members (i.e. -# regardless of whether members are time lagged with respect to the -# nominal cycle date specified by cdate). If set_attr_lead were not -# specified as below, then MET/METplus would get the lead from the input -# forecast file, and that would in general differ from one ensemble member -# to the next depending on whether the member is time-lagged. That would -# cause confusion, so here, we always use lead values with zero lead -# corresponding to the nominal cdate. -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -FCST_VAR1_LEVELS = A{{accum_hh}} -FCST_VAR1_THRESH = {{field_thresholds}} -FCST_VAR1_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR1_LEVELS = A{{accum_hh}} -OBS_VAR1_THRESH = {{field_thresholds}} - -# -# Forecast data time window(s). -# -FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = 0 -OBS_GRID_STAT_FILE_WINDOW_END = 0 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = BOTH - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = 3,5,7 - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = STAT -GRID_STAT_OUTPUT_FLAG_CTC = STAT -GRID_STAT_OUTPUT_FLAG_CTS = STAT -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = STAT -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -#GRID_STAT_OUTPUT_FLAG_PCT = NONE -#GRID_STAT_OUTPUT_FLAG_PSTD = NONE -#GRID_STAT_OUTPUT_FLAG_PJC = NONE -#GRID_STAT_OUTPUT_FLAG_PRC = NONE -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT -GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ASNOW.conf b/parm/metplus/GridStat_ASNOW.conf deleted file mode 100644 index 3960a10c30..0000000000 --- a/parm/metplus/GridStat_ASNOW.conf +++ /dev/null @@ -1,283 +0,0 @@ -# GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET config file to pass to GridStat. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -#GRID_STAT_INTERP_FIELD = BOTH -#GRID_STAT_INTERP_VLD_THRESH = 1.0 -#GRID_STAT_INTERP_SHAPE = SQUARE -#GRID_STAT_INTERP_TYPE_METHOD = NEAREST -#GRID_STAT_INTERP_TYPE_WIDTH = 1 - -#GRID_STAT_GRID_WEIGHT_FLAG = - -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the forecast ensemble member. This -# makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_{{ensmem_name}} -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# List of forecast and corresponding observation fields to process. -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -FCST_VAR1_LEVELS = A{{accum_hh}} -FCST_VAR1_THRESH = {{field_thresholds}} -FCST_VAR1_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = A{{accum_hh}} -OBS_VAR1_THRESH = {{field_thresholds}} -OBS_VAR1_OPTIONS = convert(x) = 100.0*x; -# -# Forecast data time window(s). -# -FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = 0 -OBS_GRID_STAT_FILE_WINDOW_END = 0 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = BOTH - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = 5 - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = STAT -GRID_STAT_OUTPUT_FLAG_CTC = STAT -GRID_STAT_OUTPUT_FLAG_CTS = STAT -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = STAT -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -#GRID_STAT_OUTPUT_FLAG_PCT = NONE -#GRID_STAT_OUTPUT_FLAG_PSTD = NONE -#GRID_STAT_OUTPUT_FLAG_PJC = NONE -#GRID_STAT_OUTPUT_FLAG_PRC = NONE -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT -GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_REFC.conf b/parm/metplus/GridStat_REFC.conf deleted file mode 100644 index c7f34d27f9..0000000000 --- a/parm/metplus/GridStat_REFC.conf +++ /dev/null @@ -1,315 +0,0 @@ -# GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -GRID_STAT_INTERP_FIELD = NONE -GRID_STAT_INTERP_VLD_THRESH = 1.0 -GRID_STAT_INTERP_SHAPE = SQUARE -GRID_STAT_INTERP_TYPE_METHOD = NEAREST -GRID_STAT_INTERP_TYPE_WIDTH = 1 - -GRID_STAT_GRID_WEIGHT_FLAG = NONE -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the forecast ensemble member. This -# makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_{{ensmem_name}} -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. Here, -# we store the value of the original lead in this column, i.e. the lead -# with zero corresponding to the actual start time of the forecast (which -# is (cdate - time_lag)), not to cdate. This is just the lead in -# LEAD_SEQ with the time lag (time_lag) of the current forecast member -# added on. -# -# Uncomment this line only after upgrading to METplus 5.x. -#GRID_STAT_DESC = {lead?fmt=%H%M%S?shift={{time_lag}}} -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# List of forecast and corresponding observation fields to process. -# -# Note on use of set_attr_lead and ensemble member time-lagging: -# ------------------------------------------------------------- -# The set_attr_lead parameter appearing below in [FCST|OBS]_VAR_OPTIONS -# specifies the lead to use both in naming of the output .stat and .nc -# files and for setting the lead values contained in those files. This -# option causes MET/METplus to use the lead values in the variable LEAD_SEQ -# set above, which are the same for all ensemble forecast members (i.e. -# regardless of whether members are time lagged with respect to the -# nominal cycle date specified by cdate). If set_attr_lead were not -# specified as below, then MET/METplus would get the lead from the input -# forecast file, and that would in general differ from one ensemble member -# to the next depending on whether the member is time-lagged. That would -# cause confusion, so here, we always use lead values with zero lead -# corresponding to the nominal cdate. -# -FCST_VAR1_NAME = {{fieldname_in_fcst_input}} -FCST_VAR1_LEVELS = L0 -FCST_VAR1_THRESH = {{field_thresholds}} -FCST_VAR1_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = Z500 -OBS_VAR1_THRESH = {{field_thresholds}} -OBS_VAR1_OPTIONS = censor_thresh = [eq-999, <-20]; - censor_val = [-9999, -20]; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - -# -# Forecast data time window(s). -# -#FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -#FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300 -OBS_GRID_STAT_FILE_WINDOW_END = 300 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = BOTH - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = 1,3,5,7 - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = STAT -GRID_STAT_OUTPUT_FLAG_CTC = STAT -GRID_STAT_OUTPUT_FLAG_CTS = STAT -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = STAT -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -#GRID_STAT_OUTPUT_FLAG_PCT = NONE -#GRID_STAT_OUTPUT_FLAG_PSTD = NONE -#GRID_STAT_OUTPUT_FLAG_PJC = NONE -#GRID_STAT_OUTPUT_FLAG_PRC = NONE -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT -GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_RETOP.conf b/parm/metplus/GridStat_RETOP.conf deleted file mode 100644 index be91a0ba03..0000000000 --- a/parm/metplus/GridStat_RETOP.conf +++ /dev/null @@ -1,317 +0,0 @@ -# GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -GRID_STAT_INTERP_FIELD = NONE -GRID_STAT_INTERP_VLD_THRESH = 1.0 -GRID_STAT_INTERP_SHAPE = SQUARE -GRID_STAT_INTERP_TYPE_METHOD = NEAREST -GRID_STAT_INTERP_TYPE_WIDTH = 1 - -GRID_STAT_GRID_WEIGHT_FLAG = NONE -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the forecast ensemble member. This -# makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_{{ensmem_name}} -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. Here, -# we store the value of the original lead in this column, i.e. the lead -# with zero corresponding to the actual start time of the forecast (which -# is (cdate - time_lag)), not to cdate. This is just the lead in -# LEAD_SEQ with the time lag (time_lag) of the current forecast member -# added on. -# -# Uncomment this line only after upgrading to METplus 5.x. -#GRID_STAT_DESC = {lead?fmt=%H%M%S?shift={{time_lag}}} -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# List of forecast and corresponding observation fields to process. -# -# Note on use of set_attr_lead and ensemble member time-lagging: -# ------------------------------------------------------------- -# The set_attr_lead parameter appearing below in [FCST|OBS]_VAR_OPTIONS -# specifies the lead to use both in naming of the output .stat and .nc -# files and for setting the lead values contained in those files. This -# option causes MET/METplus to use the lead values in the variable LEAD_SEQ -# set above, which are the same for all ensemble forecast members (i.e. -# regardless of whether members are time lagged with respect to the -# nominal cycle date specified by cdate). If set_attr_lead were not -# specified as below, then MET/METplus would get the lead from the input -# forecast file, and that would in general differ from one ensemble member -# to the next depending on whether the member is time-lagged. That would -# cause confusion, so here, we always use lead values with zero lead -# corresponding to the nominal cdate. -# -FCST_VAR1_NAME = {{fieldname_in_fcst_input}} -FCST_VAR1_LEVELS = L0 -FCST_VAR1_THRESH = {{field_thresholds}} -FCST_VAR1_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; - convert(x) = x * 3.28084 * 0.001; - cnt_thresh = [ >0 ]; - cnt_logic = UNION; -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = Z500 -OBS_VAR1_THRESH = {{field_thresholds}} -OBS_VAR1_OPTIONS = convert(x) = x * 3280.84 * 0.001; - censor_thresh = [<=-9.84252,eq-3.28084]; - censor_val = [-9999,-16.4042]; - cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -# -# Forecast data time window(s). -# -#FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -#FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300 -OBS_GRID_STAT_FILE_WINDOW_END = 300 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = BOTH - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = 1,3,5,7 - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = STAT -GRID_STAT_OUTPUT_FLAG_CTC = STAT -GRID_STAT_OUTPUT_FLAG_CTS = STAT -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = STAT -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -#GRID_STAT_OUTPUT_FLAG_PCT = NONE -#GRID_STAT_OUTPUT_FLAG_PSTD = NONE -#GRID_STAT_OUTPUT_FLAG_PJC = NONE -#GRID_STAT_OUTPUT_FLAG_PRC = NONE -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT -GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensmean.conf b/parm/metplus/GridStat_ensmean.conf new file mode 100644 index 0000000000..7c3b3b7ad9 --- /dev/null +++ b/parm/metplus/GridStat_ensmean.conf @@ -0,0 +1,655 @@ +# Ensemble mean {{MetplusToolName}} METplus Configuration + +[config] + +# List of applications (tools) to run. +PROCESS_LIST = {{MetplusToolName}} + +# time looping - options are INIT, VALID, RETRO, and REALTIME +# If set to INIT or RETRO: +# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set +# If set to VALID or REALTIME: +# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set +LOOP_BY = INIT + +# Format of INIT_BEG and INIT_END using % items +# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. +# see www.strftime.org for more information +# %Y%m%d%H expands to YYYYMMDDHH +INIT_TIME_FMT = %Y%m%d%H + +# Start time for METplus run - must match INIT_TIME_FMT +INIT_BEG = {{cdate}} + +# End time for METplus run - must match INIT_TIME_FMT +INIT_END = {{cdate}} + +# Increment between METplus runs (in seconds if no units are specified). +# Must be >= 60 seconds. +INIT_INCREMENT = 3600 + +# List of forecast leads to process for each run time (init or valid) +# In hours if units are not specified +# If unset, defaults to 0 (don't loop through forecast leads) +LEAD_SEQ = {{vx_leadhr_list}} +# +# Order of loops to process data - Options are times, processes +# Not relevant if only one item is in the PROCESS_LIST +# times = run all wrappers in the PROCESS_LIST for a single run time, then +# increment the run time and run all wrappers again until all times have +# been evaluated. +# processes = run the first wrapper in the PROCESS_LIST for all times +# specified, then repeat for the next item in the PROCESS_LIST until all +# wrappers have been run +# +LOOP_ORDER = times +# +# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. +# +LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} +# +# Specify the name of the METplus log file. +# +LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} +# +# Specify the location and name of the final METplus conf file. +# +METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}} +# +# Location of MET configuration file to pass to {{MetplusToolName}}. +# +# References PARM_BASE, which is the location of the parm directory +# corresponding to the ush directory of the run_metplus.py script that +# is called or the value of the environment variable METPLUS_PARM_BASE +# if set. +# +{{METPLUS_TOOL_NAME}}_CONFIG_FILE = {PARM_BASE}/met_config/{{MetplusToolName}}Config_wrapped + +# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary +# See MET User's Guide for more information +{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = FCST +{{METPLUS_TOOL_NAME}}_REGRID_VLD_THRESH = 0.5 +{{METPLUS_TOOL_NAME}}_REGRID_METHOD = BUDGET +{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2 +{{METPLUS_TOOL_NAME}}_REGRID_SHAPE = SQUARE + +#{{METPLUS_TOOL_NAME}}_INTERP_FIELD = BOTH +#{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = 1.0 +#{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = SQUARE +#{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = NEAREST +#{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 1 + +#{{METPLUS_TOOL_NAME}}_GRID_WEIGHT_FLAG = + +# +# Name to identify model (forecast) data in output. +# +# The variable MODEL is recorded in the stat files, and the data in +# these files is then plotted (e.g. using METViewer). Here, we add a +# suffix to MODEL that identifies the data as that for the ensemble +# mean. This makes it easier to identify each curve. +# +MODEL = {{vx_fcst_model_name}}_ensmean +# +# Name to identify observation data in output. +# +OBTYPE = {{obtype}} +# +# Value to enter under the DESC column in the output stat file. +# +{{METPLUS_TOOL_NAME}}_DESC = NA +# +# Overrides of MET configuration defaults. +# +{{METPLUS_TOOL_NAME}}_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; +# +# List of forecast and corresponding observation fields to process. +# +{#- +Import the file containing jinja macros. +#} +{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %} + +{#- +Set the probabilistic threshold to be used for the forecast field. If +necessary, this can be changed to be an input parameter in the calling +script instead of a hard-coded value as below. +#} +{%- set thresh_fcst_prob = '==0.1' %} + +{#- +Jinja requires certain variables to be defined globally within the template +before they can be used in if-statements and other scopes (see Jinja scoping +rules). Define such variables. +#} +{%- set indx_level_fcst = '' %} +{%- set indx_input_thresh_fcst = '' %} +{%- set error_msg = '' %} +{%- set opts_indent = '' %} +{%- set opts_indent_len = '' %} +{%- set tmp = '' %} + +{%- set field_fcst = '' %} +{%- set field_obs = '' %} +{%- set level_fcst = '' %} +{%- set level_obs = '' %} +{%- set thresh_fcst = '' %} +{%- set thresh_obs = '' %} + +{%- set threshes_fcst = [] %} +{%- set threshes_obs = [] %} + +{#- +Get the set of valid field groups and ensure that the specified input +field group appears in this list. +#} +{%- set valid_field_groups = vx_config_dict.keys()|list %} +{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }} + +{#- +Reset the input forecast level so that if it happens to be an accumulation +(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g. +reset to 'A3'). +#} +{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} + +{#- +Extract from the configuration dictionary the set (which itself is a +dictionary) of fields, levels, and thresholds corresponding to the input +field group. Then set the delimiter string that separates forecast and +observation values in the various items (i.e. dictionary keys and values +representing field names, levels, and thresholds) in this dictionary. +#} +{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %} +{%- set delim_str = metplus_macros.set_delim_str() %} + +{#- +Some fields in the specified field group (input_field_group) may need to +be excluded from the METplus config file because calculating means for +them doesn't make sense. List these (for each input_field_group) in the +following dictionary. +#} +{%- set fields_fcst_to_exclude_by_field_group = + {'APCP': [], + 'ASNOW': [], + 'REFC': [], + 'RETOP': [], + 'SFC': ['TCDC', 'VIS', 'HGT'], + 'UPA': []} %} +{%- set fields_fcst_to_exclude = fields_fcst_to_exclude_by_field_group[input_field_group] %} + +{#- +Remove from the dictionary fields_levels_threshes_cpld any fields that +are in the list to be excluded. +#} +{%- for field_cpld in fields_levels_threshes_cpld.copy() %} + + {%- if delim_str in field_cpld %} + {%- set field_fcst, field_obs = field_cpld.split(delim_str) %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- set field_obs = field_cpld %} + {%- endif %} + + {%- if field_fcst in fields_fcst_to_exclude %} + {%- set tmp = fields_levels_threshes_cpld.pop(field_cpld) %} + {%- endif %} + +{%- endfor %} + +{#- +Loop over the fields and set field names, levels, thresholds, and/or +options for each field, both for forecasts and for observations, in the +METplus configuration file. +#} +{%- set ns = namespace(var_count = 0) %} +{%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %} + + {%- if delim_str in field_cpld %} + {%- set field_fcst, field_obs = field_cpld.split(delim_str) %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- set field_obs = field_cpld %} + {%- endif %} + +{#- +For convenience, create lists of valid forecast and observation levels +for the current field. +#} + {%- set valid_levels_fcst = [] %} + {%- set valid_levels_obs = [] %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + {%- set tmp = valid_levels_fcst.append(level_fcst) %} + {%- set tmp = valid_levels_obs.append(level_obs) %} + {%- endfor %} + +{#- +Make sure that the input forecast level (input_level_fcst) is set to a +valid value. +#} + {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %} + {%- set error_msg = '\n' ~ +'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~ +'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~ +'for the current forecast field (field_fcst). This is not the case:\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~ +' input_level_fcst = ' ~ input_level_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + +{#- +Loop over the (coupled) levels and corresponding lists of thresholds. +Extract from these the level values for forecasts and observations and +use them to set the forecast and observation field names, levels, +thresholds, and/or options in the METplus configuration file. +#} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + + {%- set valid_threshes_fcst = [] %} + {%- set valid_threshes_obs = [] %} + {%- for thresh_cpld in threshes_cpld %} + {%- if delim_str in thresh_cpld %} + {%- set thresh_fcst, thresh_obs = thresh_cpld.split(delim_str) %} + {%- else %} + {%- set thresh_fcst = thresh_cpld %} + {%- set thresh_obs = thresh_cpld %} + {%- endif %} + {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %} + {%- set tmp = valid_threshes_obs.append(thresh_obs) %} + {%- endfor %} + + {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %} +{#- +Increment the METplus variable counter. +#} + {%- set ns.var_count = ns.var_count+1 %} + +{#- +Set forecast field name. Note that this has to exactly match the name +of the field in the input forecast file(s). + +The input forecast files are generated by the MET/METplus GenEnsProd +tool. That tool adds the field's level to the variable names in its +output file to ensure that all variables in the file have distinct names. +For example, if the same field, say APCP, is output at two different +levels, say at A3 and A6 (for APCP, "levels" are really accumulation +periods), there need to be two variables in the output file, and they +obviously can't both be named "APCP", so GenEnsProd names one "APCP_A3" +and the other "APCP_A6". Here, the level is stored in the variable +level_fcst and, below, is included in the name of the forecast field. + +For accumulated fields, the field name in the input forecast file contains +TWO references to the accumulation period. The first is the level of the +forecast field added by GenEnsProd as described above. The second is +another reference to this same level (accumulation period) but added by +the MET/METplus's PcpCombine tool (whose output file is the input into +GenEnsProd). PcpCombine adds this reference to the level (really the +accumulation period) to the field's name for the same reason that +GenEnsProd does, i.e. to ensure that the names of variables in the output +file are distinct. Here, this accumulation period is stored in the +variable accum_hh. Thus, for accumulated fields, below we add both +accum_hh and level_fcst to the field name to get an exact field name +match. +#} + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{accum_hh}}_{{level_fcst}}_ENS_MEAN + {%- else %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{level_fcst}}_ENS_MEAN + {%- endif %} + +{#- +Set forecast field level. +#} +FCST_VAR{{ns.var_count}}_LEVELS = {{level_fcst}} + +{#- +Set forecast field threshold(s). Note that no forecast thresholds are +included in the METplus configuration file if input_thresh_fcst is set +to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +If input_thresh_fcst is set to 'all', set the list of forecast thresholds +to the full set of valid values. +#} + {%- if (input_thresh_fcst == 'all') %} + + {%- set threshes_fcst = valid_threshes_fcst %} +{#- +If input_thresh_fcst is set to a specific value: +* If that value is valid, i.e. it exists in the list of valid forecast + thresholds, get its index in that list and use it to set the forecast + threshold to a one-element list containing that value. Note that the + index will be needed later below when setting the observation threshold(s). +* If the input forecast threshold is not valid, print out a warning message + and exit. +#} + {%- else %} + + {%- if input_thresh_fcst in valid_threshes_fcst %} + {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %} + {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %} + {%- else %} + {%- set error_msg = '\n' ~ +'For the current forecast field (field_fcst) and forecast level (level_fcst),\n' ~ +'the input forecast threshold (input_thresh_fcst) does not exist in the list\n' ~ +'of valid forecast thresholds (valid_threshes_fcst):\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' level_fcst = ' ~ level_fcst ~ '\n' ~ +' valid_threshes_fcst = ' ~ valid_threshes_fcst ~ '\n' ~ +' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + + {%- endif %} +{#- +If threshes_fcst has been reset to something other than its default +value of an empty list, then set the forecast thresholds in the METplus +configuration file because that implies threshes_fcst was set above to +a non-empty value. Then reset threshes_fcst to its default value for +proper processing of thresholds for the next field. +#} + {%- if (threshes_fcst != []) %} +FCST_VAR{{ns.var_count}}_THRESH = {{threshes_fcst|join(', ')}} + {%- endif %} + {%- set threshes_fcst = [] %} + + {%- endif %} + +{#- +Set forecast field options. +#} + {%- set opts_indent_len = 20 %} + {%- if (ns.var_count > 9) and (ns.var_count <= 99) %} + {%- set opts_indent_len = opts_indent_len + 1 %} + {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %} + {%- set opts_indent_len = opts_indent_len + 2 %} + {%- elif (ns.var_count > 999) %} + {%- set opts_indent_len = opts_indent_len + 3 %} + {%- endif %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'UPA' %} + + {%- if field_fcst == 'CAPE' %} +FCST_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; + {%- endif %} + + {%- endif %} + +{#- +Set observation field name. Note that this has to exactly match the name +of the field in the input observation file. + +For accumulated fields, the input observation file is generated by MET's +PcpCombine tool. In that file, the field name consists of the observation +field name here (field_obs) with the accumulation period appended to it +(separated by an underscore), so we must do the same here to get an exact +match. +#} + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}}_{{accum_hh}} + {%- else %} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}} + {%- endif %} + +{#- +Set observation field level. +#} + {%- set indx_level_fcst = valid_levels_fcst.index(level_fcst) %} + {%- set level_obs = valid_levels_obs[indx_level_fcst] %} +OBS_VAR{{ns.var_count}}_LEVELS = {{level_obs}} + +{#- +Set observation field threshold(s). Note that no observation thresholds +are included in the METplus configuration file if input_thresh_fcst is +set to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +If input_thresh_fcst is set to 'all', set the list of observation thresholds +to the full set of valid values. +#} + {%- if (input_thresh_fcst == 'all') %} + + {%- set threshes_obs = valid_threshes_obs %} +{#- +If input_thresh_fcst is set to a specific forecast threshold, then the +observation threshold is given by the element in the list of valid +observation thresholds that has the same index as that of input_thresh_fcst +in the list of valid forecast thresholds. +#} + {%- else %} + {%- set threshes_obs = [valid_threshes_obs[indx_input_thresh_fcst]] %} + {%- endif %} +{#- +If threshes_obs has been reset to something other than its default value +of an empty list, then set the observation thresholds in the METplus +configuration file because that implies threshes_obs was set above to +a non-empty value. Then reset threshes_obs to its default value for +proper processing of thresholds for the next field. +#} + {%- if (threshes_obs != []) %} +OBS_VAR{{ns.var_count}}_THRESH = {{threshes_obs|join(', ')}} + {%- endif %} + {%- set threshes_obs = [] %} + + {%- endif %} + +{#- +Set observation field options. +#} + {%- set opts_indent_len = opts_indent_len - 1 %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'ASNOW' %} + + {%- if field_obs == 'ASNOW' %} +OBS_VAR{{ns.var_count}}_OPTIONS = convert(x) = 100.0*x; + {%- endif %} + + {%- elif input_field_group == 'UPA' %} + + {%- if field_obs == 'CAPE' %} +OBS_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; +{{opts_indent}}cnt_logic = UNION; + {%- elif field_obs == 'PBL' %} +OBS_VAR{{ns.var_count}}_OPTIONS = desc = "TKE"; + {%- endif %} + + {%- endif %} + +{#- +Print out a newline to separate the settings for the current field (both +forecast and observation settings) from those for the next field. +#} + {{- '\n' }} + + {%- endif %} + + {%- endfor %} +{%- endfor %} +# +# Forecast data time window(s). +# +FCST_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_BEGIN = 0 +FCST_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_END = 0 +# +# Observation data time window(s). +# +OBS_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_BEGIN = 0 +OBS_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_END = 0 + +# MET {{MetplusToolName}} neighborhood values +# See the MET User's Guide {{MetplusToolName}} section for more information +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_FIELD = BOTH + +# width value passed to nbrhd dictionary in the MET config file +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_WIDTH = 3,5,7 + +# shape value passed to nbrhd dictionary in the MET config file +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_SHAPE = SQUARE + +# cov thresh list passed to nbrhd dictionary in the MET config file +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_COV_THRESH = >=0.5 + +# Set to true to run {{MetplusToolName}} separately for each field specified +# Set to false to create one run of {{MetplusToolName}} per run time that +# includes all fields specified. +{{METPLUS_TOOL_NAME}}_ONCE_PER_FIELD = False +# +# Set to true if forecast data is probabilistic. +# +FCST_IS_PROB = False +# +# Only used if FCST_IS_PROB is true - sets probabilistic threshold +# +FCST_{{METPLUS_TOOL_NAME}}_PROB_THRESH = ==0.1 + +{{METPLUS_TOOL_NAME}}_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} + +# Climatology data +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FILE_NAME = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FIELD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_WIDTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_SHAPE = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_MATCH_MONTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_DAY_INTERVAL = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_HOUR_INTERVAL = + +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FILE_NAME = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FIELD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_WIDTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_SHAPE = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_TIME_INTERP_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_MATCH_MONTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_DAY_INTERVAL = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_HOUR_INTERVAL = + +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1 +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = True + +{{METPLUS_TOOL_NAME}}_MASK_GRID = + +# Statistical output types +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_FHO = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTC = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTS = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTC = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTS = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CNT = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SL1L2 = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SAL1L2 = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VL1L2 = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VAL1L2 = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VCNT = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PCT = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PSTD = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PJC = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PRC = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV = BOTH +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCTC = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCTS = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCNT = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_GRAD = BOTH +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_DMAP = NONE + +# NetCDF matched pairs output file +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_VAR_NAME = +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_LATLON = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_RAW = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_DIFF = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_CLIMO = FALSE +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_CLIMO_CDP = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_WEIGHT = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_NBRHD = FALSE +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_FOURIER = FALSE +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_GRADIENT = FALSE +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_APPLY_MASK = FALSE + +# End of [config] section and start of [dir] section. +[dir] +# +# Directory containing observation input to {{MetplusToolName}}. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{obs_input_dir}} +# +# Directory containing forecast input to {{MetplusToolName}}. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}} +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR = +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR = +# +# Directory in which to write output from {{MetplusToolName}}. +# +# OUTPUT_BASE apparently has to be set to something; it cannot be left +# to its default value. But it is not explicitly used elsewhere in this +# configuration file. +# +OUTPUT_BASE = {{output_base}} +{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}} +# +# Directory for staging data. +# +STAGING_DIR = {{staging_dir}} + +# End of [dir] section and start of [filename_templates] section. +[filename_templates] +# +# Template for observation input to {{MetplusToolName}} relative to +# OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{obs_input_fn_template}} +# +# Template for forecast input to {{MetplusToolName}} relative to +# FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}} +# +# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR. +# +{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_TEMPLATE = +# +# Variable used to specify one or more verification mask files for +# {{MetplusToolName}}. Not used for this example. +# +{{METPLUS_TOOL_NAME}}_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensmean_APCP.conf b/parm/metplus/GridStat_ensmean_APCP.conf deleted file mode 100644 index 6d3956c8e6..0000000000 --- a/parm/metplus/GridStat_ensmean_APCP.conf +++ /dev/null @@ -1,282 +0,0 @@ -# Ensemble mean GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -#GRID_STAT_INTERP_FIELD = BOTH -#GRID_STAT_INTERP_VLD_THRESH = 1.0 -#GRID_STAT_INTERP_SHAPE = SQUARE -#GRID_STAT_INTERP_TYPE_METHOD = NEAREST -#GRID_STAT_INTERP_TYPE_WIDTH = 1 - -#GRID_STAT_GRID_WEIGHT_FLAG = - -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as that for the ensemble -# mean. This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensmean -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# List of forecast and corresponding observation fields to process. -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_MEAN -FCST_VAR1_LEVELS = A{{accum_hh}} -FCST_VAR1_THRESH = {{field_thresholds}} -OBS_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR1_LEVELS = A{{accum_hh}} -OBS_VAR1_THRESH = {{field_thresholds}} - -# -# Forecast data time window(s). -# -FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = 0 -OBS_GRID_STAT_FILE_WINDOW_END = 0 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = BOTH - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = 3,5,7 - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = STAT -GRID_STAT_OUTPUT_FLAG_CTC = STAT -GRID_STAT_OUTPUT_FLAG_CTS = STAT -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = STAT -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -#GRID_STAT_OUTPUT_FLAG_PCT = NONE -#GRID_STAT_OUTPUT_FLAG_PSTD = NONE -#GRID_STAT_OUTPUT_FLAG_PJC = NONE -#GRID_STAT_OUTPUT_FLAG_PRC = NONE -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT -GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensmean_ASNOW.conf b/parm/metplus/GridStat_ensmean_ASNOW.conf deleted file mode 100644 index 6fb8951a3f..0000000000 --- a/parm/metplus/GridStat_ensmean_ASNOW.conf +++ /dev/null @@ -1,287 +0,0 @@ -# Ensemble mean GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -#GRID_STAT_INTERP_FIELD = BOTH -#GRID_STAT_INTERP_VLD_THRESH = 1.0 -#GRID_STAT_INTERP_SHAPE = SQUARE -#GRID_STAT_INTERP_TYPE_METHOD = NEAREST -#GRID_STAT_INTERP_TYPE_WIDTH = 1 - -#GRID_STAT_GRID_WEIGHT_FLAG = - -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as that for the ensemble -# mean. This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensmean -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# List of forecast and corresponding observation fields to process. -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_MEAN -FCST_VAR1_LEVELS = A{{accum_hh}} -FCST_VAR1_THRESH = {{field_thresholds}} -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = A{{accum_hh}} -OBS_VAR1_THRESH = {{field_thresholds}} -OBS_VAR1_OPTIONS = convert(x) = 100.0*x; -# -# Forecast data time window(s). -# -FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = 0 -OBS_GRID_STAT_FILE_WINDOW_END = 0 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = BOTH - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = 3,5,7 - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = STAT -GRID_STAT_OUTPUT_FLAG_CTC = STAT -GRID_STAT_OUTPUT_FLAG_CTS = STAT -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = STAT -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -#GRID_STAT_OUTPUT_FLAG_PCT = NONE -#GRID_STAT_OUTPUT_FLAG_PSTD = NONE -#GRID_STAT_OUTPUT_FLAG_PJC = NONE -#GRID_STAT_OUTPUT_FLAG_PRC = NONE -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT -GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensmean_REFC.conf b/parm/metplus/GridStat_ensmean_REFC.conf deleted file mode 100644 index 451c82dfd5..0000000000 --- a/parm/metplus/GridStat_ensmean_REFC.conf +++ /dev/null @@ -1,313 +0,0 @@ -# Ensemble mean GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -GRID_STAT_INTERP_FIELD = NONE -GRID_STAT_INTERP_VLD_THRESH = 1.0 -GRID_STAT_INTERP_SHAPE = SQUARE -GRID_STAT_INTERP_TYPE_METHOD = NEAREST -GRID_STAT_INTERP_TYPE_WIDTH = 1 - -GRID_STAT_GRID_WEIGHT_FLAG = NONE -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as that for the ensemble -# mean. This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensmean -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# String to search for in the foreast input files for forecast variable -# 1. -# -# Note: -# This is the name of the field in the NetCDF file(s) created by MET's -# gen_ens_prod tool. This tool reads in the grib2 file(s) (in this case -# of forecasts) and outputs NetCDF file(s) in which the array names -# consist of the value of fieldname_in_met_output plus a suffix that -# specifies additional properties of the data in the array such as the -# level, the type of statistic, etc. In this case, this suffix is -# "_L0_ENS_MEAN". Thus, below, FCST_VAR1_NAME must be set to the value -# of fieldname_in_met_output with "_L0_ENS_MEAN" appended to it. -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_L0_ENS_MEAN -FCST_VAR1_LEVELS = L0 -FCST_VAR1_THRESH = {{field_thresholds}} -# -# String to search for in the observation input files for observation -# variable 1. -# -# Note: -# This is the name of the field in the grib2 observation file. Thus, -# it should not be set to {{fieldname_in_met_output}} because the -# value of fieldname_in_met_output is in general not the same as the -# name of the field in the grib2 observation file (although it can be -# for certain fields). If you do and it doesn't match, you may get an -# error like this from METplus: -# ERROR : VarInfoGrib2::set_dict() -> unrecognized GRIB2 field abbreviation ... -# -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = Z500 -OBS_VAR1_THRESH = {{field_thresholds}} -OBS_VAR1_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; -# -# Forecast data time window(s). -# -#FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -#FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300 -OBS_GRID_STAT_FILE_WINDOW_END = 300 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = BOTH - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = 3,5,7 - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = STAT -GRID_STAT_OUTPUT_FLAG_CTC = STAT -GRID_STAT_OUTPUT_FLAG_CTS = STAT -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = STAT -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -#GRID_STAT_OUTPUT_FLAG_PCT = NONE -#GRID_STAT_OUTPUT_FLAG_PSTD = NONE -#GRID_STAT_OUTPUT_FLAG_PJC = NONE -#GRID_STAT_OUTPUT_FLAG_PRC = NONE -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT -GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensmean_RETOP.conf b/parm/metplus/GridStat_ensmean_RETOP.conf deleted file mode 100644 index a881ed3ab5..0000000000 --- a/parm/metplus/GridStat_ensmean_RETOP.conf +++ /dev/null @@ -1,315 +0,0 @@ -# Ensemble mean GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -GRID_STAT_INTERP_FIELD = NONE -GRID_STAT_INTERP_VLD_THRESH = 1.0 -GRID_STAT_INTERP_SHAPE = SQUARE -GRID_STAT_INTERP_TYPE_METHOD = NEAREST -GRID_STAT_INTERP_TYPE_WIDTH = 1 - -GRID_STAT_GRID_WEIGHT_FLAG = NONE -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as that for the ensemble -# mean. This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensmean -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# String to search for in the foreast input files for forecast variable -# 1. -# -# Note: -# This is the name of the field in the NetCDF file(s) created by MET's -# gen_ens_prod tool. This tool reads in the grib2 file(s) (in this case -# of forecasts) and outputs NetCDF file(s) in which the array names -# consist of the value of fieldname_in_met_output plus a suffix that -# specifies additional properties of the data in the array such as the -# level, the type of statistic, etc. In this case, this suffix is -# "_L0_ENS_MEAN". Thus, below, FCST_VAR1_NAME must be set to the value -# of fieldname_in_met_output with "_L0_ENS_MEAN" appended to it. -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_L0_ENS_MEAN -FCST_VAR1_LEVELS = L0 -FCST_VAR1_THRESH = {{field_thresholds}} -FCST_VAR1_OPTIONS = convert(x) = x * 3.28084 * 0.001; -# -# String to search for in the observation input files for observation -# variable 1. -# -# Note: -# This is the name of the field in the grib2 observation file. Thus, -# it should not be set to {{fieldname_in_met_output}} because the -# value of fieldname_in_met_output is in general not the same as the -# name of the field in the grib2 observation file (although it can be -# for certain fields). If you do and it doesn't match, you may get an -# error like this from METplus: -# ERROR : VarInfoGrib2::set_dict() -> unrecognized GRIB2 field abbreviation ... -# -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = Z500 -OBS_VAR1_THRESH = {{field_thresholds}} -OBS_VAR1_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; -# -# Forecast data time window(s). -# -#FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -#FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300 -OBS_GRID_STAT_FILE_WINDOW_END = 300 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = BOTH - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = 3,5,7 - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = STAT -GRID_STAT_OUTPUT_FLAG_CTC = STAT -GRID_STAT_OUTPUT_FLAG_CTS = STAT -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = STAT -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -#GRID_STAT_OUTPUT_FLAG_PCT = NONE -#GRID_STAT_OUTPUT_FLAG_PSTD = NONE -#GRID_STAT_OUTPUT_FLAG_PJC = NONE -#GRID_STAT_OUTPUT_FLAG_PRC = NONE -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT -GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensprob.conf b/parm/metplus/GridStat_ensprob.conf new file mode 100644 index 0000000000..abde89ef4b --- /dev/null +++ b/parm/metplus/GridStat_ensprob.conf @@ -0,0 +1,646 @@ +# Ensemble probabilistic {{MetplusToolName}} METplus Configuration + +[config] + +# List of applications (tools) to run. +PROCESS_LIST = {{MetplusToolName}} + +# time looping - options are INIT, VALID, RETRO, and REALTIME +# If set to INIT or RETRO: +# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set +# If set to VALID or REALTIME: +# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set +LOOP_BY = INIT + +# Format of INIT_BEG and INIT_END using % items +# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. +# see www.strftime.org for more information +# %Y%m%d%H expands to YYYYMMDDHH +INIT_TIME_FMT = %Y%m%d%H + +# Start time for METplus run - must match INIT_TIME_FMT +INIT_BEG = {{cdate}} + +# End time for METplus run - must match INIT_TIME_FMT +INIT_END = {{cdate}} + +# Increment between METplus runs (in seconds if no units are specified). +# Must be >= 60 seconds. +INIT_INCREMENT = 3600 + +# List of forecast leads to process for each run time (init or valid) +# In hours if units are not specified +# If unset, defaults to 0 (don't loop through forecast leads) +LEAD_SEQ = {{vx_leadhr_list}} +# +# Order of loops to process data - Options are times, processes +# Not relevant if only one item is in the PROCESS_LIST +# times = run all wrappers in the PROCESS_LIST for a single run time, then +# increment the run time and run all wrappers again until all times have +# been evaluated. +# processes = run the first wrapper in the PROCESS_LIST for all times +# specified, then repeat for the next item in the PROCESS_LIST until all +# wrappers have been run +# +LOOP_ORDER = times +# +# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. +# +LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} +# +# Specify the name of the METplus log file. +# +LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} +# +# Specify the location and name of the final METplus conf file. +# +METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}} +# +# Location of MET configuration file to pass to {{MetplusToolName}}. +# +# References PARM_BASE, which is the location of the parm directory +# corresponding to the ush directory of the run_metplus.py script that +# is called or the value of the environment variable METPLUS_PARM_BASE +# if set. +# +{{METPLUS_TOOL_NAME}}_CONFIG_FILE = {PARM_BASE}/met_config/{{MetplusToolName}}Config_wrapped + +# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary +# See MET User's Guide for more information +{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = FCST +{{METPLUS_TOOL_NAME}}_REGRID_VLD_THRESH = 0.5 +{{METPLUS_TOOL_NAME}}_REGRID_METHOD = BUDGET +{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2 +{{METPLUS_TOOL_NAME}}_REGRID_SHAPE = SQUARE + +{%- if input_field_group in ['APCP', 'ASNOW'] %} + +#{{METPLUS_TOOL_NAME}}_INTERP_FIELD = BOTH +#{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = 1.0 +#{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = SQUARE +#{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = NEAREST +#{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 1 + +#{{METPLUS_TOOL_NAME}}_GRID_WEIGHT_FLAG = +{%- elif input_field_group in ['REFC', 'RETOP'] %} + +{{METPLUS_TOOL_NAME}}_INTERP_FIELD = NONE +{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = 1.0 +{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = SQUARE +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = NEAREST +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 1 + +{{METPLUS_TOOL_NAME}}_GRID_WEIGHT_FLAG = NONE +{%- endif %} +# +# Name to identify model (forecast) data in output. +# +# The variable MODEL is recorded in the stat files, and the data in +# these files is then plotted (e.g. using METViewer). Here, we add a +# suffix to MODEL that identifies the data as ensemble-probabilistic. +# This makes it easier to identify each curve. +# +MODEL = {{vx_fcst_model_name}}_ensprob +# +# Name to identify observation data in output. +# +OBTYPE = {{obtype}} +# +# Value to enter under the DESC column in the output stat file. +# +{{METPLUS_TOOL_NAME}}_DESC = NA +# +# Overrides of MET configuration defaults. +# +{{METPLUS_TOOL_NAME}}_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; +# +# List of forecast and corresponding observation fields to process. +# +{#- +Import the file containing jinja macros. +#} +{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %} + +{#- +Set the probabilistic threshold to be used for the forecast field. If +necessary, this can be changed to be an input parameter in the calling +script instead of a hard-coded value as below. +#} +{%- set thresh_fcst_prob = '==0.1' %} + +{#- +Jinja requires certain variables to be defined globally within the template +before they can be used in if-statements and other scopes (see Jinja +scoping rules). Define such variables. +#} +{%- set indx_level_fcst = '' %} +{%- set indx_thresh_fcst = '' %} +{%- set error_msg = '' %} +{%- set opts_indent = '' %} +{%- set opts_indent_len = '' %} + +{%- set field_fcst = '' %} +{%- set field_obs = '' %} +{%- set level_fcst = '' %} +{%- set level_obs = '' %} +{%- set thresh_fcst = '' %} +{%- set thresh_obs = '' %} +{%- set thresh_fcst_and_or = '' %} + +{#- +Get the set of valid field groups and ensure that the specified input +field group appears in this list. +#} +{%- set valid_field_groups = vx_config_dict.keys()|list %} +{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }} + +{#- +Reset the input forecast level so that if it happens to be an accumulation +(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g. +reset to 'A3'). +#} +{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} + +{#- +Extract from the configuration dictionary the set (which itself is a +dictionary) of fields, levels, and thresholds corresponding to the input +field group. Then set the delimiter string that separates forecast and +observation values in the various items (i.e. dictionary keys and values +representing field names, levels, and thresholds) in this dictionary. +#} +{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %} +{%- set delim_str = metplus_macros.set_delim_str() %} + +{#- +Loop over the fields and set field names, levels, thresholds, and/or +options for each field, both for forecasts and for observations, in the +METplus configuration file. +#} +{%- set ns = namespace(var_count = 0) %} +{#- +Loop over each field twice, the first treating the forecast field as +probabilistic and the second time as a scalar. +#} +{%- for treat_fcst_as_prob in [True, False] %} + + {%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %} + + {%- if delim_str in field_cpld %} + {%- set field_fcst, field_obs = field_cpld.split(delim_str) %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- set field_obs = field_cpld %} + {%- endif %} + +{#- +For convenience, create lists of valid forecast and observation levels +for the current field. +#} + {%- set valid_levels_fcst = [] %} + {%- set valid_levels_obs = [] %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + {%- set tmp = valid_levels_fcst.append(level_fcst) %} + {%- set tmp = valid_levels_obs.append(level_obs) %} + {%- endfor %} + +{#- +Make sure that the input forecast level (input_level_fcst) is set to a +valid value. +#} + {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %} + {%- set error_msg = '\n' ~ +'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~ +'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~ +'for the current forecast field (field_fcst). This is not the case:\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~ +' input_level_fcst = ' ~ input_level_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + +{#- +Add comment depending on whether or not the field is being treated +probabilistically. +#} + {%- if treat_fcst_as_prob %} +# FREQ +# Process as probability +# + {%- else %} +# +#Process as scalars for neighborhood methods +## Note that the number of forecast and obs thresholds must match +## but won't actually be applied to NBRCNT outputs with "nbrhd.field = OBS;" +# + {%- endif %} + +{#- +Loop over the (coupled) levels and corresponding lists of thresholds. +Extract from these the level values for forecasts and observations and +use them to set the forecast and observation field names, levels, +thresholds, and/or options in the METplus configuration file. +#} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + + {%- set valid_threshes_fcst = [] %} + {%- set valid_threshes_obs = [] %} + {%- for thresh_cpld in threshes_cpld %} + {%- if delim_str in thresh_cpld %} + {%- set thresh_fcst, thresh_obs = thresh_cpld.split(delim_str) %} + {%- else %} + {%- set thresh_fcst = thresh_cpld %} + {%- set thresh_obs = thresh_cpld %} + {%- endif %} + {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %} + {%- set tmp = valid_threshes_obs.append(thresh_obs) %} + {%- endfor %} + + {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %} + + {%- for thresh_fcst in valid_threshes_fcst %} + + {%- if (input_thresh_fcst == 'all') or (input_thresh_fcst == thresh_fcst) %} +{#- +Increment the METplus variable counter. +#} + {%- set ns.var_count = ns.var_count+1 %} + +{#- +Set forecast field name. Note that this has to exactly match the name +of the field in the input forecast file(s). + +The input forecast files are generated by the MET/METplus GenEnsProd +tool. That tool adds the field's level to the variable names in its +output file to ensure that all variables in the file have distinct names. +For example, if the same field, say APCP, is output at two different +levels, say at A3 and A6 (for APCP, "levels" are really accumulation +periods), there need to be two variables in the output file, and they +obviously can't both be named "APCP", so GenEnsProd names one "APCP_A3" +and the other "APCP_A6". Here, the level is stored in the variable +level_fcst and, below, is included in the name of the forecast field. + +For accumulated fields, the field name in the input forecast file contains +TWO references to the accumulation period. The first is the level of the +forecast field added by GenEnsProd as described above. The second is +another reference to this same level (accumulation period) but added by +the MET/METplus's PcpCombine tool (whose output file is the input into +GenEnsProd). PcpCombine adds this reference to the level (really the +accumulation period) to the field's name for the same reason that +GenEnsProd does, i.e. to ensure that the names of variables in the output +file are distinct. Here, this accumulation period is stored in the +variable accum_hh. Thus, for accumulated fields, below we add both +accum_hh and level_fcst to the field name to get an exact field name +match. +#} + {%- set thresh_fcst_and_or = thresh_fcst|replace("&&", ".and.") %} + {%- set thresh_fcst_and_or = thresh_fcst_and_or|replace("||", ".or.") %} + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{accum_hh}}_{{level_fcst}}_ENS_FREQ_{{thresh_fcst_and_or}} + {%- else %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{level_fcst}}_ENS_FREQ_{{thresh_fcst_and_or}} + {%- endif %} + +{#- +Set forecast field level. +#} +FCST_VAR{{ns.var_count}}_LEVELS = {{level_fcst}} + +{#- +Set forecast field threshold. +Note that since the forecast field being read in is actually a field of +probabilities, we set the forecast threshold to a probabilistic one +(thresh_fcst_prob) and not to the physical threshold (thresh_fcst) in +the dictionary of forecast field names, levels, and thresholds that we +are looping over. +#} +FCST_VAR{{ns.var_count}}_THRESH = {{thresh_fcst_prob}} + +{#- +Set forecast field options. +#} + {%- set opts_indent_len = 20 %} + {%- if (ns.var_count > 9) and (ns.var_count <= 99) %} + {%- set opts_indent_len = opts_indent_len + 1 %} + {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %} + {%- set opts_indent_len = opts_indent_len + 2 %} + {%- elif (ns.var_count > 999) %} + {%- set opts_indent_len = opts_indent_len + 3 %} + {%- endif %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if not treat_fcst_as_prob %} +FCST_VAR{{ns.var_count}}_OPTIONS = prob = FALSE; + {%- endif %} + +{#- +Set observation field name. Note that this has to exactly match the name +of the field in the input observation file. + +For accumulated fields, the input observation file is generated by MET's +PcpCombine tool. In that file, the field name consists of the observation +field name here (field_obs) with the accumulation period appended to it +(separated by an underscore), so we must do the same here to get an exact +match. +#} + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}}_{{accum_hh}} + {%- else %} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}} + {%- endif %} + +{#- +Set observation field level. +#} + {%- set indx_level_fcst = valid_levels_fcst.index(level_fcst) %} + {%- set level_obs = valid_levels_obs[indx_level_fcst] %} +OBS_VAR{{ns.var_count}}_LEVELS = {{level_obs}} + +{#- +Set observation field threshold. Note that no observation thresholds +are included in the METplus configuration file if input_thresh_fcst is +set to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +Set the observation threshold. This is given by the element in the list +of valid observation thresholds that has the same index as that of the +current forcast threshold (thresh_fcst) in the list of valid forecast +thresholds. +#} + {%- set indx_thresh_fcst = valid_threshes_fcst.index(thresh_fcst) %} + {%- set thresh_obs = valid_threshes_obs[indx_thresh_fcst] %} +OBS_VAR{{ns.var_count}}_THRESH = {{thresh_obs}} + {%- endif %} + +{#- +Set observation field options. +#} + {%- set opts_indent_len = opts_indent_len - 1 %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'APCP' %} + + {%- if field_obs == 'APCP' %} + {%- if not treat_fcst_as_prob %} +OBS_VAR{{ns.var_count}}_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } + {%- endif %} + {%- endif %} + + {%- elif input_field_group == 'ASNOW' %} + + {%- if field_obs == 'ASNOW' %} + {%- if treat_fcst_as_prob %} +OBS_VAR{{ns.var_count}}_OPTIONS = convert(x) = 100.0*x; + {%- else %} +OBS_VAR{{ns.var_count}}_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }; +{{opts_indent}}convert(x) = 100.0*x; + {%- endif %} + {%- endif %} + + {%- elif input_field_group == 'REFC' %} + + {%- if field_obs == 'MergedReflectivityQCComposite' %} +OBS_VAR{{ns.var_count}}_OPTIONS = censor_thresh = lt-20; +{{opts_indent}}censor_val = -20.0; +{{opts_indent}}cnt_thresh = [ >15 ]; +{{opts_indent}}cnt_logic = UNION; + {%- if not treat_fcst_as_prob %} +{{opts_indent}}nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } + {%- endif %} + {%- endif %} + + {%- elif input_field_group == 'RETOP' %} + + {%- if field_obs == 'EchoTop18' %} +OBS_VAR{{ns.var_count}}_OPTIONS = censor_thresh = lt-20.0; +{{opts_indent}}censor_val = -20.0; +{{opts_indent}}cnt_thresh = [ >15 ]; +{{opts_indent}}cnt_logic = UNION; +{{opts_indent}}convert(x) = x * 3280.84 * 0.001; + {%- if not treat_fcst_as_prob %} +{{opts_indent}}nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } + {%- endif %} + {%- endif %} + + {%- endif %} + +{#- +Print out a newline to separate the settings for the current field (both +forecast and observation settings) from those for the next field. +#} + {{- '\n' }} + + {%- endif %} + {%- endfor %} + + {%- endif %} + + {%- endfor %} + {%- endfor %} +{%- endfor %} +# +# Forecast data time window(s). +# +{%- set comment_or_null = '' %} +{%- set obs_window_abs_val = '0' %} +{%- if input_field_group in ['REFC', 'RETOP'] %} + {%- set comment_or_null = '#' %} + {%- set obs_window_abs_val = '300' %} +{%- endif %} +{{comment_or_null}}FCST_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_BEGIN = 0 +{{comment_or_null}}FCST_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_END = 0 +# +# Observation data time window(s). +# +{#- +Use integers for seconds, but int can be changed to float if there is a +need to go to sub-seconds. +#} +OBS_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_BEGIN = {{ 0 - obs_window_abs_val|int }} +OBS_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_END = {{ obs_window_abs_val|int }} + +# MET {{MetplusToolName}} neighborhood values +# See the MET User's Guide {{MetplusToolName}} section for more information +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_FIELD = + +# width value passed to nbrhd dictionary in the MET config file +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_WIDTH = + +# shape value passed to nbrhd dictionary in the MET config file +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_SHAPE = + +# cov thresh list passed to nbrhd dictionary in the MET config file +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_COV_THRESH = >=0.5 + +# Set to true to run {{MetplusToolName}} separately for each field specified +# Set to false to create one run of {{MetplusToolName}} per run time that +# includes all fields specified. +{{METPLUS_TOOL_NAME}}_ONCE_PER_FIELD = False +# +# Set to true if forecast data is probabilistic. +# +FCST_IS_PROB = True +FCST_PROB_IN_GRIB_PDS = False +# +# Only used if FCST_IS_PROB is true - sets probabilistic threshold +# +FCST_{{METPLUS_TOOL_NAME}}_PROB_THRESH = {{thresh_fcst_prob}} + +{{METPLUS_TOOL_NAME}}_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} + +# Climatology data +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FILE_NAME = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FIELD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_WIDTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_SHAPE = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_MATCH_MONTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_DAY_INTERVAL = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_HOUR_INTERVAL = + +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FILE_NAME = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FIELD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_WIDTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_SHAPE = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_TIME_INTERP_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_MATCH_MONTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_DAY_INTERVAL = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_HOUR_INTERVAL = + +{%- set comment_or_null = '' %} +{%- if input_field_group in ['APCP', 'ASNOW'] %} + {%- set comment_or_null = '#' %} +{%- endif %} + +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1 +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = True + +{{METPLUS_TOOL_NAME}}_MASK_GRID = + +# Statistical output types +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_FHO = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTC = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTS = NONE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTC = NONE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTS = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CNT = NONE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SL1L2 = NONE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SAL1L2 = NONE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VL1L2 = NONE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VAL1L2 = NONE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VCNT = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PCT = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PSTD = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PJC = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PRC = STAT +{%- if input_field_group in ['APCP', 'ASNOW'] %} +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV = BOTH +{%- elif input_field_group in ['REFC', 'RETOP'] %} +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV = NONE +{%- endif %} +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCTC = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCTS = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCNT = STAT +{%- if input_field_group in ['APCP', 'ASNOW'] %} +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_GRAD = BOTH +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_DMAP = NONE +{%- elif input_field_group in ['REFC', 'RETOP'] %} +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_GRAD = NONE +{%- endif %} + +# NetCDF matched pairs output file +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_VAR_NAME = +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_LATLON = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_RAW = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_DIFF = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_CLIMO = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_WEIGHT = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_NBRHD = FALSE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_FOURIER = FALSE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_GRADIENT = FALSE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_APPLY_MASK = FALSE + +# End of [config] section and start of [dir] section. +[dir] +# +# Directory containing observation input to {{MetplusToolName}}. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{obs_input_dir}} +# +# Directory containing forecast input to {{MetplusToolName}}. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}} +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR = +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR = +# +# Directory in which to write output from {{MetplusToolName}}. +# +# OUTPUT_BASE apparently has to be set to something; it cannot be left +# to its default value. But it is not explicitly used elsewhere in this +# configuration file. +# +OUTPUT_BASE = {{output_base}} +{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}} +# +# Directory for staging data. +# +STAGING_DIR = {{staging_dir}} + +# End of [dir] section and start of [filename_templates] section. +[filename_templates] +# +# Template for observation input to {{MetplusToolName}} relative to +# OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{obs_input_fn_template}} +# +# Template for forecast input to {{MetplusToolName}} relative to +# FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}} +# +# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR. +# +{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_TEMPLATE = +# +# Variable used to specify one or more verification mask files for +# {{MetplusToolName}}. Not used for this example. +# +{{METPLUS_TOOL_NAME}}_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensprob_APCP.conf b/parm/metplus/GridStat_ensprob_APCP.conf deleted file mode 100644 index 3e16de248d..0000000000 --- a/parm/metplus/GridStat_ensprob_APCP.conf +++ /dev/null @@ -1,362 +0,0 @@ -# Ensemble probabilistic GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -#GRID_STAT_INTERP_FIELD = BOTH -#GRID_STAT_INTERP_VLD_THRESH = 1.0 -#GRID_STAT_INTERP_SHAPE = SQUARE -#GRID_STAT_INTERP_TYPE_METHOD = NEAREST -#GRID_STAT_INTERP_TYPE_WIDTH = 1 - -#GRID_STAT_GRID_WEIGHT_FLAG = -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as ensemble-probabilistic. -# This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensprob -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; - -{%- set field_thresholds = [] %} -{%- if accum_hh == '01' %} - {%- set field_thresholds = ['gt0.0', 'ge0.254', 'ge0.508', 'ge2.54'] %} -{%- elif accum_hh == '03' %} - {%- set field_thresholds = ['gt0.0', 'ge0.508', 'ge2.54', 'ge6.350'] %} -{%- elif accum_hh == '06' %} - {%- set field_thresholds = ['gt0.0', 'ge2.54', 'ge6.350', 'ge12.700'] %} -{%- elif accum_hh == '24' %} - {%- set field_thresholds = ['gt0.0', 'ge6.350', 'ge12.700', 'ge25.400'] %} -{%- endif %} -# -# List of forecast and corresponding observation fields to process. -# -# FREQ -# Process as probability -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[0]}} -FCST_VAR1_LEVELS = A{{accum_hh}} -FCST_VAR1_THRESH = ==0.1 -OBS_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR1_LEVELS = A{{accum_hh}} -OBS_VAR1_THRESH = {{field_thresholds[0]}} - -FCST_VAR2_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[1]}} -FCST_VAR2_LEVELS = A{{accum_hh}} -FCST_VAR2_THRESH = ==0.1 -OBS_VAR2_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR2_LEVELS = A{{accum_hh}} -OBS_VAR2_THRESH = {{field_thresholds[1]}} - -FCST_VAR3_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[2]}} -FCST_VAR3_LEVELS = A{{accum_hh}} -FCST_VAR3_THRESH = ==0.1 -OBS_VAR3_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR3_LEVELS = A{{accum_hh}} -OBS_VAR3_THRESH = {{field_thresholds[2]}} - -FCST_VAR4_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[3]}} -FCST_VAR4_LEVELS = A{{accum_hh}} -FCST_VAR4_THRESH = ==0.1 -OBS_VAR4_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR4_LEVELS = A{{accum_hh}} -OBS_VAR4_THRESH = {{field_thresholds[3]}} - -# -#Process as scalars for neighborhood methods -## Note that the number of forecast and obs thresholds must match -## but won't actually be applied to NBRCNT outputs with "nbrhd.field = OBS;" -# -FCST_VAR5_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[0]}} -FCST_VAR5_LEVELS = A{{accum_hh}} -FCST_VAR5_THRESH = ==0.1 -FCST_VAR5_OPTIONS = prob = FALSE; -OBS_VAR5_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR5_LEVELS = A{{accum_hh}} -OBS_VAR5_THRESH = {{field_thresholds[0]}} -OBS_VAR5_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR6_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[1]}} -FCST_VAR6_LEVELS = A{{accum_hh}} -FCST_VAR6_THRESH = ==0.1 -FCST_VAR6_OPTIONS = prob = FALSE; -OBS_VAR6_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR6_LEVELS = A{{accum_hh}} -OBS_VAR6_THRESH = {{field_thresholds[1]}} -OBS_VAR6_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR7_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[2]}} -FCST_VAR7_LEVELS = A{{accum_hh}} -FCST_VAR7_THRESH = ==0.1 -FCST_VAR7_OPTIONS = prob = FALSE; -OBS_VAR7_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR7_LEVELS = A{{accum_hh}} -OBS_VAR7_THRESH = {{field_thresholds[2]}} -OBS_VAR7_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR8_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[3]}} -FCST_VAR8_LEVELS = A{{accum_hh}} -FCST_VAR8_THRESH = ==0.1 -FCST_VAR8_OPTIONS = prob = FALSE; -OBS_VAR8_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR8_LEVELS = A{{accum_hh}} -OBS_VAR8_THRESH = {{field_thresholds[3]}} -OBS_VAR8_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -# -# Forecast data time window(s). -# -FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = 0 -OBS_GRID_STAT_FILE_WINDOW_END = 0 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = True -FCST_PROB_IN_GRIB_PDS = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = NONE -GRID_STAT_OUTPUT_FLAG_CTC = NONE -GRID_STAT_OUTPUT_FLAG_CTS = NONE -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = NONE -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -GRID_STAT_OUTPUT_FLAG_PCT = STAT -GRID_STAT_OUTPUT_FLAG_PSTD = STAT -GRID_STAT_OUTPUT_FLAG_PJC = STAT -GRID_STAT_OUTPUT_FLAG_PRC = STAT -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = NONE -GRID_STAT_OUTPUT_FLAG_NBRCTS = NONE -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensprob_ASNOW.conf b/parm/metplus/GridStat_ensprob_ASNOW.conf deleted file mode 100644 index ecd17f681b..0000000000 --- a/parm/metplus/GridStat_ensprob_ASNOW.conf +++ /dev/null @@ -1,384 +0,0 @@ -# Ensemble probabilistic GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -#GRID_STAT_INTERP_FIELD = BOTH -#GRID_STAT_INTERP_VLD_THRESH = 1.0 -#GRID_STAT_INTERP_SHAPE = SQUARE -#GRID_STAT_INTERP_TYPE_METHOD = NEAREST -#GRID_STAT_INTERP_TYPE_WIDTH = 1 - -#GRID_STAT_GRID_WEIGHT_FLAG = -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as ensemble-probabilistic. -# This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensprob -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; - -{%- set field_thresholds = [] %} -{%- if accum_hh == '06' %} - {%- set field_thresholds = ['gt0.0', 'ge2.54', 'ge5.08', 'ge10.16', 'ge20.32'] %} -{%- elif accum_hh == '24' %} - {%- set field_thresholds = ['gt0.0', 'ge2.54', 'ge5.08', 'ge10.16', 'ge20.32'] %} -{%- endif %} -# -# List of forecast and corresponding observation fields to process. -# -# FREQ -# Process as probability -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[0]}} -FCST_VAR1_LEVELS = A{{accum_hh}} -FCST_VAR1_THRESH = ==0.1 -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = A{{accum_hh}} -OBS_VAR1_THRESH = {{field_thresholds[0]}} -OBS_VAR1_OPTIONS = convert(x) = 100.0*x; - -FCST_VAR2_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[1]}} -FCST_VAR2_LEVELS = A{{accum_hh}} -FCST_VAR2_THRESH = ==0.1 -OBS_VAR2_NAME = {{fieldname_in_obs_input}} -OBS_VAR2_LEVELS = A{{accum_hh}} -OBS_VAR2_THRESH = {{field_thresholds[1]}} -OBS_VAR2_OPTIONS = convert(x) = 100.0*x; - -FCST_VAR3_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[2]}} -FCST_VAR3_LEVELS = A{{accum_hh}} -FCST_VAR3_THRESH = ==0.1 -OBS_VAR3_NAME = {{fieldname_in_obs_input}} -OBS_VAR3_LEVELS = A{{accum_hh}} -OBS_VAR3_THRESH = {{field_thresholds[2]}} -OBS_VAR3_OPTIONS = convert(x) = 100.0*x; - -FCST_VAR4_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[3]}} -FCST_VAR4_LEVELS = A{{accum_hh}} -FCST_VAR4_THRESH = ==0.1 -OBS_VAR4_NAME = {{fieldname_in_obs_input}} -OBS_VAR4_LEVELS = A{{accum_hh}} -OBS_VAR4_THRESH = {{field_thresholds[3]}} -OBS_VAR4_OPTIONS = convert(x) = 100.0*x; - -FCST_VAR5_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[4]}} -FCST_VAR5_LEVELS = A{{accum_hh}} -FCST_VAR5_THRESH = ==0.1 -OBS_VAR5_NAME = {{fieldname_in_obs_input}} -OBS_VAR5_LEVELS = A{{accum_hh}} -OBS_VAR5_THRESH = {{field_thresholds[4]}} -OBS_VAR5_OPTIONS = convert(x) = 100.0*x; - -# -#Process as scalars for neighborhood methods -## Note that the number of forecast and obs thresholds must match -## but won't actually be applied to NBRCNT outputs with "nbrhd.field = OBS;" -# -FCST_VAR6_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[0]}} -FCST_VAR6_LEVELS = A{{accum_hh}} -FCST_VAR6_THRESH = ==0.1 -FCST_VAR6_OPTIONS = prob = FALSE; -OBS_VAR6_NAME = {{fieldname_in_obs_input}} -OBS_VAR6_LEVELS = A{{accum_hh}} -OBS_VAR6_THRESH = {{field_thresholds[0]}} -OBS_VAR6_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }; - convert(x) = 100.0*x; - -FCST_VAR7_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[1]}} -FCST_VAR7_LEVELS = A{{accum_hh}} -FCST_VAR7_THRESH = ==0.1 -FCST_VAR7_OPTIONS = prob = FALSE; -OBS_VAR7_NAME = {{fieldname_in_obs_input}} -OBS_VAR7_LEVELS = A{{accum_hh}} -OBS_VAR7_THRESH = {{field_thresholds[1]}} -OBS_VAR7_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }; - convert(x) = 100.0*x; - -FCST_VAR8_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[2]}} -FCST_VAR8_LEVELS = A{{accum_hh}} -FCST_VAR8_THRESH = ==0.1 -FCST_VAR8_OPTIONS = prob = FALSE; -OBS_VAR8_NAME = {{fieldname_in_obs_input}} -OBS_VAR8_LEVELS = A{{accum_hh}} -OBS_VAR8_THRESH = {{field_thresholds[2]}} -OBS_VAR8_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }; - convert(x) = 100.0*x; - -FCST_VAR9_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[3]}} -FCST_VAR9_LEVELS = A{{accum_hh}} -FCST_VAR9_THRESH = ==0.1 -FCST_VAR9_OPTIONS = prob = FALSE; -OBS_VAR9_NAME = {{fieldname_in_obs_input}} -OBS_VAR9_LEVELS = A{{accum_hh}} -OBS_VAR9_THRESH = {{field_thresholds[3]}} -OBS_VAR9_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }; - convert(x) = 100.0*x; - -FCST_VAR10_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[4]}} -FCST_VAR10_LEVELS = A{{accum_hh}} -FCST_VAR10_THRESH = ==0.1 -FCST_VAR10_OPTIONS = prob = FALSE; -OBS_VAR10_NAME = {{fieldname_in_obs_input}} -OBS_VAR10_LEVELS = A{{accum_hh}} -OBS_VAR10_THRESH = {{field_thresholds[4]}} -OBS_VAR10_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }; - convert(x) = 100.0*x; - -# -# Forecast data time window(s). -# -FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = 0 -OBS_GRID_STAT_FILE_WINDOW_END = 0 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = True -FCST_PROB_IN_GRIB_PDS = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = NONE -GRID_STAT_OUTPUT_FLAG_CTC = NONE -GRID_STAT_OUTPUT_FLAG_CTS = NONE -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = NONE -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -GRID_STAT_OUTPUT_FLAG_PCT = STAT -GRID_STAT_OUTPUT_FLAG_PSTD = STAT -GRID_STAT_OUTPUT_FLAG_PJC = STAT -GRID_STAT_OUTPUT_FLAG_PRC = STAT -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = NONE -GRID_STAT_OUTPUT_FLAG_NBRCTS = NONE -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensprob_REFC.conf b/parm/metplus/GridStat_ensprob_REFC.conf deleted file mode 100644 index 95e19af1ce..0000000000 --- a/parm/metplus/GridStat_ensprob_REFC.conf +++ /dev/null @@ -1,382 +0,0 @@ -# Ensemble probabilistic GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -GRID_STAT_INTERP_FIELD = NONE -GRID_STAT_INTERP_VLD_THRESH = 1.0 -GRID_STAT_INTERP_SHAPE = SQUARE -GRID_STAT_INTERP_TYPE_METHOD = NEAREST -GRID_STAT_INTERP_TYPE_WIDTH = 1 - -GRID_STAT_GRID_WEIGHT_FLAG = NONE -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as ensemble-probabilistic. -# This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensprob -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# List of forecast and corresponding observation fields to process. -# -# FREQ -# Process as probability -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge20 -FCST_VAR1_LEVELS = L0 -FCST_VAR1_THRESH = ==0.1 -OBS_VAR1_NAME = MergedReflectivityQCComposite -OBS_VAR1_LEVELS = Z500 -OBS_VAR1_THRESH = ge20 -OBS_VAR1_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - -FCST_VAR2_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge30 -FCST_VAR2_LEVELS = L0 -FCST_VAR2_THRESH = ==0.1 -OBS_VAR2_NAME = MergedReflectivityQCComposite -OBS_VAR2_LEVELS = Z500 -OBS_VAR2_THRESH = ge30 -OBS_VAR2_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - -FCST_VAR3_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge40 -FCST_VAR3_LEVELS = L0 -FCST_VAR3_THRESH = ==0.1 -OBS_VAR3_NAME = MergedReflectivityQCComposite -OBS_VAR3_LEVELS = Z500 -OBS_VAR3_THRESH = ge40 -OBS_VAR3_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - -FCST_VAR4_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge50 -FCST_VAR4_LEVELS = L0 -FCST_VAR4_THRESH = ==0.1 -OBS_VAR4_NAME = MergedReflectivityQCComposite -OBS_VAR4_LEVELS = Z500 -OBS_VAR4_THRESH = ge50 -OBS_VAR4_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - -# -#Process as scalars for neighborhood methods -## Note that the number of forecast and obs thresholds must match -## but won't actually be applied to NBRCNT outputs with "nbrhd.field = OBS;" -# -FCST_VAR5_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge20 -FCST_VAR5_LEVELS = L0 -FCST_VAR5_THRESH = ==0.1 -FCST_VAR5_OPTIONS = prob = FALSE; -OBS_VAR5_NAME = MergedReflectivityQCComposite -OBS_VAR5_LEVELS = Z500 -OBS_VAR5_THRESH = ge20 -OBS_VAR5_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR6_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge30 -FCST_VAR6_LEVELS = L0 -FCST_VAR6_THRESH = ==0.1 -FCST_VAR6_OPTIONS = prob = FALSE; -OBS_VAR6_NAME = MergedReflectivityQCComposite -OBS_VAR6_LEVELS = Z500 -OBS_VAR6_THRESH = ge30 -OBS_VAR6_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR7_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge40 -FCST_VAR7_LEVELS = L0 -FCST_VAR7_THRESH = ==0.1 -FCST_VAR7_OPTIONS = prob = FALSE; -OBS_VAR7_NAME = MergedReflectivityQCComposite -OBS_VAR7_LEVELS = Z500 -OBS_VAR7_THRESH = ge40 -OBS_VAR7_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR8_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge50 -FCST_VAR8_LEVELS = L0 -FCST_VAR8_THRESH = ==0.1 -FCST_VAR8_OPTIONS = prob = FALSE; -OBS_VAR8_NAME = MergedReflectivityQCComposite -OBS_VAR8_LEVELS = Z500 -OBS_VAR8_THRESH = ge50 -OBS_VAR8_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -# -# Forecast data time window(s). -# -#FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -#FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300 -OBS_GRID_STAT_FILE_WINDOW_END = 300 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = True -FCST_PROB_IN_GRIB_PDS = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = NONE -GRID_STAT_OUTPUT_FLAG_CTC = NONE -GRID_STAT_OUTPUT_FLAG_CTS = NONE -GRID_STAT_OUTPUT_FLAG_MCTC = NONE -GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = NONE -GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -GRID_STAT_OUTPUT_FLAG_VCNT = NONE -GRID_STAT_OUTPUT_FLAG_PCT = STAT -GRID_STAT_OUTPUT_FLAG_PSTD = STAT -GRID_STAT_OUTPUT_FLAG_PJC = STAT -GRID_STAT_OUTPUT_FLAG_PRC = STAT -GRID_STAT_OUTPUT_FLAG_ECLV = NONE -GRID_STAT_OUTPUT_FLAG_NBRCTC = NONE -GRID_STAT_OUTPUT_FLAG_NBRCTS = NONE -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -GRID_STAT_OUTPUT_FLAG_GRAD = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensprob_RETOP.conf b/parm/metplus/GridStat_ensprob_RETOP.conf deleted file mode 100644 index d1f218bea8..0000000000 --- a/parm/metplus/GridStat_ensprob_RETOP.conf +++ /dev/null @@ -1,390 +0,0 @@ -# Ensemble probabilistic GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -GRID_STAT_INTERP_FIELD = NONE -GRID_STAT_INTERP_VLD_THRESH = 1.0 -GRID_STAT_INTERP_SHAPE = SQUARE -GRID_STAT_INTERP_TYPE_METHOD = NEAREST -GRID_STAT_INTERP_TYPE_WIDTH = 1 - -GRID_STAT_GRID_WEIGHT_FLAG = NONE -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as ensemble-probabilistic. -# This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensprob -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# List of forecast and corresponding observation fields to process. -# -# FREQ -# Process as probability -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge20 -FCST_VAR1_LEVELS = L0 -FCST_VAR1_THRESH = ==0.1 -OBS_VAR1_NAME = EchoTop18 -OBS_VAR1_LEVELS = Z500 -OBS_VAR1_THRESH = ge20 -OBS_VAR1_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; - -FCST_VAR2_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge30 -FCST_VAR2_LEVELS = L0 -FCST_VAR2_THRESH = ==0.1 -OBS_VAR2_NAME = EchoTop18 -OBS_VAR2_LEVELS = Z500 -OBS_VAR2_THRESH = ge30 -OBS_VAR2_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; - -FCST_VAR3_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge40 -FCST_VAR3_LEVELS = L0 -FCST_VAR3_THRESH = ==0.1 -OBS_VAR3_NAME = EchoTop18 -OBS_VAR3_LEVELS = Z500 -OBS_VAR3_THRESH = ge40 -OBS_VAR3_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; - -FCST_VAR4_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge50 -FCST_VAR4_LEVELS = L0 -FCST_VAR4_THRESH = ==0.1 -OBS_VAR4_NAME = EchoTop18 -OBS_VAR4_LEVELS = Z500 -OBS_VAR4_THRESH = ge50 -OBS_VAR4_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; - -# -#Process as scalars for neighborhood methods -## Note that the number of forecast and obs thresholds must match -## but won't actually be applied to NBRCNT outputs with "nbrhd.field = OBS;" -# -FCST_VAR5_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge20 -FCST_VAR5_LEVELS = L0 -FCST_VAR5_THRESH = ==0.1 -FCST_VAR5_OPTIONS = prob = FALSE; -OBS_VAR5_NAME = EchoTop18 -OBS_VAR5_LEVELS = Z500 -OBS_VAR5_THRESH = ge20 -OBS_VAR5_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; - nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR6_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge30 -FCST_VAR6_LEVELS = L0 -FCST_VAR6_THRESH = ==0.1 -FCST_VAR6_OPTIONS = prob = FALSE; -OBS_VAR6_NAME = EchoTop18 -OBS_VAR6_LEVELS = Z500 -OBS_VAR6_THRESH = ge30 -OBS_VAR6_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; - nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR7_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge40 -FCST_VAR7_LEVELS = L0 -FCST_VAR7_THRESH = ==0.1 -FCST_VAR7_OPTIONS = prob = FALSE; -OBS_VAR7_NAME = EchoTop18 -OBS_VAR7_LEVELS = Z500 -OBS_VAR7_THRESH = ge40 -OBS_VAR7_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; - nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR8_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge50 -FCST_VAR8_LEVELS = L0 -FCST_VAR8_THRESH = ==0.1 -FCST_VAR8_OPTIONS = prob = FALSE; -OBS_VAR8_NAME = EchoTop18 -OBS_VAR8_LEVELS = Z500 -OBS_VAR8_THRESH = ge50 -OBS_VAR8_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; - nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -# -# Forecast data time window(s). -# -#FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -#FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300 -OBS_GRID_STAT_FILE_WINDOW_END = 300 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = True -FCST_PROB_IN_GRIB_PDS = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = NONE -GRID_STAT_OUTPUT_FLAG_CTC = NONE -GRID_STAT_OUTPUT_FLAG_CTS = NONE -GRID_STAT_OUTPUT_FLAG_MCTC = NONE -GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = NONE -GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -GRID_STAT_OUTPUT_FLAG_VCNT = NONE -GRID_STAT_OUTPUT_FLAG_PCT = STAT -GRID_STAT_OUTPUT_FLAG_PSTD = STAT -GRID_STAT_OUTPUT_FLAG_PJC = STAT -GRID_STAT_OUTPUT_FLAG_PRC = STAT -GRID_STAT_OUTPUT_FLAG_ECLV = NONE -GRID_STAT_OUTPUT_FLAG_NBRCTC = NONE -GRID_STAT_OUTPUT_FLAG_NBRCTS = NONE -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -GRID_STAT_OUTPUT_FLAG_GRAD = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_or_PointStat.conf b/parm/metplus/GridStat_or_PointStat.conf new file mode 100644 index 0000000000..155b028291 --- /dev/null +++ b/parm/metplus/GridStat_or_PointStat.conf @@ -0,0 +1,856 @@ +# {{MetplusToolName}} METplus Configuration + +[config] + +# List of applications (tools) to run. +PROCESS_LIST = {{MetplusToolName}} + +# time looping - options are INIT, VALID, RETRO, and REALTIME +# If set to INIT or RETRO: +# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set +# If set to VALID or REALTIME: +# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set +LOOP_BY = INIT + +# Format of INIT_BEG and INIT_END using % items +# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. +# see www.strftime.org for more information +# %Y%m%d%H expands to YYYYMMDDHH +INIT_TIME_FMT = %Y%m%d%H + +# Start time for METplus run - must match INIT_TIME_FMT +INIT_BEG = {{cdate}} + +# End time for METplus run - must match INIT_TIME_FMT +INIT_END = {{cdate}} + +# Increment between METplus runs (in seconds if no units are specified). +# Must be >= 60 seconds. +INIT_INCREMENT = 3600 + +# List of forecast leads to process for each run time (init or valid) +# In hours if units are not specified +# If unset, defaults to 0 (don't loop through forecast leads) +LEAD_SEQ = {{vx_leadhr_list}} +# +# Order of loops to process data - Options are times, processes +# Not relevant if only one item is in the PROCESS_LIST +# times = run all wrappers in the PROCESS_LIST for a single run time, then +# increment the run time and run all wrappers again until all times have +# been evaluated. +# processes = run the first wrapper in the PROCESS_LIST for all times +# specified, then repeat for the next item in the PROCESS_LIST until all +# wrappers have been run +# +LOOP_ORDER = times +# +# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. +# +LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} +# +# Specify the name of the METplus log file. +# +LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} +# +# Specify the location and name of the final METplus conf file. +# +METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}} +# +# Location of MET configuration file to pass to {{MetplusToolName}}. +# +# References PARM_BASE, which is the location of the parm directory +# corresponding to the ush directory of the run_metplus.py script that +# is called or the value of the environment variable METPLUS_PARM_BASE +# if set. +# +{{METPLUS_TOOL_NAME}}_CONFIG_FILE = {PARM_BASE}/met_config/{{MetplusToolName}}Config_wrapped + +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} +# +# Grid to remap data. Value is set as the 'to_grid' variable in the +# 'regrid' dictionary. See MET User's Guide for more information. +# +{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = FCST +{{METPLUS_TOOL_NAME}}_REGRID_VLD_THRESH = 0.5 +{{METPLUS_TOOL_NAME}}_REGRID_METHOD = BUDGET +{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2 +{{METPLUS_TOOL_NAME}}_REGRID_SHAPE = SQUARE +{%- elif (METPLUS_TOOL_NAME == 'POINT_STAT') %} +# +# Regrid to specified grid. Indicate NONE if no regridding, or the grid id +# (e.g. G212) +# +{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = NONE +{{METPLUS_TOOL_NAME}}_REGRID_METHOD = BILIN +{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2 +{%- endif %} + +{%- if (METPLUS_TOOL_NAME == 'POINT_STAT') %} +# +{{METPLUS_TOOL_NAME}}_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA +#{{METPLUS_TOOL_NAME}}_OBS_QUALITY_EXC = +{%- endif %} + +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} + + {%- if input_field_group in ['APCP', 'ASNOW'] %} + +#{{METPLUS_TOOL_NAME}}_INTERP_FIELD = BOTH +#{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = 1.0 +#{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = SQUARE +#{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = NEAREST +#{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 1 + +#{{METPLUS_TOOL_NAME}}_GRID_WEIGHT_FLAG = + {%- elif input_field_group in ['REFC', 'RETOP'] %} + +{{METPLUS_TOOL_NAME}}_INTERP_FIELD = NONE +{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = 1.0 +{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = SQUARE +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = NEAREST +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 1 + +{{METPLUS_TOOL_NAME}}_GRID_WEIGHT_FLAG = NONE + {%- endif %} + +{%- elif (METPLUS_TOOL_NAME == 'POINT_STAT') %} + +#{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = BILIN +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 2 + +{%- endif %} +# +# Name to identify model (forecast) data in output. +# +# The variable MODEL is recorded in the stat files, and the data in +# these files is then plotted (e.g. using METViewer). Here, we add a +# suffix to MODEL that identifies the forecast ensemble member. This +# makes it easier to identify each curve. +# +MODEL = {{vx_fcst_model_name}}_{{ensmem_name}} +# +# Name to identify observation data in output. +# +OBTYPE = {{obtype}} +# +# Value to enter under the DESC column in the output stat file. Here, +# we store the value of the original lead in this column, i.e. the lead +# with zero corresponding to the actual start time of the forecast (which +# is (cdate - time_lag)), not to cdate. This is just the lead in +# LEAD_SEQ with the time lag (time_lag) of the current forecast member +# added on. +# +# Uncomment this line only after upgrading to METplus 5.x. +#{{METPLUS_TOOL_NAME}}_DESC = {lead?fmt=%H%M%S?shift={{time_lag}}} +{{METPLUS_TOOL_NAME}}_DESC = NA +# +# Verification Masking regions +# Indicate which grid and polygon masking region, if applicable +# +{{METPLUS_TOOL_NAME}}_MASK_GRID = + +{%- if (METPLUS_TOOL_NAME == 'POINT_STAT') %} +# +# List of full path to poly masking files. NOTE: Only short lists of poly +# files work (those that fit on one line), a long list will result in an +# environment variable that is too long, resulting in an error. For long +# lists of poly masking files (i.e. all the mask files in the NCEP_mask +# directory), define these in the METplus {{MetplusToolName}} configuration file. +# +{{METPLUS_TOOL_NAME}}_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly +{{METPLUS_TOOL_NAME}}_STATION_ID = + +# Message types, if all message types are to be returned, leave this empty, +# otherwise indicate the message types of interest. +{{METPLUS_TOOL_NAME}}_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} +{%- endif %} +{%- set overrides_indent_len = 0 %} +{%- set overrides_indent = '' %} +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} + {%- set overrides_indent_len = 33 %} + {%- set overrides_indent = ' '*overrides_indent_len %} +# +# Overrides of MET configuration defaults. +# +{{METPLUS_TOOL_NAME}}_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; +{{overrides_indent}}cnt_thresh = [NA]; +{{overrides_indent}}cnt_logic = UNION; +{{overrides_indent}}wind_thresh = [NA]; +{{overrides_indent}}wind_logic = UNION; +{{overrides_indent}}ci_alpha = [0.05]; +{{overrides_indent}}rank_corr_flag = FALSE; +{%- endif %} +# +# List of forecast and corresponding observation fields to process. +# +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} + {%- if input_field_group in ['APCP', 'ASNOW'] %} +# Note that for accumulated fields such as APCP and ASNOW, in the input +# forecast and observation files (which are generated by MET's PcpCombine +# tool) the accumulation period is appended to the field name, so the +# same is done here. +# + {%- endif %} +{%- endif %} +# Note on use of set_attr_lead and ensemble member time-lagging: +# ------------------------------------------------------------- +# The set_attr_lead parameter appearing below in [FCST|OBS]_VAR_OPTIONS +# specifies the lead to use both in naming of the output .stat and .nc +# files and for setting the lead values contained in those files. This +# option causes MET/METplus to use the lead values in the variable LEAD_SEQ +# set above, which are the same for all ensemble forecast members (i.e. +# regardless of whether members are time lagged with respect to the +# nominal cycle date specified by cdate). If set_attr_lead were not +# specified as below, then MET/METplus would get the lead from the input +# forecast file, and that would in general differ from one ensemble member +# to the next depending on whether the member is time-lagged. That would +# cause confusion, so here, we always use lead values with zero lead +# corresponding to the nominal cdate. +# +{#- +Import the file containing jinja macros. +#} +{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %} + +{#- +Jinja requires certain variables to be defined globally within the template +before they can be used in if-statements and other scopes (see Jinja scoping +rules). Define such variables. +#} +{%- set indx_input_level_fcst = '' %} +{%- set indx_input_thresh_fcst = '' %} +{%- set error_msg = '' %} + +{%- set field_fcst = '' %} +{%- set field_obs = '' %} +{%- set level_fcst = '' %} +{%- set level_obs = '' %} +{%- set thresh_fcst = '' %} +{%- set thresh_obs = '' %} + +{%- set levels_fcst = '' %} +{%- set levels_obs = '' %} +{%- set threshes_cpld = [] %} +{%- set valid_threshes_fcst = [] %} +{%- set valid_threshes_obs = [] %} +{%- set threshes_fcst = [] %} +{%- set threshes_obs = [] %} + +{#- +Get the set of valid field groups and ensure that the specified input +field group appears in this list. +#} +{%- set valid_field_groups = vx_config_dict.keys()|list %} +{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }} + +{#- +Reset the input forecast level so that if it happens to be an accumulation +(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g. +reset to 'A3'). +#} +{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} + +{#- +Extract from the configuration dictionary the set (which itself is a +dictionary) of fields, levels, and thresholds corresponding to the input +field group. Then set the delimiter string that separates forecast and +observation values in the various items (i.e. dictionary keys and values +representing field names, levels, and thresholds) in this dictionary. +#} +{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %} +{%- set delim_str = metplus_macros.set_delim_str() %} + +{#- +Loop over the fields and set field names, levels, thresholds, and/or +options for each field, both for forecasts and for observations, in the +METplus configuration file. +#} +{%- set ns = namespace(var_count = 0) %} +{%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %} + + {%- if delim_str in field_cpld %} + {%- set field_fcst, field_obs = field_cpld.split(delim_str) %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- set field_obs = field_cpld %} + {%- endif %} + + {%- set levels_cpld = levels_threshes_cpld.keys()|list %} + {%- set num_levels = levels_cpld|length %} + +{#- +For convenience, create lists of valid forecast and observation levels +for the current field. +#} + {%- set valid_levels_fcst = [] %} + {%- set valid_levels_obs = [] %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + {%- set tmp = valid_levels_fcst.append(level_fcst) %} + {%- set tmp = valid_levels_obs.append(level_obs) %} + {%- endfor %} + +{#- +Make sure that the input forecast level (input_level_fcst) is set to a +valid value. +#} + {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %} + {%- set error_msg = '\n' ~ +'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~ +'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~ +'for the current forecast field (field_fcst). This is not the case:\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~ +' input_level_fcst = ' ~ input_level_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + +{#- +Increment the METplus variable counter. +#} + {%- set ns.var_count = ns.var_count+1 %} + +{#- +Set jinja parameters needed in setting the forecast and observation field +level(s). +#} + {%- if (input_level_fcst == 'all') %} + {%- set levels_fcst = valid_levels_fcst %} + {%- set levels_obs = valid_levels_obs %} +{#- +If input_level_fcst is set to 'all' and there is more than one level to +be verified for the current field, then the list of forecast thresholds +for each forecast level must be identical to every other. Check for this. +Note that this restriction includes the order of the thresholds, i.e. the +set of thresholds for each level must be in the same order as for all +other levels. Once this is verified, we can set the index of the level +to use when obtaining thresholds to that of the first (index 0), which +will be valid both for the case of num_levels = 1 and num_levels > 1. +#} + {%- if (num_levels > 1) %} + {{- metplus_macros.check_for_identical_threshes_by_level( + field_cpld, levels_threshes_cpld) }} + {%- endif %} + {%- set indx_input_level_fcst = 0 %} +{#- +If input_level_fcst is set to a specific value: + 1) Ensure that input_level_fcst exists in the list of valid forecast + levels. + 2) Get the index of input_level_fcst in the list of valid forecast + levels. + 3) Use this index to set the forecast and observation levels to one- + element lists containing the appropriate level values. +#} + {%- else %} + + {%- if input_level_fcst in valid_levels_fcst %} + {%- set indx_input_level_fcst = valid_levels_fcst.index(input_level_fcst) %} + {%- set levels_fcst = [valid_levels_fcst[indx_input_level_fcst]] %} + {%- set levels_obs = [valid_levels_obs[indx_input_level_fcst]] %} + {%- else %} + {%- set error_msg = '\n' ~ +'For the current forecast field (field_fcst), the input forecast level\n' ~ +'(input_level_fcst) does not exist in the list of valid forecast levels\n' ~ +'(valid_levels_fcst):\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' input_level_fcst = ' ~ input_level_fcst ~ '\n' ~ +' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + + {%- endif %} + +{#- +Set jinja parameters needed in setting the forecast and observation field +threshold(s). +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +Now set the list of valid forecast thresholds to the one corresponding +to the first (zeroth) forecast level in the list of forecast levels set +above. We can do this because, for the case of a single forecast level, +there is only one list of forecast thresholds to consider (the first +one), and for the case of all levels, all levels have the same set of +thresholds (as verified by the check above). +#} + {%- set threshes_cpld = levels_threshes_cpld[levels_cpld[indx_input_level_fcst]] %} + {%- set valid_threshes_fcst = [] %} + {%- set valid_threshes_obs = [] %} + {%- for thresh_cpld in threshes_cpld %} + {%- if delim_str in thresh_cpld %} + {%- set thresh_fcst, thresh_obs = thresh_cpld.split(delim_str) %} + {%- else %} + {%- set thresh_fcst = thresh_cpld %} + {%- set thresh_obs = thresh_cpld %} + {%- endif %} + {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %} + {%- set tmp = valid_threshes_obs.append(thresh_obs) %} + {%- endfor %} +{#- +If input_thresh_fcst is set to 'all', set the list of forecast and +observation thresholds to the full set of valid values. +#} + {%- if (input_thresh_fcst == 'all') %} + + {%- set threshes_fcst = valid_threshes_fcst %} + {%- set threshes_obs = valid_threshes_obs %} +{#- +If input_thresh_fcst is set to a specific value: + 1) Ensure that input_thresh_fcst exists in the list of valid forecast + thresholds. + 2) Get the index of input_thresh_fcst in the list of valid forecast + thresholds. + 3) Use this index to set the forecast and observation threshold to one- + element lists containing the appropriate threshold values. +#} + {%- else %} + + {%- if input_thresh_fcst in valid_threshes_fcst %} + {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %} + {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %} + {%- set threshes_obs = [valid_threshes_obs[indx_input_thresh_fcst]] %} + {%- else %} + {%- set error_msg = '\n' ~ +'For the current forecast field (field_fcst) and list of forecast levels\n' ~ +'(levels_fcst), the input forecast threshold (input_thresh_fcst) does not\n' ~ +'exist in the list of valid forecast thresholds (valid_threshes_fcst):\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' levels_fcst = ' ~ levels_fcst ~ '\n' ~ +' valid_threshes_fcst = ' ~ valid_threshes_fcst ~ '\n' ~ +' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + + {%- endif %} + + {%- endif %} + +{#- +Set forecast field name. Note that this has to exactly match the name +of the field in the input forecast file. + +For accumulated fields, the input forecast file is generated by MET's +PcpCombine tool. In that file, the field name consists of the forecast +field name here (field_fcst) with the accumulation period appended to +it (separated by an underscore), so we must do the same here to get an +exact match. +#} + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{accum_hh}} + {%- else %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}} + {%- endif %} + +{#- +Set forecast field level(s). +#} +FCST_VAR{{ns.var_count}}_LEVELS = {{levels_fcst|join(', ')}} + +{#- +Set forecast field threshold(s). Note that: +1) No forecast thresholds are included in the METplus configuration file + if input_thresh_fcst is set to 'none'. +2) If threshes_fcst has been reset to something other than its default value + value of an empty list, then set the forecast thresholds in the METplus + configuration file because that implies threshes_fcst was set above to + a non-empty value. Then reset threshes_fcst to its default value for + proper processing of thresholds for the next field. +#} + {%- if (input_thresh_fcst != 'none') %} + {%- if (threshes_fcst != []) %} +FCST_VAR{{ns.var_count}}_THRESH = {{threshes_fcst|join(', ')}} + {%- endif %} + {%- set threshes_fcst = [] %} + {%- endif %} + +{#- +Set forecast field options. +#} +FCST_VAR{{ns.var_count}}_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; + + {%- set opts_indent_len = 20 %} + {%- if (ns.var_count > 9) and (ns.var_count <= 99) %} + {%- set opts_indent_len = opts_indent_len + 1 %} + {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %} + {%- set opts_indent_len = opts_indent_len + 2 %} + {%- elif (ns.var_count > 999) %} + {%- set opts_indent_len = opts_indent_len + 3 %} + {%- endif %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if (input_field_group == 'REFC') %} + + {%- if (field_fcst == 'REFC') %} +{{opts_indent}}cnt_thresh = [ >15 ]; +{{opts_indent}}cnt_logic = UNION; + {%- endif %} + + {%- elif (input_field_group == 'RETOP') %} + + {%- if (field_fcst == 'RETOP') %} +{{opts_indent}}convert(x) = x * 3.28084 * 0.001; +{{opts_indent}}cnt_thresh = [ >0 ]; +{{opts_indent}}cnt_logic = UNION; + {%- endif %} + + {%- elif (input_field_group == 'SFC') %} + + {%- if (field_fcst in ['WIND']) %} +{{opts_indent}}GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind. + {%- elif (field_fcst in ['TCDC']) %} +{{opts_indent}}GRIB_lvl_typ = 200; +{{opts_indent}}GRIB2_ipdtmpl_index=[27]; +{{opts_indent}}GRIB2_ipdtmpl_val=[255]; +{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- elif (field_fcst in ['VIS']) %} +{{opts_indent}}censor_thresh = [>16090]; +{{opts_indent}}censor_val = [16090]; +{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- elif (field_fcst in ['HGT']) %} +{{opts_indent}}GRIB_lvl_typ = 215; +{{opts_indent}}desc = "CEILING"; + {%- endif %} + + {%- elif (input_field_group == 'UPA') %} + + {%- if (field_fcst in ['HGT']) %} + {%- if (levels_fcst[0] in ['L0']) %} +{{opts_indent}}GRIB_lvl_typ = 220; + {%- endif %} + {%- elif (field_fcst in ['CAPE']) %} +{{opts_indent}}cnt_thresh = [ >0 ]; + {%- endif %} + + {%- endif %} + +{#- +Set observation field name. Note that this has to exactly match the name +of the field in the input observation file. + +For accumulated fields, the input observation file is generated by MET's +PcpCombine tool. In that file, the field name consists of the observation +field name here (field_obs) with the accumulation period appended to it +(separated by an underscore), so we must do the same here to get an exact +match. +#} + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}}_{{accum_hh}} + {%- else %} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}} + {%- endif %} + +{#- +Set observation field level(s). +#} +OBS_VAR{{ns.var_count}}_LEVELS = {{levels_obs|join(', ')}} + +{#- +Set observation field threshold(s). Note that: +1) No observation thresholds are included in the METplus configuration + file if input_thresh_fcst is set to 'none'. +2) If threshes_obs has been reset to something other than its default value + of an empty list, then we set the observation thresholds in the METplus + configuration file because that implies threshes_obs was set above to + a non-empty value. Then reset threshes_obs to its default value for + proper processing of thresholds for the next field. +#} + {%- if (input_thresh_fcst != 'none') %} + {%- if (threshes_obs != []) %} +OBS_VAR{{ns.var_count}}_THRESH = {{threshes_obs|join(', ')}} + {%- endif %} + {%- set threshes_obs = [] %} + {%- endif %} + +{#- +Set observation field options. +#} + {%- set opts_indent_len = opts_indent_len - 1 %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if (input_field_group == 'ASNOW') %} + + {%- if (field_obs == 'ASNOW') %} +OBS_VAR{{ns.var_count}}_OPTIONS = convert(x) = 100.0*x; + {%- endif %} + + {%- elif (input_field_group == 'REFC') %} + + {%- if (field_obs == 'MergedReflectivityQCComposite') %} +OBS_VAR{{ns.var_count}}_OPTIONS = censor_thresh = [eq-999, <-20]; +{{opts_indent}}censor_val = [-9999, -20]; +{{opts_indent}}cnt_thresh = [ >15 ]; +{{opts_indent}}cnt_logic = UNION; + {%- endif %} + + {%- elif (input_field_group == 'RETOP') %} + + {%- if (field_obs in ['EchoTop18']) %} +OBS_VAR{{ns.var_count}}_OPTIONS = convert(x) = x * 3280.84 * 0.001; +{{opts_indent}}censor_thresh = [<=-9.84252,eq-3.28084]; +{{opts_indent}}censor_val = [-9999,-16.4042]; +{{opts_indent}}cnt_thresh = [ >0 ]; +{{opts_indent}}cnt_logic = UNION; + {%- endif %} + + {%- elif (input_field_group == 'SFC') %} + + {%- if (field_obs in ['WIND']) %} +OBS_VAR{{ns.var_count}}_OPTIONS = GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind. + {%- elif (field_obs in ['VIS']) %} +OBS_VAR{{ns.var_count}}_OPTIONS = censor_thresh = [>16090]; +{{opts_indent}}censor_val = [16090]; +{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- elif (field_obs in ['CEILING']) %} +OBS_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 215; +{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- endif %} + + {%- elif (input_field_group == 'UPA') %} + + {%- if (field_obs in ['CAPE', 'MLCAPE']) %} +OBS_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; +{{opts_indent}}cnt_logic = UNION; + {%- elif (field_obs in ['PBL']) %} + {%- if (field_fcst in ['HPBL']) %} +OBS_VAR{{ns.var_count}}_OPTIONS = desc = "TKE"; + {%- elif (field_fcst in ['HGT']) %} +OBS_VAR{{ns.var_count}}_OPTIONS = desc = "RI"; + {%- endif %} + {%- endif %} + + {%- endif %} + +{#- +Print out a newline to separate the settings for the current field (both +forecast and observation settings) from those for the next field. +#} + {{- '\n' }} + +{%- endfor %} + + + +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +# +# Forecast data time window(s). +# +FCST_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_BEGIN = 0 +FCST_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_END = 0 + {%- endif %} +{%- endif %} +# +# Observation data time window(s). +# +{%- set obs_window_begin = 0 %} +{%- set obs_window_end = 0 %} +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} + {%- if (input_field_group in ['REFC', 'RETOP']) %} + {%- set obs_window_begin = -300 %} + {%- set obs_window_end = 300 %} + {%- endif %} +OBS_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_BEGIN = {{obs_window_begin}} +OBS_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_END = {{obs_window_end}} +{%- elif (METPLUS_TOOL_NAME == 'POINT_STAT') %} + {%- set obs_window_begin = -1799 %} + {%- set obs_window_end = 1800 %} +OBS_WINDOW_BEGIN = {{obs_window_begin}} +OBS_WINDOW_END = {{obs_window_end}} +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_END = {OBS_WINDOW_END} +# +# Optional list of offsets to look for point observation data +# +{{METPLUS_TOOL_NAME}}_OFFSETS = 0 +{%- endif %} + +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} + +# MET {{MetplusToolName}} neighborhood values +# See the MET User's Guide {{MetplusToolName}} section for more information +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_FIELD = BOTH + +# width value passed to nbrhd dictionary in the MET config file +{%- if (input_field_group in ['APCP']) %} +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_WIDTH = 3,5,7 +{%- elif (input_field_group in ['ASNOW']) %} +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_WIDTH = 5 +{%- elif (input_field_group in ['REFC', 'RETOP']) %} +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_WIDTH = 1,3,5,7 +{%- endif %} + +# shape value passed to nbrhd dictionary in the MET config file +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_SHAPE = SQUARE + +# cov thresh list passed to nbrhd dictionary in the MET config file +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_COV_THRESH = >=0.5 +{%- endif %} +# +# Set to True to run {{MetplusToolName}} separately for each field specified; +# set to False to run {{MetplusToolName}} once per run time that includes all +# fields specified. +# +{{METPLUS_TOOL_NAME}}_ONCE_PER_FIELD = False + +{{METPLUS_TOOL_NAME}}_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} + +# Climatology data +{%- set comment_or_null = '' %} +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = + {%- if (input_field_group in ['APCP', 'ASNOW']) %} + {%- set comment_or_null = '#' %} + {%- endif %} + +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1 +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = True +{%- elif (METPLUS_TOOL_NAME == 'POINT_STAT') %} +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST + +{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1 +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = True +{%- endif %} + +# Statistical output types +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_FHO = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTC = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTS = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTC = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTS = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CNT = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SL1L2 = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SAL1L2 = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VL1L2 = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VAL1L2 = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VCNT = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PCT = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PSTD = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PJC = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PRC = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV = BOTH +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCTC = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCTS = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCNT = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_GRAD = BOTH +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_DMAP = NONE +{%- elif (METPLUS_TOOL_NAME == 'POINT_STAT') %} +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_FHO = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTC = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTS = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTS = +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CNT = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SL1L2 = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SAL1L2 = +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VL1L2 = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VAL1L2 = +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VCNT = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PCT = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PSTD = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PJC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PRC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECNT = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_RPS = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MPR = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ORANK = +{%- endif %} + +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} + +# NetCDF matched pairs output file +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_VAR_NAME = +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_LATLON = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_RAW = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_DIFF = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_CLIMO = FALSE +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_CLIMO_CDP = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_WEIGHT = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_NBRHD = FALSE +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_FOURIER = FALSE +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_GRADIENT = FALSE +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_APPLY_MASK = FALSE +{%- endif %} + +# End of [config] section and start of [dir] section. +[dir] +# +# Directory containing observation input to {{MetplusToolName}}. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{obs_input_dir}} +# +# Directory containing forecast input to {{MetplusToolName}}. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}} +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR = +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR = +# +# Directory in which to write output from {{MetplusToolName}}. +# +# OUTPUT_BASE apparently has to be set to something; it cannot be left +# to its default value. But it is not explicitly used elsewhere in this +# configuration file. +# +OUTPUT_BASE = {{output_base}} +{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}} +# +# Directory for staging data. +# +STAGING_DIR = {{staging_dir}} + +# End of [dir] section and start of [filename_templates] section. +[filename_templates] +# +# Template for observation input to {{MetplusToolName}} relative to +# OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{obs_input_fn_template}} +# +# Template for forecast input to {{MetplusToolName}} relative to +# FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}} +# +# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR. +# +{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_TEMPLATE = +{#- +Not sure if the following section for ..._VERIFICATION_MASK_TEMPLATE +is also nececcary for PointStat. +#} +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} +# +# Variable used to specify one or more verification mask files for +# {{MetplusToolName}}. Not used for this example. +# +{{METPLUS_TOOL_NAME}}_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly +{%- endif %} diff --git a/parm/metplus/Pb2nc_obs.conf b/parm/metplus/Pb2nc_obs.conf index 729bf2ba06..24d469602f 100644 --- a/parm/metplus/Pb2nc_obs.conf +++ b/parm/metplus/Pb2nc_obs.conf @@ -31,7 +31,7 @@ INIT_INCREMENT = 3600 # List of forecast leads to process for each run time (init or valid) # In hours if units are not specified # If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} +LEAD_SEQ = {{leadhr_list}} # Order of loops to process data - Options are times, processes # Not relevant if only one item is in the PROCESS_LIST diff --git a/parm/metplus/PcpCombine.conf b/parm/metplus/PcpCombine.conf new file mode 100644 index 0000000000..04562dc14b --- /dev/null +++ b/parm/metplus/PcpCombine.conf @@ -0,0 +1,225 @@ +{%- if FCST_OR_OBS == 'FCST' -%} +# PcpCombine METplus Configuration for Forecasts +{%- elif FCST_OR_OBS == 'OBS' -%} +# PcpCombine METplus Configuration for Observations +{%- endif %} + +[config] + +# List of applications (tools) to run. +PROCESS_LIST = PcpCombine + +# time looping - options are INIT, VALID, RETRO, and REALTIME +# If set to INIT or RETRO: +# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set +# If set to VALID or REALTIME: +# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set +LOOP_BY = INIT + +# Format of INIT_BEG and INIT_END using % items +# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. +# see www.strftime.org for more information +# %Y%m%d%H expands to YYYYMMDDHH +INIT_TIME_FMT = %Y%m%d%H + +# Start time for METplus run - must match INIT_TIME_FMT +INIT_BEG = {{cdate}} + +# End time for METplus run - must match INIT_TIME_FMT +INIT_END = {{cdate}} + +# Increment between METplus runs (in seconds if no units are specified). +# Must be >= 60 seconds. +INIT_INCREMENT = 3600 + +# List of forecast leads to process for each run time (init or valid) +# In hours if units are not specified +# If unset, defaults to 0 (don't loop through forecast leads) +LEAD_SEQ = {{vx_leadhr_list}} + +# Order of loops to process data - Options are times, processes +# Not relevant if only one item is in the PROCESS_LIST +# times = run all wrappers in the PROCESS_LIST for a single run time, then +# increment the run time and run all wrappers again until all times have +# been evaluated. +# processes = run the first wrapper in the PROCESS_LIST for all times +# specified, then repeat for the next item in the PROCESS_LIST until all +# wrappers have been run +LOOP_ORDER = times +# +# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. +# +LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} +# +# Specify the name of the METplus log file. +# +LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} +# +# Specify the location and name of the final METplus conf file. +# +METPLUS_CONF = {% raw %}{{% endraw %}{{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} + +{%- if FCST_OR_OBS == 'FCST' %} +# +# Run PcpCombine on forecast data but not observations (observation input +# files are not provided). +# +OBS_PCP_COMBINE_RUN = False +FCST_PCP_COMBINE_RUN = True +{%- elif FCST_OR_OBS == 'OBS' %} +# +# Run PcpCombine on observation data but not forecasts (forecast input +# files are not provided). +# +OBS_PCP_COMBINE_RUN = True +FCST_PCP_COMBINE_RUN = False +{%- endif %} +# +# Mode of PcpCombine to use (SUM, ADD, SUBTRACT, DERIVE, or CUSTOM). +# +{{FCST_OR_OBS}}_PCP_COMBINE_METHOD = ADD + +{%- if (FCST_OR_OBS == 'FCST') %} + {%- if (input_field_group == 'ASNOW') %} +# +# Specify name of variable for Snowfall Accumulation. +# +# NOTE: +# For forecasts, currently TSNOWP is used which is a constant-density +# estimate of snowfall accumulation. In future RRFS development, a GSL +# product with variable-density snowfall accumulation is planned for UPP. +# When that is included and turned on in post, this variable may be +# changed to ASNOW. +# +{{FCST_OR_OBS}}_PCP_COMBINE_INPUT_NAMES = TSNOWP +{{FCST_OR_OBS}}_PCP_COMBINE_INPUT_LEVELS = A{{input_accum_hh}} + {%- endif %} +{%- elif (FCST_OR_OBS == 'OBS') %} + {%- if (input_field_group == 'ASNOW') %} +{{FCST_OR_OBS}}_PCP_COMBINE_INPUT_NAMES = ASNOW +{{FCST_OR_OBS}}_PCP_COMBINE_INPUT_LEVELS = A{{input_accum_hh}} + {%- endif %} +{%- endif %} +# +# Specify how to name the array in the NetCDF file that PcpCombine +# generates. +# +# For accumulation variables (which is the only type of variable that we +# run PcpCombine on), we add the accumulation period to the variable name +# because this is how METplus normally sets names. This is because, +# depending on the settings in the METplus configuration file, it is +# possible for a single NetCDF output file to contain output for multiple +# accumulations, so even though the "level" attribute of each accumulation +# variable in the output file will contain the level (e.g. "A1" or "A3"), +# the variable names for say the 1-hour and 3-hour accumulations would be +# the same (e.g. both would be "APCP"), which is not allowed and/or would +# cause overwriting of data. To avoid this, METplus includes the level +# as part of the variable name, so we do the same here (even though in +# our case, it is not required because there will only be one variable in +# the output NetCDF file). +# +{%- if (input_field_group in ['APCP', 'ASNOW']) %} +{{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_NAME = {{fieldname_in_met_output}}_{{output_accum_hh}} +{%- else %} +{{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_NAME = {{fieldname_in_met_output}} +{%- endif %} +# +# Accumulation interval available in the input data. +# +{{FCST_OR_OBS}}_PCP_COMBINE_INPUT_ACCUMS = {{input_accum_hh}} +# +# Accumulation interval to generate in the output file. +# +{{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_ACCUM = {{output_accum_hh}} +# +# If the output NetCDF file already exists, specify whether or not to +# skip the call to PcpCombine. +# +# In general, relaunching a task in the SRW App should recreate all the +# output from that task regardless of whether or not that output already +# exists. This is the case when running the PcpCombine task on forecasts. +# Thus, for forecasts, we set the skip flag to False. However, it turns +# out that when running PcpCombine on observations, it is necessary to +# skip the call to PcpCombine (i.e. NOT recreate output files) because +# in the SRW App's workflow, more than one cycle may want to create the +# same output observation file. This can happen if the forecast periods +# from two or more forecasts overlap, e.g. forecast 1 starts at 00Z of +# day one and forecast 2 starts at 00Z of day 2, and the forecasts are +# both 36 hours long, so the last 12 hours of forecast 1 overlap with the +# first 12 hours of forecast 2. In this case, there will be two workflow +# tasks that will try to create the observation APCP files for those 12 +# hours, and the files will be named exactly the same (because the output +# naming convention in this conf file is based on valid times). Thus, in +# order to avoid (1) duplicating work and (2) having two tasks accidentally +# trying to write to the same file (which will cause at least one task to +# fail), when running PcpCombine on observations we want to skip the call +# if the output observation file(s) (for a given forecast hour) already +# exist. For this reason, for observations we set the skip flag to False +# but set it to True for forecasts. +# +{%- if FCST_OR_OBS == 'FCST' %} +# Since this METplus configuration file takes forecast files as inputs, +# we set this flag to False. +# +PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS = False +{%- elif FCST_OR_OBS == 'OBS' %} +# Since this METplus configuration file takes observation files as inputs, +# we set this flag to True. +# +PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS = True +{%- endif %} + +{%- if FCST_OR_OBS == 'FCST' %} +# +# Maximum forecast lead to allow when searching for model data to use in +# PcpCombine. Default is a very large time (4000 years) so setting this +# to a valid maximum value can speed up execution time of numerous runs. +# +FCST_PCP_COMBINE_MAX_FORECAST = 2d +# +# Keep initialization time constant. +# +FCST_PCP_COMBINE_CONSTANT_INIT = True +{%- endif %} + +{%- if FCST_OR_OBS == 'OBS' %} +# +# Name to identify observation data in output. +# +OBTYPE = {{obtype}} +{%- endif %} +# +# Specify file type of input data. +# +{{FCST_OR_OBS}}_PCP_COMBINE_INPUT_DATATYPE = GRIB + +# End of [config] section and start of [dir] section. +[dir] +# +# Directory containing input files. +# +{{FCST_OR_OBS}}_PCP_COMBINE_INPUT_DIR = {{input_dir}} +# +# Directory in which to write output from PcpCombine. +# +# OUTPUT_BASE apparently has to be set to something; it cannot be left +# to its default value. But it is not explicitly used elsewhere in this +# configuration file. +# +OUTPUT_BASE = {{output_base}} +{{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_DIR = {{output_dir}} +# +# Directory for staging data. +# +STAGING_DIR = {{staging_dir}} + +# End of [dir] section and start of [filename_templates] section. +[filename_templates] +# +# Input file name template relative to {{FCST_OR_OBS}}_PCP_COMBINE_INPUT_DIR. +# +{{FCST_OR_OBS}}_PCP_COMBINE_INPUT_TEMPLATE = {{input_fn_template}} +# +# Output file name template relative to {{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_DIR. +# +{{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_TEMPLATE = {{output_fn_template}} diff --git a/parm/metplus/PcpCombine_fcst_APCP.conf b/parm/metplus/PcpCombine_fcst_APCP.conf deleted file mode 100644 index 64fe0b4fcf..0000000000 --- a/parm/metplus/PcpCombine_fcst_APCP.conf +++ /dev/null @@ -1,130 +0,0 @@ -# PcpCombine METplus Configuration for Forecasts - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PcpCombine - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} - -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {FCST_PCP_COMBINE_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Run PcpCombine on forecast data but not observation (observation input -# files are not provided). -# -OBS_PCP_COMBINE_RUN = False -FCST_PCP_COMBINE_RUN = True -# -# Mode of PcpCombine to use (SUM, ADD, SUBTRACT, DERIVE, or CUSTOM). -# -FCST_PCP_COMBINE_METHOD = ADD -# -# Specify how to name the array in the NetCDF file that PcpCombine -# generates. -# -FCST_PCP_COMBINE_OUTPUT_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -# -# Accumulation interval available in the forecast input data. -# -FCST_PCP_COMBINE_INPUT_ACCUMS = 01 -# -# Accumulation interval to generate in the output file. -# -FCST_PCP_COMBINE_OUTPUT_ACCUM = {{accum_hh}} -# -# If the "bucket" output NetCDF file already exists, DON'T skip the call -# to PcpCombine. -# -# In general, we want to recreate the files when the SRW App workflow -# task that uses this METplus configuration file is relaunched. -# -PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS = False - -# Maximum forecast lead to allow when searching for model data to use in -# PcpCombine. Default is a very large time (4000 years) so setting this -# to a valid maximum value can speed up execution time of numerous runs. -FCST_PCP_COMBINE_MAX_FORECAST = 2d - -# Keep initialization time constant. -FCST_PCP_COMBINE_CONSTANT_INIT = True - -FCST_PCP_COMBINE_INPUT_DATATYPE = GRIB - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing forecast input to PcpCombine. -# -FCST_PCP_COMBINE_INPUT_DIR = {{fcst_input_dir}} -# -# Directory in which to write output from PcpCombine. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -FCST_PCP_COMBINE_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for forecast input to PcpCombine relative to -# FCST_PCP_COMBINE_INPUT_DIR. -# -FCST_PCP_COMBINE_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from PcpCombine relative to -# FCST_PCP_COMBINE_OUTPUT_DIR. -# -FCST_PCP_COMBINE_OUTPUT_TEMPLATE = {{output_fn_template}} diff --git a/parm/metplus/PcpCombine_fcst_ASNOW.conf b/parm/metplus/PcpCombine_fcst_ASNOW.conf deleted file mode 100644 index 91a6a70abb..0000000000 --- a/parm/metplus/PcpCombine_fcst_ASNOW.conf +++ /dev/null @@ -1,141 +0,0 @@ -# PcpCombine METplus Configuration for Forecasts - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PcpCombine - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} - -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {FCST_PCP_COMBINE_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Run PcpCombine on forecast data but not observation (observation input -# files are not provided). -# -OBS_PCP_COMBINE_RUN = False -FCST_PCP_COMBINE_RUN = True -# -# Mode of PcpCombine to use (SUM, ADD, SUBTRACT, DERIVE, or CUSTOM). -# -FCST_PCP_COMBINE_METHOD = ADD -# -# Specify name of variable for Snowfall Accumulation. -# NOTE: Currently TSNOWP is used which is a constant-density estimate of snowfall accumulation. -# In future RRFS development, a GSL product with variable-density snowfall accumulation -# is planned for UPP. When that is included and turned on in post, this variable may be changed -# to ASNOW. -# -FCST_PCP_COMBINE_INPUT_NAMES=TSNOWP - -FCST_PCP_COMBINE_INPUT_LEVELS = A01 -# -# Specify how to name the array in the NetCDF file that PcpCombine -# generates. -# -FCST_PCP_COMBINE_OUTPUT_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -# -# Accumulation interval available in the forecast input data. -# -FCST_PCP_COMBINE_INPUT_ACCUMS = 01 -# -# Accumulation interval to generate in the output file. -# -FCST_PCP_COMBINE_OUTPUT_ACCUM = {{accum_hh}} - -# If the "bucket" output NetCDF file already exists, DON'T skip the call -# to PcpCombine. -# -# In general, we want to recreate the files when the SRW App workflow -# task that uses this METplus configuration file is relaunched. -# -PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS = False - -# Maximum forecast lead to allow when searching for model data to use in -# PcpCombine. Default is a very large time (4000 years) so setting this -# to a valid maximum value can speed up execution time of numerous runs. -FCST_PCP_COMBINE_MAX_FORECAST = 2d - -# Keep initialization time constant. -FCST_PCP_COMBINE_CONSTANT_INIT = True - -FCST_PCP_COMBINE_INPUT_DATATYPE = GRIB -#FCST_NATIVE_DATA_TYPE = GRIB - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing forecast input to PcpCombine. -# -FCST_PCP_COMBINE_INPUT_DIR = {{fcst_input_dir}} -# -# Directory in which to write output from PcpCombine. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -FCST_PCP_COMBINE_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for forecast input to PcpCombine relative to -# FCST_PCP_COMBINE_INPUT_DIR. -# -FCST_PCP_COMBINE_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from PCPCOMBINE relative to -# FCST_PCP_COMBINE_OUTPUT_DIR. -# -FCST_PCP_COMBINE_OUTPUT_TEMPLATE = {{output_fn_template}} diff --git a/parm/metplus/PcpCombine_obs_APCP.conf b/parm/metplus/PcpCombine_obs_APCP.conf deleted file mode 100644 index cea6809597..0000000000 --- a/parm/metplus/PcpCombine_obs_APCP.conf +++ /dev/null @@ -1,139 +0,0 @@ -# PcpCombine METplus Configuration for Observations - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PcpCombine - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} - -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {OBS_PCP_COMBINE_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Run PcpCombine on observation data but not forecast (forecast input -# files are not provided). -# -OBS_PCP_COMBINE_RUN = True -FCST_PCP_COMBINE_RUN = False -# -# Mode of PcpCombine to use (SUM, ADD, SUBTRACT, DERIVE, or CUSTOM). -# -OBS_PCP_COMBINE_METHOD = ADD -# -# Specify how to name the array in the NetCDF file that PcpCombine -# generates. -# -OBS_PCP_COMBINE_OUTPUT_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -# -# Accumulation interval available in the observation input data. -# -OBS_PCP_COMBINE_INPUT_ACCUMS = 01 -# -# Accumulation interval to generate in the output file. -# -OBS_PCP_COMBINE_OUTPUT_ACCUM = {{accum_hh}} -# -# If the "bucket" output NetCDF file already exists, skip the call to -# PcpCombine. -# -# In general, we want to recreate the files when the SRW App workflow -# task that uses this METplus configuration file is relaunched. In this -# case, however, it is necessary to skip the call to PcpCombine because -# in the SRW App's workflow, more than one cycle may want to create the -# same file. This can happen if the forecast periods from two or more -# forecasts overlap, e.g. forecast 1 starts at 00Z of day one and forecast -# 2 starts at 00Z of day 2, and the forecasts are both 36 hours long, so -# the last 12 hours of forecast 1 overlap with the first 12 hours of -# forecast 2. In this case, there will be two workflow tasks that will -# try to create the observation APCP files for those 12 hours, and the -# files will be named exactly the same (because the output naming convention -# in this conf file uses valid times). In order to (1) avoid duplicating -# work and (2) having two tasks accidentally trying to write to the same -# file (which will cause at least one task to fail), we do not call -# PcpCombine if the output file (for a given forecast hour) already -# exists. -# -PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS = True -# -# Name to identify observation data in output. -# -OBTYPE = CCPA -OBS_PCP_COMBINE_INPUT_DATA_TYPE = GRIB - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to PcpCombine. -# -OBS_PCP_COMBINE_INPUT_DIR = {{obs_input_dir}} -# -# Directory in which to write output from PcpCombine. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -OBS_PCP_COMBINE_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to PcpCombine relative to -# OBS_PCP_COMBINE_INPUT_DIR. -# -OBS_PCP_COMBINE_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for output from PcpCombine relative to -# OBS_PCP_COMBINE_OUTPUT_DIR. -# -OBS_PCP_COMBINE_OUTPUT_TEMPLATE = {{output_fn_template}} diff --git a/parm/metplus/PointStat_ADPSFC.conf b/parm/metplus/PointStat_ADPSFC.conf deleted file mode 100644 index 6d94e0bed9..0000000000 --- a/parm/metplus/PointStat_ADPSFC.conf +++ /dev/null @@ -1,378 +0,0 @@ -# PointStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PointStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {POINT_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to PointStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -POINT_STAT_CONFIG_FILE = {PARM_BASE}/met_config/PointStatConfig_wrapped - -POINT_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA -#POINT_STAT_OBS_QUALITY_EXC = - -POINT_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST -#POINT_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = - -#POINT_STAT_INTERP_VLD_THRESH = -#POINT_STAT_INTERP_SHAPE = -POINT_STAT_INTERP_TYPE_METHOD = BILIN -POINT_STAT_INTERP_TYPE_WIDTH = 2 - -POINT_STAT_OUTPUT_FLAG_FHO = STAT -POINT_STAT_OUTPUT_FLAG_CTC = STAT -POINT_STAT_OUTPUT_FLAG_CTS = STAT -#POINT_STAT_OUTPUT_FLAG_MCTC = -#POINT_STAT_OUTPUT_FLAG_MCTS = -POINT_STAT_OUTPUT_FLAG_CNT = STAT -POINT_STAT_OUTPUT_FLAG_SL1L2 = STAT -#POINT_STAT_OUTPUT_FLAG_SAL1L2 = -POINT_STAT_OUTPUT_FLAG_VL1L2 = STAT -#POINT_STAT_OUTPUT_FLAG_VAL1L2 = -POINT_STAT_OUTPUT_FLAG_VCNT = STAT -#POINT_STAT_OUTPUT_FLAG_PCT = -#POINT_STAT_OUTPUT_FLAG_PSTD = -#POINT_STAT_OUTPUT_FLAG_PJC = -#POINT_STAT_OUTPUT_FLAG_PRC = -#POINT_STAT_OUTPUT_FLAG_ECNT = -#POINT_STAT_OUTPUT_FLAG_RPS = -#POINT_STAT_OUTPUT_FLAG_ECLV = -#POINT_STAT_OUTPUT_FLAG_MPR = -#POINT_STAT_OUTPUT_FLAG_ORANK = - -POINT_STAT_CLIMO_CDF_BINS = 1 -#POINT_STAT_CLIMO_CDF_CENTER_BINS = False -#POINT_STAT_CLIMO_CDF_WRITE_BINS = True - -#POINT_STAT_HSS_EC_VALUE = - -# -# Observation data time window(s). -# -OBS_WINDOW_BEGIN = -1799 -OBS_WINDOW_END = 1800 -OBS_POINT_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} -OBS_POINT_STAT_WINDOW_END = {OBS_WINDOW_END} - -# Optional list of offsets to look for point observation data -POINT_STAT_OFFSETS = 0 -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the forecast ensemble member. This -# makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_{{ensmem_name}} -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. Here, -# we store the value of the original lead in this column, i.e. the lead -# with zero corresponding to the actual start time of the forecast (which -# is (cdate - time_lag)), not to cdate. This is just the lead in -# LEAD_SEQ with the time lag (time_lag) of the current forecast member -# added on. -# -# Uncomment this line only after upgrading to METplus 5.x. -#POINT_STAT_DESC = {lead?fmt=%H%M%S?shift={{time_lag}}} -POINT_STAT_DESC = NA - -# Regrid to specified grid. Indicate NONE if no regridding, or the grid id -# (e.g. G212) -POINT_STAT_REGRID_TO_GRID = NONE -POINT_STAT_REGRID_METHOD = BILIN -POINT_STAT_REGRID_WIDTH = 2 - -POINT_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# sets the -obs_valid_beg command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H} - -# sets the -obs_valid_end command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_END = {valid?fmt=%Y%m%d_%H} - -# Verification Masking regions -# Indicate which grid and polygon masking region, if applicable -POINT_STAT_GRID = - -# List of full path to poly masking files. NOTE: Only short lists of poly -# files work (those that fit on one line), a long list will result in an -# environment variable that is too long, resulting in an error. For long -# lists of poly masking files (i.e. all the mask files in the NCEP_mask -# directory), define these in the METplus PointStat configuration file. -POINT_STAT_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly -POINT_STAT_STATION_ID = - -# Message types, if all message types are to be returned, leave this empty, -# otherwise indicate the message types of interest. -POINT_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} - -# set to True to run PointStat once for each name/level combination -# set to False to run PointStat once per run time including all fields -POINT_STAT_ONCE_PER_FIELD = False -# -# List of forecast and corresponding observation fields to process. -# -# Note on use of set_attr_lead and ensemble member time-lagging: -# ------------------------------------------------------------- -# The set_attr_lead parameter appearing below in [FCST|OBS]_VAR_OPTIONS -# specifies the lead to use both in naming of the output .stat and .nc -# files and for setting the lead values contained in those files. This -# option causes MET/METplus to use the lead values in the variable LEAD_SEQ -# set above, which are the same for all ensemble forecast members (i.e. -# regardless of whether members are time lagged with respect to the -# nominal cycle date specified by cdate). If set_attr_lead were not -# specified as below, then MET/METplus would get the lead from the input -# forecast file, and that would in general differ from one ensemble member -# to the next depending on whether the member is time-lagged. That would -# cause confusion, so here, we always use lead values with zero lead -# corresponding to the nominal cdate. -# -FCST_VAR1_NAME = TMP -FCST_VAR1_LEVELS = Z2 -FCST_VAR1_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR1_NAME = TMP -OBS_VAR1_LEVELS = Z2 - -FCST_VAR2_NAME = DPT -FCST_VAR2_LEVELS = Z2 -FCST_VAR2_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR2_NAME = DPT -OBS_VAR2_LEVELS = Z2 - -FCST_VAR3_NAME = RH -FCST_VAR3_LEVELS = Z2 -FCST_VAR3_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR3_NAME = RH -OBS_VAR3_LEVELS = Z2 - -FCST_VAR4_NAME = UGRD -FCST_VAR4_LEVELS = Z10 -FCST_VAR4_THRESH = ge2.572 -FCST_VAR4_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR4_NAME = UGRD -OBS_VAR4_LEVELS = Z10 -OBS_VAR4_THRESH = ge2.572 - -FCST_VAR5_NAME = VGRD -FCST_VAR5_LEVELS = Z10 -FCST_VAR5_THRESH = ge2.572 -FCST_VAR5_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR5_NAME = VGRD -OBS_VAR5_LEVELS = Z10 -OBS_VAR5_THRESH = ge2.572 - -FCST_VAR6_NAME = WIND -FCST_VAR6_LEVELS = Z10 -FCST_VAR6_THRESH = ge2.572, ge2.572&<5.144, ge5.144, ge10.288, ge15.433 -FCST_VAR6_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; - GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind. -OBS_VAR6_NAME = WIND -OBS_VAR6_LEVELS = Z10 -OBS_VAR6_THRESH = ge2.572, ge2.572&<5.144, ge5.144, ge10.288, ge15.433 -OBS_VAR6_OPTIONS = GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind. - -FCST_VAR7_NAME = PRMSL -FCST_VAR7_LEVELS = Z0 -FCST_VAR7_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR7_NAME = PRMSL -OBS_VAR7_LEVELS = Z0 - -FCST_VAR8_NAME = TCDC -FCST_VAR8_LEVELS = L0 -FCST_VAR8_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; - GRIB_lvl_typ = 200; - GRIB2_ipdtmpl_index=[27]; - GRIB2_ipdtmpl_val=[255]; - interp = { type = [ { method = NEAREST; width = 1; } ]; } -OBS_VAR8_NAME = TCDC -OBS_VAR8_LEVELS = L0 - -FCST_VAR9_NAME = VIS -FCST_VAR9_LEVELS = L0 -FCST_VAR9_THRESH = lt805, lt1609, lt4828, lt8045, ge8045, lt16090 -FCST_VAR9_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; - censor_thresh = [>16090]; - censor_val = [16090]; - interp = { type = [ { method = NEAREST; width = 1; } ]; } -OBS_VAR9_NAME = VIS -OBS_VAR9_LEVELS = L0 -OBS_VAR9_THRESH = lt805, lt1609, lt4828, lt8045, ge8045, lt16090 -OBS_VAR9_OPTIONS = censor_thresh = [>16090]; - censor_val = [16090]; - interp = { type = [ { method = NEAREST; width = 1; } ]; } - -FCST_VAR10_NAME = GUST -FCST_VAR10_LEVELS = Z0 -FCST_VAR10_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR10_NAME = GUST -OBS_VAR10_LEVELS = Z0 - -FCST_VAR11_NAME = HGT -FCST_VAR11_LEVELS = L0 -FCST_VAR11_THRESH = lt152, lt305, lt914, lt1520, lt3040, ge914 -FCST_VAR11_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; - GRIB_lvl_typ = 215; - desc = "CEILING"; -OBS_VAR11_NAME = CEILING -OBS_VAR11_LEVELS = L0 -OBS_VAR11_THRESH = lt152, lt305, lt914, lt1520, lt3040, ge914 -OBS_VAR11_OPTIONS = GRIB_lvl_typ = 215; - interp = { type = [ { method = NEAREST; width = 1; } ]; } - -FCST_VAR12_NAME = SPFH -FCST_VAR12_LEVELS = Z2 -OBS_VAR12_NAME = SPFH -OBS_VAR12_LEVELS = Z2 - -FCST_VAR13_NAME = CRAIN -FCST_VAR13_LEVELS = L0 -FCST_VAR13_THRESH = ge1.0 -OBS_VAR13_NAME = PRWE -OBS_VAR13_LEVELS = Z0 -OBS_VAR13_THRESH = ge161&&le163 - -FCST_VAR14_NAME = CSNOW -FCST_VAR14_LEVELS = L0 -FCST_VAR14_THRESH = ge1.0 -OBS_VAR14_NAME = PRWE -OBS_VAR14_LEVELS = Z0 -OBS_VAR14_THRESH = ge171&&le173 - -FCST_VAR15_NAME = CFRZR -FCST_VAR15_LEVELS = L0 -FCST_VAR15_THRESH = ge1.0 -OBS_VAR15_NAME = PRWE -OBS_VAR15_LEVELS = Z0 -OBS_VAR15_THRESH = ge164&&le166 - -FCST_VAR16_NAME = CICEP -FCST_VAR16_LEVELS = L0 -FCST_VAR16_THRESH = ge1.0 -OBS_VAR16_NAME = PRWE -OBS_VAR16_LEVELS = Z0 -OBS_VAR16_THRESH = ge174&&le176 - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to PointStat. -# -OBS_POINT_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to PointStat. -# -FCST_POINT_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from PointStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -POINT_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to PointStat relative to -# OBS_POINT_STAT_INPUT_DIR. -# -OBS_POINT_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to PointStat relative to -# FCST_POINT_STAT_INPUT_DIR. -# -FCST_POINT_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from PointStat relative to POINT_STAT_OUTPUT_DIR. -# -POINT_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_TEMPLATE = diff --git a/parm/metplus/PointStat_ADPUPA.conf b/parm/metplus/PointStat_ADPUPA.conf deleted file mode 100644 index 519767a51e..0000000000 --- a/parm/metplus/PointStat_ADPUPA.conf +++ /dev/null @@ -1,343 +0,0 @@ -# PointStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PointStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {POINT_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to PointStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -POINT_STAT_CONFIG_FILE = {PARM_BASE}/met_config/PointStatConfig_wrapped - -POINT_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA -#POINT_STAT_OBS_QUALITY_EXC = - -POINT_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST -#POINT_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = - -#POINT_STAT_INTERP_VLD_THRESH = -#POINT_STAT_INTERP_SHAPE = -POINT_STAT_INTERP_TYPE_METHOD = BILIN -POINT_STAT_INTERP_TYPE_WIDTH = 2 - -POINT_STAT_OUTPUT_FLAG_FHO = STAT -POINT_STAT_OUTPUT_FLAG_CTC = STAT -POINT_STAT_OUTPUT_FLAG_CTS = STAT -#POINT_STAT_OUTPUT_FLAG_MCTC = -#POINT_STAT_OUTPUT_FLAG_MCTS = -POINT_STAT_OUTPUT_FLAG_CNT = STAT -POINT_STAT_OUTPUT_FLAG_SL1L2 = STAT -#POINT_STAT_OUTPUT_FLAG_SAL1L2 = -POINT_STAT_OUTPUT_FLAG_VL1L2 = STAT -#POINT_STAT_OUTPUT_FLAG_VAL1L2 = -POINT_STAT_OUTPUT_FLAG_VCNT = STAT -#POINT_STAT_OUTPUT_FLAG_PCT = -#POINT_STAT_OUTPUT_FLAG_PSTD = -#POINT_STAT_OUTPUT_FLAG_PJC = -#POINT_STAT_OUTPUT_FLAG_PRC = -#POINT_STAT_OUTPUT_FLAG_ECNT = -#POINT_STAT_OUTPUT_FLAG_RPS = -#POINT_STAT_OUTPUT_FLAG_ECLV = -#POINT_STAT_OUTPUT_FLAG_MPR = -#POINT_STAT_OUTPUT_FLAG_ORANK = - -POINT_STAT_CLIMO_CDF_BINS = 1 -#POINT_STAT_CLIMO_CDF_CENTER_BINS = False -#POINT_STAT_CLIMO_CDF_WRITE_BINS = True - -#POINT_STAT_HSS_EC_VALUE = - -# -# Observation data time window(s). -# -OBS_WINDOW_BEGIN = -1799 -OBS_WINDOW_END = 1800 -OBS_POINT_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} -OBS_POINT_STAT_WINDOW_END = {OBS_WINDOW_END} - -# Optional list of offsets to look for point observation data -POINT_STAT_OFFSETS = 0 -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the forecast ensemble member. This -# makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_{{ensmem_name}} -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. Here, -# we store the value of the original lead in this column, i.e. the lead -# with zero corresponding to the actual start time of the forecast (which -# is (cdate - time_lag)), not to cdate. This is just the lead in -# LEAD_SEQ with the time lag (time_lag) of the current forecast member -# added on. -# -# Uncomment this line only after upgrading to METplus 5.x. -#POINT_STAT_DESC = {lead?fmt=%H%M%S?shift={{time_lag}}} -POINT_STAT_DESC = NA - -# Regrid to specified grid. Indicate NONE if no regridding, or the grid id -# (e.g. G212) -POINT_STAT_REGRID_TO_GRID = NONE -POINT_STAT_REGRID_METHOD = BILIN -POINT_STAT_REGRID_WIDTH = 2 - -POINT_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# sets the -obs_valid_beg command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H} - -# sets the -obs_valid_end command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_END = {valid?fmt=%Y%m%d_%H} - -# Verification Masking regions -# Indicate which grid and polygon masking region, if applicable -POINT_STAT_GRID = - -# List of full path to poly masking files. NOTE: Only short lists of poly -# files work (those that fit on one line), a long list will result in an -# environment variable that is too long, resulting in an error. For long -# lists of poly masking files (i.e. all the mask files in the NCEP_mask -# directory), define these in the METplus PointStat configuration file. -POINT_STAT_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly -POINT_STAT_STATION_ID = - -# Message types, if all message types are to be returned, leave this empty, -# otherwise indicate the message types of interest. -POINT_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} - -# set to True to run PointStat once for each name/level combination -# set to False to run PointStat once per run time including all fields -POINT_STAT_ONCE_PER_FIELD = False -# -# List of forecast and corresponding observation fields to process. -# -# Note on use of set_attr_lead and ensemble member time-lagging: -# ------------------------------------------------------------- -# The set_attr_lead parameter appearing below in [FCST|OBS]_VAR_OPTIONS -# specifies the lead to use both in naming of the output .stat and .nc -# files and for setting the lead values contained in those files. This -# option causes MET/METplus to use the lead values in the variable LEAD_SEQ -# set above, which are the same for all ensemble forecast members (i.e. -# regardless of whether members are time lagged with respect to the -# nominal cycle date specified by cdate). If set_attr_lead were not -# specified as below, then MET/METplus would get the lead from the input -# forecast file, and that would in general differ from one ensemble member -# to the next depending on whether the member is time-lagged. That would -# cause confusion, so here, we always use lead values with zero lead -# corresponding to the nominal cdate. -# -FCST_VAR1_NAME = TMP -FCST_VAR1_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 -FCST_VAR1_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR1_NAME = TMP -OBS_VAR1_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 - -FCST_VAR2_NAME = RH -FCST_VAR2_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250 -FCST_VAR2_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR2_NAME = RH -OBS_VAR2_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250 - -FCST_VAR3_NAME = DPT -FCST_VAR3_LEVELS = P1000, P925, P850, P700, P500, P400, P300 -FCST_VAR3_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR3_NAME = DPT -OBS_VAR3_LEVELS = P1000, P925, P850, P700, P500, P400, P300 - -FCST_VAR4_NAME = UGRD -FCST_VAR4_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 -FCST_VAR4_THRESH = ge2.572 -FCST_VAR4_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR4_NAME = UGRD -OBS_VAR4_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 -OBS_VAR4_THRESH = ge2.572 - -FCST_VAR5_NAME = VGRD -FCST_VAR5_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 -FCST_VAR5_THRESH = ge2.572 -FCST_VAR5_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR5_NAME = VGRD -OBS_VAR5_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 -OBS_VAR5_THRESH = ge2.572 - -FCST_VAR6_NAME = WIND -FCST_VAR6_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 -FCST_VAR6_THRESH = ge2.572, ge2.572&<5.144, ge5.144, ge10.288, ge15.433, ge20.577, ge25.722 -FCST_VAR6_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR6_NAME = WIND -OBS_VAR6_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 -OBS_VAR6_THRESH = ge2.572, ge2.572&<5.144, ge5.144, ge10.288, ge15.433, ge20.577, ge25.722 - -FCST_VAR7_NAME = HGT -FCST_VAR7_LEVELS = P1000, P950, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 -FCST_VAR7_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR7_NAME = HGT -OBS_VAR7_LEVELS = P1000, P950, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 - -FCST_VAR8_NAME = SPFH -FCST_VAR8_LEVELS = P1000, P850, P700, P500, P400, P300 -FCST_VAR8_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR8_NAME = SPFH -OBS_VAR8_LEVELS = P1000, P850, P700, P500, P400, P300 - -FCST_VAR9_NAME = CAPE -FCST_VAR9_LEVELS = L0 -FCST_VAR9_THRESH = gt500, gt1000, gt1500, gt2000, gt3000, gt4000 -FCST_VAR9_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; - cnt_thresh = [ >0 ]; -OBS_VAR9_NAME = CAPE -OBS_VAR9_LEVELS = L0-100000 -OBS_VAR9_THRESH = gt500, gt1000, gt1500, gt2000, gt3000, gt4000 -OBS_VAR9_OPTIONS = cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -FCST_VAR10_NAME = HPBL -FCST_VAR10_LEVELS = Z0 -FCST_VAR10_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR10_NAME = PBL -OBS_VAR10_LEVELS = L0 -OBS_VAR10_OPTIONS = desc = "TKE"; - -FCST_VAR11_NAME = HGT -FCST_VAR11_LEVELS = L0 -FCST_VAR11_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; - GRIB_lvl_typ = 220; -OBS_VAR11_NAME = PBL -OBS_VAR11_LEVELS = L0 -OBS_VAR11_OPTIONS = desc = "RI"; - -FCST_VAR12_NAME = CAPE -FCST_VAR12_LEVELS = L0-90 -FCST_VAR12_THRESH = gt500, gt1000, gt1500, gt2000, gt3000, gt4000 -FCST_VAR12_OPTIONS = cnt_thresh = [ >0 ]; -OBS_VAR12_NAME = MLCAPE -OBS_VAR12_LEVELS = L0 -OBS_VAR12_THRESH = gt500, gt1000, gt1500, gt2000, gt3000, gt4000 -OBS_VAR12_OPTIONS = cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to PointStat. -# -OBS_POINT_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to PointStat. -# -FCST_POINT_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from PointStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -POINT_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to PointStat relative to -# OBS_POINT_STAT_INPUT_DIR. -# -OBS_POINT_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to PointStat relative to -# FCST_POINT_STAT_INPUT_DIR. -# -FCST_POINT_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from PointStat relative to POINT_STAT_OUTPUT_DIR. -# -POINT_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_TEMPLATE = diff --git a/parm/metplus/PointStat_ensmean.conf b/parm/metplus/PointStat_ensmean.conf new file mode 100644 index 0000000000..fc9ccec85b --- /dev/null +++ b/parm/metplus/PointStat_ensmean.conf @@ -0,0 +1,564 @@ +# Ensemble mean {{MetplusToolName}} METplus Configuration + +[config] + +# List of applications (tools) to run. +PROCESS_LIST = {{MetplusToolName}} + +# time looping - options are INIT, VALID, RETRO, and REALTIME +# If set to INIT or RETRO: +# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set +# If set to VALID or REALTIME: +# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set +LOOP_BY = INIT + +# Format of INIT_BEG and INIT_END using % items +# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. +# see www.strftime.org for more information +# %Y%m%d%H expands to YYYYMMDDHH +INIT_TIME_FMT = %Y%m%d%H + +# Start time for METplus run - must match INIT_TIME_FMT +INIT_BEG = {{cdate}} + +# End time for METplus run - must match INIT_TIME_FMT +INIT_END = {{cdate}} + +# Increment between METplus runs (in seconds if no units are specified). +# Must be >= 60 seconds. +INIT_INCREMENT = 3600 + +# List of forecast leads to process for each run time (init or valid) +# In hours if units are not specified +# If unset, defaults to 0 (don't loop through forecast leads) +LEAD_SEQ = {{vx_leadhr_list}} +# +# Order of loops to process data - Options are times, processes +# Not relevant if only one item is in the PROCESS_LIST +# times = run all wrappers in the PROCESS_LIST for a single run time, then +# increment the run time and run all wrappers again until all times have +# been evaluated. +# processes = run the first wrapper in the PROCESS_LIST for all times +# specified, then repeat for the next item in the PROCESS_LIST until all +# wrappers have been run +# +LOOP_ORDER = times +# +# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. +# +LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} +# +# Specify the name of the METplus log file. +# +LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} +# +# Specify the location and name of the final METplus conf file. +# +METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}} +# +# Location of MET configuration file to pass to {{MetplusToolName}}. +# +# References PARM_BASE, which is the location of the parm directory +# corresponding to the ush directory of the run_metplus.py script that +# is called or the value of the environment variable METPLUS_PARM_BASE +# if set. +# +{{METPLUS_TOOL_NAME}}_CONFIG_FILE = {PARM_BASE}/met_config/{{MetplusToolName}}Config_wrapped + +{{METPLUS_TOOL_NAME}}_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA +#{{METPLUS_TOOL_NAME}}_OBS_QUALITY_EXC = + +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_TIME_INTERP_METHOD = + +#{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = BILIN +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 2 + +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_FHO = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTS = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTS = +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CNT = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SL1L2 = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SAL1L2 = +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VL1L2 = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VAL1L2 = +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VCNT = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PCT = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PSTD = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PJC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PRC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECNT = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_RPS = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MPR = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ORANK = + +{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1 +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = True + +#{{METPLUS_TOOL_NAME}}_HSS_EC_VALUE = + +# +# Observation data time window(s). +# +OBS_WINDOW_BEGIN = -1799 +OBS_WINDOW_END = 1800 +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_END = {OBS_WINDOW_END} + +# Optional list of offsets to look for point observation data +{{METPLUS_TOOL_NAME}}_OFFSETS = 0 +# +# Name to identify model (forecast) data in output. +# +# The variable MODEL is recorded in the stat files, and the data in +# these files is then plotted (e.g. using METViewer). Here, we add a +# suffix to MODEL that identifies the data as that for the ensemble +# mean. This makes it easier to identify each curve. +# +MODEL = {{vx_fcst_model_name}}_ensmean +# +# Name to identify observation data in output. +# +OBTYPE = {{obtype}} +# +# Value to enter under the DESC column in the output stat file. +# +{{METPLUS_TOOL_NAME}}_DESC = NA + +# Regrid to specified grid. Indicate NONE if no regridding, or the grid id +# (e.g. G212) +{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = NONE +{{METPLUS_TOOL_NAME}}_REGRID_METHOD = BILIN +{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2 + +{{METPLUS_TOOL_NAME}}_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} + +# sets the -obs_valid_beg command line argument (optional) +# not used for this example +#{{METPLUS_TOOL_NAME}}_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H} + +# sets the -obs_valid_end command line argument (optional) +# not used for this example +#{{METPLUS_TOOL_NAME}}_OBS_VALID_END = {valid?fmt=%Y%m%d_%H} + +# Verification Masking regions +# Indicate which grid and polygon masking region, if applicable +{{METPLUS_TOOL_NAME}}_GRID = + +# List of full path to poly masking files. NOTE: Only short lists of poly +# files work (those that fit on one line), a long list will result in an +# environment variable that is too long, resulting in an error. For long +# lists of poly masking files (i.e. all the mask files in the NCEP_mask +# directory), define these in the METplus {{MetplusToolName}} configuration file. +{{METPLUS_TOOL_NAME}}_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly +{{METPLUS_TOOL_NAME}}_STATION_ID = + +# Message types, if all message types are to be returned, leave this empty, +# otherwise indicate the message types of interest. +{{METPLUS_TOOL_NAME}}_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} + +# set to True to run {{MetplusToolName}} once for each name/level combination +# set to False to run {{MetplusToolName}} once per run time including all fields +{{METPLUS_TOOL_NAME}}_ONCE_PER_FIELD = False +# +# List of forecast and corresponding observation fields to process. +# +{#- +Import the file containing jinja macros. +#} +{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %} + +{#- +Set the probabilistic threshold to be used for the forecast field. If +necessary, this can be changed to be an input parameter in the calling +script instead of a hard-coded value as below. +#} +{%- set thresh_fcst_prob = '==0.1' %} + +{#- +Jinja requires certain variables to be defined globally within the template +before they can be used in if-statements and other scopes (see Jinja scoping +rules). Define such variables. +#} +{%- set indx_level_fcst = '' %} +{%- set indx_input_thresh_fcst = '' %} +{%- set error_msg = '' %} +{%- set opts_indent = '' %} +{%- set opts_indent_len = '' %} +{%- set tmp = '' %} + +{%- set field_fcst = '' %} +{%- set field_obs = '' %} +{%- set level_fcst = '' %} +{%- set level_obs = '' %} +{%- set thresh_fcst = '' %} +{%- set thresh_obs = '' %} + +{%- set threshes_fcst = [] %} +{%- set threshes_obs = [] %} + +{#- +Get the set of valid field groups and ensure that the specified input +field group appears in this list. +#} +{%- set valid_field_groups = vx_config_dict.keys()|list %} +{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }} + +{#- +Reset the input forecast level so that if it happens to be an accumulation +(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g. +reset to 'A3'). +#} +{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} + +{#- +Extract from the configuration dictionary the set (which itself is a +dictionary) of fields, levels, and thresholds corresponding to the input +field group. Then set the delimiter string that separates forecast and +observation values in the various items (i.e. dictionary keys and values +representing field names, levels, and thresholds) in this dictionary. +#} +{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %} +{%- set delim_str = metplus_macros.set_delim_str() %} + +{#- +Some fields in the specified field group (input_field_group) may need to +be excluded from the METplus config file because calculating means for +them doesn't make sense. List these (for each input_field_group) in the +following dictionary. +#} +{%- set fields_fcst_to_exclude_by_field_group = + {'APCP': [], + 'ASNOW': [], + 'REFC': [], + 'RETOP': [], + 'SFC': ['TCDC', 'VIS', 'HGT'], + 'UPA': []} %} +{%- set fields_fcst_to_exclude = fields_fcst_to_exclude_by_field_group[input_field_group] %} + +{#- +Remove from the dictionary fields_levels_threshes_cpld any fields that +are in the list to be excluded. +#} +{%- for field_cpld in fields_levels_threshes_cpld.copy() %} + + {%- if delim_str in field_cpld %} + {%- set field_fcst, field_obs = field_cpld.split(delim_str) %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- set field_obs = field_cpld %} + {%- endif %} + + {%- if field_fcst in fields_fcst_to_exclude %} + {%- set tmp = fields_levels_threshes_cpld.pop(field_cpld) %} + {%- endif %} + +{%- endfor %} + +{#- +Loop over the fields and set field names, levels, thresholds, and/or +options for each field, both for forecasts and for observations, in the +METplus configuration file. +#} +{%- set ns = namespace(var_count = 0) %} +{%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %} + + {%- if delim_str in field_cpld %} + {%- set field_fcst, field_obs = field_cpld.split(delim_str) %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- set field_obs = field_cpld %} + {%- endif %} + +{#- +For convenience, create lists of valid forecast and observation levels +for the current field. +#} + {%- set valid_levels_fcst = [] %} + {%- set valid_levels_obs = [] %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + {%- set tmp = valid_levels_fcst.append(level_fcst) %} + {%- set tmp = valid_levels_obs.append(level_obs) %} + {%- endfor %} + +{#- +Make sure that the input forecast level (input_level_fcst) is set to a +valid value. +#} + {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %} + {%- set error_msg = '\n' ~ +'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~ +'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~ +'for the current forecast field (field_fcst). This is not the case:\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~ +' input_level_fcst = ' ~ input_level_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + +{#- +Loop over the (coupled) levels and corresponding lists of thresholds. +Extract from these the level values for forecasts and observations and +use them to set the forecast and observation field names, levels, +thresholds, and/or options in the METplus configuration file. +#} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + + {%- set valid_threshes_fcst = [] %} + {%- set valid_threshes_obs = [] %} + {%- for thresh_cpld in threshes_cpld %} + {%- if delim_str in thresh_cpld %} + {%- set thresh_fcst, thresh_obs = thresh_cpld.split(delim_str) %} + {%- else %} + {%- set thresh_fcst = thresh_cpld %} + {%- set thresh_obs = thresh_cpld %} + {%- endif %} + {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %} + {%- set tmp = valid_threshes_obs.append(thresh_obs) %} + {%- endfor %} + + {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %} +{#- +Increment the METplus variable counter. +#} + {%- set ns.var_count = ns.var_count+1 %} + +{#- +Set forecast field name. +#} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{level_fcst}}_ENS_MEAN + +{#- +Set forecast field level. +#} +FCST_VAR{{ns.var_count}}_LEVELS = {{level_fcst}} + +{#- +Set forecast field threshold(s). Note that no forecast thresholds are +included in the METplus configuration file if input_thresh_fcst is set +to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +If input_thresh_fcst is set to 'all', set the list of forecast thresholds +to the full set of valid values. +#} + {%- if (input_thresh_fcst == 'all') %} + + {%- set threshes_fcst = valid_threshes_fcst %} +{#- +If input_thresh_fcst is set to a specific value: +* If that value is valid, i.e. it exists in the list of valid forecast + thresholds, get its index in that list and use it to set the forecast + threshold to a one-element list containing that value. Note that the + index will be needed later below when setting the observation threshold(s). +* If the input forecast threshold is not valid, print out a warning message + and exit. +#} + {%- else %} + + {%- if input_thresh_fcst in valid_threshes_fcst %} + {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %} + {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %} + {%- else %} + {%- set error_msg = '\n' ~ +'For the current forecast field (field_fcst) and forecast level (level_fcst),\n' ~ +'the input forecast threshold (input_thresh_fcst) does not exist in the list\n' ~ +'of valid forecast thresholds (valid_threshes_fcst):\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' level_fcst = ' ~ level_fcst ~ '\n' ~ +' valid_threshes_fcst = ' ~ valid_threshes_fcst ~ '\n' ~ +' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + + {%- endif %} +{#- +If threshes_fcst has been reset to something other than its default +value of an empty list, then set the forecast thresholds in the METplus +configuration file because that implies threshes_fcst was set above to +a non-empty value. Then reset threshes_fcst to its default value for +proper processing of thresholds for the next field. +#} + {%- if (threshes_fcst != []) %} +FCST_VAR{{ns.var_count}}_THRESH = {{threshes_fcst|join(', ')}} + {%- endif %} + {%- set threshes_fcst = [] %} + + {%- endif %} + +{#- +Set forecast field options. +#} + {%- set opts_indent_len = 20 %} + {%- if (ns.var_count > 9) and (ns.var_count <= 99) %} + {%- set opts_indent_len = opts_indent_len + 1 %} + {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %} + {%- set opts_indent_len = opts_indent_len + 2 %} + {%- elif (ns.var_count > 999) %} + {%- set opts_indent_len = opts_indent_len + 3 %} + {%- endif %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'UPA' %} + + {%- if field_fcst == 'CAPE' %} +FCST_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; + {%- endif %} + + {%- endif %} + +{#- +Set observation field name. +#} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}} + +{#- +Set observation field level. +#} + {%- set indx_level_fcst = valid_levels_fcst.index(level_fcst) %} + {%- set level_obs = valid_levels_obs[indx_level_fcst] %} +OBS_VAR{{ns.var_count}}_LEVELS = {{level_obs}} + +{#- +Set observation field threshold(s). Note that no observation thresholds +are included in the METplus configuration file if input_thresh_fcst is +set to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +If input_thresh_fcst is set to 'all', set the list of observation thresholds +to the full set of valid values. +#} + {%- if (input_thresh_fcst == 'all') %} + + {%- set threshes_obs = valid_threshes_obs %} +{#- +If input_thresh_fcst is set to a specific forecast threshold, then the +observation threshold is given by the element in the list of valid +observation thresholds that has the same index as that of input_thresh_fcst +in the list of valid forecast thresholds. +#} + {%- else %} + {%- set threshes_obs = [valid_threshes_obs[indx_input_thresh_fcst]] %} + {%- endif %} +{#- +If threshes_obs has been reset to something other than its default value +of an empty list, then set the observation thresholds in the METplus +configuration file because that implies threshes_obs was set above to +a non-empty value. Then reset threshes_obs to its default value for +proper processing of thresholds for the next field. +#} + {%- if (threshes_obs != []) %} +OBS_VAR{{ns.var_count}}_THRESH = {{threshes_obs|join(', ')}} + {%- endif %} + {%- set threshes_obs = [] %} + + {%- endif %} + +{#- +Set observation field options. +#} + {%- set opts_indent_len = opts_indent_len - 1 %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'UPA' %} + + {%- if field_obs == 'CAPE' %} +OBS_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; +{{opts_indent}}cnt_logic = UNION; + {%- elif field_obs == 'PBL' %} +OBS_VAR{{ns.var_count}}_OPTIONS = desc = "TKE"; + {%- endif %} + + {%- endif %} + +{#- +Print out a newline to separate the settings for the current field (both +forecast and observation settings) from those for the next field. +#} + {{- '\n' }} + + {%- endif %} + + {%- endfor %} +{%- endfor %} +# End of [config] section and start of [dir] section. +[dir] +# +# Directory containing observation input to {{MetplusToolName}}. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{obs_input_dir}} +# +# Directory containing forecast input to {{MetplusToolName}}. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}} +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR = +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR = +# +# Directory in which to write output from {{MetplusToolName}}. +# +# OUTPUT_BASE apparently has to be set to something; it cannot be left +# to its default value. But it is not explicitly used elsewhere in this +# configuration file. +# +OUTPUT_BASE = {{output_base}} +{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}} +# +# Directory for staging data. +# +STAGING_DIR = {{staging_dir}} + +# End of [dir] section and start of [filename_templates] section. +[filename_templates] +# +# Template for observation input to {{MetplusToolName}} relative to +# OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{obs_input_fn_template}} +# +# Template for forecast input to {{MetplusToolName}} relative to +# FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}} +# +# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR. +# +{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_TEMPLATE = diff --git a/parm/metplus/PointStat_ensmean_ADPSFC.conf b/parm/metplus/PointStat_ensmean_ADPSFC.conf deleted file mode 100644 index 6b7e7e9cff..0000000000 --- a/parm/metplus/PointStat_ensmean_ADPSFC.conf +++ /dev/null @@ -1,252 +0,0 @@ -# Ensemble mean PointStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PointStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {POINT_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to PointStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -POINT_STAT_CONFIG_FILE = {PARM_BASE}/met_config/PointStatConfig_wrapped - -POINT_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA -#POINT_STAT_OBS_QUALITY_EXC = - -POINT_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST -#POINT_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = - -#POINT_STAT_INTERP_VLD_THRESH = -#POINT_STAT_INTERP_SHAPE = -POINT_STAT_INTERP_TYPE_METHOD = BILIN -POINT_STAT_INTERP_TYPE_WIDTH = 2 - -#POINT_STAT_OUTPUT_FLAG_FHO = -#POINT_STAT_OUTPUT_FLAG_CTC = -#POINT_STAT_OUTPUT_FLAG_CTS = -#POINT_STAT_OUTPUT_FLAG_MCTC = -#POINT_STAT_OUTPUT_FLAG_MCTS = -POINT_STAT_OUTPUT_FLAG_CNT = STAT -POINT_STAT_OUTPUT_FLAG_SL1L2 = STAT -#POINT_STAT_OUTPUT_FLAG_SAL1L2 = -POINT_STAT_OUTPUT_FLAG_VL1L2 = STAT -#POINT_STAT_OUTPUT_FLAG_VAL1L2 = -POINT_STAT_OUTPUT_FLAG_VCNT = STAT -#POINT_STAT_OUTPUT_FLAG_PCT = -#POINT_STAT_OUTPUT_FLAG_PSTD = -#POINT_STAT_OUTPUT_FLAG_PJC = -#POINT_STAT_OUTPUT_FLAG_PRC = -#POINT_STAT_OUTPUT_FLAG_ECNT = -#POINT_STAT_OUTPUT_FLAG_RPS = -#POINT_STAT_OUTPUT_FLAG_ECLV = -#POINT_STAT_OUTPUT_FLAG_MPR = -#POINT_STAT_OUTPUT_FLAG_ORANK = - -POINT_STAT_CLIMO_CDF_BINS = 1 -#POINT_STAT_CLIMO_CDF_CENTER_BINS = False -#POINT_STAT_CLIMO_CDF_WRITE_BINS = True - -#POINT_STAT_HSS_EC_VALUE = - -# -# Observation data time window(s). -# -OBS_WINDOW_BEGIN = -1799 -OBS_WINDOW_END = 1800 -OBS_POINT_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} -OBS_POINT_STAT_WINDOW_END = {OBS_WINDOW_END} - -# Optional list of offsets to look for point observation data -POINT_STAT_OFFSETS = 0 -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as that for the ensemble -# mean. This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensmean -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -POINT_STAT_DESC = NA - -# Regrid to specified grid. Indicate NONE if no regridding, or the grid id -# (e.g. G212) -POINT_STAT_REGRID_TO_GRID = NONE -POINT_STAT_REGRID_METHOD = BILIN -POINT_STAT_REGRID_WIDTH = 2 - -POINT_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# sets the -obs_valid_beg command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H} - -# sets the -obs_valid_end command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_END = {valid?fmt=%Y%m%d_%H} - -# Verification Masking regions -# Indicate which grid and polygon masking region, if applicable -POINT_STAT_GRID = - -# List of full path to poly masking files. NOTE: Only short lists of poly -# files work (those that fit on one line), a long list will result in an -# environment variable that is too long, resulting in an error. For long -# lists of poly masking files (i.e. all the mask files in the NCEP_mask -# directory), define these in the METplus PointStat configuration file. -POINT_STAT_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly -POINT_STAT_STATION_ID = - -# Message types, if all message types are to be returned, leave this empty, -# otherwise indicate the message types of interest. -POINT_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} - -# set to True to run PointStat once for each name/level combination -# set to False to run PointStat once per run time including all fields -POINT_STAT_ONCE_PER_FIELD = False -# -# List of forecast and corresponding observation fields to process. -# -FCST_VAR1_NAME = TMP_Z2_ENS_MEAN -FCST_VAR1_LEVELS = Z2 -FCST_VAR1_THRESH = ge268, ge273, ge278, ge293, ge298, ge303 -OBS_VAR1_NAME = TMP -OBS_VAR1_LEVELS = Z2 -OBS_VAR1_THRESH = ge268, ge273, ge278, ge293, ge298, ge303 - -FCST_VAR2_NAME = DPT_Z2_ENS_MEAN -FCST_VAR2_LEVELS = Z2 -FCST_VAR2_THRESH = ge263, ge268, ge273, ge288, ge293, ge298 -OBS_VAR2_NAME = DPT -OBS_VAR2_LEVELS = Z2 -OBS_VAR2_THRESH = ge263, ge268, ge273, ge288, ge293, ge298 - -FCST_VAR3_NAME = WIND_Z10_ENS_MEAN -FCST_VAR3_LEVELS = Z10 -FCST_VAR3_THRESH = ge5, ge10, ge15 -OBS_VAR3_NAME = WIND -OBS_VAR3_LEVELS = Z10 -OBS_VAR3_THRESH = ge5, ge10, ge15 - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to PointStat. -# -OBS_POINT_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to PointStat. -# -FCST_POINT_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from PointStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -POINT_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to PointStat relative to -# OBS_POINT_STAT_INPUT_DIR. -# -OBS_POINT_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to PointStat relative to -# FCST_POINT_STAT_INPUT_DIR. -# -FCST_POINT_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from PointStat relative to POINT_STAT_OUTPUT_DIR. -# -POINT_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_TEMPLATE = diff --git a/parm/metplus/PointStat_ensmean_ADPUPA.conf b/parm/metplus/PointStat_ensmean_ADPUPA.conf deleted file mode 100644 index b54c775b46..0000000000 --- a/parm/metplus/PointStat_ensmean_ADPUPA.conf +++ /dev/null @@ -1,319 +0,0 @@ -# Ensemble mean PointStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PointStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {POINT_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to PointStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -POINT_STAT_CONFIG_FILE = {PARM_BASE}/met_config/PointStatConfig_wrapped - -POINT_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA -#POINT_STAT_OBS_QUALITY_EXC = - -POINT_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST -#POINT_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = - -#POINT_STAT_INTERP_VLD_THRESH = -#POINT_STAT_INTERP_SHAPE = -POINT_STAT_INTERP_TYPE_METHOD = BILIN -POINT_STAT_INTERP_TYPE_WIDTH = 2 - -#POINT_STAT_OUTPUT_FLAG_FHO = -#POINT_STAT_OUTPUT_FLAG_CTC = -#POINT_STAT_OUTPUT_FLAG_CTS = -#POINT_STAT_OUTPUT_FLAG_MCTC = -#POINT_STAT_OUTPUT_FLAG_MCTS = -POINT_STAT_OUTPUT_FLAG_CNT = STAT -POINT_STAT_OUTPUT_FLAG_SL1L2 = STAT -#POINT_STAT_OUTPUT_FLAG_SAL1L2 = -POINT_STAT_OUTPUT_FLAG_VL1L2 = STAT -#POINT_STAT_OUTPUT_FLAG_VAL1L2 = -POINT_STAT_OUTPUT_FLAG_VCNT = STAT -#POINT_STAT_OUTPUT_FLAG_PCT = -#POINT_STAT_OUTPUT_FLAG_PSTD = -#POINT_STAT_OUTPUT_FLAG_PJC = -#POINT_STAT_OUTPUT_FLAG_PRC = -#POINT_STAT_OUTPUT_FLAG_ECNT = -#POINT_STAT_OUTPUT_FLAG_RPS = -#POINT_STAT_OUTPUT_FLAG_ECLV = -#POINT_STAT_OUTPUT_FLAG_MPR = -#POINT_STAT_OUTPUT_FLAG_ORANK = - -POINT_STAT_CLIMO_CDF_BINS = 1 -#POINT_STAT_CLIMO_CDF_CENTER_BINS = False -#POINT_STAT_CLIMO_CDF_WRITE_BINS = True - -#POINT_STAT_HSS_EC_VALUE = - -# -# Observation data time window(s). -# -OBS_WINDOW_BEGIN = -1799 -OBS_WINDOW_END = 1800 -OBS_POINT_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} -OBS_POINT_STAT_WINDOW_END = {OBS_WINDOW_END} - -# Optional list of offsets to look for point observation data -POINT_STAT_OFFSETS = 0 -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as that for the ensemble -# mean. This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensmean -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -POINT_STAT_DESC = NA - -# Regrid to specified grid. Indicate NONE if no regridding, or the grid id -# (e.g. G212) -POINT_STAT_REGRID_TO_GRID = NONE -POINT_STAT_REGRID_METHOD = BILIN -POINT_STAT_REGRID_WIDTH = 2 - -POINT_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# sets the -obs_valid_beg command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H} - -# sets the -obs_valid_end command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_END = {valid?fmt=%Y%m%d_%H} - -# Verification Masking regions -# Indicate which grid and polygon masking region, if applicable -POINT_STAT_GRID = - -# List of full path to poly masking files. NOTE: Only short lists of poly -# files work (those that fit on one line), a long list will result in an -# environment variable that is too long, resulting in an error. For long -# lists of poly masking files (i.e. all the mask files in the NCEP_mask -# directory), define these in the METplus PointStat configuration file. -POINT_STAT_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly -POINT_STAT_STATION_ID = - -# Message types, if all message types are to be returned, leave this empty, -# otherwise indicate the message types of interest. -POINT_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} - -# set to True to run PointStat once for each name/level combination -# set to False to run PointStat once per run time including all fields -POINT_STAT_ONCE_PER_FIELD = False -# -# List of forecast and corresponding observation fields to process. -# -FCST_VAR1_NAME = TMP_P850_ENS_MEAN -FCST_VAR1_LEVELS = P850 -FCST_VAR1_THRESH = ge288, ge293, ge298 -OBS_VAR1_NAME = TMP -OBS_VAR1_LEVELS = P850 -OBS_VAR1_THRESH = ge288, ge293, ge298 - -FCST_VAR2_NAME = TMP_P700_ENS_MEAN -FCST_VAR2_LEVELS = P700 -FCST_VAR2_THRESH = ge273, ge278, ge283 -OBS_VAR2_NAME = TMP -OBS_VAR2_LEVELS = P700 -OBS_VAR2_THRESH = ge273, ge278, ge283 - -FCST_VAR3_NAME = TMP_P500_ENS_MEAN -FCST_VAR3_LEVELS = P500 -FCST_VAR3_THRESH = ge258, ge263, ge268 -OBS_VAR3_NAME = TMP -OBS_VAR3_LEVELS = P500 -OBS_VAR3_THRESH = ge258, ge263, ge268 - -FCST_VAR4_NAME = DPT_P850_ENS_MEAN -FCST_VAR4_LEVELS = P850 -FCST_VAR4_THRESH = ge273, ge278, ge283 -OBS_VAR4_NAME = DPT -OBS_VAR4_LEVELS = P850 -OBS_VAR4_THRESH = ge273, ge278, ge283 - -FCST_VAR5_NAME = DPT_P700_ENS_MEAN -FCST_VAR5_LEVELS = P700 -FCST_VAR5_THRESH = ge263, ge286, ge273 -OBS_VAR5_NAME = DPT -OBS_VAR5_LEVELS = P700 -OBS_VAR5_THRESH = ge263, ge286, ge273 - -FCST_VAR6_NAME = WIND_P850_ENS_MEAN -FCST_VAR6_LEVELS = P850 -FCST_VAR6_THRESH = ge5, ge10, ge15 -OBS_VAR6_NAME = WIND -OBS_VAR6_LEVELS = P850 -OBS_VAR6_THRESH = ge5, ge10, ge15 - -FCST_VAR7_NAME = WIND_P700_ENS_MEAN -FCST_VAR7_LEVELS = P700 -FCST_VAR7_THRESH = ge10, ge15, ge20 -OBS_VAR7_NAME = WIND -OBS_VAR7_LEVELS = P700 -OBS_VAR7_THRESH = ge10, ge15, ge20 - -FCST_VAR8_NAME = WIND_P500_ENS_MEAN -FCST_VAR8_LEVELS = P500 -FCST_VAR8_THRESH = ge15, ge21, ge26 -OBS_VAR8_NAME = WIND -OBS_VAR8_LEVELS = P500 -OBS_VAR8_THRESH = ge15, ge21, ge26 - -FCST_VAR9_NAME = WIND_P250_ENS_MEAN -FCST_VAR9_LEVELS = P250 -FCST_VAR9_THRESH = ge26, ge31, ge46, ge62 -OBS_VAR9_NAME = WIND -OBS_VAR9_LEVELS = P250 -OBS_VAR9_THRESH = ge26, ge31, ge46, ge62 - -FCST_VAR10_NAME = HGT_P500_ENS_MEAN -FCST_VAR10_LEVELS = P500 -FCST_VAR10_THRESH = ge5400, ge5600, ge5880 -OBS_VAR10_NAME = HGT -OBS_VAR10_LEVELS = P500 -OBS_VAR10_THRESH = ge5400, ge5600, ge5880 - -FCST_VAR11_NAME = CAPE_L0_ENS_MEAN -FCST_VAR11_LEVELS = L0 -FCST_VAR11_THRESH = le1000, gt1000&<2500, gt2500&<4000, gt2500 -FCST_VAR11_OPTIONS = cnt_thresh = [ >0 ]; -OBS_VAR11_NAME = CAPE -OBS_VAR11_LEVELS = L0-100000 -OBS_VAR11_THRESH = le1000, gt1000&<2500, gt2500&<4000, gt2500 -OBS_VAR11_OPTIONS = cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -FCST_VAR12_NAME = HPBL_Z0_ENS_MEAN -FCST_VAR12_LEVELS = Z0 -FCST_VAR12_THRESH = lt500, lt1500, gt1500 -OBS_VAR12_NAME = PBL -OBS_VAR12_LEVELS = L0 -OBS_VAR12_THRESH = lt500, lt1500, gt1500 -OBS_VAR12_OPTIONS = desc = "TKE"; - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to PointStat. -# -OBS_POINT_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to PointStat. -# -FCST_POINT_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from PointStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -POINT_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to PointStat relative to -# OBS_POINT_STAT_INPUT_DIR. -# -OBS_POINT_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to PointStat relative to -# FCST_POINT_STAT_INPUT_DIR. -# -FCST_POINT_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from PointStat relative to POINT_STAT_OUTPUT_DIR. -# -POINT_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_TEMPLATE = diff --git a/parm/metplus/PointStat_ensprob.conf b/parm/metplus/PointStat_ensprob.conf new file mode 100644 index 0000000000..42ac254a4b --- /dev/null +++ b/parm/metplus/PointStat_ensprob.conf @@ -0,0 +1,502 @@ +# Ensemble probabilistic {{MetplusToolName}} METplus Configuration + +[config] + +# List of applications (tools) to run. +PROCESS_LIST = {{MetplusToolName}} + +# time looping - options are INIT, VALID, RETRO, and REALTIME +# If set to INIT or RETRO: +# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set +# If set to VALID or REALTIME: +# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set +LOOP_BY = INIT + +# Format of INIT_BEG and INIT_END using % items +# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. +# see www.strftime.org for more information +# %Y%m%d%H expands to YYYYMMDDHH +INIT_TIME_FMT = %Y%m%d%H + +# Start time for METplus run - must match INIT_TIME_FMT +INIT_BEG = {{cdate}} + +# End time for METplus run - must match INIT_TIME_FMT +INIT_END = {{cdate}} + +# Increment between METplus runs (in seconds if no units are specified). +# Must be >= 60 seconds. +INIT_INCREMENT = 3600 + +# List of forecast leads to process for each run time (init or valid) +# In hours if units are not specified +# If unset, defaults to 0 (don't loop through forecast leads) +LEAD_SEQ = {{vx_leadhr_list}} +# +# Order of loops to process data - Options are times, processes +# Not relevant if only one item is in the PROCESS_LIST +# times = run all wrappers in the PROCESS_LIST for a single run time, then +# increment the run time and run all wrappers again until all times have +# been evaluated. +# processes = run the first wrapper in the PROCESS_LIST for all times +# specified, then repeat for the next item in the PROCESS_LIST until all +# wrappers have been run +# +LOOP_ORDER = times +# +# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. +# +LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} +# +# Specify the name of the METplus log file. +# +LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} +# +# Specify the location and name of the final METplus conf file. +# +METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}} +# +# Location of MET configuration file to pass to {{MetplusToolName}}. +# +# References PARM_BASE, which is the location of the parm directory +# corresponding to the ush directory of the run_metplus.py script that +# is called or the value of the environment variable METPLUS_PARM_BASE +# if set. +# +{{METPLUS_TOOL_NAME}}_CONFIG_FILE = {PARM_BASE}/met_config/{{MetplusToolName}}Config_wrapped + +{{METPLUS_TOOL_NAME}}_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA +#{{METPLUS_TOOL_NAME}}_OBS_QUALITY_EXC = + +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_TIME_INTERP_METHOD = + +#{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = BILIN +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 2 + +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_FHO = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTS = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTS = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CNT = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SL1L2 = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SAL1L2 = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VL1L2 = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VAL1L2 = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VCNT = +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PCT = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PSTD = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PJC = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PRC = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECNT = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_RPS = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MPR = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ORANK = + +{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1 +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = True + +#{{METPLUS_TOOL_NAME}}_HSS_EC_VALUE = + +# +# Observation data time window(s). +# +OBS_WINDOW_BEGIN = -1799 +OBS_WINDOW_END = 1800 +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_END = {OBS_WINDOW_END} + +# Optional list of offsets to look for point observation data +{{METPLUS_TOOL_NAME}}_OFFSETS = 0 +# +# Name to identify model (forecast) data in output. +# +# The variable MODEL is recorded in the stat files, and the data in +# these files is then plotted (e.g. using METViewer). Here, we add a +# suffix to MODEL that identifies the data as ensemble-probabilistic. +# This makes it easier to identify each curve. +# +MODEL = {{vx_fcst_model_name}}_ensprob +# +# Name to identify observation data in output. +# +OBTYPE = {{obtype}} +# +# Value to enter under the DESC column in the output stat file. +# +{{METPLUS_TOOL_NAME}}_DESC = NA + +# Regrid to specified grid. Indicate NONE if no regridding, or the grid id +# (e.g. G212) +{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = NONE +{{METPLUS_TOOL_NAME}}_REGRID_METHOD = BILIN +{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2 + +{{METPLUS_TOOL_NAME}}_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} + +# sets the -obs_valid_beg command line argument (optional) +# not used for this example +#{{METPLUS_TOOL_NAME}}_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H} + +# sets the -obs_valid_end command line argument (optional) +# not used for this example +#{{METPLUS_TOOL_NAME}}_OBS_VALID_END = {valid?fmt=%Y%m%d_%H} + +# Verification Masking regions +# Indicate which grid and polygon masking region, if applicable +{{METPLUS_TOOL_NAME}}_GRID = + +# List of full path to poly masking files. NOTE: Only short lists of poly +# files work (those that fit on one line), a long list will result in an +# environment variable that is too long, resulting in an error. For long +# lists of poly masking files (i.e. all the mask files in the NCEP_mask +# directory), define these in the METplus {{MetplusToolName}} configuration file. +{{METPLUS_TOOL_NAME}}_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly +{{METPLUS_TOOL_NAME}}_STATION_ID = + +# Message types, if all message types are to be returned, leave this empty, +# otherwise indicate the message types of interest. +{{METPLUS_TOOL_NAME}}_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} + +# set to True to run {{MetplusToolName}} once for each name/level combination +# set to False to run {{MetplusToolName}} once per run time including all fields +{{METPLUS_TOOL_NAME}}_ONCE_PER_FIELD = False +# +# List of forecast and corresponding observation fields to process. +# Note that the forecast variable name must exactly match the name of a +# variable in the forecast input file(s). +# +{#- +Import the file containing jinja macros. +#} +{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %} + +{#- +Set the probabilistic threshold to be used for the forecast field. If +necessary, this can be changed to be an input parameter in the calling +script instead of a hard-coded value as below. +#} +{%- set thresh_fcst_prob = '==0.1' %} + +{#- +Jinja requires certain variables to be defined globally within the template +before they can be used in if-statements and other scopes (see Jinja scoping +rules). Define such variables. +#} +{%- set indx_level_fcst = '' %} +{%- set indx_thresh_fcst = '' %} +{%- set error_msg = '' %} +{%- set opts_indent = '' %} +{%- set opts_indent_len = '' %} + +{%- set field_fcst = '' %} +{%- set field_obs = '' %} +{%- set level_fcst = '' %} +{%- set level_obs = '' %} +{%- set thresh_fcst = '' %} +{%- set thresh_obs = '' %} +{%- set thresh_fcst_and_or = '' %} + +{#- +Get the set of valid field groups and ensure that the specified input +field group appears in this list. +#} +{%- set valid_field_groups = vx_config_dict.keys()|list %} +{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }} + +{#- +Reset the input forecast level so that if it happens to be an accumulation +(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g. +reset to 'A3'). +#} +{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} + +{#- +Extract from the configuration dictionary the set (which itself is a +dictionary) of fields, levels, and thresholds corresponding to the input +field group. Then set the delimiter string that separates forecast and +observation values in the various items (i.e. dictionary keys and values +representing field names, levels, and thresholds) in this dictionary. +#} +{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %} +{%- set delim_str = metplus_macros.set_delim_str() %} + +{#- +Loop over the fields and set field names, levels, thresholds, and/or +options for each field, both for forecasts and for observations, in the +METplus configuration file. +#} +{%- set ns = namespace(var_count = 0) %} +{#- +This outer for-loop is included to make this code as similar as possible +to the one in GridStat_ensprob.conf. There, treat_fcst_as_prob takes on +both True and False values, although here it only takes on the value +True (which makes the loop redundant). It is not clear why it doesn't +need to be set to False. This is being investigated (12/13/2023). +#} +{%- for treat_fcst_as_prob in [True] %} + + {%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %} + + {%- if delim_str in field_cpld %} + {%- set field_fcst, field_obs = field_cpld.split(delim_str) %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- set field_obs = field_cpld %} + {%- endif %} + +{#- +For convenience, create lists of valid forecast and observation levels +for the current field. +#} + {%- set valid_levels_fcst = [] %} + {%- set valid_levels_obs = [] %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + {%- set tmp = valid_levels_fcst.append(level_fcst) %} + {%- set tmp = valid_levels_obs.append(level_obs) %} + {%- endfor %} + +{#- +Make sure that the input forecast level (input_level_fcst) is set to a +valid value. +#} + {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %} + {%- set error_msg = '\n' ~ +'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~ +'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~ +'for the current forecast field (field_fcst). This is not the case:\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~ +' input_level_fcst = ' ~ input_level_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + +{#- +Loop over the (coupled) levels and corresponding lists of thresholds. +Extract from these the level values for forecasts and observations and +use them to set the forecast and observation field names, levels, +thresholds, and/or options in the METplus configuration file. +#} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + + {%- set valid_threshes_fcst = [] %} + {%- set valid_threshes_obs = [] %} + {%- for thresh_cpld in threshes_cpld %} + {%- if delim_str in thresh_cpld %} + {%- set thresh_fcst, thresh_obs = thresh_cpld.split(delim_str) %} + {%- else %} + {%- set thresh_fcst = thresh_cpld %} + {%- set thresh_obs = thresh_cpld %} + {%- endif %} + {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %} + {%- set tmp = valid_threshes_obs.append(thresh_obs) %} + {%- endfor %} + + {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %} + + {%- for thresh_fcst in valid_threshes_fcst %} + + {%- if (input_thresh_fcst == 'all') or (input_thresh_fcst == thresh_fcst) %} +{#- +Increment the METplus variable counter. +#} + {%- set ns.var_count = ns.var_count+1 %} + +{#- +Set forecast field name. +#} + {%- set thresh_fcst_and_or = thresh_fcst|replace("&&", ".and.") %} + {%- set thresh_fcst_and_or = thresh_fcst_and_or|replace("||", ".or.") %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{level_fcst}}_ENS_FREQ_{{thresh_fcst_and_or}} + +{#- +Set forecast field level. +#} +FCST_VAR{{ns.var_count}}_LEVELS = {{level_fcst}} + +{#- +Set forecast field threshold. +Note that since the forecast field being read in is actually a field of +probabilities, we set the forecast threshold to a probabilistic one +(thresh_fcst_prob) and not to the physical threshold (thresh_fcst) in +the dictionary of forecast field names, levels, and thresholds that we +are looping over. +#} +FCST_VAR{{ns.var_count}}_THRESH = {{thresh_fcst_prob}} + +{#- +Set forecast field options. +#} + {%- set opts_indent_len = 20 %} + {%- if (ns.var_count > 9) and (ns.var_count <= 99) %} + {%- set opts_indent_len = opts_indent_len + 1 %} + {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %} + {%- set opts_indent_len = opts_indent_len + 2 %} + {%- elif (ns.var_count > 999) %} + {%- set opts_indent_len = opts_indent_len + 3 %} + {%- endif %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'SFC' %} + + {%- if field_fcst == 'HGT' %} +FCST_VAR{{ns.var_count}}_OPTIONS = desc = "CEILING"; + {%- elif field_fcst == 'VIS' %} +FCST_VAR{{ns.var_count}}_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- endif %} + + {%- endif %} + +{#- +Set observation field name. +#} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}} + +{#- +Set observation field level. +#} + {%- set indx_level_fcst = valid_levels_fcst.index(level_fcst) %} + {%- set level_obs = valid_levels_obs[indx_level_fcst] %} +OBS_VAR{{ns.var_count}}_LEVELS = {{level_obs}} + +{#- +Set observation field threshold. Note that no observation thresholds +are included in the METplus configuration file if input_thresh_fcst is +set to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +Set the observation threshold. This is given by the element in the list +of valid observation thresholds that has the same index as that of the +current forcast threshold (thresh_fcst) in the list of valid forecast +thresholds. +#} + {%- set indx_thresh_fcst = valid_threshes_fcst.index(thresh_fcst) %} + {%- set thresh_obs = valid_threshes_obs[indx_thresh_fcst] %} +OBS_VAR{{ns.var_count}}_THRESH = {{thresh_obs}} + {%- endif %} + +{#- +Set observation field options. +#} + {%- set opts_indent_len = opts_indent_len - 1 %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'SFC' %} + + {%- if field_obs == 'CEILING' %} +OBS_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 215; +{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- elif field_obs == 'VIS' %} +OBS_VAR{{ns.var_count}}_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- endif %} + + {%- elif input_field_group == 'UPA' %} + + {%- if field_obs == 'CAPE' %} +OBS_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; +{{opts_indent}}cnt_logic = UNION; + {%- elif field_obs == 'PBL' %} +OBS_VAR{{ns.var_count}}_OPTIONS = desc = "TKE"; + {%- endif %} + + {%- endif %} + +{#- +Print out a newline to separate the settings for the current field (both +forecast and observation settings) from those for the next field. +#} + {{- '\n' }} + + {%- endif %} + {%- endfor %} + + {%- endif %} + + {%- endfor %} + {%- endfor %} +{%- endfor %} +# +# Forecast data description variables +# +FCST_IS_PROB = True +FCST_PROB_IN_GRIB_PDS = False + +# End of [config] section and start of [dir] section. +[dir] +# +# Directory containing observation input to {{MetplusToolName}}. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{obs_input_dir}} +# +# Directory containing forecast input to {{MetplusToolName}}. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}} +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR = +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR = +# +# Directory in which to write output from {{MetplusToolName}}. +# +# OUTPUT_BASE apparently has to be set to something; it cannot be left +# to its default value. But it is not explicitly used elsewhere in this +# configuration file. +# +OUTPUT_BASE = {{output_base}} +{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}} +# +# Directory for staging data. +# +STAGING_DIR = {{staging_dir}} + +# End of [dir] section and start of [filename_templates] section. +[filename_templates] +# +# Template for observation input to {{MetplusToolName}} relative to +# OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{obs_input_fn_template}} +# +# Template for forecast input to {{MetplusToolName}} relative to +# FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}} +# +# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR. +# +{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_TEMPLATE = diff --git a/parm/metplus/PointStat_ensprob_ADPSFC.conf b/parm/metplus/PointStat_ensprob_ADPSFC.conf deleted file mode 100644 index c9333b2c81..0000000000 --- a/parm/metplus/PointStat_ensprob_ADPSFC.conf +++ /dev/null @@ -1,415 +0,0 @@ -# Ensemble probabilistic PointStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PointStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {POINT_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to PointStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -POINT_STAT_CONFIG_FILE = {PARM_BASE}/met_config/PointStatConfig_wrapped - -POINT_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA -#POINT_STAT_OBS_QUALITY_EXC = - -POINT_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST -#POINT_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = - -#POINT_STAT_INTERP_VLD_THRESH = -#POINT_STAT_INTERP_SHAPE = -POINT_STAT_INTERP_TYPE_METHOD = BILIN -POINT_STAT_INTERP_TYPE_WIDTH = 2 - -#POINT_STAT_OUTPUT_FLAG_FHO = -#POINT_STAT_OUTPUT_FLAG_CTC = -#POINT_STAT_OUTPUT_FLAG_CTS = -#POINT_STAT_OUTPUT_FLAG_MCTC = -#POINT_STAT_OUTPUT_FLAG_MCTS = -#POINT_STAT_OUTPUT_FLAG_CNT = -#POINT_STAT_OUTPUT_FLAG_SL1L2 = -#POINT_STAT_OUTPUT_FLAG_SAL1L2 = -#POINT_STAT_OUTPUT_FLAG_VL1L2 = -#POINT_STAT_OUTPUT_FLAG_VAL1L2 = -#POINT_STAT_OUTPUT_FLAG_VCNT = -POINT_STAT_OUTPUT_FLAG_PCT = STAT -POINT_STAT_OUTPUT_FLAG_PSTD = STAT -POINT_STAT_OUTPUT_FLAG_PJC = STAT -POINT_STAT_OUTPUT_FLAG_PRC = STAT -#POINT_STAT_OUTPUT_FLAG_ECNT = -#POINT_STAT_OUTPUT_FLAG_RPS = -#POINT_STAT_OUTPUT_FLAG_ECLV = -#POINT_STAT_OUTPUT_FLAG_MPR = -#POINT_STAT_OUTPUT_FLAG_ORANK = - -POINT_STAT_CLIMO_CDF_BINS = 1 -#POINT_STAT_CLIMO_CDF_CENTER_BINS = False -#POINT_STAT_CLIMO_CDF_WRITE_BINS = True - -#POINT_STAT_HSS_EC_VALUE = - -# -# Observation data time window(s). -# -OBS_WINDOW_BEGIN = -1799 -OBS_WINDOW_END = 1800 -OBS_POINT_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} -OBS_POINT_STAT_WINDOW_END = {OBS_WINDOW_END} - -# Optional list of offsets to look for point observation data -POINT_STAT_OFFSETS = 0 -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as ensemble-probabilistic. -# This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensprob -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -POINT_STAT_DESC = NA - -# Regrid to specified grid. Indicate NONE if no regridding, or the grid id -# (e.g. G212) -POINT_STAT_REGRID_TO_GRID = NONE -POINT_STAT_REGRID_METHOD = BILIN -POINT_STAT_REGRID_WIDTH = 2 - -POINT_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# sets the -obs_valid_beg command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H} - -# sets the -obs_valid_end command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_END = {valid?fmt=%Y%m%d_%H} - -# Verification Masking regions -# Indicate which grid and polygon masking region, if applicable -POINT_STAT_GRID = - -# List of full path to poly masking files. NOTE: Only short lists of poly -# files work (those that fit on one line), a long list will result in an -# environment variable that is too long, resulting in an error. For long -# lists of poly masking files (i.e. all the mask files in the NCEP_mask -# directory), define these in the METplus PointStat configuration file. -POINT_STAT_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly -POINT_STAT_STATION_ID = - -# Message types, if all message types are to be returned, leave this empty, -# otherwise indicate the message types of interest. -POINT_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} - -# set to True to run PointStat once for each name/level combination -# set to False to run PointStat once per run time including all fields -POINT_STAT_ONCE_PER_FIELD = False -# -# List of forecast and corresponding observation fields to process. -# Note that the forecast variable name must exactly match the name of a -# variable in the forecast input file(s). -# -FCST_VAR1_NAME = TMP_Z2_ENS_FREQ_ge268 -FCST_VAR1_LEVELS = Z2 -FCST_VAR1_THRESH = ==0.1 -OBS_VAR1_NAME = TMP -OBS_VAR1_LEVELS = Z2 -OBS_VAR1_THRESH = ge268 - -FCST_VAR2_NAME = TMP_Z2_ENS_FREQ_ge273 -FCST_VAR2_LEVELS = Z2 -FCST_VAR2_THRESH = ==0.1 -OBS_VAR2_NAME = TMP -OBS_VAR2_LEVELS = Z2 -OBS_VAR2_THRESH = ge273 - -FCST_VAR3_NAME = TMP_Z2_ENS_FREQ_ge278 -FCST_VAR3_LEVELS = Z2 -FCST_VAR3_THRESH = ==0.1 -OBS_VAR3_NAME = TMP -OBS_VAR3_LEVELS = Z2 -OBS_VAR3_THRESH = ge278 - -FCST_VAR4_NAME = TMP_Z2_ENS_FREQ_ge293 -FCST_VAR4_LEVELS = Z2 -FCST_VAR4_THRESH = ==0.1 -OBS_VAR4_NAME = TMP -OBS_VAR4_LEVELS = Z2 -OBS_VAR4_THRESH = ge293 - -FCST_VAR5_NAME = TMP_Z2_ENS_FREQ_ge298 -FCST_VAR5_LEVELS = Z2 -FCST_VAR5_THRESH = ==0.1 -OBS_VAR5_NAME = TMP -OBS_VAR5_LEVELS = Z2 -OBS_VAR5_THRESH = ge298 - -FCST_VAR6_NAME = TMP_Z2_ENS_FREQ_ge303 -FCST_VAR6_LEVELS = Z2 -FCST_VAR6_THRESH = ==0.1 -OBS_VAR6_NAME = TMP -OBS_VAR6_LEVELS = Z2 -OBS_VAR6_THRESH = ge303 - -FCST_VAR7_NAME = DPT_Z2_ENS_FREQ_ge263 -FCST_VAR7_LEVELS = Z2 -FCST_VAR7_THRESH = ==0.1 -OBS_VAR7_NAME = DPT -OBS_VAR7_LEVELS = Z2 -OBS_VAR7_THRESH = ge263 - -FCST_VAR8_NAME = DPT_Z2_ENS_FREQ_ge268 -FCST_VAR8_LEVELS = Z2 -FCST_VAR8_THRESH = ==0.1 -OBS_VAR8_NAME = DPT -OBS_VAR8_LEVELS = Z2 -OBS_VAR8_THRESH = ge268 - -FCST_VAR9_NAME = DPT_Z2_ENS_FREQ_ge273 -FCST_VAR9_LEVELS = Z2 -FCST_VAR9_THRESH = ==0.1 -OBS_VAR9_NAME = DPT -OBS_VAR9_LEVELS = Z2 -OBS_VAR9_THRESH = ge273 - -FCST_VAR10_NAME = DPT_Z2_ENS_FREQ_ge288 -FCST_VAR10_LEVELS = Z2 -FCST_VAR10_THRESH = ==0.1 -OBS_VAR10_NAME = DPT -OBS_VAR10_LEVELS = Z2 -OBS_VAR10_THRESH = ge288 - -FCST_VAR11_NAME = DPT_Z2_ENS_FREQ_ge293 -FCST_VAR11_LEVELS = Z2 -FCST_VAR11_THRESH = ==0.1 -OBS_VAR11_NAME = DPT -OBS_VAR11_LEVELS = Z2 -OBS_VAR11_THRESH = ge293 - -FCST_VAR12_NAME = DPT_Z2_ENS_FREQ_ge298 -FCST_VAR12_LEVELS = Z2 -FCST_VAR12_THRESH = ==0.1 -OBS_VAR12_NAME = DPT -OBS_VAR12_LEVELS = Z2 -OBS_VAR12_THRESH = ge298 - -FCST_VAR13_NAME = WIND_Z10_ENS_FREQ_ge5 -FCST_VAR13_LEVELS = Z10 -FCST_VAR13_THRESH = ==0.1 -OBS_VAR13_NAME = WIND -OBS_VAR13_LEVELS = Z10 -OBS_VAR13_THRESH = ge5 - -FCST_VAR14_NAME = WIND_Z10_ENS_FREQ_ge10 -FCST_VAR14_LEVELS = Z10 -FCST_VAR14_THRESH = ==0.1 -OBS_VAR14_NAME = WIND -OBS_VAR14_LEVELS = Z10 -OBS_VAR14_THRESH = ge10 - -FCST_VAR15_NAME = WIND_Z10_ENS_FREQ_ge15 -FCST_VAR15_LEVELS = Z10 -FCST_VAR15_THRESH = ==0.1 -OBS_VAR15_NAME = WIND -OBS_VAR15_LEVELS = Z10 -OBS_VAR15_THRESH = ge15 - -FCST_VAR16_NAME = TCDC_L0_ENS_FREQ_lt25 -FCST_VAR16_LEVELS = L0 -FCST_VAR16_THRESH = ==0.1 -OBS_VAR16_NAME = TCDC -OBS_VAR16_LEVELS = L0 -OBS_VAR16_THRESH = lt25 - -FCST_VAR17_NAME = TCDC_L0_ENS_FREQ_gt75 -FCST_VAR17_LEVELS = L0 -FCST_VAR17_THRESH = ==0.1 -OBS_VAR17_NAME = TCDC -OBS_VAR17_LEVELS = L0 -OBS_VAR17_THRESH = gt75 - -FCST_VAR18_NAME = VIS_L0_ENS_FREQ_lt1609 -FCST_VAR18_LEVELS = L0 -FCST_VAR18_THRESH = ==0.1 -FCST_VAR18_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } -OBS_VAR18_NAME = VIS -OBS_VAR18_LEVELS = L0 -OBS_VAR18_THRESH = lt1609 -OBS_VAR18_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } - -FCST_VAR19_NAME = VIS_L0_ENS_FREQ_lt8045 -FCST_VAR19_LEVELS = L0 -FCST_VAR19_THRESH = ==0.1 -FCST_VAR19_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } -OBS_VAR19_NAME = VIS -OBS_VAR19_LEVELS = L0 -OBS_VAR19_THRESH = lt8045 -OBS_VAR19_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } - -FCST_VAR20_NAME = VIS_L0_ENS_FREQ_ge8045 -FCST_VAR20_LEVELS = L0 -FCST_VAR20_THRESH = ==0.1 -FCST_VAR20_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } -OBS_VAR20_NAME = VIS -OBS_VAR20_LEVELS = L0 -OBS_VAR20_THRESH = ge8045 -OBS_VAR20_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } - -FCST_VAR21_NAME = HGT_L0_ENS_FREQ_lt152 -FCST_VAR21_LEVELS = L0 -FCST_VAR21_THRESH = ==0.1 -FCST_VAR21_OPTIONS = desc = "CEILING"; -OBS_VAR21_NAME = CEILING -OBS_VAR21_LEVELS = L0 -OBS_VAR21_THRESH = lt152 -OBS_VAR21_OPTIONS = GRIB_lvl_typ = 215; - interp = { type = [ { method = NEAREST; width = 1; } ]; } - -FCST_VAR22_NAME = HGT_L0_ENS_FREQ_lt1520 -FCST_VAR22_LEVELS = L0 -FCST_VAR22_THRESH = ==0.1 -FCST_VAR22_OPTIONS = desc = "CEILING"; -OBS_VAR22_NAME = CEILING -OBS_VAR22_LEVELS = L0 -OBS_VAR22_THRESH = lt1520 -OBS_VAR22_OPTIONS = GRIB_lvl_typ = 215; - interp = { type = [ { method = NEAREST; width = 1; } ]; } - -FCST_VAR23_NAME = HGT_L0_ENS_FREQ_ge914 -FCST_VAR23_LEVELS = L0 -FCST_VAR23_THRESH = ==0.1 -FCST_VAR23_OPTIONS = desc = "CEILING"; -OBS_VAR23_NAME = CEILING -OBS_VAR23_LEVELS = L0 -OBS_VAR23_THRESH = ge914 -OBS_VAR23_OPTIONS = GRIB_lvl_typ = 215; - interp = { type = [ { method = NEAREST; width = 1; } ]; } - -# -# Forecast data description variables -# -FCST_IS_PROB = True -FCST_PROB_IN_GRIB_PDS = False - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to PointStat. -# -OBS_POINT_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to PointStat. -# -FCST_POINT_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from PointStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -POINT_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to PointStat relative to -# OBS_POINT_STAT_INPUT_DIR. -# -OBS_POINT_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to PointStat relative to -# FCST_POINT_STAT_INPUT_DIR. -# -FCST_POINT_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from PointStat relative to POINT_STAT_OUTPUT_DIR. -# -POINT_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_TEMPLATE = diff --git a/parm/metplus/PointStat_ensprob_ADPUPA.conf b/parm/metplus/PointStat_ensprob_ADPUPA.conf deleted file mode 100644 index eab0270c69..0000000000 --- a/parm/metplus/PointStat_ensprob_ADPUPA.conf +++ /dev/null @@ -1,523 +0,0 @@ -# Ensemble probabilistic PointStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PointStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {POINT_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to PointStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -POINT_STAT_CONFIG_FILE = {PARM_BASE}/met_config/PointStatConfig_wrapped - -POINT_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA -#POINT_STAT_OBS_QUALITY_EXC = - -POINT_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST -#POINT_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = - -#POINT_STAT_INTERP_VLD_THRESH = -#POINT_STAT_INTERP_SHAPE = -POINT_STAT_INTERP_TYPE_METHOD = BILIN -POINT_STAT_INTERP_TYPE_WIDTH = 2 - -#POINT_STAT_OUTPUT_FLAG_FHO = -#POINT_STAT_OUTPUT_FLAG_CTC = -#POINT_STAT_OUTPUT_FLAG_CTS = -#POINT_STAT_OUTPUT_FLAG_MCTC = -#POINT_STAT_OUTPUT_FLAG_MCTS = -#POINT_STAT_OUTPUT_FLAG_CNT = -#POINT_STAT_OUTPUT_FLAG_SL1L2 = -#POINT_STAT_OUTPUT_FLAG_SAL1L2 = -#POINT_STAT_OUTPUT_FLAG_VL1L2 = -#POINT_STAT_OUTPUT_FLAG_VAL1L2 = -#POINT_STAT_OUTPUT_FLAG_VCNT = -POINT_STAT_OUTPUT_FLAG_PCT = STAT -POINT_STAT_OUTPUT_FLAG_PSTD = STAT -POINT_STAT_OUTPUT_FLAG_PJC = STAT -POINT_STAT_OUTPUT_FLAG_PRC = STAT -#POINT_STAT_OUTPUT_FLAG_ECNT = -#POINT_STAT_OUTPUT_FLAG_RPS = -#POINT_STAT_OUTPUT_FLAG_ECLV = -#POINT_STAT_OUTPUT_FLAG_MPR = -#POINT_STAT_OUTPUT_FLAG_ORANK = - -POINT_STAT_CLIMO_CDF_BINS = 1 -#POINT_STAT_CLIMO_CDF_CENTER_BINS = False -#POINT_STAT_CLIMO_CDF_WRITE_BINS = True - -#POINT_STAT_HSS_EC_VALUE = - -# -# Observation data time window(s). -# -OBS_WINDOW_BEGIN = -1799 -OBS_WINDOW_END = 1800 -OBS_POINT_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} -OBS_POINT_STAT_WINDOW_END = {OBS_WINDOW_END} - -# Optional list of offsets to look for point observation data -POINT_STAT_OFFSETS = 0 -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as ensemble-probabilistic. -# This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensprob -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -POINT_STAT_DESC = NA - -# Regrid to specified grid. Indicate NONE if no regridding, or the grid id -# (e.g. G212) -POINT_STAT_REGRID_TO_GRID = NONE -POINT_STAT_REGRID_METHOD = BILIN -POINT_STAT_REGRID_WIDTH = 2 - -POINT_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# sets the -obs_valid_beg command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H} - -# sets the -obs_valid_end command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_END = {valid?fmt=%Y%m%d_%H} - -# Verification Masking regions -# Indicate which grid and polygon masking region, if applicable -POINT_STAT_GRID = - -# List of full path to poly masking files. NOTE: Only short lists of poly -# files work (those that fit on one line), a long list will result in an -# environment variable that is too long, resulting in an error. For long -# lists of poly masking files (i.e. all the mask files in the NCEP_mask -# directory), define these in the METplus PointStat configuration file. -POINT_STAT_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly -POINT_STAT_STATION_ID = - -# Message types, if all message types are to be returned, leave this empty, -# otherwise indicate the message types of interest. -POINT_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} - -# set to True to run PointStat once for each name/level combination -# set to False to run PointStat once per run time including all fields -POINT_STAT_ONCE_PER_FIELD = False -# -# List of forecast and corresponding observation fields to process. -# Note that the forecast variable name must exactly match the name of a -# variable in the forecast input file(s). -# -FCST_VAR1_NAME = TMP_P850_ENS_FREQ_ge288 -FCST_VAR1_LEVELS = P850 -FCST_VAR1_THRESH = ==0.1 -OBS_VAR1_NAME = TMP -OBS_VAR1_LEVELS = P850 -OBS_VAR1_THRESH = ge288 - -FCST_VAR2_NAME = TMP_P850_ENS_FREQ_ge293 -FCST_VAR2_LEVELS = P850 -FCST_VAR2_THRESH = ==0.1 -OBS_VAR2_NAME = TMP -OBS_VAR2_LEVELS = P850 -OBS_VAR2_THRESH = ge293 - -FCST_VAR3_NAME = TMP_P850_ENS_FREQ_ge298 -FCST_VAR3_LEVELS = P850 -FCST_VAR3_THRESH = ==0.1 -OBS_VAR3_NAME = TMP -OBS_VAR3_LEVELS = P850 -OBS_VAR3_THRESH = ge298 - -FCST_VAR4_NAME = TMP_P700_ENS_FREQ_ge273 -FCST_VAR4_LEVELS = P700 -FCST_VAR4_THRESH = ==0.1 -OBS_VAR4_NAME = TMP -OBS_VAR4_LEVELS = P700 -OBS_VAR4_THRESH = ge273 - -FCST_VAR5_NAME = TMP_P700_ENS_FREQ_ge278 -FCST_VAR5_LEVELS = P700 -FCST_VAR5_THRESH = ==0.1 -OBS_VAR5_NAME = TMP -OBS_VAR5_LEVELS = P700 -OBS_VAR5_THRESH = ge278 - -FCST_VAR6_NAME = TMP_P700_ENS_FREQ_ge283 -FCST_VAR6_LEVELS = P700 -FCST_VAR6_THRESH = ==0.1 -OBS_VAR6_NAME = TMP -OBS_VAR6_LEVELS = P700 -OBS_VAR6_THRESH = ge283 - -FCST_VAR7_NAME = TMP_P500_ENS_FREQ_ge258 -FCST_VAR7_LEVELS = P500 -FCST_VAR7_THRESH = ==0.1 -OBS_VAR7_NAME = TMP -OBS_VAR7_LEVELS = P500 -OBS_VAR7_THRESH = ge258 - -FCST_VAR8_NAME = TMP_P500_ENS_FREQ_ge263 -FCST_VAR8_LEVELS = P500 -FCST_VAR8_THRESH = ==0.1 -OBS_VAR8_NAME = TMP -OBS_VAR8_LEVELS = P500 -OBS_VAR8_THRESH = ge263 - -FCST_VAR9_NAME = TMP_P500_ENS_FREQ_ge268 -FCST_VAR9_LEVELS = P500 -FCST_VAR9_THRESH = ==0.1 -OBS_VAR9_NAME = TMP -OBS_VAR9_LEVELS = P500 -OBS_VAR9_THRESH = ge268 - -FCST_VAR10_NAME = DPT_P850_ENS_FREQ_ge273 -FCST_VAR10_LEVELS = P850 -FCST_VAR10_THRESH = ==0.1 -OBS_VAR10_NAME = DPT -OBS_VAR10_LEVELS = P850 -OBS_VAR10_THRESH = ge273 - -FCST_VAR11_NAME = DPT_P850_ENS_FREQ_ge278 -FCST_VAR11_LEVELS = P850 -FCST_VAR11_THRESH = ==0.1 -OBS_VAR11_NAME = DPT -OBS_VAR11_LEVELS = P850 -OBS_VAR11_THRESH = ge278 - -FCST_VAR12_NAME = DPT_P850_ENS_FREQ_ge283 -FCST_VAR12_LEVELS = P850 -FCST_VAR12_THRESH = ==0.1 -OBS_VAR12_NAME = DPT -OBS_VAR12_LEVELS = P850 -OBS_VAR12_THRESH = ge283 - -FCST_VAR13_NAME = DPT_P700_ENS_FREQ_ge263 -FCST_VAR13_LEVELS = P700 -FCST_VAR13_THRESH = ==0.1 -OBS_VAR13_NAME = DPT -OBS_VAR13_LEVELS = P700 -OBS_VAR13_THRESH = ge263 - -FCST_VAR14_NAME = DPT_P700_ENS_FREQ_ge268 -FCST_VAR14_LEVELS = P700 -FCST_VAR14_THRESH = ==0.1 -OBS_VAR14_NAME = DPT -OBS_VAR14_LEVELS = P700 -OBS_VAR14_THRESH = ge268 - -FCST_VAR15_NAME = DPT_P700_ENS_FREQ_ge273 -FCST_VAR15_LEVELS = P700 -FCST_VAR15_THRESH = ==0.1 -OBS_VAR15_NAME = DPT -OBS_VAR15_LEVELS = P700 -OBS_VAR15_THRESH = ge273 - -FCST_VAR16_NAME = WIND_P850_ENS_FREQ_ge5 -FCST_VAR16_LEVELS = P850 -FCST_VAR16_THRESH = ==0.1 -OBS_VAR16_NAME = WIND -OBS_VAR16_LEVELS = P850 -OBS_VAR16_THRESH = ge5 - -FCST_VAR17_NAME = WIND_P850_ENS_FREQ_ge10 -FCST_VAR17_LEVELS = P850 -FCST_VAR17_THRESH = ==0.1 -OBS_VAR17_NAME = WIND -OBS_VAR17_LEVELS = P850 -OBS_VAR17_THRESH = ge10 - -FCST_VAR18_NAME = WIND_P850_ENS_FREQ_ge15 -FCST_VAR18_LEVELS = P850 -FCST_VAR18_THRESH = ==0.1 -OBS_VAR18_NAME = WIND -OBS_VAR18_LEVELS = P850 -OBS_VAR18_THRESH = ge15 - -FCST_VAR19_NAME = WIND_P700_ENS_FREQ_ge10 -FCST_VAR19_LEVELS = P700 -FCST_VAR19_THRESH = ==0.1 -OBS_VAR19_NAME = WIND -OBS_VAR19_LEVELS = P700 -OBS_VAR19_THRESH = ge10 - -FCST_VAR20_NAME = WIND_P700_ENS_FREQ_ge15 -FCST_VAR20_LEVELS = P700 -FCST_VAR20_THRESH = ==0.1 -OBS_VAR20_NAME = WIND -OBS_VAR20_LEVELS = P700 -OBS_VAR20_THRESH = ge15 - -FCST_VAR21_NAME = WIND_P700_ENS_FREQ_ge20 -FCST_VAR21_LEVELS = P700 -FCST_VAR21_THRESH = ==0.1 -OBS_VAR21_NAME = WIND -OBS_VAR21_LEVELS = P700 -OBS_VAR21_THRESH = ge20 - -FCST_VAR22_NAME = WIND_P500_ENS_FREQ_ge15 -FCST_VAR22_LEVELS = P500 -FCST_VAR22_THRESH = ==0.1 -OBS_VAR22_NAME = WIND -OBS_VAR22_LEVELS = P500 -OBS_VAR22_THRESH = ge15 - -FCST_VAR23_NAME = WIND_P500_ENS_FREQ_ge21 -FCST_VAR23_LEVELS = P500 -FCST_VAR23_THRESH = ==0.1 -OBS_VAR23_NAME = WIND -OBS_VAR23_LEVELS = P500 -OBS_VAR23_THRESH = ge21 - -FCST_VAR24_NAME = WIND_P500_ENS_FREQ_ge26 -FCST_VAR24_LEVELS = P500 -FCST_VAR24_THRESH = ==0.1 -OBS_VAR24_NAME = WIND -OBS_VAR24_LEVELS = P500 -OBS_VAR24_THRESH = ge26 - -FCST_VAR25_NAME = WIND_P250_ENS_FREQ_ge26 -FCST_VAR25_LEVELS = P250 -FCST_VAR25_THRESH = ==0.1 -OBS_VAR25_NAME = WIND -OBS_VAR25_LEVELS = P250 -OBS_VAR25_THRESH = ge26 - -FCST_VAR26_NAME = WIND_P250_ENS_FREQ_ge31 -FCST_VAR26_LEVELS = P250 -FCST_VAR26_THRESH = ==0.1 -OBS_VAR26_NAME = WIND -OBS_VAR26_LEVELS = P250 -OBS_VAR26_THRESH = ge31 - -FCST_VAR27_NAME = WIND_P250_ENS_FREQ_ge36 -FCST_VAR27_LEVELS = P250 -FCST_VAR27_THRESH = ==0.1 -OBS_VAR27_NAME = WIND -OBS_VAR27_LEVELS = P250 -OBS_VAR27_THRESH = ge36 - -FCST_VAR28_NAME = WIND_P250_ENS_FREQ_ge46 -FCST_VAR28_LEVELS = P250 -FCST_VAR28_THRESH = ==0.1 -OBS_VAR28_NAME = WIND -OBS_VAR28_LEVELS = P250 -OBS_VAR28_THRESH = ge46 - -FCST_VAR29_NAME = WIND_P250_ENS_FREQ_ge62 -FCST_VAR29_LEVELS = P250 -FCST_VAR29_THRESH = ==0.1 -OBS_VAR29_NAME = WIND -OBS_VAR29_LEVELS = P250 -OBS_VAR29_THRESH = ge62 - -FCST_VAR30_NAME = HGT_P500_ENS_FREQ_ge5400 -FCST_VAR30_LEVELS = P500 -FCST_VAR30_THRESH = ==0.1 -OBS_VAR30_NAME = HGT -OBS_VAR30_LEVELS = P500 -OBS_VAR30_THRESH = ge5400 - -FCST_VAR31_NAME = HGT_P500_ENS_FREQ_ge5600 -FCST_VAR31_LEVELS = P500 -FCST_VAR31_THRESH = ==0.1 -OBS_VAR31_NAME = HGT -OBS_VAR31_LEVELS = P500 -OBS_VAR31_THRESH = ge5600 - -FCST_VAR32_NAME = HGT_P500_ENS_FREQ_ge5880 -FCST_VAR32_LEVELS = P500 -FCST_VAR32_THRESH = ==0.1 -OBS_VAR32_NAME = HGT -OBS_VAR32_LEVELS = P500 -OBS_VAR32_THRESH = ge5880 - -FCST_VAR33_NAME = CAPE_L0_ENS_FREQ_le1000 -FCST_VAR33_LEVELS = L0 -FCST_VAR33_THRESH = ==0.1 -OBS_VAR33_NAME = CAPE -OBS_VAR33_LEVELS = L0-100000 -OBS_VAR33_THRESH = le1000 -OBS_VAR33_OPTIONS = cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -FCST_VAR34_NAME = CAPE_L0_ENS_FREQ_gt1000.and.lt2500 -FCST_VAR34_LEVELS = L0 -FCST_VAR34_THRESH = ==0.1 -OBS_VAR34_NAME = CAPE -OBS_VAR34_LEVELS = L0-100000 -OBS_VAR34_THRESH = gt1000&<2500 -OBS_VAR34_OPTIONS = cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -FCST_VAR35_NAME = CAPE_L0_ENS_FREQ_gt2500.and.lt4000 -FCST_VAR35_LEVELS = L0 -FCST_VAR35_THRESH = ==0.1 -OBS_VAR35_NAME = CAPE -OBS_VAR35_LEVELS = L0-100000 -OBS_VAR35_THRESH = gt2500&<4000 -OBS_VAR35_OPTIONS = cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -FCST_VAR36_NAME = CAPE_L0_ENS_FREQ_gt2500 -FCST_VAR36_LEVELS = L0 -FCST_VAR36_THRESH = ==0.1 -OBS_VAR36_NAME = CAPE -OBS_VAR36_LEVELS = L0-100000 -OBS_VAR36_THRESH = gt2500 -OBS_VAR36_OPTIONS = cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -FCST_VAR37_NAME = HPBL_Z0_ENS_FREQ_lt500 -FCST_VAR37_LEVELS = Z0 -FCST_VAR37_THRESH = ==0.1 -OBS_VAR37_NAME = PBL -OBS_VAR37_LEVELS = L0 -OBS_VAR37_THRESH = lt500 -OBS_VAR37_OPTIONS = desc = "TKE"; - -FCST_VAR38_NAME = HPBL_Z0_ENS_FREQ_lt1500 -FCST_VAR38_LEVELS = Z0 -FCST_VAR38_THRESH = ==0.1 -OBS_VAR38_NAME = PBL -OBS_VAR38_LEVELS = L0 -OBS_VAR38_THRESH = lt1500 -OBS_VAR38_OPTIONS = desc = "TKE"; - -FCST_VAR39_NAME = HPBL_Z0_ENS_FREQ_gt1500 -FCST_VAR39_LEVELS = Z0 -FCST_VAR39_THRESH = ==0.1 -OBS_VAR39_NAME = PBL -OBS_VAR39_LEVELS = L0 -OBS_VAR39_THRESH = gt1500 -OBS_VAR39_OPTIONS = desc = "TKE"; - -# -# Forecast data description variables -# -FCST_IS_PROB = True -FCST_PROB_IN_GRIB_PDS = False - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to PointStat. -# -OBS_POINT_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to PointStat. -# -FCST_POINT_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from PointStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -POINT_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to PointStat relative to -# OBS_POINT_STAT_INPUT_DIR. -# -OBS_POINT_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to PointStat relative to -# FCST_POINT_STAT_INPUT_DIR. -# -FCST_POINT_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from PointStat relative to POINT_STAT_OUTPUT_DIR. -# -POINT_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_TEMPLATE = diff --git a/parm/metplus/metplus_macros.jinja b/parm/metplus/metplus_macros.jinja new file mode 100644 index 0000000000..4dc8c599ce --- /dev/null +++ b/parm/metplus/metplus_macros.jinja @@ -0,0 +1,78 @@ +{#- +Set the string delimiter that separates the forecast value of an item +(e.g. a field name, level, or threshold) from its observation value in the +various items in the deterministic and ensemble verification configuration +files. +#} +{%- macro set_delim_str() %} + {{-'%%'}} +{%- endmacro %} + +{#- +This macro prints out an error message and quits the jinja templater. +#} +{%- macro print_err_and_quit(error_msg) %} + {%- include 'ERROR: ' ~ error_msg %} +{%- endmacro %} +{#- +Given a specified field level that is really an accumulation period, this +macro prints out an "A" followed by the accumulation period (an integer) +with any leading zeros removed. For example, if the level is 'A03', it +prints out 'A3'. +#} +{%- macro get_accumulation_no_zero_pad(level) %} + {%- set first_char = level[0] %} + {%- set the_rest = level[1:] %} + {%- if (first_char == 'A') %} + {{- first_char ~ '%d'%the_rest|int }} + {%- else %} + {{- level }} + {%- endif %} +{%- endmacro %} + +{#- +This macro checks whether the specified threshold (input_thresh) has a +#} +{%- macro check_field_group(valid_field_groups, input_field_group) %} + {%- if input_field_group not in valid_field_groups %} + {%- set error_msg = '\n' ~ + 'The specified input field group (input_field_group) is not in the list of\n' ~ + 'valid field groups (valid_field_groups):\n' ~ + ' input_field_group = \'' ~ input_field_group ~ '\'\n' ~ + ' valid_field_groups = ' ~ valid_field_groups ~ '\n' ~ + 'Reset input_field_group to one of the elements in valid_field_groups and\n' ~ + 'rerun.' %} + {{print_err_and_quit(error_msg)}} + {%- endif %} +{%- endmacro %} + +{#- +This macro checks whether, for the given field, the list of thresholds +for all levels are identical. If not, it prints out an error message +and errors out. +#} +{%- macro check_for_identical_threshes_by_level(field, levels_threshes) %} + {%- set avail_levels = levels_threshes.keys()|list %} + {%- set num_avail_levels = avail_levels|length %} + {%- set threshes_by_avail_level = levels_threshes.values()|list %} + {%- for i in range(1,num_avail_levels) %} + {%- set level = avail_levels[i-1] %} + {%- set threshes = threshes_by_avail_level[i-1] %} + {%- set level_next = avail_levels[i] %} + {%- set threshes_next = threshes_by_avail_level[i] %} + {%- if (threshes_next != threshes) %} + {%- set error_msg = '\n\n' ~ +'For the given field (field), the set of thresholds for the next level\n' ~ +'(level_next, threshes_next) is not equal to that of the current level\n' ~ +'(level, threshes) (note that order of thresholds matters here):\n' ~ +' field = \'' ~ field ~ '\'\n' ~ +' num_avail_levels = ' ~ num_avail_levels ~ '\n' ~ +' level = \'' ~ level ~ '\'\n' ~ +' threshes = ' ~ threshes ~ '\n' ~ +' level_next = \'' ~ level_next ~ '\'\n' ~ +' threshes_next = ' ~ threshes_next ~ '\n' + %} + {{print_err_and_quit(error_msg)}} + {%- endif %} + {%- endfor %} +{%- endmacro %} diff --git a/parm/metplus/vx_configs/vx_config_det.obs_gdas.model_aiml.yaml b/parm/metplus/vx_configs/vx_config_det.obs_gdas.model_aiml.yaml new file mode 100644 index 0000000000..81425cc1a1 --- /dev/null +++ b/parm/metplus/vx_configs/vx_config_det.obs_gdas.model_aiml.yaml @@ -0,0 +1,54 @@ +# +# This configuration file specifies the field groups, fields, levels, +# and thresholds to use for DETERMINISTIC verification. The format is +# as follows: +# +# FIELD_GROUP1: +# FIELD1: +# LEVEL1: list_of_thresholds +# LEVEL2: list_of_thresholds +# ... +# FIELD2: +# LEVEL1: list_of_thresholds +# LEVEL2: list_of_thresholds +# ... +# ... +# +# FIELD_GROUP2: +# FIELD1: +# LEVEL1: list_of_thresholds +# LEVEL2: list_of_thresholds +# ... +# FIELD2: +# LEVEL1: list_of_thresholds +# LEVEL2: list_of_thresholds +# ... +# ... +# +# ... +# +# If the threshold list for a given combination of field group, field, +# and level is set to the empty string ([]), then all values of that +# field will be included in the verification. +# +# Both the keys that represent field groups, fields, and levels and the +# strings in the list of thresholds may contain the separator string "%%" +# that separates the value of the quantity for the forecast from that for +# the observations. For example, if a field is set to +# +# RETOP%%EchoTop18 +# +# it means the name of the field in the forecast data is RETOP while its +# name in the observations is EchoTop18. +# +SFC: + TMP: + Z2: [] + UGRD: + Z10: ['ge2.572'] + VGRD: + Z10: ['ge2.572'] + WIND: + Z10: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433'] + PRES%%PRMSL: + Z0: [] diff --git a/parm/metplus/vx_configs/vx_config_det.obs_gdas.model_gfs.yaml b/parm/metplus/vx_configs/vx_config_det.obs_gdas.model_gfs.yaml new file mode 100644 index 0000000000..dde2dd3302 --- /dev/null +++ b/parm/metplus/vx_configs/vx_config_det.obs_gdas.model_gfs.yaml @@ -0,0 +1,54 @@ +# +# This configuration file specifies the field groups, fields, levels, +# and thresholds to use for DETERMINISTIC verification. The format is +# as follows: +# +# FIELD_GROUP1: +# FIELD1: +# LEVEL1: list_of_thresholds +# LEVEL2: list_of_thresholds +# ... +# FIELD2: +# LEVEL1: list_of_thresholds +# LEVEL2: list_of_thresholds +# ... +# ... +# +# FIELD_GROUP2: +# FIELD1: +# LEVEL1: list_of_thresholds +# LEVEL2: list_of_thresholds +# ... +# FIELD2: +# LEVEL1: list_of_thresholds +# LEVEL2: list_of_thresholds +# ... +# ... +# +# ... +# +# If the threshold list for a given combination of field group, field, +# and level is set to the empty string ([]), then all values of that +# field will be included in the verification. +# +# Both the keys that represent field groups, fields, and levels and the +# strings in the list of thresholds may contain the separator string "%%" +# that separates the value of the quantity for the forecast from that for +# the observations. For example, if a field is set to +# +# RETOP%%EchoTop18 +# +# it means the name of the field in the forecast data is RETOP while its +# name in the observations is EchoTop18. +# +SFC: + TMP: + Z2: [] + UGRD: + Z10: ['ge2.572'] + VGRD: + Z10: ['ge2.572'] + WIND: + Z10: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433'] + PRMSL: + Z0: [] diff --git a/parm/metplus/vx_configs/vx_config_det.yaml b/parm/metplus/vx_configs/vx_config_det.yaml new file mode 100644 index 0000000000..48b8aff97b --- /dev/null +++ b/parm/metplus/vx_configs/vx_config_det.yaml @@ -0,0 +1,209 @@ +# +# This configuration file specifies the field groups, fields, levels, +# and thresholds to use for DETERMINISTIC verification. The format is +# as follows: +# +# FIELD_GROUP1: +# FIELD1: +# LEVEL1: list_of_thresholds +# LEVEL2: list_of_thresholds +# ... +# FIELD2: +# LEVEL1: list_of_thresholds +# LEVEL2: list_of_thresholds +# ... +# ... +# +# FIELD_GROUP2: +# FIELD1: +# LEVEL1: list_of_thresholds +# LEVEL2: list_of_thresholds +# ... +# FIELD2: +# LEVEL1: list_of_thresholds +# LEVEL2: list_of_thresholds +# ... +# ... +# +# ... +# +# If the threshold list for a given combination of field group, field, +# and level is set to the empty string ([]), then all values of that +# field will be included in the verification. +# +# Both the keys that represent field groups, fields, and levels and the +# strings in the list of thresholds may contain the separator string "%%" +# that separates the value of the quantity for the forecast from that for +# the observations. For example, if a field is set to +# +# RETOP%%EchoTop18 +# +# it means the name of the field in the forecast data is RETOP while its +# name in the observations is EchoTop18. +# +APCP: + APCP: + A1: ['gt0.0', 'ge0.254', 'ge0.508', 'ge1.27', 'ge2.54'] + A3: ['gt0.0', 'ge0.254', 'ge0.508', 'ge1.27', 'ge2.54', 'ge3.810', 'ge6.350'] + A6: ['gt0.0', 'ge0.254', 'ge0.508', 'ge1.27', 'ge2.54', 'ge3.810', 'ge6.350', 'ge8.890', 'ge12.700'] + A24: ['gt0.0', 'ge0.254', 'ge0.508', 'ge1.27', 'ge2.54', 'ge3.810', 'ge6.350', 'ge8.890', 'ge12.700', 'ge25.400'] +ASNOW: + ASNOW: + A6: ['gt0.0', 'ge2.54', 'ge5.08', 'ge10.16', 'ge20.32'] + A24: ['gt0.0', 'ge2.54', 'ge10.16', 'ge20.32', 'ge30.48'] +REFC: + REFC%%MergedReflectivityQCComposite: + L0%%Z500: ['ge20', 'ge30', 'ge40', 'ge50'] +RETOP: + RETOP%%EchoTop18: + L0%%Z500: ['ge20', 'ge30', 'ge40', 'ge50'] +SFC: + TMP: + Z2: [] + DPT: + Z2: [] + RH: + Z2: [] + UGRD: + Z10: ['ge2.572'] + VGRD: + Z10: ['ge2.572'] + WIND: + Z10: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433'] + PRMSL: + Z0: [] + TCDC: + L0: [] + VIS: + L0: ['lt805', 'lt1609', 'lt4828', 'lt8045', 'ge8045', 'lt16090'] + GUST: + Z0: [] + HGT%%CEILING: + L0: ['lt152', 'lt305', 'lt914', 'lt1520', 'lt3040', 'ge914'] + SPFH: + Z2: [] + CRAIN%%PRWE: + L0%%Z0: ['ge1.0%%ge161&&le163'] + CSNOW%%PRWE: + L0%%Z0: ['ge1.0%%ge171&&le173'] + CFRZR%%PRWE: + L0%%Z0: ['ge1.0%%ge164&&le166'] + CICEP%%PRWE: + L0%%Z0: ['ge1.0%%ge174&&le176'] +UPA: + TMP: + P1000: &adpupa_tmp_threshes + [] + P925: *adpupa_tmp_threshes + P850: *adpupa_tmp_threshes + P700: *adpupa_tmp_threshes + P500: *adpupa_tmp_threshes + P400: *adpupa_tmp_threshes + P300: *adpupa_tmp_threshes + P250: *adpupa_tmp_threshes + P200: *adpupa_tmp_threshes + P150: *adpupa_tmp_threshes + P100: *adpupa_tmp_threshes + P50: *adpupa_tmp_threshes + P20: *adpupa_tmp_threshes + P10: *adpupa_tmp_threshes + RH: + P1000: &adpupa_rh_threshes + [] + P925: *adpupa_rh_threshes + P850: *adpupa_rh_threshes + P700: *adpupa_rh_threshes + P500: *adpupa_rh_threshes + P400: *adpupa_rh_threshes + P300: *adpupa_rh_threshes + P250: *adpupa_rh_threshes + DPT: + P1000: &adpupa_dpt_threshes + [] + P925: *adpupa_dpt_threshes + P850: *adpupa_dpt_threshes + P700: *adpupa_dpt_threshes + P500: *adpupa_dpt_threshes + P400: *adpupa_dpt_threshes + P300: *adpupa_dpt_threshes + UGRD: + P1000: &adpupa_ugrd_threshes + ['ge2.572'] + P925: *adpupa_ugrd_threshes + P850: *adpupa_ugrd_threshes + P700: *adpupa_ugrd_threshes + P500: *adpupa_ugrd_threshes + P400: *adpupa_ugrd_threshes + P300: *adpupa_ugrd_threshes + P250: *adpupa_ugrd_threshes + P200: *adpupa_ugrd_threshes + P150: *adpupa_ugrd_threshes + P100: *adpupa_ugrd_threshes + P50: *adpupa_ugrd_threshes + P20: *adpupa_ugrd_threshes + P10: *adpupa_ugrd_threshes + VGRD: + P1000: &adpupa_vgrd_threshes + ['ge2.572'] + P925: *adpupa_vgrd_threshes + P850: *adpupa_vgrd_threshes + P700: *adpupa_vgrd_threshes + P500: *adpupa_vgrd_threshes + P400: *adpupa_vgrd_threshes + P300: *adpupa_vgrd_threshes + P250: *adpupa_vgrd_threshes + P200: *adpupa_vgrd_threshes + P150: *adpupa_vgrd_threshes + P100: *adpupa_vgrd_threshes + P50: *adpupa_vgrd_threshes + P20: *adpupa_vgrd_threshes + P10: *adpupa_vgrd_threshes + WIND: + P1000: &adpupa_wind_threshes + ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] + P925: *adpupa_wind_threshes + P850: *adpupa_wind_threshes + P700: *adpupa_wind_threshes + P500: *adpupa_wind_threshes + P400: *adpupa_wind_threshes + P300: *adpupa_wind_threshes + P250: *adpupa_wind_threshes + P200: *adpupa_wind_threshes + P150: *adpupa_wind_threshes + P100: *adpupa_wind_threshes + P50: *adpupa_wind_threshes + P20: *adpupa_wind_threshes + P10: *adpupa_wind_threshes + HGT: + P1000: &adpupa_hgt_threshes + [] + P950: *adpupa_hgt_threshes + P925: *adpupa_hgt_threshes + P850: *adpupa_hgt_threshes + P700: *adpupa_hgt_threshes + P500: *adpupa_hgt_threshes + P400: *adpupa_hgt_threshes + P300: *adpupa_hgt_threshes + P250: *adpupa_hgt_threshes + P200: *adpupa_hgt_threshes + P150: *adpupa_hgt_threshes + P100: *adpupa_hgt_threshes + P50: *adpupa_hgt_threshes + P20: *adpupa_hgt_threshes + P10: *adpupa_hgt_threshes + SPFH: + P1000: &adpupa_spfh_threshes + [] + P850: *adpupa_spfh_threshes + P700: *adpupa_spfh_threshes + P500: *adpupa_spfh_threshes + P400: *adpupa_spfh_threshes + P300: *adpupa_spfh_threshes + CAPE: + L0%%L0-100000: ['gt500', 'gt1000', 'gt1500', 'gt2000', 'gt3000', 'gt4000'] + HPBL%%PBL: + Z0%%L0: [] + HGT%%PBL: + L0: [] + CAPE%%MLCAPE: + L0-90%%L0: ['gt500', 'gt1000', 'gt1500', 'gt2000', 'gt3000', 'gt4000'] diff --git a/parm/metplus/vx_configs/vx_config_ens.yaml b/parm/metplus/vx_configs/vx_config_ens.yaml new file mode 100644 index 0000000000..4eb1524648 --- /dev/null +++ b/parm/metplus/vx_configs/vx_config_ens.yaml @@ -0,0 +1,55 @@ +# +# This configuration file specifies the field groups, fields, levels, +# and thresholds to use for ENSEMBLE verification. The format is the +# same as the one used in the configuration file for deterministic +# verification (vx_config_det.yaml); please see the documentation in +# that file for details. +# +APCP: + APCP: + A1: ['gt0.0', 'ge0.254', 'ge0.508', 'ge2.54'] + A3: ['gt0.0', 'ge0.508', 'ge2.54', 'ge6.350'] + A6: ['gt0.0', 'ge2.54', 'ge6.350', 'ge12.700'] + A24: ['gt0.0', 'ge6.350', 'ge12.700', 'ge25.400'] +ASNOW: + ASNOW: + A6: ['gt0.0', 'ge2.54', 'ge5.08', 'ge10.16', 'ge20.32'] + A24: ['gt0.0', 'ge2.54', 'ge10.16', 'ge20.32', 'ge30.48'] +REFC: + REFC%%MergedReflectivityQCComposite: + L0%%Z500: ['ge20', 'ge30', 'ge40', 'ge50'] +RETOP: + RETOP%%EchoTop18: + L0%%Z500: ['ge20', 'ge30', 'ge40', 'ge50'] +SFC: + TMP: + Z2: ['ge268', 'ge273', 'ge278', 'ge293', 'ge298', 'ge303'] + DPT: + Z2: ['ge263', 'ge268', 'ge273', 'ge288', 'ge293', 'ge298'] + WIND: + Z10: ['ge5', 'ge10', 'ge15'] + TCDC: + L0: ['lt25', 'gt75'] + VIS: + L0: ['lt1609', 'lt8045', 'ge8045'] + HGT%%CEILING: + L0: ['lt152', 'lt305', 'lt914'] +UPA: + TMP: + P850: ['ge288', 'ge293', 'ge298'] + P700: ['ge273', 'ge278', 'ge283'] + P500: ['ge258', 'ge263', 'ge268'] + DPT: + P850: ['ge273', 'ge278', 'ge283'] + P700: ['ge263', 'ge268', 'ge273'] + WIND: + P850: ['ge5', 'ge10', 'ge15'] + P700: ['ge10', 'ge15', 'ge20'] + P500: ['ge15', 'ge21', 'ge26'] + P250: ['ge26', 'ge31', 'ge36', 'ge46', 'ge62'] + HGT: + P500: ['ge5400', 'ge5600', 'ge5880'] + CAPE: + L0%%L0-100000: ['le1000', 'gt1000&<2500', 'ge2500&<4000', 'ge2500'] + HPBL%%PBL: + Z0%%L0: ['lt500', 'lt1500', 'gt1500'] diff --git a/parm/model_configure b/parm/model_configure index aeb45f4719..d22adf3f3a 100644 --- a/parm/model_configure +++ b/parm/model_configure @@ -1,5 +1,3 @@ -total_member: 1 -PE_MEMBER01: {{ PE_MEMBER01 }} start_year: {{ start_year }} start_month: {{ start_month }} start_day: {{ start_day }} @@ -13,7 +11,6 @@ ENS_SPS: .false. dt_atmos: {{ dt_atmos }} calendar: 'julian' memuse_verbose: .false. -atmos_nthreads: {{ atmos_nthreads }} restart_interval: {{ restart_interval }} output_1st_tstep_rst: .false. write_dopost: {{ write_dopost }} diff --git a/parm/namelist.fire b/parm/namelist.fire new file mode 100644 index 0000000000..4ef039996b --- /dev/null +++ b/parm/namelist.fire @@ -0,0 +1,28 @@ + &time + dt = 0.5 + interval_output = 300 +/ + + &atm + interval_atm = 1 + kde = 65 + / + + &fire + fire_num_ignitions = 1, + fire_ignition_ros1 = 0.05, + fire_ignition_start_lat1 = 40.609, + fire_ignition_start_lon1 = -105.879, + fire_ignition_end_lat1 = 40.609, + fire_ignition_end_lon1 = -105.879, + fire_ignition_radius1 = 250, + fire_ignition_start_time1 = 6480, + fire_ignition_end_time1 = 7000, + fire_wind_height = 5.0, ! height to interpolate winds to for calculating fire spread rate + fire_print_msg = 1, ! 1 print fire debugging messages + fire_atm_feedback = 1.0, ! real, multiplier for heat fluxes, 1.=normal, 0.=turn off two-way coupling + fire_viscosity = 0.4, ! artificial viscosity in level set method (max 1, needed with fire_upwinding=0) + fire_upwinding = 9, ! 0=none, 1=standard, 2=godunov, 3=eno, 4=sethian + fire_lsm_zcoupling = .false., + fire_lsm_zcoupling_ref = 60.0, +/ diff --git a/parm/ufs.configure b/parm/ufs.configure index d90b7447f4..74bd602231 100644 --- a/parm/ufs.configure +++ b/parm/ufs.configure @@ -3,50 +3,47 @@ ############################################# # ESMF # -{%- if print_esmf %} -logKindFlag: ESMF_LOGKIND_MULTI -{%- else %} -logKindFlag: ESMF_LOGKIND_MULTI_ON_ERROR -{%- endif %} +logKindFlag: {{ logKindFlag }} globalResourceControl: true -{% if cpl_aqm %} # EARTH # -EARTH_component_list: ATM AQM +EARTH_component_list: {{ EARTH_cl }} EARTH_attributes:: Verbosity = 0 :: # ATM # ATM_model: fv3 -ATM_petlist_bounds: -1 -1 +ATM_petlist_bounds: {{ ATM_pb }}{{ +ATM_omp_num_threads_line }} ATM_attributes:: - Verbosity = 0 + Verbosity = 0{{ +ATM_diag_line }} :: +{% if cpl_aqm %} # AQM # AQM_model: aqm -AQM_petlist_bounds: -1 -1 +AQM_petlist_bounds: {{ AQM_pb }} AQM_attributes:: Verbosity = 0 Diagnostic = 0 :: +{% endif %} +{% if ufs_fire %} +# FIRE # +FIRE_model: fire_behavior +FIRE_petlist_bounds: {{ FIRE_pb }} +FIRE_attributes:: + Verbosity = high + Diagnostic = 0 +:: +{% endif %} # Run Sequence # runSeq:: - @{{ dt_atmos }} - ATM phase1 - ATM -> AQM - AQM - AQM -> ATM - ATM phase2 - @ +{% for line in runseq -%} +{{ line }} +{%- endfor %} :: -{% else %} -# EARTH # -EARTH_component_list: ATM - ATM_model: fv3 - runSeq:: - ATM - :: -{% endif %} + diff --git a/parm/wflow/aqm_post.yaml b/parm/wflow/aqm_post.yaml index 5f307184d3..48a0761fef 100644 --- a/parm/wflow/aqm_post.yaml +++ b/parm/wflow/aqm_post.yaml @@ -22,7 +22,7 @@ default_aqm_task: &default_aqm task_pre_post_stat: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "pre_post_stat" "&HOMEdir;/jobs/JSRW_PRE_POST_STAT"' + command: '&LOAD_MODULES_RUN_TASK; "pre_post_stat" "&HOMEdir;/jobs/JSRW_PRE_POST_STAT"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' dependency: or: @@ -36,7 +36,7 @@ task_pre_post_stat: task_post_stat_o3: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "post_stat_o3" "&HOMEdir;/jobs/JSRW_POST_STAT_O3"' + command: '&LOAD_MODULES_RUN_TASK; "post_stat_o3" "&HOMEdir;/jobs/JSRW_POST_STAT_O3"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' memory: 120G dependency: @@ -46,7 +46,7 @@ task_post_stat_o3: task_post_stat_pm25: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "post_stat_pm25" "&HOMEdir;/jobs/JSRW_POST_STAT_PM25"' + command: '&LOAD_MODULES_RUN_TASK; "post_stat_pm25" "&HOMEdir;/jobs/JSRW_POST_STAT_PM25"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' memory: 120G dependency: @@ -56,7 +56,7 @@ task_post_stat_pm25: task_bias_correction_o3: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "bias_correction_o3" "&HOMEdir;/jobs/JSRW_BIAS_CORRECTION_O3"' + command: '&LOAD_MODULES_RUN_TASK; "bias_correction_o3" "&HOMEdir;/jobs/JSRW_BIAS_CORRECTION_O3"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' memory: 120G dependency: @@ -66,7 +66,7 @@ task_bias_correction_o3: task_bias_correction_pm25: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "bias_correction_pm25" "&HOMEdir;/jobs/JSRW_BIAS_CORRECTION_PM25"' + command: '&LOAD_MODULES_RUN_TASK; "bias_correction_pm25" "&HOMEdir;/jobs/JSRW_BIAS_CORRECTION_PM25"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' memory: 120G dependency: diff --git a/parm/wflow/aqm_prep.yaml b/parm/wflow/aqm_prep.yaml index d8f01d2c82..d90bbde60f 100644 --- a/parm/wflow/aqm_prep.yaml +++ b/parm/wflow/aqm_prep.yaml @@ -29,7 +29,7 @@ default_aqm_task: &default_aqm task_nexus_gfs_sfc: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_gfs_sfc" "&HOMEdir;/jobs/JSRW_NEXUS_GFS_SFC"' + command: '&LOAD_MODULES_RUN_TASK; "nexus_gfs_sfc" "&HOMEdir;/jobs/JSRW_NEXUS_GFS_SFC"' native: '{% if platform.get("SCHED_NATIVE_CMD_HPSS") %}{{ platform.SCHED_NATIVE_CMD_HPSS }}{% else %}{{ platform.SCHED_NATIVE_CMD}}{% endif %}' partition: '{% if platform.get("PARTITION_HPSS") %}&PARTITION_HPSS;{% else %}None{% endif %}' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' @@ -53,7 +53,7 @@ metatask_nexus_emission: nspt: '{% for h in range(0, cpl_aqm_parm.NUM_SPLIT_NEXUS) %}{{ " %02d" % h }}{% endfor %}' task_nexus_emission_#nspt#: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_emission" "&HOMEdir;/jobs/JSRW_NEXUS_EMISSION"' + command: '&LOAD_MODULES_RUN_TASK; "nexus_emission" "&HOMEdir;/jobs/JSRW_NEXUS_EMISSION"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' nnodes: '{{ task_nexus_emission.NNODES_NEXUS_EMISSION }}' ppn: '{{ task_nexus_emission.PPN_NEXUS_EMISSION // 1 }}' @@ -68,7 +68,7 @@ metatask_nexus_emission: task_nexus_post_split: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_post_split" "&HOMEdir;/jobs/JSRW_NEXUS_POST_SPLIT"' + command: '&LOAD_MODULES_RUN_TASK; "nexus_post_split" "&HOMEdir;/jobs/JSRW_NEXUS_POST_SPLIT"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' dependency: metataskdep: @@ -77,13 +77,13 @@ task_nexus_post_split: task_fire_emission: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "fire_emission" "&HOMEdir;/jobs/JSRW_FIRE_EMISSION"' + command: '&LOAD_MODULES_RUN_TASK; "fire_emission" "&HOMEdir;/jobs/JSRW_FIRE_EMISSION"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' memory: 2G task_point_source: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "point_source" "&HOMEdir;/jobs/JSRW_POINT_SOURCE"' + command: '&LOAD_MODULES_RUN_TASK; "point_source" "&HOMEdir;/jobs/JSRW_POINT_SOURCE"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' walltime: 01:00:00 dependency: @@ -101,11 +101,12 @@ task_aqm_ics_ext: attrs: cycledefs: at_start maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_ics" "&HOMEdir;/jobs/JSRW_AQM_ICS"' + command: '&LOAD_MODULES_RUN_TASK; "aqm_ics" "&HOMEdir;/jobs/JSRW_AQM_ICS"' envars: <<: *default_vars PREV_CYCLE_DIR: '&WARMSTART_CYCLE_DIR;' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + memory: 64G dependency: and: taskdep: @@ -126,11 +127,12 @@ task_aqm_ics: attrs: cycledefs: cycled_from_second maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_ics" "&HOMEdir;/jobs/JSRW_AQM_ICS"' + command: '&LOAD_MODULES_RUN_TASK; "aqm_ics" "&HOMEdir;/jobs/JSRW_AQM_ICS"' envars: <<: *default_vars PREV_CYCLE_DIR: '&COMIN_DIR;' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + memory: 64G dependency: and: taskdep: @@ -148,7 +150,7 @@ task_aqm_ics: task_aqm_lbcs: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_lbcs" "&HOMEdir;/jobs/JSRW_AQM_LBCS"' + command: '&LOAD_MODULES_RUN_TASK; "aqm_lbcs" "&HOMEdir;/jobs/JSRW_AQM_LBCS"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' ppn: 24 dependency: diff --git a/parm/wflow/coldstart.yaml b/parm/wflow/coldstart.yaml index ceefe865e6..6fad0b8d83 100644 --- a/parm/wflow/coldstart.yaml +++ b/parm/wflow/coldstart.yaml @@ -20,7 +20,7 @@ default_task: &default_task task_get_extrn_ics: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "get_extrn_ics" "&JOBSdir;/JREGIONAL_GET_EXTRN_MDL_FILES"' + command: '&LOAD_MODULES_RUN_TASK; "get_extrn_ics" "&JOBSdir;/JREGIONAL_GET_EXTRN_MDL_FILES"' attrs: cycledefs: forecast maxtries: '2' @@ -51,7 +51,7 @@ task_get_extrn_ics: task_get_extrn_lbcs: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "get_extrn_lbcs" "&JOBSdir;/JREGIONAL_GET_EXTRN_MDL_FILES"' + command: '&LOAD_MODULES_RUN_TASK; "get_extrn_lbcs" "&JOBSdir;/JREGIONAL_GET_EXTRN_MDL_FILES"' attrs: cycledefs: forecast maxtries: '2' @@ -85,7 +85,7 @@ metatask_run_ensemble: mem: '{% if global.DO_ENSEMBLE %}{%- for m in range(1, global.NUM_ENS_MEMBERS+1) -%}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' task_make_ics_mem#mem#: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "make_ics" "&JOBSdir;/JREGIONAL_MAKE_ICS"' + command: '&LOAD_MODULES_RUN_TASK; "make_ics" "&JOBSdir;/JREGIONAL_MAKE_ICS"' envars: <<: *default_vars SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' @@ -124,7 +124,7 @@ metatask_run_ensemble: task_make_lbcs_mem#mem#: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "make_lbcs" "&JOBSdir;/JREGIONAL_MAKE_LBCS"' + command: '&LOAD_MODULES_RUN_TASK; "make_lbcs" "&JOBSdir;/JREGIONAL_MAKE_LBCS"' envars: <<: *default_vars SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' @@ -142,7 +142,7 @@ metatask_run_ensemble: task_run_fcst_mem#mem#: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "run_fcst" "&JOBSdir;/JREGIONAL_RUN_FCST"' + command: '&LOAD_MODULES_RUN_TASK; "run_fcst" "&JOBSdir;/JREGIONAL_RUN_FCST"' envars: <<: *default_vars SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' diff --git a/parm/wflow/default_workflow.yaml b/parm/wflow/default_workflow.yaml index c79415b3be..4ffb6f288a 100644 --- a/parm/wflow/default_workflow.yaml +++ b/parm/wflow/default_workflow.yaml @@ -4,20 +4,20 @@ rocoto: entities: ACCOUNT: '{{ user.ACCOUNT }}' - CCPA_OBS_DIR: '{{ platform.CCPA_OBS_DIR }}' + CCPA_OBS_DIR: '{{ verification.CCPA_OBS_DIR }}' COLDSTART: '{{ workflow.COLDSTART }}' COMINgfs: '{{ platform.get("COMINgfs") }}' GLOBAL_VAR_DEFNS_FP: '{{ workflow.GLOBAL_VAR_DEFNS_FP }}' HOMEdir: '{{ user.HOMEdir }}' JOBSdir: '{{ user.JOBSdir }}' KEEPDATA: '{{ nco.KEEPDATA_default }}' - LOAD_MODULES_RUN_TASK_FP: '{{ workflow.LOAD_MODULES_RUN_TASK_FP }}' + LOAD_MODULES_RUN_TASK: '{{ workflow.LOAD_MODULES_RUN_TASK_FP }} {{ user.MACHINE }}' LOGEXT: ".log" NET: '{{ nco.NET_default }}' - MRMS_OBS_DIR: '{{ platform.MRMS_OBS_DIR }}' + MRMS_OBS_DIR: '{{ verification.MRMS_OBS_DIR }}' NCORES_PER_NODE: '{{ platform.NCORES_PER_NODE }}' - NDAS_OBS_DIR: '{{ platform.NDAS_OBS_DIR }}' - NOHRSC_OBS_DIR: '{{ platform.NOHRSC_OBS_DIR }}' + NDAS_OBS_DIR: '{{ verification.NDAS_OBS_DIR }}' + NOHRSC_OBS_DIR: '{{ verification.NOHRSC_OBS_DIR }}' PARTITION_DEFAULT: '{{ platform.get("PARTITION_DEFAULT") }}' PARTITION_FCST: '{{ platform.get("PARTITION_FCST") }}' PARTITION_HPSS: '{{ platform.get("PARTITION_HPSS") }}' @@ -51,7 +51,13 @@ rocoto: forecast: - !startstopfreq ['{{workflow.DATE_FIRST_CYCL}}', '{{workflow.DATE_LAST_CYCL}}', '{{workflow.INCR_CYCL_FREQ}}'] cycled_from_second: - - !startstopfreq ['{%- if workflow.DATE_FIRST_CYCL != workflow.DATE_LAST_CYCL %}{{ [workflow.DATE_FIRST_CYCL[0:8], "{:02d}".format(workflow.INCR_CYCL_FREQ)]|join }}{%- else %}{{workflow.DATE_FIRST_CYCL}}{%- endif %}', '{{workflow.DATE_LAST_CYCL}}', '{{workflow.INCR_CYCL_FREQ}}'] + - !startstopfreq ['{%- if workflow.DATE_FIRST_CYCL != workflow.DATE_LAST_CYCL %} + {{- workflow.DATE_SECOND_CYCL }} + {%- else %} + {{- workflow.DATE_FIRST_CYCL }} + {%- endif %}', + '{{ workflow.DATE_LAST_CYCL }}', + '{{ workflow.INCR_CYCL_FREQ }}'] log: !cycstr '&LOGDIR;/FV3LAM_wflow.{% if user.RUN_ENVIR == "nco" %}{{ workflow.WORKFLOW_ID + "." }}{% endif %}log' tasks: taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/test.yaml"]|include }}' diff --git a/parm/wflow/plot.yaml b/parm/wflow/plot.yaml index 6dad3e0dfa..8448bc3f9e 100644 --- a/parm/wflow/plot.yaml +++ b/parm/wflow/plot.yaml @@ -12,10 +12,12 @@ default_task_plot: &default_task PDY: !cycstr "@Y@m@d" cyc: !cycstr "@H" subcyc: !cycstr "@M" + fhr: '#fhr#' LOGDIR: !cycstr "&LOGDIR;" SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' ENSMEM_INDX: '#mem#' - nprocs: '{{ nnodes * ppn }}' + nprocs: '{{ parent.nnodes * parent.ppn }}' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' native: '{{ platform.SCHED_NATIVE_CMD }}' nnodes: 1 nodes: '{{ nnodes }}:ppn={{ ppn }}' @@ -24,25 +26,31 @@ default_task_plot: &default_task queue: '&QUEUE_DEFAULT;' walltime: 01:00:00 -task_plot_allvars: - <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "plot_allvars" "&JOBSdir;/JREGIONAL_PLOT_ALLVARS"' - join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' - dependency: - or_do_post: &post_files_exist - and_run_post: # If post was meant to run, wait on the whole post metatask - taskvalid: - attrs: - task: run_post_mem000_f000 - metataskdep: - attrs: - metatask: run_ens_post - and_inline_post: # If inline post ran, wait on the forecast task to complete - not: - taskvalid: - attrs: - task: run_post_mem000_f000 - taskdep: - attrs: - task: run_fcst_mem000 +metatask_plot_allvars: + var: + mem: '{% if global.DO_ENSEMBLE %}{%- for m in range(1, global.NUM_ENS_MEMBERS+1) -%}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' + metatask_plot_allvars_mem#mem#_all_fhrs: + var: + fhr: '{% for h in range(0, workflow.LONG_FCST_LEN+1) %}{{ " %03d" % h }}{% endfor %}' + cycledef: '{% for h in range(0, workflow.LONG_FCST_LEN+1) %}{% if h <= workflow.FCST_LEN_CYCL|min %}forecast {% else %}long_forecast {% endif %}{% endfor %}' + task_plot_allvars_mem#mem#_f#fhr#: + <<: *default_task + command: '&LOAD_MODULES_RUN_TASK; "plot_allvars" "&JOBSdir;/JREGIONAL_PLOT_ALLVARS"' + dependency: + or_do_post: &post_files_exist + and_run_post: # If post was meant to run, wait on the whole post metatask + taskvalid: + attrs: + task: run_post_mem#mem#_f#fhr# + metataskdep: + attrs: + metatask: run_ens_post + and_inline_post: # If inline post ran, wait on the forecast task to complete + not: + taskvalid: + attrs: + task: run_post_mem#mem#_f#fhr# + taskdep: + attrs: + task: run_post_mem#mem#_f#fhr# diff --git a/parm/wflow/post.yaml b/parm/wflow/post.yaml index 5672e7343f..114e5de377 100644 --- a/parm/wflow/post.yaml +++ b/parm/wflow/post.yaml @@ -3,7 +3,7 @@ default_task_post: &default_task attrs: cycledefs: '#cycledef#' maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_post" "&JOBSdir;/JREGIONAL_RUN_POST"' + command: '&LOAD_MODULES_RUN_TASK; "run_post" "&JOBSdir;/JREGIONAL_RUN_POST"' envars: &default_vars GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;' USHdir: '&USHdir;' diff --git a/parm/wflow/prdgen.yaml b/parm/wflow/prdgen.yaml index 6b9f7cd4f6..3f2026a45f 100644 --- a/parm/wflow/prdgen.yaml +++ b/parm/wflow/prdgen.yaml @@ -10,7 +10,7 @@ metatask_run_prdgen: attrs: cycledefs: '#cycledef#' maxtries: 1 - command: '&LOAD_MODULES_RUN_TASK_FP; "run_prdgen" "&JOBSdir;/JREGIONAL_RUN_PRDGEN"' + command: '&LOAD_MODULES_RUN_TASK; "run_prdgen" "&JOBSdir;/JREGIONAL_RUN_PRDGEN"' envars: GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;' USHdir: '&USHdir;' diff --git a/parm/wflow/prep.yaml b/parm/wflow/prep.yaml index c9d5549909..a0c6e3119a 100644 --- a/parm/wflow/prep.yaml +++ b/parm/wflow/prep.yaml @@ -24,12 +24,12 @@ default_task_prep: &default_task task_make_grid: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "make_grid" "&JOBSdir;/JREGIONAL_MAKE_GRID"' + command: '&LOAD_MODULES_RUN_TASK; "make_grid" "&JOBSdir;/JREGIONAL_MAKE_GRID"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' task_make_orog: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "make_orog" "&JOBSdir;/JREGIONAL_MAKE_OROG"' + command: '&LOAD_MODULES_RUN_TASK; "make_orog" "&JOBSdir;/JREGIONAL_MAKE_OROG"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' dependency: or: &make_grid_satisfied @@ -47,7 +47,7 @@ task_make_orog: task_make_sfc_climo: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "make_sfc_climo" "&JOBSdir;/JREGIONAL_MAKE_SFC_CLIMO"' + command: '&LOAD_MODULES_RUN_TASK; "make_sfc_climo" "&JOBSdir;/JREGIONAL_MAKE_SFC_CLIMO"' envars: <<: *default_envars join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' diff --git a/parm/wflow/test.yaml b/parm/wflow/test.yaml index 716665b228..9c084d6875 100644 --- a/parm/wflow/test.yaml +++ b/parm/wflow/test.yaml @@ -29,7 +29,7 @@ metatask_integration_test: mem: '{% if global.DO_ENSEMBLE %}{%- for m in range(1, global.NUM_ENS_MEMBERS+1) -%}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' task_integration_test_mem#mem#: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "integration_test" "&JOBSdir;/JREGIONAL_INTEGRATION_TEST"' + command: '&LOAD_MODULES_RUN_TASK; "integration_test" "&JOBSdir;/JREGIONAL_INTEGRATION_TEST"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' dependency: and_run_fcst: diff --git a/parm/wflow/verify_det.yaml b/parm/wflow/verify_det.yaml index 4c6b43ca25..c090ea8b0c 100644 --- a/parm/wflow/verify_det.yaml +++ b/parm/wflow/verify_det.yaml @@ -1,6 +1,6 @@ default_task_verify_det: &default_task_verify_det account: '&ACCOUNT;' - attrs: + attrs: &default_attrs cycledefs: forecast maxtries: '1' envars: &default_vars @@ -21,119 +21,163 @@ default_task_verify_det: &default_task_verify_det queue: '&QUEUE_DEFAULT;' walltime: 00:30:00 -metatask_GridStat_CCPA_all_accums_all_mems: +metatask_GridStat_APCP_all_accums_all_mems: var: ACCUM_HH: '{% for ah in verification.VX_APCP_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' - metatask_GridStat_CCPA_APCP#ACCUM_HH#h_all_mems: + metatask_GridStat_APCP#ACCUM_HH#h_all_mems: var: mem: '{% if global.DO_ENSEMBLE %}{% for m in range(1, global.NUM_ENS_MEMBERS+1) %}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' task_run_MET_GridStat_vx_APCP#ACCUM_HH#h_mem#mem#: <<: *default_task_verify_det attrs: + <<: *default_attrs maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' envars: <<: *default_vars OBS_DIR: '&CCPA_OBS_DIR;' - VAR: APCP + FIELD_GROUP: 'APCP' ACCUM_HH: '#ACCUM_HH#' METPLUSTOOLNAME: 'GRIDSTAT' OBTYPE: 'CCPA' ENSMEM_INDX: "#mem#" SLASH_ENSMEM_SUBDIR_OR_NULL: '{% if global.DO_ENSEMBLE %}{{ "/mem#mem#" }}{% endif %}' + OBS_AVAIL_INTVL_HRS: '{{- verification.CCPA_OBS_AVAIL_INTVL_HRS }}' + FCST_LEVEL: 'A#ACCUM_HH#' + FCST_THRESH: 'all' walltime: 02:00:00 dependency: and: taskdep_pcpcombine_obs: attrs: - task: run_MET_PcpCombine_obs_APCP#ACCUM_HH#h + task: run_MET_PcpCombine_APCP#ACCUM_HH#h_obs_CCPA taskdep_pcpcombine_fcst: attrs: - task: run_MET_PcpCombine_fcst_APCP#ACCUM_HH#h_mem#mem# + task: run_MET_PcpCombine_APCP#ACCUM_HH#h_fcst_mem#mem# -metatask_GridStat_NOHRSC_all_accums_all_mems: +metatask_GridStat_ASNOW_all_accums_all_mems: var: ACCUM_HH: '{% for ah in verification.VX_ASNOW_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' - metatask_GridStat_NOHRSC_ASNOW#ACCUM_HH#h_all_mems: + metatask_GridStat_ASNOW#ACCUM_HH#h_all_mems: var: mem: '{% if global.DO_ENSEMBLE %}{% for m in range(1, global.NUM_ENS_MEMBERS+1) %}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' task_run_MET_GridStat_vx_ASNOW#ACCUM_HH#h_mem#mem#: <<: *default_task_verify_det attrs: + <<: *default_attrs maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' envars: <<: *default_vars OBS_DIR: '&NOHRSC_OBS_DIR;' - VAR: ASNOW + FIELD_GROUP: 'ASNOW' ACCUM_HH: '#ACCUM_HH#' METPLUSTOOLNAME: 'GRIDSTAT' OBTYPE: 'NOHRSC' ENSMEM_INDX: "#mem#" SLASH_ENSMEM_SUBDIR_OR_NULL: '{% if global.DO_ENSEMBLE %}{{ "/mem#mem#" }}{% endif %}' + OBS_AVAIL_INTVL_HRS: '{{- verification.NOHRSC_OBS_AVAIL_INTVL_HRS }}' + FCST_LEVEL: 'A#ACCUM_HH#' + FCST_THRESH: 'all' walltime: 02:00:00 dependency: and: - taskdep: + taskdep_pcpcombine_obs: attrs: - task: get_obs_mrms + task: run_MET_PcpCombine_ASNOW#ACCUM_HH#h_obs_NOHRSC taskdep_pcpcombine_fcst: attrs: - task: run_MET_PcpCombine_fcst_ASNOW#ACCUM_HH#h_mem#mem# + task: run_MET_PcpCombine_ASNOW#ACCUM_HH#h_fcst_mem#mem# -metatask_GridStat_MRMS_all_mems: +metatask_GridStat_REFC_RETOP_all_mems: var: mem: '{% if global.DO_ENSEMBLE %}{% for m in range(1, global.NUM_ENS_MEMBERS+1) %}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' - metatask_GridStat_MRMS_mem#mem#: + metatask_GridStat_REFC_RETOP_mem#mem#: var: - VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["REFC", "RETOP"] %}{{ "%s " % var }}{% endif %}{% endfor %}' - task_run_MET_GridStat_vx_#VAR#_mem#mem#: + FIELD_GROUP: '{% for var in verification.VX_FIELD_GROUPS %}{% if var in ["REFC", "RETOP"] %}{{ "%s " % var }}{% endif %}{% endfor %}' + task_run_MET_GridStat_vx_#FIELD_GROUP#_mem#mem#: <<: *default_task_verify_det - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' envars: <<: *default_vars OBS_DIR: '&MRMS_OBS_DIR;' - VAR: '#VAR#' - ACCUM_HH: '01' + FIELD_GROUP: '#FIELD_GROUP#' METPLUSTOOLNAME: 'GRIDSTAT' OBTYPE: 'MRMS' ENSMEM_INDX: "#mem#" SLASH_ENSMEM_SUBDIR_OR_NULL: '{% if global.DO_ENSEMBLE %}{{ "/mem#mem#" }}{% endif %}' + OBS_AVAIL_INTVL_HRS: '{{- verification.MRMS_OBS_AVAIL_INTVL_HRS }}' + FCST_LEVEL: 'L0' + FCST_THRESH: 'all' walltime: 02:00:00 dependency: and: - taskdep: + datadep_all_get_obs_mrms_complete: attrs: - task: get_obs_mrms + age: 00:00:00:30 + # Check that the flag files that indicate that the get_obs_mrms tasks + # are complete are all present before launching any GridStat task. + text: '{%- set num_obs_days = workflow.OBS_DAYS_ALL_CYCLES_INST|length %} + {%- set indent = " " %} + {%- set indent_p2 = indent + " " %} + {%- for n in range(0, num_obs_days) %} + {%- set yyyymmdd = workflow.OBS_DAYS_ALL_CYCLES_INST[n] %} + {%- if n == 0 %} + {{- workflow.WFLOW_FLAG_FILES_DIR ~ "/get_obs_mrms_" ~ yyyymmdd ~ "_complete.txt" }} + {%- else %} + {{- indent ~ "\n" }} + {{- indent ~ "\n" }} + {{- indent_p2 ~ workflow.WFLOW_FLAG_FILES_DIR ~ "/get_obs_mrms_" ~ yyyymmdd ~ "_complete.txt" }} + {%- endif %} + {%- if n != num_obs_days-1 %} {{- "\n" }} {%- endif %} + {%- endfor %}' datadep_post_files_exist: attrs: age: 00:00:00:30 text: !cycstr '{{ workflow.EXPTDIR }}/@Y@m@d@H/post_files_exist_mem#mem#.txt' -metatask_PointStat_NDAS_all_mems: +metatask_PointStat_SFC_UPA_all_mems: var: mem: '{% if global.DO_ENSEMBLE %}{% for m in range(1, global.NUM_ENS_MEMBERS+1) %}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' - metatask_PointStat_NDAS_mem#mem#: + metatask_PointStat_SFC_UPA_mem#mem#: var: - VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["ADPSFC", "ADPUPA"] %}{{ "%s " % var }}{% endif %}{% endfor %}' - task_run_MET_PointStat_vx_#VAR#_mem#mem#: + FIELD_GROUP: '{% for var in verification.VX_FIELD_GROUPS %}{% if var in ["SFC", "UPA"] %}{{ "%s " % var }}{% endif %}{% endfor %}' + task_run_MET_PointStat_vx_#FIELD_GROUP#_mem#mem#: <<: *default_task_verify_det - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' envars: <<: *default_vars OBS_DIR: '&NDAS_OBS_DIR;' - VAR: '#VAR#' + FIELD_GROUP: '#FIELD_GROUP#' METPLUSTOOLNAME: 'POINTSTAT' OBTYPE: 'NDAS' - ACCUM_HH: '01' ENSMEM_INDX: "#mem#" SLASH_ENSMEM_SUBDIR_OR_NULL: '{% if global.DO_ENSEMBLE %}{{ "/mem#mem#" }}{% endif %}' + OBS_AVAIL_INTVL_HRS: '{{- verification.NDAS_OBS_AVAIL_INTVL_HRS }}' + FCST_LEVEL: 'all' + FCST_THRESH: 'all' walltime: 01:00:00 dependency: and: - taskdep_pb2nc: + datadep_all_pb2nc_obs_ndas_complete: attrs: - task: run_MET_Pb2nc_obs + age: 00:00:00:30 + # Check that the flag files that indicate that the Pb2NC tasks are + # complete are all present before launching any PointStat task. + text: '{%- set num_obs_days = workflow.OBS_DAYS_ALL_CYCLES_INST|length %} + {%- set indent = " " %} + {%- set indent_p2 = indent + " " %} + {%- for n in range(0, num_obs_days) %} + {%- set yyyymmdd = workflow.OBS_DAYS_ALL_CYCLES_INST[n] %} + {%- if n == 0 %} + {{- workflow.WFLOW_FLAG_FILES_DIR ~ "/run_met_pb2nc_obs_ndas_" ~ yyyymmdd ~ "_complete.txt" }} + {%- else %} + {{- indent ~ "\n" }} + {{- indent ~ "\n" }} + {{- indent_p2 ~ workflow.WFLOW_FLAG_FILES_DIR ~ "/run_met_pb2nc_obs_ndas_" ~ yyyymmdd ~ "_complete.txt" }} + {%- endif %} + {%- if n != num_obs_days-1 %} {{- "\n" }} {%- endif %} + {%- endfor %}' datadep_post_files_exist: attrs: age: 00:00:00:30 diff --git a/parm/wflow/verify_ens.yaml b/parm/wflow/verify_ens.yaml index cf0a8d1dac..d3601c14d8 100644 --- a/parm/wflow/verify_ens.yaml +++ b/parm/wflow/verify_ens.yaml @@ -21,222 +21,277 @@ default_task_verify_ens: &default_task_verify_ens queue: '&QUEUE_DEFAULT;' walltime: 01:00:00 -metatask_GenEnsProd_EnsembleStat_CCPA: +metatask_GenEnsProd_EnsembleStat_APCP_all_accums: var: ACCUM_HH: '{% for ah in verification.VX_APCP_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' task_run_MET_GenEnsProd_vx_APCP#ACCUM_HH#h: &task_GenEnsProd_CCPA <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' envars: &envars_GenEnsProd_CCPA <<: *default_vars ACCUM_HH: '#ACCUM_HH#' OBS_DIR: '&CCPA_OBS_DIR;' - VAR: APCP + FIELD_GROUP: 'APCP' METPLUSTOOLNAME: 'GENENSPROD' OBTYPE: 'CCPA' + FCST_LEVEL: 'A#ACCUM_HH#' + FCST_THRESH: 'all' dependency: - and: - # The PcpCombine task for obs must be complete because this GenEnsProd - # task checks to see the forecast hours for which obs are available before - # processing the forecast for those hours. - taskdep_pcpcombine_obs: - attrs: - task: run_MET_PcpCombine_obs_APCP#ACCUM_HH#h - metataskdep_pcpcombine_fcst: - attrs: - metatask: PcpCombine_fcst_APCP#ACCUM_HH#h_all_mems + metataskdep_pcpcombine_fcst: + attrs: + metatask: PcpCombine_APCP#ACCUM_HH#h_all_mems task_run_MET_EnsembleStat_vx_APCP#ACCUM_HH#h: <<: *task_GenEnsProd_CCPA envars: <<: *envars_GenEnsProd_CCPA METPLUSTOOLNAME: 'ENSEMBLESTAT' + FCST_THRESH: 'none' dependency: - taskdep_genensprod: - attrs: - task: run_MET_GenEnsProd_vx_APCP#ACCUM_HH#h + and: + taskdep_pcpcombine_obs_ccpa: &taskdep_pcpcombine_obs_ccpa + attrs: + task: run_MET_PcpCombine_APCP#ACCUM_HH#h_obs_CCPA + taskdep_genensprod: + attrs: + task: run_MET_GenEnsProd_vx_APCP#ACCUM_HH#h -metatask_GenEnsProd_EnsembleStat_NOHRSC: +metatask_GenEnsProd_EnsembleStat_ASNOW_all_accums: var: ACCUM_HH: '{% for ah in verification.VX_ASNOW_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' task_run_MET_GenEnsProd_vx_ASNOW#ACCUM_HH#h: &task_GenEnsProd_NOHRSC <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' envars: &envars_GenEnsProd_NOHRSC <<: *default_vars ACCUM_HH: '#ACCUM_HH#' OBS_DIR: '&NOHRSC_OBS_DIR;' - VAR: ASNOW + FIELD_GROUP: 'ASNOW' METPLUSTOOLNAME: 'GENENSPROD' OBTYPE: 'NOHRSC' + FCST_LEVEL: 'A#ACCUM_HH#' + FCST_THRESH: 'all' dependency: and: - # The PcpCombine task for obs must be complete because this GenEnsProd - # task checks to see the forecast hours for which obs are available before - # processing the forecast for those hours. metataskdep_pcpcombine_fcst: attrs: - metatask: PcpCombine_fcst_ASNOW#ACCUM_HH#h_all_mems + metatask: PcpCombine_ASNOW#ACCUM_HH#h_all_mems task_run_MET_EnsembleStat_vx_ASNOW#ACCUM_HH#h: <<: *task_GenEnsProd_NOHRSC envars: <<: *envars_GenEnsProd_NOHRSC METPLUSTOOLNAME: 'ENSEMBLESTAT' + FCST_THRESH: 'none' dependency: and: - taskdep: + taskdep_pcpcombine_obs_nohrsc: &taskdep_pcpcombine_obs_nohrsc attrs: - task: get_obs_nohrsc + task: run_MET_PcpCombine_ASNOW#ACCUM_HH#h_obs_NOHRSC taskdep_genensprod: attrs: task: run_MET_GenEnsProd_vx_ASNOW#ACCUM_HH#h -metatask_GenEnsProd_EnsembleStat_MRMS: +metatask_GenEnsProd_EnsembleStat_REFC_RETOP: var: - VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["REFC", "RETOP"] %}{{ "%s " % var }}{% endif %}{% endfor %}' - task_run_MET_GenEnsProd_vx_#VAR#: &task_GenEnsProd_MRMS + FIELD_GROUP: '{% for var in verification.VX_FIELD_GROUPS %}{% if var in ["REFC", "RETOP"] %}{{ "%s " % var }}{% endif %}{% endfor %}' + task_run_MET_GenEnsProd_vx_#FIELD_GROUP#: &task_GenEnsProd_MRMS <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' envars: &envars_GenEnsProd_MRMS <<: *default_vars ACCUM_HH: '01' OBS_DIR: '&MRMS_OBS_DIR;' - VAR: '#VAR#' + FIELD_GROUP: '#FIELD_GROUP#' METPLUSTOOLNAME: 'GENENSPROD' OBTYPE: 'MRMS' + FCST_LEVEL: 'L0' + FCST_THRESH: 'all' dependency: and: - taskdep: - attrs: - task: get_obs_mrms - metataskdep_post_files_exist: &post_files_exist + metataskdep_check_post_output: &check_post_output attrs: metatask: check_post_output_all_mems - - task_run_MET_EnsembleStat_vx_#VAR#: + task_run_MET_EnsembleStat_vx_#FIELD_GROUP#: <<: *task_GenEnsProd_MRMS envars: <<: *envars_GenEnsProd_MRMS METPLUSTOOLNAME: 'ENSEMBLESTAT' + FCST_LEVEL: 'L0' + FCST_THRESH: 'none' dependency: - taskdep: - attrs: - task: run_MET_GenEnsProd_vx_#VAR# + and: + datadep_all_get_obs_mrms_complete: &all_get_obs_mrms_complete + attrs: + age: 00:00:00:30 + # Check that the flag files that indicate that the get_obs_mrms tasks + # are complete are all present before launching any EnsembleStat task. + text: '{%- set num_obs_days = workflow.OBS_DAYS_ALL_CYCLES_INST|length %} + {%- set indent = " " %} + {%- set indent_p2 = indent + " " %} + {%- for n in range(0, num_obs_days) %} + {%- set yyyymmdd = workflow.OBS_DAYS_ALL_CYCLES_INST[n] %} + {%- if n == 0 %} + {{- workflow.WFLOW_FLAG_FILES_DIR ~ "/get_obs_mrms_" ~ yyyymmdd ~ "_complete.txt" }} + {%- else %} + {{- indent ~ "\n" }} + {{- indent ~ "\n" }} + {{- indent_p2 ~ workflow.WFLOW_FLAG_FILES_DIR ~ "/get_obs_mrms_" ~ yyyymmdd ~ "_complete.txt" }} + {%- endif %} + {%- if n != num_obs_days-1 %} {{- "\n" }} {%- endif %} + {%- endfor %}' + taskdep_genensprod: + attrs: + task: run_MET_GenEnsProd_vx_#FIELD_GROUP# -metatask_GenEnsProd_EnsembleStat_NDAS: +metatask_GenEnsProd_EnsembleStat_SFC_UPA: var: - VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["ADPSFC", "ADPUPA"] %}{{ "%s " % var }}{% endif %}{% endfor %}' - task_run_MET_GenEnsProd_vx_#VAR#: &task_GenEnsProd_NDAS + FIELD_GROUP: '{% for var in verification.VX_FIELD_GROUPS %}{% if var in ["SFC", "UPA"] %}{{ "%s " % var }}{% endif %}{% endfor %}' + task_run_MET_GenEnsProd_vx_#FIELD_GROUP#: &task_GenEnsProd_NDAS <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' envars: &envars_GenEnsProd_NDAS <<: *default_vars OBS_DIR: '&NDAS_OBS_DIR;' - VAR: '#VAR#' + FIELD_GROUP: '#FIELD_GROUP#' METPLUSTOOLNAME: 'GENENSPROD' OBTYPE: 'NDAS' ACCUM_HH: '01' - walltime: 02:30:00 + FCST_LEVEL: 'all' + FCST_THRESH: 'all' + walltime: 04:15:00 dependency: - and: - # The Pb2nc task (which is run only for obs) must be complete because - # this GenEnsProd task checks to see the forecast hours for which obs - # are available before processing the forecast for those hours. - taskdep_pb2nc: - attrs: - task: run_MET_Pb2nc_obs - metataskdep_post_files_exist: - <<: *post_files_exist - task_run_MET_EnsembleStat_vx_#VAR#: + metataskdep_check_post_output: + <<: *check_post_output + task_run_MET_EnsembleStat_vx_#FIELD_GROUP#: <<: *task_GenEnsProd_NDAS envars: <<: *envars_GenEnsProd_NDAS METPLUSTOOLNAME: 'ENSEMBLESTAT' walltime: 01:00:00 dependency: - taskdep_genensprod: - attrs: - task: run_MET_GenEnsProd_vx_#VAR# + and: + datadep_all_pb2nc_obs_ndas_complete: &all_pb2nc_obs_ndas_complete + attrs: + age: 00:00:00:30 + # Check that the flag files that indicate that the Pb2NC tasks are + # complete are all present before launching any EnsembleStat task. + text: '{%- set num_obs_days = workflow.OBS_DAYS_ALL_CYCLES_INST|length %} + {%- set indent = " " %} + {%- set indent_p2 = indent + " " %} + {%- for n in range(0, num_obs_days) %} + {%- set yyyymmdd = workflow.OBS_DAYS_ALL_CYCLES_INST[n] %} + {%- if n == 0 %} + {{- workflow.WFLOW_FLAG_FILES_DIR ~ "/run_met_pb2nc_obs_ndas_" ~ yyyymmdd ~ "_complete.txt" }} + {%- else %} + {{- indent ~ "\n" }} + {{- indent ~ "\n" }} + {{- indent_p2 ~ workflow.WFLOW_FLAG_FILES_DIR ~ "/run_met_pb2nc_obs_ndas_" ~ yyyymmdd ~ "_complete.txt" }} + {%- endif %} + {%- if n != num_obs_days-1 %} {{- "\n" }} {%- endif %} + {%- endfor %}' + taskdep_genensprod: + attrs: + task: run_MET_GenEnsProd_vx_#FIELD_GROUP# -metatask_GridStat_CCPA_ensmeanprob_all_accums: +metatask_GridStat_APCP_all_accums_ensmeanprob: var: stat: MEAN PROB statlc: mean prob - metatask_GridStat_CCPA_ens#statlc#_all_accums: + metatask_GridStat_APCP_all_accums_ens#statlc#: var: ACCUM_HH: '{% for ah in verification.VX_APCP_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' - task_run_MET_GridStat_vx_ens#statlc#_APCP#ACCUM_HH#h: + task_run_MET_GridStat_vx_APCP#ACCUM_HH#h_ens#statlc#: <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"' envars: <<: *default_vars OBS_DIR: '&CCPA_OBS_DIR;' - VAR: APCP + FIELD_GROUP: 'APCP' METPLUSTOOLNAME: 'GRIDSTAT' OBTYPE: 'CCPA' ACCUM_HH: '#ACCUM_HH#' + FCST_LEVEL: 'A#ACCUM_HH#' + FCST_THRESH: 'all' dependency: - taskdep: - attrs: - task: run_MET_GenEnsProd_vx_APCP#ACCUM_HH#h + and: + taskdep_pcpcombine_obs_ccpa: + <<: *taskdep_pcpcombine_obs_ccpa + taskdep_genensprod: + attrs: + task: run_MET_GenEnsProd_vx_APCP#ACCUM_HH#h -metatask_GridStat_NOHRSC_ensmeanprob_all_accums: +metatask_GridStat_ASNOW_all_accums_ensmeanprob: var: stat: MEAN PROB statlc: mean prob - metatask_GridStat_NOHRSC_ens#statlc#_all_accums: + metatask_GridStat_ASNOW_all_accums_ens#statlc#: var: ACCUM_HH: '{% for ah in verification.VX_ASNOW_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' - task_run_MET_GridStat_vx_ens#statlc#_ASNOW#ACCUM_HH#h: + task_run_MET_GridStat_vx_ASNOW#ACCUM_HH#h_ens#statlc#: <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"' envars: <<: *default_vars OBS_DIR: '&NOHRSC_OBS_DIR;' - VAR: ASNOW + FIELD_GROUP: 'ASNOW' METPLUSTOOLNAME: 'GRIDSTAT' OBTYPE: 'NOHRSC' ACCUM_HH: '#ACCUM_HH#' + FCST_LEVEL: 'A#ACCUM_HH#' + FCST_THRESH: 'all' dependency: - taskdep: - attrs: - task: run_MET_GenEnsProd_vx_ASNOW#ACCUM_HH#h + and: + taskdep_pcpcombine_obs_nohrsc: + <<: *taskdep_pcpcombine_obs_nohrsc + taskdep_genensprod: + attrs: + task: run_MET_GenEnsProd_vx_ASNOW#ACCUM_HH#h -metatask_GridStat_MRMS_ensprob: +metatask_GridStat_REFC_RETOP_ensprob: var: - VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["REFC", "RETOP"] %}{{ "%s " % var }}{% endif %}{% endfor %}' - task_run_MET_GridStat_vx_ensprob_#VAR#: + FIELD_GROUP: '{% for var in verification.VX_FIELD_GROUPS %}{% if var in ["REFC", "RETOP"] %}{{ "%s " % var }}{% endif %}{% endfor %}' + task_run_MET_GridStat_vx_#FIELD_GROUP#_ensprob: <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB"' envars: <<: *default_vars ACCUM_HH: '01' OBS_DIR: '&MRMS_OBS_DIR;' - VAR: '#VAR#' + FIELD_GROUP: '#FIELD_GROUP#' METPLUSTOOLNAME: 'GRIDSTAT' OBTYPE: 'MRMS' + FCST_LEVEL: 'L0' + FCST_THRESH: 'all' dependency: - taskdep: - attrs: - task: run_MET_GenEnsProd_vx_#VAR# + and: + datadep_all_get_obs_mrms_complete: + <<: *all_get_obs_mrms_complete + taskdep_genensprod: + attrs: + task: run_MET_GenEnsProd_vx_#FIELD_GROUP# -metatask_PointStat_NDAS_ensmeanprob: +metatask_PointStat_SFC_UPA_ensmeanprob: var: stat: MEAN PROB statlc: mean prob - metatask_PointStat_NDAS_ens#statlc#: + metatask_PointStat_SFC_UPA_ens#statlc#: var: - VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["ADPSFC", "ADPUPA"] %}{{ "%s " % var }}{% endif %}{% endfor %}' - task_run_MET_PointStat_vx_ens#statlc#_#VAR#: + FIELD_GROUP: '{% for var in verification.VX_FIELD_GROUPS %}{% if var in ["SFC", "UPA"] %}{{ "%s " % var }}{% endif %}{% endfor %}' + task_run_MET_PointStat_vx_#FIELD_GROUP#_ens#statlc#: <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"' envars: <<: *default_vars OBS_DIR: '&NDAS_OBS_DIR;' - VAR: '#VAR#' + FIELD_GROUP: '#FIELD_GROUP#' METPLUSTOOLNAME: 'POINTSTAT' OBTYPE: 'NDAS' ACCUM_HH: '01' + FCST_LEVEL: 'all' + FCST_THRESH: 'all' dependency: - taskdep: - attrs: - task: run_MET_GenEnsProd_vx_#VAR# + and: + datadep_all_pb2nc_obs_ndas_complete: + <<: *all_pb2nc_obs_ndas_complete + taskdep_genensprod: + attrs: + task: run_MET_GenEnsProd_vx_#FIELD_GROUP# diff --git a/parm/wflow/verify_pre.yaml b/parm/wflow/verify_pre.yaml index eb1a7eb796..2b86772565 100644 --- a/parm/wflow/verify_pre.yaml +++ b/parm/wflow/verify_pre.yaml @@ -1,7 +1,7 @@ default_task_verify_pre: &default_task_verify_pre account: '&ACCOUNT;' attrs: - cycledefs: forecast + cycledefs: cycledefs_obs_days_inst maxtries: '1' envars: &default_vars GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;' @@ -23,26 +23,27 @@ default_task_verify_pre: &default_task_verify_pre task_get_obs_ccpa: <<: *default_task_verify_pre - command: '&LOAD_MODULES_RUN_TASK_FP; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' + attrs: + cycledefs: cycledefs_obs_days_cumul + maxtries: '1' + command: '&LOAD_MODULES_RUN_TASK; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' envars: <<: *default_vars - ACCUM_HH: '01' - OBS_DIR: '&CCPA_OBS_DIR;' OBTYPE: 'CCPA' - FHR: '{% for h in range(0, workflow.FCST_LEN_HRS+1) %}{{ " %02d" % h }}{% endfor %}' native: '{% if platform.get("SCHED_NATIVE_CMD_HPSS") %}{{ platform.SCHED_NATIVE_CMD_HPSS }}{% else %}{{ platform.SCHED_NATIVE_CMD}}{% endif %}' partition: '{% if platform.get("PARTITION_HPSS") %}&PARTITION_HPSS;{% else %}None{% endif %}' queue: "&QUEUE_HPSS;" - walltime: 00:45:00 + walltime: 02:00:00 task_get_obs_nohrsc: <<: *default_task_verify_pre - command: '&LOAD_MODULES_RUN_TASK_FP; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' + attrs: + cycledefs: cycledefs_obs_days_cumul + maxtries: '1' + command: '&LOAD_MODULES_RUN_TASK; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' envars: <<: *default_vars - OBS_DIR: '&NOHRSC_OBS_DIR;' OBTYPE: 'NOHRSC' - FHR: '{% for h in range(0, workflow.FCST_LEN_HRS+1) %}{{ " %02d" % h }}{% endfor %}' native: '{% if platform.get("SCHED_NATIVE_CMD_HPSS") %}{{ platform.SCHED_NATIVE_CMD_HPSS }}{% else %}{{ platform.SCHED_NATIVE_CMD}}{% endif %}' partition: '{% if platform.get("PARTITION_HPSS") %}&PARTITION_HPSS;{% else %}None{% endif %}' queue: "&QUEUE_HPSS;" @@ -50,13 +51,10 @@ task_get_obs_nohrsc: task_get_obs_mrms: <<: *default_task_verify_pre - command: '&LOAD_MODULES_RUN_TASK_FP; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' + command: '&LOAD_MODULES_RUN_TASK; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' envars: <<: *default_vars - OBS_DIR: '&MRMS_OBS_DIR;' OBTYPE: 'MRMS' - VAR: 'REFC RETOP' - FHR: '{% for h in range(0, workflow.FCST_LEN_HRS+1) %}{{ " %02d" % h }}{% endfor %}' native: '{% if platform.get("SCHED_NATIVE_CMD_HPSS") %}{{ platform.SCHED_NATIVE_CMD_HPSS }}{% else %}{{ platform.SCHED_NATIVE_CMD}}{% endif %}' partition: '{% if platform.get("PARTITION_HPSS") %}&PARTITION_HPSS;{% else %}None{% endif %}' queue: "&QUEUE_HPSS;" @@ -64,28 +62,26 @@ task_get_obs_mrms: task_get_obs_ndas: <<: *default_task_verify_pre + command: '&LOAD_MODULES_RUN_TASK; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' envars: <<: *default_vars - OBS_DIR: '&NDAS_OBS_DIR;' OBTYPE: 'NDAS' - FHR: '{% for h in range(0, workflow.FCST_LEN_HRS+1) %}{{ " %02d" % h }}{% endfor %}' - command: '&LOAD_MODULES_RUN_TASK_FP; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' queue: "&QUEUE_HPSS;" native: '{% if platform.get("SCHED_NATIVE_CMD_HPSS") %}{{ platform.SCHED_NATIVE_CMD_HPSS }}{% else %}{{ platform.SCHED_NATIVE_CMD}}{% endif %}' partition: '{% if platform.get("PARTITION_HPSS") %}&PARTITION_HPSS;{% else %}None{% endif %}' walltime: 02:00:00 -task_run_MET_Pb2nc_obs: +task_run_MET_Pb2nc_obs_NDAS: <<: *default_task_verify_pre attrs: - cycledefs: forecast + cycledefs: cycledefs_obs_days_inst maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PB2NC_OBS"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PB2NC_OBS"' envars: <<: *default_vars - VAR: ADPSFC + FIELD_GROUP: 'SFC' ACCUM_HH: '01' - obs_or_fcst: obs + FCST_OR_OBS: OBS OBTYPE: NDAS OBS_DIR: '&NDAS_OBS_DIR;' METPLUSTOOLNAME: 'PB2NC' @@ -102,35 +98,89 @@ task_run_MET_Pb2nc_obs: attrs: task: get_obs_ndas -metatask_PcpCombine_obs: +metatask_PcpCombine_APCP_all_accums_obs_CCPA: var: ACCUM_HH: '{% for ah in verification.VX_APCP_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' - task_run_MET_PcpCombine_obs_APCP#ACCUM_HH#h: + task_run_MET_PcpCombine_APCP#ACCUM_HH#h_obs_CCPA: <<: *default_task_verify_pre attrs: cycledefs: forecast maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' envars: <<: *default_vars - VAR: APCP + FIELD_GROUP: 'APCP' ACCUM_HH: '#ACCUM_HH#' - obs_or_fcst: obs + FCST_OR_OBS: OBS OBTYPE: CCPA OBS_DIR: '&CCPA_OBS_DIR;' + OBS_AVAIL_INTVL_HRS: '{{- verification.CCPA_OBS_AVAIL_INTVL_HRS }}' METPLUSTOOLNAME: 'PCPCOMBINE' dependency: and: datadep: text: "&CCPA_OBS_DIR;" - or: - not: - taskvalid: - attrs: - task: get_obs_ccpa - taskdep: - attrs: - task: get_obs_ccpa + datadep_all_get_obs_ccpa_complete: + attrs: + age: 00:00:00:30 + # Check that the flag files that indicate that the get_obs_ccpa tasks + # are complete are all present before launching any PcpCombine task. + text: '{%- set num_obs_days = workflow.OBS_DAYS_ALL_CYCLES_CUMUL|length %} + {%- set indent = " " %} + {%- set indent_p2 = indent + " " %} + {%- for n in range(0, num_obs_days) %} + {%- set yyyymmdd = workflow.OBS_DAYS_ALL_CYCLES_CUMUL[n] %} + {%- if n == 0 %} + {{- workflow.WFLOW_FLAG_FILES_DIR ~ "/get_obs_ccpa_" ~ yyyymmdd ~ "_complete.txt" }} + {%- else %} + {{- indent ~ "\n" }} + {{- indent ~ "\n" }} + {{- indent_p2 ~ workflow.WFLOW_FLAG_FILES_DIR ~ "/get_obs_ccpa_" ~ yyyymmdd ~ "_complete.txt" }} + {%- endif %} + {%- if n != num_obs_days-1 %} {{- "\n" }} {%- endif %} + {%- endfor %}' + +metatask_PcpCombine_ASNOW_all_accums_obs_NOHRSC: + var: + ACCUM_HH: '{% for ah in verification.VX_ASNOW_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' + task_run_MET_PcpCombine_ASNOW#ACCUM_HH#h_obs_NOHRSC: + <<: *default_task_verify_pre + attrs: + cycledefs: forecast + maxtries: '2' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' + envars: + <<: *default_vars + FIELD_GROUP: 'ASNOW' + ACCUM_HH: '#ACCUM_HH#' + FCST_OR_OBS: OBS + OBTYPE: NOHRSC + OBS_DIR: '&NOHRSC_OBS_DIR;' + OBS_AVAIL_INTVL_HRS: '{{- verification.NOHRSC_OBS_AVAIL_INTVL_HRS }}' + METPLUSTOOLNAME: 'PCPCOMBINE' + dependency: + and: + datadep: + text: "&NOHRSC_OBS_DIR;" + datadep_all_get_obs_nohrsc_complete: + attrs: + age: 00:00:00:30 + # Check that the flag files that indicate that the get_obs_nohrsc tasks + # are complete are all present before launching any PcpCombine task. + text: '{%- set num_obs_days = workflow.OBS_DAYS_ALL_CYCLES_CUMUL|length %} + {%- set indent = " " %} + {%- set indent_p2 = indent + " " %} + {%- for n in range(0, num_obs_days) %} + {%- set yyyymmdd = workflow.OBS_DAYS_ALL_CYCLES_CUMUL[n] %} + {%- if n == 0 %} + {{- workflow.WFLOW_FLAG_FILES_DIR ~ "/get_obs_nohrsc_" ~ yyyymmdd ~ "_complete.txt" }} + {%- else %} + {{- indent ~ "\n" }} + {{- indent ~ "\n" }} + {{- indent_p2 ~ workflow.WFLOW_FLAG_FILES_DIR ~ "/get_obs_nohrsc_" ~ yyyymmdd ~ "_complete.txt" }} + {%- endif %} + {%- if n != num_obs_days-1 %} {{- "\n" }} {%- endif %} + {%- endfor %}' metatask_check_post_output_all_mems: var: @@ -140,11 +190,9 @@ metatask_check_post_output_all_mems: attrs: cycledefs: forecast maxtries: '1' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_CHECK_POST_OUTPUT"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_CHECK_POST_OUTPUT"' envars: <<: *default_vars - VAR: APCP - ACCUM_HH: '01' ENSMEM_INDX: '#mem#' dependency: # This "or" checks that the necessary stand-alone post tasks or forecast @@ -183,7 +231,15 @@ metatask_check_post_output_all_mems: # metatask: run_post_mem#mem#_all_fhrs taskdep: attrs: - task: '{% for h in range(0, workflow.LONG_FCST_LEN+1) %}{% if h > 0 %}{{" \n"}}{% endif %}{%- endfor -%}' + task: '{%- for h in range(0, workflow.LONG_FCST_LEN+1) %} + {%- if h > 0 %} + {{- " \n" }} + {%- endif %} + {%- endfor %}' # This "and" is to check whether post is being run inline (i.e. as part of # the weather model), and if so, to ensure that the forecast task for the # current member has completed. @@ -210,23 +266,23 @@ metatask_check_post_output_all_mems: taskvalid: <<: *fcst_task_exists -metatask_PcpCombine_fcst_APCP_all_accums_all_mems: +metatask_PcpCombine_APCP_all_accums_all_mems: var: ACCUM_HH: '{% for ah in verification.VX_APCP_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' - metatask_PcpCombine_fcst_APCP#ACCUM_HH#h_all_mems: + metatask_PcpCombine_APCP#ACCUM_HH#h_all_mems: var: mem: '{% if global.DO_ENSEMBLE %}{% for m in range(1, global.NUM_ENS_MEMBERS+1) %}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' - task_run_MET_PcpCombine_fcst_APCP#ACCUM_HH#h_mem#mem#: + task_run_MET_PcpCombine_APCP#ACCUM_HH#h_fcst_mem#mem#: <<: *default_task_verify_pre attrs: cycledefs: forecast maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' envars: <<: *default_vars - VAR: APCP + FIELD_GROUP: 'APCP' ACCUM_HH: '#ACCUM_HH#' - obs_or_fcst: fcst + FCST_OR_OBS: FCST OBTYPE: CCPA OBS_DIR: '&CCPA_OBS_DIR;' METPLUSTOOLNAME: 'PCPCOMBINE' @@ -236,25 +292,25 @@ metatask_PcpCombine_fcst_APCP_all_accums_all_mems: attrs: age: 00:00:00:30 text: !cycstr '{{ workflow.EXPTDIR }}/@Y@m@d@H/post_files_exist_mem#mem#.txt' - walltime: 00:10:00 + walltime: 00:30:00 -metatask_PcpCombine_fcst_ASNOW_all_accums_all_mems: +metatask_PcpCombine_ASNOW_all_accums_all_mems: var: ACCUM_HH: '{% for ah in verification.VX_ASNOW_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' - metatask_PcpCombine_fcst_ASNOW#ACCUM_HH#h_all_mems: + metatask_PcpCombine_ASNOW#ACCUM_HH#h_all_mems: var: mem: '{% if global.DO_ENSEMBLE %}{% for m in range(1, global.NUM_ENS_MEMBERS+1) %}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' - task_run_MET_PcpCombine_fcst_ASNOW#ACCUM_HH#h_mem#mem#: + task_run_MET_PcpCombine_ASNOW#ACCUM_HH#h_fcst_mem#mem#: <<: *default_task_verify_pre attrs: cycledefs: forecast maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' envars: <<: *default_vars - VAR: ASNOW + FIELD_GROUP: 'ASNOW' ACCUM_HH: '#ACCUM_HH#' - obs_or_fcst: fcst + FCST_OR_OBS: FCST OBTYPE: NOHRSC OBS_DIR: '&NOHRSC_OBS_DIR;' METPLUSTOOLNAME: 'PCPCOMBINE' @@ -264,4 +320,4 @@ metatask_PcpCombine_fcst_ASNOW_all_accums_all_mems: attrs: age: 00:00:00:30 text: !cycstr '{{ workflow.EXPTDIR }}/@Y@m@d@H/post_files_exist_mem#mem#.txt' - walltime: 00:10:00 + walltime: 00:30:00 diff --git a/scripts/exregional_check_post_output.sh b/scripts/exregional_check_post_output.sh index 1352d38789..4d5836519c 100755 --- a/scripts/exregional_check_post_output.sh +++ b/scripts/exregional_check_post_output.sh @@ -3,20 +3,52 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions file and the bash utility functions. +# The ex-script for checking the post output. +# +# Run-time environment variables: +# +# ACCUM_HH +# CDATE +# ENSMEM_INDX +# GLOBAL_VAR_DEFNS_FP +# METPLUS_ROOT (used by ush/set_leadhrs.py) +# +# Experiment variables +# +# user: +# USHdir +# +# workflow: +# FCST_LEN_HRS +# +# global: +# DO_ENSEMBLE +# ENS_TIME_LAG_HRS +# +# verification: +# FCST_FN_TEMPLATE +# FCST_SUBDIR_TEMPLATE +# NUM_MISSING_FCST_FILES_MAX +# VX_FCST_INPUT_BASEDIR +# VX_NDIGITS_ENSMEM_NAMES +# +# constants: +# SECS_PER_HOUR # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh -source_config_for_task "task_run_met_pcpcombine|task_run_post" ${GLOBAL_VAR_DEFNS_FP} + # #----------------------------------------------------------------------- # -# Source files defining auxiliary functions for verification. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $USHdir/set_vx_fhr_list.sh +. $USHdir/source_util_funcs.sh +for sect in user nco workflow global verification constants task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -50,10 +82,11 @@ print_info_msg " Entering script: \"${scrfunc_fn}\" In directory: \"${scrfunc_dir}\" -This is the ex-script for the task that checks that all the post-processed -output files in fact exist and are at least a certain age. These files -may have been generated by UPP as part of the current SRW App workflow, -or they may be user-staged. +This is the ex-script for the task that checks that no more than +NUM_MISSING_FCST_FILES_MAX of each forecast's (ensemble member's) post- +processed output files are missing. Note that such files may have been +generated by UPP as part of the current SRW App workflow, or they may be +user-staged. ========================================================================" # #----------------------------------------------------------------------- @@ -63,38 +96,34 @@ or they may be user-staged. #----------------------------------------------------------------------- # i="0" -if [ "${DO_ENSEMBLE}" = "TRUE" ]; then +if [[ $(boolify "${DO_ENSEMBLE}") == "TRUE" ]]; then i=$( bc -l <<< "${ENSMEM_INDX}-1" ) fi time_lag=$( bc -l <<< "${ENS_TIME_LAG_HRS[$i]}*${SECS_PER_HOUR}" ) # #----------------------------------------------------------------------- # -# Get the list of forecast hours for which there is a post-processed -# output file. Note that: -# -# 1) CDATE (in YYYYMMDDHH format) is already available via the call to -# the job_preamble.sh script in the j-job of this ex-script. -# 2) VAR is set to "APCP" and ACCUM_HH is set to "01" because we assume -# the output files are hourly, so these settings will result in the -# function set_vx_fhr_list checking for existence of hourly post output -# files. +# Check to ensure that all the expected post-processed forecast output +# files are present on disk. This is done by the set_leadhrs function +# below. Note that CDATE (in YYYYMMDDHH format) is already available via +# the call to the job_preamble.sh script in the j-job of this ex-script. # #----------------------------------------------------------------------- # ensmem_indx=$(printf "%0${VX_NDIGITS_ENSMEM_NAMES}d" $(( 10#${ENSMEM_INDX}))) ensmem_name="mem${ensmem_indx}" FCST_INPUT_FN_TEMPLATE=$( eval echo ${FCST_SUBDIR_TEMPLATE:+${FCST_SUBDIR_TEMPLATE}/}${FCST_FN_TEMPLATE} ) -set_vx_fhr_list \ - cdate="${CDATE}" \ - fcst_len_hrs="${FCST_LEN_HRS}" \ - field="$VAR" \ - accum_hh="${ACCUM_HH}" \ - base_dir="${VX_FCST_INPUT_BASEDIR}" \ - fn_template="${FCST_INPUT_FN_TEMPLATE}" \ - check_accum_contrib_files="FALSE" \ - num_missing_files_max="${NUM_MISSING_FCST_FILES_MAX}" \ - outvarname_fhr_list="FHR_LIST" + +FHR_LIST=$( python3 $USHdir/set_leadhrs.py \ + --date_init="${CDATE}" \ + --lhr_min="0" \ + --lhr_max="${FCST_LEN_HRS}" \ + --lhr_intvl="${VX_FCST_OUTPUT_INTVL_HRS}" \ + --base_dir="${VX_FCST_INPUT_BASEDIR}" \ + --fn_template="${FCST_INPUT_FN_TEMPLATE}" \ + --num_missing_files_max="${NUM_MISSING_FCST_FILES_MAX}" \ + --time_lag="${time_lag%.*}") || \ +print_err_msg_exit "Call to set_leadhrs.py failed with return code: $?" # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_get_extrn_mdl_files.sh b/scripts/exregional_get_extrn_mdl_files.sh index 018a30c285..96c3136e33 100755 --- a/scripts/exregional_get_extrn_mdl_files.sh +++ b/scripts/exregional_get_extrn_mdl_files.sh @@ -1,5 +1,65 @@ #!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# The ex-script for getting the model files that will be used for either +# initial conditions or lateral boundary conditions for the experiment. +# +# Run-time environment variables: +# +# CDATE +# COMIN +# cyc +# DATA +# EXTRN_MDL_CDATE +# EXTRN_MDL_NAME +# EXTRN_MDL_STAGING_DIR +# GLOBAL_VAR_DEFNS_FP +# ICS_OR_LBCS +# NET +# PDY +# TIME_OFFSET_HRS +# +# Experiment variables +# +# user: +# MACHINE +# PARMdir +# RUN_ENVIR +# USHdir +# +# platform: +# EXTRN_MDL_DATA_STORES +# +# workflow: +# DATE_FIRST_CYCL +# EXTRN_MDL_VAR_DEFNS_FN +# FCST_LEN_CYCL +# INCR_CYCL_FREQ +# SYMLINK_FIX_FILES +# +# task_get_extrn_lbcs: +# EXTRN_MDL_FILES_LBCS +# EXTRN_MDL_SOURCE_BASEDIR_LBCS +# EXTRN_MDL_SYSBASEDIR_LBCS +# FV3GFS_FILE_FMT_LBCS +# LBC_SPEC_INTVL_HRS +# +# task_get_extrn_ics: +# EXTRN_MDL_FILES_ICS +# EXTRN_MDL_SOURCE_BASEDIR_ICS +# EXTRN_MDL_SYSBASEDIR_ICS +# FV3GFS_FILE_FMT_ICS +# +# global: +# DO_ENSEMBLE +# NUM_ENS_MEMBERS +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +68,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_get_extrn_ics|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP} + +for sect in user nco platform workflow global task_get_extrn_lbcs \ + task_get_extrn_ics ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -147,12 +211,12 @@ if [ -n "${input_file_path:-}" ] ; then --input_file_path ${input_file_path}" fi -if [ $SYMLINK_FIX_FILES = "TRUE" ]; then +if [ $(boolify $SYMLINK_FIX_FILES) = "TRUE" ]; then additional_flags="$additional_flags \ --symlink" fi -if [ $DO_ENSEMBLE == "TRUE" ] ; then +if [ $(boolify $DO_ENSEMBLE) = "TRUE" ] ; then mem_dir="/mem{mem:03d}" member_list=(1 ${NUM_ENS_MEMBERS}) additional_flags="$additional_flags \ @@ -222,7 +286,7 @@ if [ "${EXTRN_MDL_NAME}" = "GEFS" ]; then for num in $(seq -f "%02g" ${NUM_ENS_MEMBERS}); do sorted_fn=( ) for fcst_hr in "${all_fcst_hrs_array[@]}"; do - # Read in filenames from $EXTRN_MDL_FNS and sort them + # Read in filenames from EXTRN_MDL_FNS and sort them base_path="${EXTRN_MDL_STAGING_DIR}/mem`printf %03d $num`" filenames_array=`awk -F= '/EXTRN_MDL_FNS/{print $2}' $base_path/${EXTRN_DEFNS}` for filename in ${filenames_array[@]}; do diff --git a/scripts/exregional_get_verif_obs.sh b/scripts/exregional_get_verif_obs.sh index a74f11cd3a..d457a6b5d8 100755 --- a/scripts/exregional_get_verif_obs.sh +++ b/scripts/exregional_get_verif_obs.sh @@ -3,547 +3,83 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions file and the bash utility functions. +# The ex-script that checks, pulls, and stages observation data for +# model verification. # -#----------------------------------------------------------------------- +# Run-time environment variables: # -. $USHdir/source_util_funcs.sh -source_config_for_task " " ${GLOBAL_VAR_DEFNS_FP} +# FHR +# GLOBAL_VAR_DEFNS_FP +# OBS_DIR +# OBTYPE +# PDY +# VAR # -#----------------------------------------------------------------------- +# Experiment variables # -# Save current shell options (in a global array). Then set new options -# for this script/function. +# user: +# USHdir +# PARMdir # #----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -set -x + # #----------------------------------------------------------------------- # -# This script performs several important tasks for preparing data for -# verification tasks. Depending on the value of the environment variable -# OBTYPE=(CCPA|MRMS|NDAS|NOHRSC), the script will prepare that particular data -# set. -# -# If data is not available on disk (in the location specified by -# CCPA_OBS_DIR, MRMS_OBS_DIR, NDAS_OBS_DIR, or NOHRSC_OBS_DIR respectively), -# the script attempts to retrieve the data from HPSS using the retrieve_data.py -# script. Depending on the data set, there are a few strange quirks and/or -# bugs in the way data is organized; see in-line comments for details. -# -# -# CCPA (Climatology-Calibrated Precipitation Analysis) precipitation accumulation obs -# ---------- -# If data is available on disk, it must be in the following -# directory structure and file name conventions expected by verification -# tasks: -# -# {CCPA_OBS_DIR}/{YYYYMMDD}/ccpa.t{HH}z.01h.hrap.conus.gb2 -# -# If data is retrieved from HPSS, it will automatically staged by this -# this script. -# -# Notes about the data and how it's used for verification: -# -# 1. Accumulation is currently hardcoded to 01h. The verification will -# use MET/pcp-combine to sum 01h files into desired accumulations. +# Source the variable definitions file and the bash utility functions. # -# 2. There is a problem with the valid time in the metadata for files -# valid from 19 - 00 UTC (or files under the '00' directory). This is -# accounted for in this script for data retrieved from HPSS, but if you -# have manually staged data on disk you should be sure this is accouned -# for. See in-line comments below for details. +#----------------------------------------------------------------------- # +. $USHdir/source_util_funcs.sh +for sect in user workflow nco ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # -# MRMS (Multi-Radar Multi-Sensor) radar observations -# ---------- -# If data is available on disk, it must be in the following -# directory structure and file name conventions expected by verification -# tasks: +#----------------------------------------------------------------------- # -# {MRMS_OBS_DIR}/{YYYYMMDD}/[PREFIX]{YYYYMMDD}-{HH}0000.grib2, -# -# Where [PREFIX] is MergedReflectivityQCComposite_00.50_ for reflectivity -# data and EchoTop_18_00.50_ for echo top data. If data is not available -# at the top of the hour, you should rename the file closest in time to -# your hour(s) of interest to the above naming format. A script -# "ush/mrms_pull_topofhour.py" is provided for this purpose. +# Save current shell options (in a global array). Then set new options +# for this script/function. # -# If data is retrieved from HPSS, it will automatically staged by this -# this script. +#----------------------------------------------------------------------- # +{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 # -# NDAS (NAM Data Assimilation System) conventional observations -# ---------- -# If data is available on disk, it must be in the following -# directory structure and file name conventions expected by verification -# tasks: +#----------------------------------------------------------------------- # -# {NDAS_OBS_DIR}/{YYYYMMDD}/prepbufr.ndas.{YYYYMMDDHH} -# -# Note that data retrieved from HPSS and other sources may be in a -# different format: nam.t{hh}z.prepbufr.tm{prevhour}.nr, where hh is -# either 00, 06, 12, or 18, and prevhour is the number of hours prior to -# hh (00 through 05). If using custom staged data, you will have to -# rename the files accordingly. -# -# If data is retrieved from HPSS, it will automatically staged by this -# this script. +# Make sure the obs type is valid. Then call a python script to check +# for the presence of obs files on disk and get them if needed. # +#----------------------------------------------------------------------- # -# NOHRSC snow accumulation observations -# ---------- -# If data is available on disk, it must be in the following -# directory structure and file name conventions expected by verification -# tasks: +valid_obtypes=("CCPA" "MRMS" "NDAS" "NOHRSC") +if [[ ! ${valid_obtypes[@]} =~ ${OBTYPE} ]]; then + print_err_msg_exit "\ +Invalid observation type (OBTYPE) specified for script: + OBTYPE = \"${OBTYPE}\" +Valid observation types are: + $(printf "\"%s\" " ${valid_obtypes[@]}) +" +fi + +cmd="\ +python3 -u ${USHdir}/get_obs.py \ +--var_defns_path "${GLOBAL_VAR_DEFNS_FP}" \ +--obtype ${OBTYPE} \ +--obs_day ${PDY}" +print_info_msg " +CALLING: ${cmd}" +${cmd} || print_err_msg_exit "Error calling get_obs.py" # -# {NOHRSC_OBS_DIR}/{YYYYMMDD}/sfav2_CONUS_{AA}h_{YYYYMMDD}{HH}_grid184.grb2 -# -# where AA is the 2-digit accumulation duration in hours: 06 or 24 +#----------------------------------------------------------------------- # -# METplus is configured to verify snowfall using 06- and 24-h accumulated -# snowfall from 6- and 12-hourly NOHRSC files, respectively. +# Create flag file that indicates completion of task. This is needed by +# the workflow. # -# If data is retrieved from HPSS, it will automatically staged by this -# this script. - #----------------------------------------------------------------------- -# Create and enter top-level obs directory (so temporary data from HPSS won't collide with other tasks) -mkdir -p ${OBS_DIR} -cd ${OBS_DIR} - -# Set log file for retrieving obs -logfile=retrieve_data.log - -# PDY and cyc are defined in rocoto XML...they are the yyyymmdd and hh for initial forecast hour respectively -iyyyy=$(echo ${PDY} | cut -c1-4) -imm=$(echo ${PDY} | cut -c5-6) -idd=$(echo ${PDY} | cut -c7-8) -ihh=${cyc} - -# Unix date utility needs dates in yyyy-mm-dd hh:mm:ss format -unix_init_DATE="${iyyyy}-${imm}-${idd} ${ihh}:00:00" - -# This awk expression gets the last item of the list $FHR -fcst_length=$(echo ${FHR} | awk '{ print $NF }') -# Make sure fcst_length isn't octal (leading zero) -fcst_length=$((10#${fcst_length})) - -current_fcst=0 -while [[ ${current_fcst} -le ${fcst_length} ]]; do - # Calculate valid date info using date utility - vdate=$($DATE_UTIL -d "${unix_init_DATE} ${current_fcst} hours" +%Y%m%d%H) - unix_vdate=$($DATE_UTIL -d "${unix_init_DATE} ${current_fcst} hours" "+%Y-%m-%d %H:00:00") - vyyyymmdd=$(echo ${vdate} | cut -c1-8) - vhh=$(echo ${vdate} | cut -c9-10) - - # Calculate valid date + 1 day; this is needed because some obs files - # are stored in the *next* day's 00h directory - vdate_p1=$($DATE_UTIL -d "${unix_init_DATE} ${current_fcst} hours 1 day" +%Y%m%d%H) - vyyyymmdd_p1=$(echo ${vdate_p1} | cut -c1-8) - - #remove leading zero again, this time keep original - vhh_noZero=$((10#${vhh})) - - # Retrieve CCPA observations - if [[ ${OBTYPE} == "CCPA" ]]; then - - #CCPA is accumulation observations, so none to retrieve for hour zero - if [[ ${current_fcst} -eq 0 ]]; then - current_fcst=$((${current_fcst} + 1)) - continue - fi - - # Staging location for raw CCPA data from HPSS - ccpa_raw=${OBS_DIR}/raw - - # Reorganized CCPA location - ccpa_proc=${OBS_DIR} - - # Accumulation is for accumulation of CCPA data to pull (hardcoded to 01h, see note above.) - accum=01 - - # Check if file exists on disk; if not, pull it. - ccpa_file="$ccpa_proc/${vyyyymmdd}/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2" - if [[ -f "${ccpa_file}" ]]; then - echo "${OBTYPE} file exists on disk:" - echo "${ccpa_file}" - else - echo "${OBTYPE} file does not exist on disk:" - echo "${ccpa_file}" - echo "Will attempt to retrieve from remote locations" - - # Create necessary raw and prop directories - if [[ ! -d "$ccpa_raw/${vyyyymmdd}" ]]; then - mkdir -p $ccpa_raw/${vyyyymmdd} - fi - if [[ ! -d "$ccpa_raw/${vyyyymmdd_p1}" ]]; then - mkdir -p $ccpa_raw/${vyyyymmdd_p1} - fi - if [[ ! -d "$ccpa_proc/${vyyyymmdd}" ]]; then - mkdir -p $ccpa_proc/${vyyyymmdd} - fi - # Check if valid hour is 00 - if [[ ${vhh_noZero} -ge 19 && ${vhh_noZero} -le 23 ]]; then - # Pull CCPA data from HPSS - cmd=" - python3 -u ${USHdir}/retrieve_data.py \ - --debug \ - --file_set obs \ - --config ${PARMdir}/data_locations.yml \ - --cycle_date ${vyyyymmdd_p1}${vhh} \ - --data_stores hpss \ - --data_type CCPA_obs \ - --output_path $ccpa_raw/${vyyyymmdd_p1} \ - --summary_file ${logfile}" - - echo "CALLING: ${cmd}" - $cmd || print_err_msg_exit "\ - Could not retrieve CCPA data from HPSS - - The following command exited with a non-zero exit status: - ${cmd} -" - - else - # Pull CCPA data from HPSS - cmd=" - python3 -u ${USHdir}/retrieve_data.py \ - --debug \ - --file_set obs \ - --config ${PARMdir}/data_locations.yml \ - --cycle_date ${vyyyymmdd}${vhh} \ - --data_stores hpss \ - --data_type CCPA_obs \ - --output_path $ccpa_raw/${vyyyymmdd} \ - --summary_file ${logfile}" - - echo "CALLING: ${cmd}" - $cmd || print_err_msg_exit "\ - Could not retrieve CCPA data from HPSS - - The following command exited with a non-zero exit status: - ${cmd} -" - fi - - # One hour CCPA files have incorrect metadata in the files under the "00" directory from 20180718 to 20210504. - # After data is pulled, reorganize into correct valid yyyymmdd structure. - if [[ ${vhh_noZero} -ge 1 && ${vhh_noZero} -le 6 ]]; then - cp $ccpa_raw/${vyyyymmdd}/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2 $ccpa_proc/${vyyyymmdd} - elif [[ ${vhh_noZero} -ge 7 && ${vhh_noZero} -le 12 ]]; then - cp $ccpa_raw/${vyyyymmdd}/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2 $ccpa_proc/${vyyyymmdd} - elif [[ ${vhh_noZero} -ge 13 && ${vhh_noZero} -le 18 ]]; then - cp $ccpa_raw/${vyyyymmdd}/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2 $ccpa_proc/${vyyyymmdd} - elif [[ ${vhh_noZero} -ge 19 && ${vhh_noZero} -le 23 ]]; then - if [[ ${vyyyymmdd} -ge 20180718 && ${vyyyymmdd} -le 20210504 ]]; then - wgrib2 $ccpa_raw/${vyyyymmdd_p1}/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2 -set_date -24hr -grib $ccpa_proc/${vyyyymmdd}/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2 -s - else - cp $ccpa_raw/${vyyyymmdd_p1}/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2 $ccpa_proc/${vyyyymmdd} - fi - elif [[ ${vhh_noZero} -eq 0 ]]; then - # One hour CCPA files on HPSS have incorrect metadata in the files under the "00" directory from 20180718 to 20210504. - if [[ ${vyyyymmdd} -ge 20180718 && ${vyyyymmdd} -le 20210504 ]]; then - wgrib2 $ccpa_raw/${vyyyymmdd}/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2 -set_date -24hr -grib $ccpa_proc/${vyyyymmdd}/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2 -s - else - cp $ccpa_raw/${vyyyymmdd}/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2 $ccpa_proc/${vyyyymmdd} - fi - fi - - fi - # Retrieve MRMS observations - elif [[ ${OBTYPE} == "MRMS" ]]; then - # Top-level MRMS directory - # raw MRMS data from HPSS - mrms_raw=${OBS_DIR}/raw - - # Reorganized MRMS location - mrms_proc=${OBS_DIR} - - # For each field (REFC and RETOP), check if file exists on disk; if not, pull it. - for field in ${VAR[@]}; do - if [ "${field}" = "REFC" ]; then - field_base_name="MergedReflectivityQCComposite" - level="_00.50_" - elif [ "${field}" = "RETOP" ]; then - field_base_name="EchoTop" - level="_18_00.50_" - else - echo "Invalid field: ${field}" - print_err_msg_exit "\ - Invalid field specified: ${field} - - Valid options are 'REFC', 'RETOP'. -" - fi - - mrms_file="$mrms_proc/${vyyyymmdd}/${field_base_name}${level}${vyyyymmdd}-${vhh}0000.grib2" - - if [[ -f "${mrms_file}" ]]; then - echo "${OBTYPE} file exists on disk for field ${field}:\n${mrms_file}" - else - echo "${OBTYPE} file does not exist on disk for field ${field}:\n${mrms_file}" - echo "Will attempt to retrieve from remote locations" - # Create directories if necessary - if [[ ! -d "$mrms_raw/${vyyyymmdd}" ]]; then - mkdir -p $mrms_raw/${vyyyymmdd} - fi - if [[ ! -d "$mrms_proc/${vyyyymmdd}" ]]; then - mkdir -p $mrms_proc/${vyyyymmdd} - fi - - - # Pull MRMS data from HPSS - cmd=" - python3 -u ${USHdir}/retrieve_data.py \ - --debug \ - --file_set obs \ - --config ${PARMdir}/data_locations.yml \ - --cycle_date ${vyyyymmdd}${vhh} \ - --data_stores hpss \ - --data_type MRMS_obs \ - --output_path $mrms_raw/${vyyyymmdd} \ - --summary_file ${logfile}" - - echo "CALLING: ${cmd}" - - $cmd || print_err_msg_exit "\ - Could not retrieve MRMS data from HPSS - - The following command exited with a non-zero exit status: - ${cmd} -" - - hour=0 - while [[ ${hour} -le 23 ]]; do - HH=$(printf "%02d" $hour) - echo "hour=${hour}" - python ${USHdir}/mrms_pull_topofhour.py --valid_time ${vyyyymmdd}${HH} --outdir ${mrms_proc} --source ${mrms_raw} --product ${field_base_name} - hour=$((${hour} + 1)) # hourly increment - done - - fi - done - - # Retrieve NDAS observations - elif [[ ${OBTYPE} == "NDAS" ]]; then - # raw NDAS data from HPSS - ndas_raw=${OBS_DIR}/raw - - # Reorganized NDAS location - ndas_proc=${OBS_DIR} - - # Check if file exists on disk - ndas_file="$ndas_proc/prepbufr.ndas.${vyyyymmdd}${vhh}" - if [[ -f "${ndas_file}" ]]; then - echo "${OBTYPE} file exists on disk:" - echo "${ndas_file}" - else - echo "${OBTYPE} file does not exist on disk:" - echo "${ndas_file}" - echo "Will attempt to retrieve from remote locations" - # NDAS data is available in 6-hourly combined tar files, each with 7 1-hour prepbufr files: - # nam.tHHz.prepbufr.tm00.nr, nam.tHHz.prepbufr.tm01.nr, ... , nam.tHHz.prepbufr.tm06.nr - # - # The "tm" here means "time minus", so nam.t12z.prepbufr.tm00.nr is valid for 12z, - # nam.t00z.prepbufr.tm03.nr is valid for 21z the previous day, etc. - # This means that every six hours we have to obs files valid for the same time: - # nam.tHHz.prepbufr.tm00.nr and nam.t[HH+6]z.prepbufr.tm06.nr - # We want to use the tm06 file because it contains more/better obs (confirmed with EMC: even - # though the earlier files are larger, this is because the time window is larger) - - # The current logic of this script will likely stage more files than you need, but will never - # pull more HPSS tarballs than necessary - - if [[ ${current_fcst} -eq 0 && ${current_fcst} -ne ${fcst_length} ]]; then - # If at forecast hour zero, skip to next hour. - current_fcst=$((${current_fcst} + 1)) - continue - fi - - if [[ ${vhh_noZero} -eq 0 || ${vhh_noZero} -eq 6 || ${vhh_noZero} -eq 12 || ${vhh_noZero} -eq 18 ]]; then - - if [[ ! -d "$ndas_raw/${vyyyymmdd}${vhh}" ]]; then - mkdir -p $ndas_raw/${vyyyymmdd}${vhh} - fi - - # Pull NDAS data from HPSS - cmd=" - python3 -u ${USHdir}/retrieve_data.py \ - --debug \ - --file_set obs \ - --config ${PARMdir}/data_locations.yml \ - --cycle_date ${vyyyymmdd}${vhh} \ - --data_stores hpss \ - --data_type NDAS_obs \ - --output_path $ndas_raw/${vyyyymmdd}${vhh} \ - --summary_file ${logfile}" - - echo "CALLING: ${cmd}" - - $cmd || print_err_msg_exit "\ - Could not retrieve NDAS data from HPSS - - The following command exited with a non-zero exit status: - ${cmd} -" - - if [[ ! -d "$ndas_proc" ]]; then - mkdir -p $ndas_proc - fi - - # copy files from the previous 6 hours ("tm" means "time minus") - # The tm06 files contain more/better observations than tm00 for the equivalent time - for tm in $(seq 1 6); do - vyyyymmddhh_tm=$($DATE_UTIL -d "${unix_vdate} ${tm} hours ago" +%Y%m%d%H) - tm2=$(echo $tm | awk '{printf "%02d\n", $0;}') - - cp $ndas_raw/${vyyyymmdd}${vhh}/nam.t${vhh}z.prepbufr.tm${tm2}.nr $ndas_proc/prepbufr.ndas.${vyyyymmddhh_tm} - done - - fi - - # If at last forecast hour, make sure we're getting the last observations - if [[ ${current_fcst} -eq ${fcst_length} ]]; then - echo "Retrieving NDAS obs for final forecast hour" - vhh_noZero=$((vhh_noZero + 6 - (vhh_noZero % 6))) - if [[ ${vhh_noZero} -eq 24 ]]; then - vyyyymmdd=${vyyyymmdd_p1} - vhh=00 - elif [[ ${vhh_noZero} -eq 6 ]]; then - vhh=06 - else - vhh=${vhh_noZero} - fi - - if [[ ! -d "$ndas_raw/${vyyyymmdd}${vhh}" ]]; then - mkdir -p $ndas_raw/${vyyyymmdd}${vhh} - fi - - # Pull NDAS data from HPSS - cmd=" - python3 -u ${USHdir}/retrieve_data.py \ - --debug \ - --file_set obs \ - --config ${PARMdir}/data_locations.yml \ - --cycle_date ${vyyyymmdd}${vhh} \ - --data_stores hpss \ - --data_type NDAS_obs \ - --output_path $ndas_raw/${vyyyymmdd}${vhh} \ - --summary_file ${logfile}" - - echo "CALLING: ${cmd}" - - $cmd || print_err_msg_exit "\ - Could not retrieve NDAS data from HPSS - - The following command exited with a non-zero exit status: - ${cmd} -" - - if [[ ! -d "$ndas_proc" ]]; then - mkdir -p $ndas_proc - fi - - for tm in $(seq 1 6); do - last_fhr=$((fcst_length + 6 - (vhh_noZero % 6))) - unix_fdate=$($DATE_UTIL -d "${unix_init_DATE} ${last_fhr} hours" "+%Y-%m-%d %H:00:00") - vyyyymmddhh_tm=$($DATE_UTIL -d "${unix_fdate} ${tm} hours ago" +%Y%m%d%H) - tm2=$(echo $tm | awk '{printf "%02d\n", $0;}') - - cp $ndas_raw/${vyyyymmdd}${vhh}/nam.t${vhh}z.prepbufr.tm${tm2}.nr $ndas_proc/prepbufr.ndas.${vyyyymmddhh_tm} - done - - fi - - fi - - # Retrieve NOHRSC observations - elif [[ ${OBTYPE} == "NOHRSC" ]]; then - - #NOHRSC is accumulation observations, so none to retrieve for hour zero - if [[ ${current_fcst} -eq 0 ]]; then - current_fcst=$((${current_fcst} + 1)) - continue - fi - - # Reorganized NOHRSC location (no need for raw data dir) - nohrsc_proc=${OBS_DIR} - - nohrsc06h_file="$nohrsc_proc/${vyyyymmdd}/sfav2_CONUS_06h_${vyyyymmdd}${vhh}_grid184.grb2" - nohrsc24h_file="$nohrsc_proc/${vyyyymmdd}/sfav2_CONUS_24h_${vyyyymmdd}${vhh}_grid184.grb2" - retrieve=0 - # If 24-hour files should be available (at 00z and 12z) then look for both files - # Otherwise just look for 6hr file - if (( ${current_fcst} % 12 == 0 )) && (( ${current_fcst} >= 24 )) ; then - if [[ ! -f "${nohrsc06h_file}" || ! -f "${nohrsc24h_file}" ]] ; then - retrieve=1 - echo "${OBTYPE} files do not exist on disk:" - echo "${nohrsc06h_file}" - echo "${nohrsc24h_file}" - echo "Will attempt to retrieve from remote locations" - else - echo "${OBTYPE} files exist on disk:" - echo "${nohrsc06h_file}" - echo "${nohrsc24h_file}" - fi - elif (( ${current_fcst} % 6 == 0 )) ; then - if [[ ! -f "${nohrsc06h_file}" ]]; then - retrieve=1 - echo "${OBTYPE} file does not exist on disk:" - echo "${nohrsc06h_file}" - echo "Will attempt to retrieve from remote locations" - else - echo "${OBTYPE} file exists on disk:" - echo "${nohrsc06h_file}" - fi - fi - if [ $retrieve == 1 ]; then - if [[ ! -d "$nohrsc_proc/${vyyyymmdd}" ]]; then - mkdir -p $nohrsc_proc/${vyyyymmdd} - fi - - # Pull NOHRSC data from HPSS; script will retrieve all files so only call once - cmd=" - python3 -u ${USHdir}/retrieve_data.py \ - --debug \ - --file_set obs \ - --config ${PARMdir}/data_locations.yml \ - --cycle_date ${vyyyymmdd}${vhh} \ - --data_stores hpss \ - --data_type NOHRSC_obs \ - --output_path $nohrsc_proc/${vyyyymmdd} \ - --summary_file ${logfile}" - - echo "CALLING: ${cmd}" - - $cmd || print_err_msg_exit "\ - Could not retrieve NOHRSC data from HPSS - - The following command exited with a non-zero exit status: - ${cmd} -" - # 6-hour forecast needs to be renamed - mv $nohrsc_proc/${vyyyymmdd}/sfav2_CONUS_6h_${vyyyymmdd}${vhh}_grid184.grb2 ${nohrsc06h_file} - fi - - else - print_err_msg_exit "\ - Invalid OBTYPE specified for script; valid options are CCPA, MRMS, NDAS, and NOHRSC - " - fi # Increment to next forecast hour - # Increment to next forecast hour - echo "Finished fcst hr=${current_fcst}" - current_fcst=$((${current_fcst} + 1)) - -done - - -# Clean up raw, unprocessed observation files -rm -rf ${OBS_DIR}/raw - +# +mkdir -p ${WFLOW_FLAG_FILES_DIR} +file_bn="get_obs_$(echo_lowercase ${OBTYPE})" +touch "${WFLOW_FLAG_FILES_DIR}/${file_bn}_${PDY}_complete.txt" # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_integration_test.py b/scripts/exregional_integration_test.py index f0ac3d9af6..996cf6320e 100755 --- a/scripts/exregional_integration_test.py +++ b/scripts/exregional_integration_test.py @@ -4,16 +4,16 @@ #### Python Script Documentation Block # # Script name: exregional_integration_test.py -# Script description: Ensures the correct number of netcdf files are generated +# Script description: Ensures the correct number of netcdf files are generated # for each experiment # # Author: Eddie Snyder Org: NOAA EPIC Date: 2024-02-05 -# +# # Instructions: 1. Pass the appropriate info for the required arguments: # --fcst_dir=/path/to/forecast/files # --fcst_len= # 2. Run script with arguments -# +# # Notes/future work: - Currently SRW App only accepts netcdf as the UFS WM # output file format. If that changes, then additional # logic is needed to address the other file formats. diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index 1f95ea8f91..104875f8dc 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -1,5 +1,99 @@ #!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# This script generates NetCDF-formatted grid files required as input +# the FV3 model configured for the regional domain. +# +# The output of this script is placed in a directory defined by GRID_DIR. +# +# More about the grid for regional configurations of FV3: +# +# a) This script creates grid files for tile 7 (reserved for the +# regional grid located soewhere within tile 6 of the 6 global +# tiles. +# +# b) Regional configurations of FV3 need two grid files, one with 3 +# halo cells and one with 4 halo cells. The width of the halo is +# the number of cells in the direction perpendicular to the +# boundary. +# +# c) The tile 7 grid file that this script creates includes a halo, +# with at least 4 cells to accommodate this requirement. The halo +# is made thinner in a subsequent step called "shave". +# +# d) We will let NHW denote the width of the wide halo that is wider +# than the required 3- or 4-cell halos. (NHW; N=number of cells, +# H=halo, W=wide halo) +# +# e) T7 indicates the cell count on tile 7. +# +# +# This script does the following: +# +# - Create the grid, either an ESGgrid with the regional_esg_grid +# executable or a GFDL-type grid with the hgrid executable +# - Calculate the regional grid's global uniform cubed-sphere grid +# equivalent resolution with the global_equiv_resol executable +# - Use the shave executable to reduce the halo to 3 and 4 cells +# - Call an ush script that runs the make_solo_mosaic executable +# +# Run-time environment variables: +# +# DATA +# GLOBAL_VAR_DEFNS_FP +# REDIRECT_OUT_ERR +# +# Experiment variables +# +# user: +# EXECdir +# USHdir +# +# platform: +# PRE_TASK_CMDS +# RUN_CMD_SERIAL + +# workflow: +# DOT_OR_USCORE +# GRID_GEN_METHOD +# RES_IN_FIXLAM_FILENAMES +# RGNL_GRID_NML_FN +# VERBOSE +# +# task_make_grid: +# GFDLgrid_NUM_CELLS +# GFDLgrid_USE_NUM_CELLS_IN_FILENAMES +# GRID_DIR +# +# constants: +# NH3 +# NH4 +# TILE_RGNL +# +# grid_params: +# DEL_ANGLE_X_SG +# DEL_ANGLE_Y_SG +# GFDLgrid_REFINE_RATIO +# IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG +# ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG +# JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG +# JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG +# LAT_CTR +# LON_CTR +# NEG_NX_OF_DOM_WITH_WIDE_HALO +# NEG_NY_OF_DOM_WITH_WIDE_HALO +# NHW +# NX +# NY +# PAZI +# STRETCH_FAC +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +102,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_grid" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow constants grid_params task_make_grid ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -196,7 +292,7 @@ fi # # Change location to the temporary (work) directory. # -cd_vrfy "$DATA" +cd "$DATA" print_info_msg "$VERBOSE" " Starting grid file generation..." @@ -276,6 +372,7 @@ generation executable (exec_fp): 'pazi': ${PAZI} " + # UW takes input from stdin when no -i/--input-config flag is provided (cat << EOF $settings EOF @@ -313,7 +410,7 @@ fi # to the original directory. # grid_fp="$DATA/${grid_fn}" -cd_vrfy - +cd - print_info_msg "$VERBOSE" " Grid file generation completed successfully." @@ -372,7 +469,7 @@ res_equiv=${res_equiv//$'\n'/} #----------------------------------------------------------------------- # if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - if [ "${GFDLgrid_USE_NUM_CELLS_IN_FILENAMES}" = "TRUE" ]; then + if [ $(boolify "${GFDLgrid_USE_NUM_CELLS_IN_FILENAMES}") = "TRUE" ]; then CRES="C${GFDLgrid_NUM_CELLS}" else CRES="C${res_equiv}" @@ -380,7 +477,15 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then elif [ "${GRID_GEN_METHOD}" = "ESGgrid" ]; then CRES="C${res_equiv}" fi -set_file_param "${GLOBAL_VAR_DEFNS_FP}" "CRES" "'$CRES'" + + # UW takes the update values from stdin when no --update-file flag is + # provided. It needs --update-format to do it correctly, though. +echo "workflow: {CRES: ${CRES}}" | uw config realize \ + --input-file $GLOBAL_VAR_DEFNS_FP \ + --update-format yaml \ + --output-file $GLOBAL_VAR_DEFNS_FP \ + --verbose + # #----------------------------------------------------------------------- # @@ -392,7 +497,7 @@ set_file_param "${GLOBAL_VAR_DEFNS_FP}" "CRES" "'$CRES'" grid_fp_orig="${grid_fp}" grid_fn="${CRES}${DOT_OR_USCORE}grid.tile${TILE_RGNL}.halo${NHW}.nc" grid_fp="${GRID_DIR}/${grid_fn}" -mv_vrfy "${grid_fp_orig}" "${grid_fp}" +mv "${grid_fp_orig}" "${grid_fp}" # #----------------------------------------------------------------------- # @@ -449,7 +554,7 @@ unshaved_fp="${grid_fp}" # Once it is complete, we will move the resultant file from DATA to # GRID_DIR. # -cd_vrfy "$DATA" +cd "$DATA" # # Create an input namelist file for the shave executable to generate a # grid file with a 3-cell-wide halo from the one with a wide halo. Then @@ -477,7 +582,7 @@ The namelist file (nml_fn) used in this call is in directory DATA: nml_fn = \"${nml_fn}\" DATA = \"${DATA}\"" POST_STEP -mv_vrfy ${shaved_fp} ${GRID_DIR} +mv ${shaved_fp} ${GRID_DIR} # # Create an input namelist file for the shave executable to generate a # grid file with a 4-cell-wide halo from the one with a wide halo. Then @@ -505,7 +610,7 @@ The namelist file (nml_fn) used in this call is in directory DATA: nml_fn = \"${nml_fn}\" DATA = \"${DATA}\"" POST_STEP -mv_vrfy ${shaved_fp} ${GRID_DIR} +mv ${shaved_fp} ${GRID_DIR} # # Create an input namelist file for the shave executable to generate a # grid file without halo from the one with a wide halo. Then @@ -532,11 +637,11 @@ The namelist file (nml_fn) used in this call is in directory DATA: nml_fn = \"${nml_fn}\" DATA = \"${DATA}\"" POST_STEP -mv_vrfy ${shaved_fp} ${GRID_DIR} +mv ${shaved_fp} ${GRID_DIR} # # Change location to the original directory. # -cd_vrfy - +cd - # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_make_ics.sh b/scripts/exregional_make_ics.sh index 84d73696eb..debf526798 100755 --- a/scripts/exregional_make_ics.sh +++ b/scripts/exregional_make_ics.sh @@ -1,5 +1,83 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The ex-scrtipt that sets up and runs chgres_cube for preparing initial +# conditions for the FV3 forecast +# +# Run-time environment variables: +# +# COMIN +# COMOUT +# COMROOT +# DATA +# DATAROOT +# DATA_SHARE +# EXTRN_MDL_CDATE +# GLOBAL_VAR_DEFNS_FP +# INPUT_DATA +# NET +# PDY +# REDIRECT_OUT_ERR +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# EXECdir +# MACHINE +# PARMdir +# RUN_ENVIR +# USHdir +# +# platform: +# FIXgsm +# PRE_TASK_CMDS +# RUN_CMD_UTILS +# +# workflow: +# CCPP_PHYS_SUITE +# COLDSTART +# CRES +# DATE_FIRST_CYCL +# DOT_OR_USCORE +# EXTRN_MDL_VAR_DEFNS_FN +# FIXlam +# SDF_USES_RUC_LSM +# SDF_USES_THOMPSON_MP +# THOMPSON_MP_CLIMO_FP +# VERBOSE +# +# task_make_ics: +# FVCOM_DIR +# FVCOM_FILE +# FVCOM_WCSTART +# KMP_AFFINITY_MAKE_ICS +# OMP_NUM_THREADS_MAKE_ICS +# OMP_STACKSIZE_MAKE_ICS +# USE_FVCOM +# VCOORD_FILE +# +# task_get_extrn_ics: +# EXTRN_MDL_NAME_ICS +# FV3GFS_FILE_FMT_ICS +# +# global: +# HALO_BLEND +# +# cpl_aqm_parm: +# CPL_AQM +# +# constants: +# NH0 +# NH4 +# TILE_RGNL +# +#----------------------------------------------------------------------- +# + + # #----------------------------------------------------------------------- # @@ -8,7 +86,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_ics|task_get_extrn_ics" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow global cpl_aqm_parm constants task_get_extrn_ics task_make_ics ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -117,6 +197,7 @@ case "${CCPP_PHYS_SUITE}" in "FV3_HRRR" | \ "FV3_RAP" ) if [ "${EXTRN_MDL_NAME_ICS}" = "RAP" ] || \ + [ "${EXTRN_MDL_NAME_ICS}" = "RRFS" ] || \ [ "${EXTRN_MDL_NAME_ICS}" = "HRRR" ]; then varmap_file="GSDphys_var_map.txt" elif [ "${EXTRN_MDL_NAME_ICS}" = "NAM" ] || \ @@ -165,7 +246,7 @@ esac # # fn_grib2: # Name (not including path) of the grib2 file generated by the external -# model. Currently used for NAM, RAP, and HRRR external model data. +# model. Currently used for NAM, RAP, and HRRR/RRFS external model data. # # input_type: # The "type" of input being provided to chgres_cube. This contains a combi- @@ -241,7 +322,7 @@ esac # tracers_input(:), it must also be 3rd in tracers(:). How can this be checked? # # NOTE: Really should use a varmap table for GFS, just like we do for -# RAP/HRRR. +# RAP/HRRR/RRFS. # # A non-prognostic variable that appears in the field_table for GSD physics # is cld_amt. Why is that in the field_table at all (since it is a non- @@ -274,7 +355,7 @@ convert_nst="" # # If the external model is not one that uses the RUC land surface model # (LSM) -- which currently includes all valid external models except the -# HRRR and the RAP -- then we set the number of soil levels to include +# HRRR/RRFS and the RAP -- then we set the number of soil levels to include # in the output NetCDF file that chgres_cube generates (nsoill_out; this # is a variable in the namelist that chgres_cube reads in) to 4. This # is because FV3 can handle this regardless of the LSM that it is using @@ -285,7 +366,7 @@ convert_nst="" # 4 soil layers to the 9 layers that it uses. # # On the other hand, if the external model is one that uses the RUC LSM -# (currently meaning that it is either the HRRR or the RAP), then what +# (currently meaning that it is either the HRRR/RRFS or the RAP), then what # we set nsoill_out to depends on whether the RUC or the Noah/Noah MP # LSM is used in the SDF. If the SDF uses RUC, then both the external # model and FV3 use RUC (which expects 9 soil levels), so we simply set @@ -299,21 +380,22 @@ convert_nst="" # 9 to 4 levels. # # In summary, we can set nsoill_out to 4 unless the external model is -# the HRRR or RAP AND the forecast model is using the RUC LSM. +# the HRRR/RRFS or RAP AND the forecast model is using the RUC LSM. # #----------------------------------------------------------------------- # nsoill_out="4" if [ "${EXTRN_MDL_NAME_ICS}" = "HRRR" -o \ + "${EXTRN_MDL_NAME_ICS}" = "RRFS" -o \ "${EXTRN_MDL_NAME_ICS}" = "RAP" ] && \ - [ "${SDF_USES_RUC_LSM}" = "TRUE" ]; then + [ $(boolify "${SDF_USES_RUC_LSM}") = "TRUE" ]; then nsoill_out="9" fi # #----------------------------------------------------------------------- # # If the external model for ICs is one that does not provide the aerosol -# fields needed by Thompson microphysics (currently only the HRRR and +# fields needed by Thompson microphysics (currently only the HRRR/RRFS and # RAP provide aerosol data) and if the physics suite uses Thompson # microphysics, set the variable thomp_mp_climo_file in the chgres_cube # namelist to the full path of the file containing aerosol climatology @@ -325,8 +407,9 @@ fi # thomp_mp_climo_file="" if [ "${EXTRN_MDL_NAME_ICS}" != "HRRR" -a \ + "${EXTRN_MDL_NAME_ICS}" != "RRFS" -a \ "${EXTRN_MDL_NAME_ICS}" != "RAP" ] && \ - [ "${SDF_USES_THOMPSON_MP}" = "TRUE" ]; then + [ $(boolify "${SDF_USES_THOMPSON_MP}") = "TRUE" ]; then thomp_mp_climo_file="${THOMPSON_MP_CLIMO_FP}" fi # @@ -439,8 +522,9 @@ case "${EXTRN_MDL_NAME_ICS}" in tg3_from_soil=False ;; -"HRRR") +"HRRR"|"RRFS") external_model="HRRR" + fn_grib2="${EXTRN_MDL_FNS[0]}" input_type="grib2" # @@ -643,9 +727,9 @@ POST_STEP # #----------------------------------------------------------------------- # -if [ "${CPL_AQM}" = "TRUE" ]; then +if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then COMOUT="${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}" #temporary path, should be removed later - if [ "${COLDSTART}" = "TRUE" ] && [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ]; then + if [ $(boolify "${COLDSTART}") = "TRUE" ] && [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ]; then data_trans_path="${COMOUT}" else data_trans_path="${DATA_SHARE}" @@ -655,10 +739,10 @@ if [ "${CPL_AQM}" = "TRUE" ]; then cp -p gfs_ctrl.nc "${COMOUT}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc" cp -p gfs.bndy.nc "${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc" else - mv_vrfy out.atm.tile${TILE_RGNL}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc - mv_vrfy out.sfc.tile${TILE_RGNL}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc - mv_vrfy gfs_ctrl.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc - mv_vrfy gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc + mv out.atm.tile${TILE_RGNL}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc + mv out.sfc.tile${TILE_RGNL}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc + mv gfs_ctrl.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc + mv gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc fi # #----------------------------------------------------------------------- @@ -667,7 +751,7 @@ fi # #----------------------------------------------------------------------- # -if [ "${USE_FVCOM}" = "TRUE" ]; then +if [ $(boolify "${USE_FVCOM}") = "TRUE" ]; then #Format for fvcom_time: YYYY-MM-DDTHH:00:00.000000 fvcom_exec_fn="fvcom_to_FV3" @@ -684,7 +768,7 @@ Please ensure that you've built this executable." print_err_msg_exit "${message_txt}" fi fi - cp_vrfy ${fvcom_exec_fp} ${INPUT_DATA}/. + cp ${fvcom_exec_fp} ${INPUT_DATA}/. fvcom_data_fp="${FVCOM_DIR}/${FVCOM_FILE}" if [ ! -f "${fvcom_data_fp}" ]; then message_txt="The file or path (fvcom_data_fp) does not exist: @@ -699,8 +783,8 @@ Please check the following user defined variables: fi fi - cp_vrfy ${fvcom_data_fp} ${INPUT_DATA}/fvcom.nc - cd_vrfy ${INPUT_DATA} + cp ${fvcom_data_fp} ${INPUT_DATA}/fvcom.nc + cd ${INPUT_DATA} PREP_STEP eval ${RUN_CMD_UTILS} ${fvcom_exec_fn} \ ${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc fvcom.nc ${FVCOM_WCSTART} ${fvcom_time} \ diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh index ca3f6401cb..acbe97a56b 100755 --- a/scripts/exregional_make_lbcs.sh +++ b/scripts/exregional_make_lbcs.sh @@ -1,5 +1,83 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The ex-scrtipt that sets up and runs chgres_cube for preparing lateral +# boundary conditions for the FV3 forecast +# +# Run-time environment variables: +# +# COMIN +# COMOUT +# COMROOT +# DATA +# DATAROOT +# DATA_SHARE +# EXTRN_MDL_CDATE +# INPUT_DATA +# GLOBAL_VAR_DEFNS_FP +# NET +# PDY +# REDIRECT_OUT_ERR +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# EXECdir +# MACHINE +# PARMdir +# RUN_ENVIR +# USHdir +# +# platform: +# FIXgsm +# PRE_TASK_CMDS +# RUN_CMD_UTILS +# +# workflow: +# CCPP_PHYS_SUITE +# COLDSTART +# CRES +# DATE_FIRST_CYCL +# DOT_OR_USCORE +# EXTRN_MDL_VAR_DEFNS_FN +# FIXlam +# SDF_USES_RUC_LSM +# SDF_USES_THOMPSON_MP +# THOMPSON_MP_CLIMO_FP +# VERBOSE +# +# task_get_extrn_lbcs: +# EXTRN_MDL_NAME_LBCS +# FV3GFS_FILE_FMT_LBCS +# +# task_make_lbcs: +# FVCOM_DIR +# FVCOM_FILE +# FVCOM_WCSTART +# KMP_AFFINITY_MAKE_LBCS +# OMP_NUM_THREADS_MAKE_LBCS +# OMP_STACKSIZE_MAKE_LBCS +# USE_FVCOM +# VCOORD_FILE +# +# global: +# HALO_BLEND +# +# cpl_aqm_parm: +# CPL_AQM +# +# constants: +# NH0 +# NH4 +# TILE_RGNL +# +#----------------------------------------------------------------------- +# + + # #----------------------------------------------------------------------- # @@ -8,7 +86,10 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_lbcs|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP} +set -x +for sect in user nco platform workflow global cpl_aqm_parm constants task_get_extrn_lbcs task_make_lbcs ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -115,6 +196,7 @@ case "${CCPP_PHYS_SUITE}" in "FV3_HRRR" | \ "FV3_RAP") if [ "${EXTRN_MDL_NAME_LBCS}" = "RAP" ] || \ + [ "${EXTRN_MDL_NAME_LBCS}" = "RRFS" ] || \ [ "${EXTRN_MDL_NAME_LBCS}" = "HRRR" ]; then varmap_file="GSDphys_var_map.txt" elif [ "${EXTRN_MDL_NAME_LBCS}" = "NAM" ] || \ @@ -158,7 +240,7 @@ esac # # fn_grib2: # Name (not including path) of the grib2 file generated by the external -# model. Currently used for NAM, RAP, and HRRR external model data. +# model. Currently used for NAM, RAP, and HRRR/RRFS external model data. # # input_type: # The "type" of input being provided to chgres_cube. This contains a combi- @@ -213,7 +295,7 @@ esac # tracers_input(:), it must also be 3rd in tracers(:). How can this be checked? # # NOTE: Really should use a varmap table for GFS, just like we do for -# RAP/HRRR. +# RAP/HRRR/RRFS. # # A non-prognostic variable that appears in the field_table for GSD physics @@ -237,7 +319,7 @@ tracers="\"\"" #----------------------------------------------------------------------- # # If the external model for LBCs is one that does not provide the aerosol -# fields needed by Thompson microphysics (currently only the HRRR and +# fields needed by Thompson microphysics (currently only the HRRR/RRFS and # RAP provide aerosol data) and if the physics suite uses Thompson # microphysics, set the variable thomp_mp_climo_file in the chgres_cube # namelist to the full path of the file containing aerosol climatology @@ -249,8 +331,9 @@ tracers="\"\"" # thomp_mp_climo_file="" if [ "${EXTRN_MDL_NAME_LBCS}" != "HRRR" -a \ + "${EXTRN_MDL_NAME_LBCS}" != "RRFS" -a \ "${EXTRN_MDL_NAME_LBCS}" != "RAP" ] && \ - [ "${SDF_USES_THOMPSON_MP}" = "TRUE" ]; then + [ $(boolify "${SDF_USES_THOMPSON_MP}") = "TRUE" ]; then thomp_mp_climo_file="${THOMPSON_MP_CLIMO_FP}" fi # @@ -320,7 +403,7 @@ case "${EXTRN_MDL_NAME_LBCS}" in input_type="grib2" ;; -"HRRR") +"HRRR"|"RRFS") external_model="HRRR" input_type="grib2" ;; @@ -421,6 +504,9 @@ for (( ii=0; ii<${num_fhrs}; ii=ii+bcgrpnum10 )); do "HRRR") fn_grib2="${EXTRN_MDL_FNS[$i]}" ;; + "RRFS") + fn_grib2="${EXTRN_MDL_FNS[$i]}" + ;; "NAM") fn_grib2="${EXTRN_MDL_FNS[$i]}" ;; @@ -495,6 +581,7 @@ FORTRAN namelist file has not specified for this external LBC model (EXTRN_MDL_N " nml_fn="fort.41" + # UW takes input from stdin when no -i/--input-config flag is provided (cat << EOF $settings EOF @@ -559,10 +646,10 @@ located in the following directory: lbc_spec_fhrs=( "${EXTRN_MDL_FHRS[$i]}" ) fcst_hhh=$(( ${lbc_spec_fhrs} - ${EXTRN_MDL_LBCS_OFFSET_HRS} )) fcst_hhh_FV3LAM=$( printf "%03d" "$fcst_hhh" ) - if [ "${CPL_AQM}" = "TRUE" ]; then + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then cp -p gfs.bndy.nc ${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc else - mv_vrfy gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc + mv gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc fi fi diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index 47430a802d..34b1675d8c 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -1,5 +1,86 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# This ex-script is responsible for creating orography files for the FV3 +# forecast. +# +# The output of this script is placed in a directory defined by OROG_DIR +# +# More about the orog for the regional configuration of the FV3: +# +# a) Only the tile 7 orography file is created. +# +# b) This orography file contains a halo of the same width (NHW) +# as the grid file for tile 7 generated by the make_grid script +# +# c) Filtered versions of the orogoraphy files are created with the +# same width (NHW) as the unfiltered orography file and the grid +# file. FV3 requires two filtered orography files, one with no +# halo cells and one with 4 halo cells. +# +# This script does the following: +# +# - Create the raw orography files by running the orog executable. +# - Run the orog_gsl executable if any of several GSL-developed +# physics suites is chosen by the user. +# - Run the filter_topo executable on the raw orography files +# - Run the shave executable for the 0- and 4-cell halo orography +# files +# +# Run-time environment variables: +# +# DATA +# GLOBAL_VAR_DEFNS_FP +# REDIRECT_OUT_ERR +# +# Experiment variables +# +# user: +# EXECdir +# USHdir +# +# platform: +# FIXorg +# PRE_TASK_CMDS +# RUN_CMD_SERIAL +# +# workflow: +# CCPP_PHYS_SUITE +# CRES +# DOT_OR_USCORE +# FIXam +# FIXlam +# GRID_GEN_METHOD +# PREEXISTING_DIR_METHOD +# VERBOSE +# +# task_make_orog: +# KMP_AFFINITY_MAKE_OROG +# OMP_NUM_THREADS_MAKE_OROG +# OMP_STACKSIZE_MAKE_OROG +# OROG_DIR +# +# task_make_grid: +# GFDLgrid_NUM_CELLS +# GFDLgrid_STRETCH_FAC +# GFDLgrid_REFINE_RATIO +# +# constants: +# NH0 +# NH4 +# TILE_RGNL +# +# grid_params: +# NHW +# NX +# NY +# STRETCH_FAC +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +89,10 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_orog|task_make_grid" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow constants grid_params task_make_grid task_make_orog task_make_grid ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done + # #----------------------------------------------------------------------- # @@ -30,13 +114,7 @@ source_config_for_task "task_make_orog|task_make_grid" ${GLOBAL_VAR_DEFNS_FP} scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) scrfunc_fn=$( basename "${scrfunc_fp}" ) scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# + print_info_msg " ======================================================================== Entering script: \"${scrfunc_fn}\" @@ -54,17 +132,7 @@ This is the ex-script for the task that generates orography files. export KMP_AFFINITY=${KMP_AFFINITY_MAKE_OROG} export OMP_NUM_THREADS=${OMP_NUM_THREADS_MAKE_OROG} export OMP_STACKSIZE=${OMP_STACKSIZE_MAKE_OROG} -# -#----------------------------------------------------------------------- -# -# Load modules and set various computational parameters and directories. -# -# Note: -# These module loads should all be moved to modulefiles. This has been -# done for Hera but must still be done for other machines. -# -#----------------------------------------------------------------------- -# + eval ${PRE_TASK_CMDS} if [ -z "${RUN_CMD_SERIAL:-}" ] ; then @@ -85,16 +153,16 @@ fi #----------------------------------------------------------------------- # check_for_preexist_dir_file "${OROG_DIR}" "${PREEXISTING_DIR_METHOD}" -mkdir_vrfy -p "${OROG_DIR}" +mkdir -p "${OROG_DIR}" raw_dir="${OROG_DIR}/raw_topo" -mkdir_vrfy -p "${raw_dir}" +mkdir -p "${raw_dir}" filter_dir="${OROG_DIR}/filtered_topo" -mkdir_vrfy -p "${filter_dir}" +mkdir -p "${filter_dir}" shave_dir="${OROG_DIR}/shave_tmp" -mkdir_vrfy -p "${shave_dir}" +mkdir -p "${shave_dir}" # # #----------------------------------------------------------------------- @@ -103,9 +171,6 @@ mkdir_vrfy -p "${shave_dir}" # #----------------------------------------------------------------------- # -# Set the name and path to the executable that generates the raw orography -# file and make sure that it exists. -# exec_fn="orog" exec_fp="$EXECdir/${exec_fn}" if [ ! -f "${exec_fp}" ]; then @@ -114,32 +179,21 @@ The executable (exec_fp) for generating the orography file does not exist: exec_fp = \"${exec_fp}\" Please ensure that you've built this executable." fi -# -# Create a temporary (work) directory in which to generate the raw orography -# file and change location to it. -# + DATA="${DATA:-${raw_dir}/tmp}" -mkdir_vrfy -p "${DATA}" -cd_vrfy "${DATA}" +mkdir -p "${DATA}" +cd "${DATA}" # # Copy topography and related data files from the system directory (FIXorg) # to the temporary directory. # -cp_vrfy ${FIXorg}/thirty.second.antarctic.new.bin fort.15 -cp_vrfy ${FIXorg}/landcover30.fixed . -cp_vrfy ${FIXorg}/gmted2010.30sec.int fort.235 +cp ${FIXorg}/thirty.second.antarctic.new.bin fort.15 +cp ${FIXorg}/landcover30.fixed . +cp ${FIXorg}/gmted2010.30sec.int fort.235 # #----------------------------------------------------------------------- # -# The orography filtering code reads in from the grid mosaic file the -# the number of tiles, the name of the grid file for each tile, and the -# dimensions (nx and ny) of each tile. Next, set the name of the grid -# mosaic file and create a symlink to it in filter_dir. -# -# Note that in the namelist file for the orography filtering code (created -# later below), the mosaic file name is saved in a variable called -# "grid_file". It would have been better to call this "mosaic_file" -# instead so it doesn't get confused with the grid file for a given tile... +# Get the grid file info from the mosaic file # #----------------------------------------------------------------------- # @@ -152,21 +206,15 @@ grid_fp="${FIXlam}/${grid_fn}" # #----------------------------------------------------------------------- # -# Set input parameters for the orography generation executable and write -# them to a text file. +# Set input parameters for the orog executable in a formatted text file. +# The executable takes its parameters via the command line. # -# Note that it doesn't matter what lonb and latb are set to below because -# if we specify an input grid file to the executable read in (which is -# what we do below), then if lonb and latb are not set to the dimensions -# of the grid specified in that file (divided by 2 since the grid file -# specifies a "supergrid"), then lonb and latb effectively get reset to -# the dimensions specified in the grid file. +# Note: lonb and latb are placeholders in this case since the program +# uses the ones obtained from the grid file. # #----------------------------------------------------------------------- # mtnres=1 -#lonb=$res -#latb=$res lonb=0 latb=0 jcap=0 @@ -195,15 +243,13 @@ cat "${input_redirect_fn}" # Call the executable to generate the raw orography file corresponding # to tile 7 (the regional domain) only. # -# The following will create an orography file named +# The script moves the output file from its temporary directory to the +# OROG_DIR and names it: # -# oro.${CRES}.tile7.nc +# ${CRES}_raw_orog.tile7.halo${NHW}.nc # -# and will place it in OROG_DIR. Note that this file will include -# orography for a halo of width NHW cells around tile 7. The follow- -# ing will also create a work directory called tile7 under OROG_DIR. -# This work directory can be removed after the orography file has been -# created (it is currently not deleted). +# Note that this file will include orography for a halo of width NHW +# cells around tile 7. # #----------------------------------------------------------------------- # @@ -221,13 +267,11 @@ POST_STEP # # Change location to the original directory. # -cd_vrfy - +cd - # #----------------------------------------------------------------------- # -# Move the raw orography file from the temporary directory to raw_dir. -# In the process, rename it such that its name includes CRES and the halo -# width. +# Move the raw orography file and rename it. # #----------------------------------------------------------------------- # @@ -236,21 +280,21 @@ raw_orog_fn_prefix="${CRES}${DOT_OR_USCORE}raw_orog" fn_suffix_with_halo="tile${TILE_RGNL}.halo${NHW}.nc" raw_orog_fn="${raw_orog_fn_prefix}.${fn_suffix_with_halo}" raw_orog_fp="${raw_dir}/${raw_orog_fn}" -mv_vrfy "${raw_orog_fp_orig}" "${raw_orog_fp}" +mv "${raw_orog_fp_orig}" "${raw_orog_fp}" # #----------------------------------------------------------------------- # -# Call the code to generate the two orography statistics files (large- -# and small-scale) needed for the drag suite in the FV3_HRRR physics -# suite. +# Call the orog_gsl executable to generate the two orography statistics +# files (large- and small-scale) needed for the drag suite in certain +# GSL physics suites. # #----------------------------------------------------------------------- # suites=( "FV3_RAP" "FV3_HRRR" "FV3_GFS_v15_thompson_mynn_lam3km" "FV3_GFS_v17_p8" ) if [[ ${suites[@]} =~ "${CCPP_PHYS_SUITE}" ]] ; then DATA="${DATA:-${OROG_DIR}/temp_orog_data}" - mkdir_vrfy -p ${DATA} - cd_vrfy ${DATA} + mkdir -p ${DATA} + cd ${DATA} mosaic_fn_gwd="${CRES}${DOT_OR_USCORE}mosaic.halo${NH4}.nc" mosaic_fp_gwd="${FIXlam}/${mosaic_fn_gwd}" grid_fn_gwd=$( get_charvar_from_netcdf "${mosaic_fp_gwd}" "gridfiles" ) || \ @@ -258,12 +302,9 @@ if [[ ${suites[@]} =~ "${CCPP_PHYS_SUITE}" ]] ; then grid_fp_gwd="${FIXlam}/${grid_fn_gwd}" ls_fn="geo_em.d01.lat-lon.2.5m.HGT_M.nc" ss_fn="HGT.Beljaars_filtered.lat-lon.30s_res.nc" - create_symlink_to_file target="${grid_fp_gwd}" symlink="${DATA}/${grid_fn_gwd}" \ - relative="TRUE" - create_symlink_to_file target="${FIXam}/${ls_fn}" symlink="${DATA}/${ls_fn}" \ - relative="TRUE" - create_symlink_to_file target="${FIXam}/${ss_fn}" symlink="${DATA}/${ss_fn}" \ - relative="TRUE" + create_symlink_to_file ${grid_fp_gwd} ${DATA}/${grid_fn_gwd} TRUE + create_symlink_to_file ${FIXam}/${ls_fn} ${DATA}/${ls_fn} TRUE + create_symlink_to_file ${FIXam}/${ss_fn} ${DATA}/${ss_fn} TRUE input_redirect_fn="grid_info.dat" cat > "${input_redirect_fn}" < "${filter_dir}/input.nml" < "${filter_dir}/input.nml" < ${nml_fn} + > ${ascii_fn} PREP_STEP -eval ${RUN_CMD_SERIAL} ${exec_fp} < ${nml_fn} ${REDIRECT_OUT_ERR} || \ +eval ${RUN_CMD_SERIAL} ${exec_fp} < ${ascii_fn} ${REDIRECT_OUT_ERR} || \ print_err_msg_exit "\ Call to executable (exec_fp) to generate a (filtered) orography file with a ${NH0}-cell-wide halo from the orography file with a {NHW}-cell-wide halo returned with nonzero exit code: exec_fp = \"${exec_fp}\" -The namelist file (nml_fn) used in this call is in directory shave_dir: - nml_fn = \"${nml_fn}\" +The config file (ascii_fn) used in this call is in directory shave_dir: + ascii_fn = \"${ascii_fn}\" shave_dir = \"${shave_dir}\"" POST_STEP -mv_vrfy ${shaved_fp} ${OROG_DIR} +mv ${shaved_fp} ${OROG_DIR} # -# Create an input namelist file for the shave executable to generate an +# Create an input config file for the shave executable to generate an # orography file with a 4-cell-wide halo from the one with a wide halo. # Then call the shave executable. Finally, move the resultant file to # the OROG_DIR directory. @@ -528,33 +554,33 @@ print_info_msg "$VERBOSE" " \"Shaving\" filtered orography file with a ${NHW}-cell-wide halo to obtain a filtered orography file with a ${NH4}-cell-wide halo..." -nml_fn="input.shave.orog.halo${NH4}" +ascii_fn="input.shave.orog.halo${NH4}" shaved_fp="${shave_dir}/${CRES}${DOT_OR_USCORE}oro_data.tile${TILE_RGNL}.halo${NH4}.nc" printf "%s %s %s %s %s\n" \ $NX $NY ${NH4} \"${unshaved_fp}\" \"${shaved_fp}\" \ - > ${nml_fn} + > ${ascii_fn} PREP_STEP -eval ${RUN_CMD_SERIAL} ${exec_fp} < ${nml_fn} ${REDIRECT_OUT_ERR} || \ +eval ${RUN_CMD_SERIAL} ${exec_fp} < ${ascii_fn} ${REDIRECT_OUT_ERR} || \ print_err_msg_exit "\ Call to executable (exec_fp) to generate a (filtered) orography file with a ${NH4}-cell-wide halo from the orography file with a {NHW}-cell-wide halo returned with nonzero exit code: exec_fp = \"${exec_fp}\" -The namelist file (nml_fn) used in this call is in directory shave_dir: - nml_fn = \"${nml_fn}\" +The namelist file (ascii_fn) used in this call is in directory shave_dir: + ascii_fn = \"${ascii_fn}\" shave_dir = \"${shave_dir}\"" POST_STEP -mv_vrfy "${shaved_fp}" "${OROG_DIR}" +mv "${shaved_fp}" "${OROG_DIR}" # # Change location to the original directory. # -cd_vrfy - +cd - # #----------------------------------------------------------------------- # -# Add link in ORIG_DIR directory to the orography file with a 4-cell-wide -# halo such that the link name do not contain the halo width. These links +# Add link in OROG_DIR directory to the orography file with a 4-cell-wide +# halo such that the link name does not contain the halo width. These links # are needed by the make_sfc_climo task. # # NOTE: It would be nice to modify the sfc_climo_gen_code to read in @@ -567,13 +593,7 @@ python3 $USHdir/link_fix.py \ --file-group "orog" || \ print_err_msg_exit "\ Call to function to create links to orography files failed." -# -#----------------------------------------------------------------------- -# -# Print message indicating successful completion of script. -# -#----------------------------------------------------------------------- -# + print_info_msg " ======================================================================== Orography files with various halo widths generated successfully!!! diff --git a/scripts/exregional_make_sfc_climo.sh b/scripts/exregional_make_sfc_climo.sh index 868029a488..a916228b1f 100755 --- a/scripts/exregional_make_sfc_climo.sh +++ b/scripts/exregional_make_sfc_climo.sh @@ -1,5 +1,52 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# This ex-script generates surface climatology files needed to run FV3 +# forecasts. +# +# The script runs the sfc_climo_gen UFS Utils program, and links the +# output to the SFC_CLIMO_GEN directory +# +# Run-time environment variables: +# +# DATA +# GLOBAL_VAR_DEFNS_FP +# REDIRECT_OUT_ERR +# +# Experiment variables +# +# user: +# EXECdir +# USHdir +# +# platform: +# FIXsfc +# PRE_TASK_CMDS +# RUN_CMD_UTILS +# +# workflow: +# CRES +# DOT_OR_USCORE +# FIXlam +# VERBOSE +# +# task_make_sfc_climo: +# KMP_AFFINITY_MAKE_SFC_CLIMO +# OMP_NUM_THREADS_MAKE_SFC_CLIMO +# OMP_STACKSIZE_MAKE_SFC_CLIMO +# SFC_CLIMO_DIR +# +# constants: +# GTYPE +# NH0 +# NH4 +# TILE_RGNL +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +55,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_sfc_climo" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow constants task_make_sfc_climo ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -70,7 +119,7 @@ ulimit -s unlimited # #----------------------------------------------------------------------- # -cd_vrfy $DATA +cd $DATA # #----------------------------------------------------------------------- # @@ -162,7 +211,7 @@ case "$GTYPE" in # for fn in *.nc; do if [[ -f $fn ]]; then - mv_vrfy $fn ${SFC_CLIMO_DIR}/${CRES}_${fn} + mv $fn ${SFC_CLIMO_DIR}/${CRES}_${fn} fi done ;; @@ -181,7 +230,7 @@ case "$GTYPE" in for fn in *.halo.nc; do if [ -f $fn ]; then bn="${fn%.halo.nc}" - mv_vrfy $fn ${SFC_CLIMO_DIR}/${CRES}.${bn}.halo${NH4}.nc + mv $fn ${SFC_CLIMO_DIR}/${CRES}.${bn}.halo${NH4}.nc fi done # @@ -194,7 +243,7 @@ case "$GTYPE" in for fn in *.nc; do if [ -f $fn ]; then bn="${fn%.nc}" - mv_vrfy $fn ${SFC_CLIMO_DIR}/${CRES}.${bn}.halo${NH0}.nc + mv $fn ${SFC_CLIMO_DIR}/${CRES}.${bn}.halo${NH0}.nc fi done ;; diff --git a/scripts/exregional_plot_allvars.py b/scripts/exregional_plot_allvars.py index 27eff0f4b0..14d15c07f0 100755 --- a/scripts/exregional_plot_allvars.py +++ b/scripts/exregional_plot_allvars.py @@ -429,7 +429,7 @@ def setup_logging(debug=False): t1a = time.perf_counter() # Sea level pressure - slp = data1.select(name="Pressure reduced to MSL")[0].values * 0.01 + slp = data1.select(name="MSLP (Eta model reduction)")[0].values * 0.01 slpsmooth = ndimage.gaussian_filter(slp, 13.78) # 2-m temperature @@ -484,7 +484,13 @@ def setup_logging(debug=False): ) # Composite reflectivity - refc = data1.select(name="Maximum/Composite radar reflectivity")[0].values + # refc is the 37th entry in the GRIB2 post output file + # First rewind to the start of the GRIB2 file + data1.rewind() + # Advance 36 entries in the GRIB2 file + data1.seek(36) + # Read values from the 37th entry in the GRIB2 file + refc = data1.readline().values if fhr > 0: # Max/Min Hourly 2-5 km Updraft Helicity @@ -577,6 +583,7 @@ def plot_all(dom): facecolor="none", linewidth=fline_wd, alpha=falpha, + zorder=4, ) coastline = cfeature.NaturalEarthFeature( "physical", @@ -586,6 +593,7 @@ def plot_all(dom): facecolor="none", linewidth=fline_wd, alpha=falpha, + zorder=4, ) states = cfeature.NaturalEarthFeature( "cultural", @@ -596,6 +604,7 @@ def plot_all(dom): linewidth=fline_wd, linestyle=":", alpha=falpha, + zorder=4, ) borders = cfeature.NaturalEarthFeature( "cultural", @@ -605,6 +614,7 @@ def plot_all(dom): facecolor="none", linewidth=fline_wd, alpha=falpha, + zorder=4, ) # All lat lons are earth relative, so setup the associated projection correct for that data diff --git a/scripts/exregional_plot_allvars_diff.py b/scripts/exregional_plot_allvars_diff.py index e51a3a6b57..c493f68f65 100755 --- a/scripts/exregional_plot_allvars_diff.py +++ b/scripts/exregional_plot_allvars_diff.py @@ -446,9 +446,9 @@ def setup_logging(debug=False): t1a = time.perf_counter() # Sea level pressure - slp_1 = data1.select(name="Pressure reduced to MSL")[0].values * 0.01 + slp_1 = data1.select(name="MSLP (Eta model reduction)")[0].values * 0.01 slpsmooth_1 = ndimage.gaussian_filter(slp_1, 13.78) - slp_2 = data2.select(name="Pressure reduced to MSL")[0].values * 0.01 + slp_2 = data2.select(name="MSLP (Eta model reduction)")[0].values * 0.01 slpsmooth_2 = ndimage.gaussian_filter(slp_2, 13.78) slp_diff = slp_2 - slp_1 @@ -544,7 +544,12 @@ def setup_logging(debug=False): qpf_diff = qpf_2 - qpf_1 # Composite reflectivity + # refc is the 37th entry in the GRIB2 post output file + data1.rewind() + data1.seek(36) refc_1 = data1.select(name="Maximum/Composite radar reflectivity")[0].values + data2.rewind() + data2.seek(36) refc_2 = data2.select(name="Maximum/Composite radar reflectivity")[0].values if fhr > 0: @@ -652,6 +657,7 @@ def plot_all(dom): facecolor="none", linewidth=fline_wd, alpha=falpha, + zorder=4, ) coastline = cfeature.NaturalEarthFeature( "physical", @@ -661,6 +667,7 @@ def plot_all(dom): facecolor="none", linewidth=fline_wd, alpha=falpha, + zorder=4, ) states = cfeature.NaturalEarthFeature( "cultural", @@ -671,6 +678,7 @@ def plot_all(dom): linewidth=fline_wd, linestyle=":", alpha=falpha, + zorder=4, ) borders = cfeature.NaturalEarthFeature( "cultural", @@ -680,6 +688,7 @@ def plot_all(dom): facecolor="none", linewidth=fline_wd, alpha=falpha, + zorder=4, ) # All lat lons are earth relative, so setup the associated projection correct for that data diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index 9e6dc38584..c5e7f6601e 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -1,5 +1,113 @@ #!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# This ex-script is responsible for running the FV3 regional forecast. +# +# Run-time environment variables: +# +# CDATE +# COMIN +# COMOUT +# COMROOT +# DATA +# DBNROOT +# GLOBAL_VAR_DEFNS_FP +# INPUT_DATA +# NET +# PDY +# REDIRECT_OUT_ERR +# RUN +# SENDDBN +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# MACHINE +# PARMdir +# RUN_ENVIR +# USHdir +# +# platform: +# PRE_TASK_CMDS +# RUN_CMD_FCST +# +# workflow: +# CCPP_PHYS_DIR +# CCPP_PHYS_SUITE +# COLDSTART +# CRES +# DATA_TABLE_FN +# DATA_TABLE_FP +# DATE_FIRST_CYCL +# DOT_OR_USCORE +# EXPTDIR +# FCST_LEN_CYCL +# FCST_LEN_HRS +# FIELD_DICT_FP +# FIELD_DICT_FN +# FIELD_TABLE_FN +# FIELD_TABLE_FP +# FIXam +# FIXclim +# FIXlam +# FV3_NML_FN +# FV3_NML_FP +# FV3_NML_STOCH_FP +# INCR_CYCL_FREQ +# PREDEF_GRID_NAME +# SYMLINK_FIX_FILES +# VERBOSE +# +# task_get_extrn_lbcs: +# LBC_SPEC_INTVL_HRS +# +# task_run_fcst: +# DO_FCST_RESTART +# DT_ATMOS +# FV3_EXEC_FP +# KMP_AFFINITY_RUN_FCST +# OMP_NUM_THREADS_RUN_FCST +# OMP_STACKSIZE_RUN_FCST +# PRINT_ESMF +# RESTART_INTERVAL +# USE_MERRA_CLIMO +# WRITE_DOPOST +# +# task_run_post: +# CUSTOM_POST_CONFIG_FP +# DT_SUBHOURLY_POST_MNTS +# POST_OUTPUT_DOMAIN_NAME +# SUB_HOURLY_POST +# USE_CUSTOM_POST_CONFIG_FILE +# +# global: +# DO_ENSEMBLE +# DO_LSM_SPP +# DO_SHUM +# DO_SKEB +# DO_SPP +# DO_SPPT +# +# cpl_aqm_parm: +# AQM_RC_PRODUCT_FN +# CPL_AQM +# +# constants: +# NH0 +# NH3 +# NH4 +# TILE_RGNL +# +# fixed_files: +# CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +116,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_fcst|task_run_post|task_get_extrn_ics|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow global cpl_aqm_parm constants fixed_files \ + task_get_extrn_lbcs task_run_fcst task_run_post fire; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done + # #----------------------------------------------------------------------- # @@ -57,7 +169,7 @@ export OMP_NUM_THREADS=${OMP_NUM_THREADS_RUN_FCST} export OMP_STACKSIZE=${OMP_STACKSIZE_RUN_FCST} export MPI_TYPE_DEPTH=20 export ESMF_RUNTIME_COMPLIANCECHECK=OFF:depth=4 -if [ "${PRINT_ESMF}" = "TRUE" ]; then +if [ $(boolify "${PRINT_ESMF}") = "TRUE" ]; then export ESMF_RUNTIME_PROFILE=ON export ESMF_RUNTIME_PROFILE_OUTPUT="SUMMARY" fi @@ -84,7 +196,6 @@ if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then CYCLE_IDX=$(( ${cyc_mod} / ${INCR_CYCL_FREQ} )) FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]} fi - # #----------------------------------------------------------------------- # @@ -98,7 +209,7 @@ Creating links in the INPUT subdirectory of the current run directory to the grid and (filtered) orography files ..." # Create links to fix files in the FIXlam directory. -cd_vrfy ${DATA}/INPUT +cd ${DATA}/INPUT # # For experiments in which the TN_MAKE_GRID task is run, we make the @@ -121,8 +232,7 @@ fi #target="${FIXlam}/${CRES}${DOT_OR_USCORE}mosaic.halo${NH4}.nc" # Should this point to this halo4 file or a halo3 file??? target="${FIXlam}/${CRES}${DOT_OR_USCORE}mosaic.halo${NH3}.nc" # Should this point to this halo4 file or a halo3 file??? symlink="grid_spec.nc" -create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" +create_symlink_to_file $target $symlink ${relative_link_flag} # Symlink to halo-3 grid file with "halo3" stripped from name. mosaic_fn="grid_spec.nc" @@ -130,8 +240,7 @@ grid_fn=$( get_charvar_from_netcdf "${mosaic_fn}" "gridfiles" ) target="${FIXlam}/${grid_fn}" symlink="${grid_fn}" -create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" +create_symlink_to_file $target $symlink ${relative_link_flag} # Symlink to halo-4 grid file with "${CRES}_" stripped from name. # @@ -147,8 +256,7 @@ create_symlink_to_file target="$target" symlink="$symlink" \ # target="${FIXlam}/${CRES}${DOT_OR_USCORE}grid.tile${TILE_RGNL}.halo${NH4}.nc" symlink="grid.tile${TILE_RGNL}.halo${NH4}.nc" -create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" +create_symlink_to_file $target $symlink ${relative_link_flag} # @@ -165,8 +273,7 @@ fi # Symlink to halo-0 orography file with "${CRES}_" and "halo0" stripped from name. target="${FIXlam}/${CRES}${DOT_OR_USCORE}oro_data.tile${TILE_RGNL}.halo${NH0}.nc" symlink="oro_data.nc" -create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" +create_symlink_to_file $target $symlink ${relative_link_flag} # # Symlink to halo-4 orography file with "${CRES}_" stripped from name. # @@ -182,8 +289,7 @@ create_symlink_to_file target="$target" symlink="$symlink" \ # target="${FIXlam}/${CRES}${DOT_OR_USCORE}oro_data.tile${TILE_RGNL}.halo${NH4}.nc" symlink="oro_data.tile${TILE_RGNL}.halo${NH4}.nc" -create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" +create_symlink_to_file $target $symlink ${relative_link_flag} # # If using the FV3_HRRR physics suite, there are two files (that contain # statistics of the orography) that are needed by the gravity wave drag @@ -198,8 +304,7 @@ if [[ ${suites[@]} =~ "${CCPP_PHYS_SUITE}" ]] ; then for file_id in "${file_ids[@]}"; do target="${FIXlam}/${CRES}${DOT_OR_USCORE}oro_data_${file_id}.tile${TILE_RGNL}.halo${NH0}.nc" symlink="oro_data_${file_id}.nc" - create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" + create_symlink_to_file $target $symlink ${relative_link_flag} done fi # @@ -225,7 +330,7 @@ of the current run directory (DATA), where DATA = \"${DATA}\" ..." -cd_vrfy ${DATA}/INPUT +cd ${DATA}/INPUT # # The symlinks to be created point to files in the same directory (INPUT), @@ -233,58 +338,54 @@ cd_vrfy ${DATA}/INPUT # relative_link_flag="FALSE" -if [ "${CPL_AQM}" = "TRUE" ]; then +if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then COMIN="${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}" #temporary path, should be removed later target="${COMIN}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc" symlink="gfs_data.nc" - create_symlink_to_file target="$target" symlink="$symlink" relative="${relative_link_flag}" + create_symlink_to_file $target $symlink ${relative_link_flag} target="${COMIN}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc" symlink="sfc_data.nc" - create_symlink_to_file target="$target" symlink="$symlink" relative="${relative_link_flag}" + create_symlink_to_file $target $symlink ${relative_link_flag} target="${COMIN}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc" symlink="gfs_ctrl.nc" - create_symlink_to_file target="$target" symlink="$symlink" relative="${relative_link_flag}" + create_symlink_to_file $target $symlink ${relative_link_flag} for fhr in $(seq -f "%03g" 0 ${LBC_SPEC_INTVL_HRS} ${FCST_LEN_HRS}); do target="${COMIN}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f${fhr}.nc" symlink="gfs_bndy.tile${TILE_RGNL}.${fhr}.nc" - create_symlink_to_file target="$target" symlink="$symlink" relative="${relative_link_flag}" + create_symlink_to_file $target $symlink ${relative_link_flag} done target="${COMIN}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt.nc" symlink="NEXUS_Expt.nc" - create_symlink_to_file target="$target" symlink="$symlink" relative="${relative_link_flag}" + create_symlink_to_file $target $symlink ${relative_link_flag} # create symlink to PT for point source in SRW-AQM target="${COMIN}/${NET}.${cycle}${dot_ensmem}.PT.nc" if [ -f ${target} ]; then symlink="PT.nc" - create_symlink_to_file target="$target" symlink="$symlink" relative="${relative_link_flag}" + create_symlink_to_file $target $symlink ${relative_link_flag} fi else target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc" symlink="gfs_data.nc" - create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" + create_symlink_to_file $target $symlink ${relative_link_flag} target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc" symlink="sfc_data.nc" - create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" + create_symlink_to_file $target $symlink ${relative_link_flag} target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc" symlink="gfs_ctrl.nc" - create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" + create_symlink_to_file $target $symlink ${relative_link_flag} for fhr in $(seq -f "%03g" 0 ${LBC_SPEC_INTVL_HRS} ${FCST_LEN_HRS}); do target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f${fhr}.nc" symlink="gfs_bndy.tile${TILE_RGNL}.${fhr}.nc" - create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" + create_symlink_to_file $target $symlink ${relative_link_flag} done fi # @@ -298,7 +399,7 @@ fi # #----------------------------------------------------------------------- # -cd_vrfy ${DATA} +cd ${DATA} print_info_msg "$VERBOSE" " Creating links in the current run directory (DATA) to fixed (i.e. @@ -317,7 +418,7 @@ static) files in the FIXam directory: # isn't really an advantage to using relative symlinks, so we use symlinks # with absolute paths. # -if [ "${SYMLINK_FIX_FILES}" == "FALSE" ]; then +if [ $(boolify "${SYMLINK_FIX_FILES}") = "FALSE" ]; then relative_link_flag="TRUE" else relative_link_flag="FALSE" @@ -335,8 +436,7 @@ for (( i=0; i<${num_symlinks}; i++ )); do symlink="${DATA}/$symlink" target="$FIXam/$target" - create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" + create_symlink_to_file $target $symlink ${relative_link_flag} done # @@ -347,7 +447,7 @@ done # #----------------------------------------------------------------------- # -if [ "${USE_MERRA_CLIMO}" = "TRUE" ]; then +if [ $(boolify "${USE_MERRA_CLIMO}") = "TRUE" ]; then for f_nm_path in ${FIXclim}/*; do f_nm=$( basename "${f_nm_path}" ) pre_f="${f_nm%%.*}" @@ -359,8 +459,7 @@ if [ "${USE_MERRA_CLIMO}" = "TRUE" ]; then symlink="${DATA}/${pre_f}.dat" fi target="${f_nm_path}" - create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" + create_symlink_to_file $target $symlink ${relative_link_flag} done fi # @@ -372,8 +471,8 @@ fi # #----------------------------------------------------------------------- # -cd_vrfy ${DATA} -rm_vrfy -f time_stamp.out +cd ${DATA} +rm -f time_stamp.out # #----------------------------------------------------------------------- # @@ -403,28 +502,22 @@ else relative_link_flag="FALSE" fi -create_symlink_to_file target="${DATA_TABLE_FP}" \ - symlink="${DATA}/${DATA_TABLE_FN}" \ - relative="${relative_link_flag}" +create_symlink_to_file ${DATA_TABLE_FP} ${DATA}/${DATA_TABLE_FN} ${relative_link_flag} -create_symlink_to_file target="${FIELD_TABLE_FP}" \ - symlink="${DATA}/${FIELD_TABLE_FN}" \ - relative="${relative_link_flag}" +create_symlink_to_file ${FIELD_TABLE_FP} ${DATA}/${FIELD_TABLE_FN} ${relative_link_flag} -create_symlink_to_file target="${FIELD_DICT_FP}" \ - symlink="${DATA}/${FIELD_DICT_FN}" \ - relative="${relative_link_flag}" +create_symlink_to_file ${FIELD_DICT_FP} ${DATA}/${FIELD_DICT_FN} ${relative_link_flag} -if [ ${WRITE_DOPOST} = "TRUE" ]; then - cp_vrfy ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat - if [ ${USE_CUSTOM_POST_CONFIG_FILE} = "TRUE" ]; then +if [ $(boolify ${WRITE_DOPOST}) = "TRUE" ]; then + cp ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat + if [ $(boolify ${USE_CUSTOM_POST_CONFIG_FILE}) = "TRUE" ]; then post_config_fp="${CUSTOM_POST_CONFIG_FP}" print_info_msg " ==================================================================== CUSTOM_POST_CONFIG_FP = \"${CUSTOM_POST_CONFIG_FP}\" ====================================================================" else - if [ "${CPL_AQM}" = "TRUE" ]; then + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then post_config_fp="${PARMdir}/upp/postxconfig-NT-AQM.txt" else post_config_fp="${PARMdir}/upp/postxconfig-NT-fv3lam.txt" @@ -434,11 +527,11 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then post_config_fp = \"${post_config_fp}\" ====================================================================" fi - cp_vrfy ${post_config_fp} ./postxconfig-NT_FH00.txt - cp_vrfy ${post_config_fp} ./postxconfig-NT.txt - cp_vrfy ${PARMdir}/upp/params_grib2_tbl_new . + cp ${post_config_fp} ./postxconfig-NT_FH00.txt + cp ${post_config_fp} ./postxconfig-NT.txt + cp ${PARMdir}/upp/params_grib2_tbl_new . # Set itag for inline-post: - if [ "${CPL_AQM}" = "TRUE" ]; then + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then post_itag_add="aqf_on=.true.," else post_itag_add="" @@ -463,7 +556,7 @@ fi #---------------------------------------------------------------------- # -cp_vrfy ${CCPP_PHYS_DIR}/noahmptable.tbl . +cp ${CCPP_PHYS_DIR}/noahmptable.tbl . # #----------------------------------------------------------------------- @@ -473,14 +566,17 @@ cp_vrfy ${CCPP_PHYS_DIR}/noahmptable.tbl . #----------------------------------------------------------------------- # STOCH="FALSE" -if ([ "${DO_SPP}" = "TRUE" ] || [ "${DO_SPPT}" = "TRUE" ] || [ "${DO_SHUM}" = "TRUE" ] || \ - [ "${DO_SKEB}" = "TRUE" ] || [ "${DO_LSM_SPP}" = "TRUE" ]); then +if ([ $(boolify "${DO_SPP}") = "TRUE" ] || \ + [ $(boolify "${DO_SPPT}") = "TRUE" ] || \ + [ $(boolify "${DO_SHUM}") = "TRUE" ] || \ + [ $(boolify "${DO_SKEB}") = "TRUE" ] || \ + [ $(boolify "${DO_LSM_SPP}") = "TRUE" ]); then STOCH="TRUE" fi -if [ "${STOCH}" == "TRUE" ]; then - cp_vrfy ${FV3_NML_STOCH_FP} ${DATA}/${FV3_NML_FN} +if [ "${STOCH}" = "TRUE" ]; then + cp ${FV3_NML_STOCH_FP} ${DATA}/${FV3_NML_FN} else - ln_vrfy -sf ${FV3_NML_FP} ${DATA}/${FV3_NML_FN} + ln -sf ${FV3_NML_FP} ${DATA}/${FV3_NML_FN} fi # @@ -490,7 +586,7 @@ fi # #----------------------------------------------------------------------- # -if ([ "$STOCH" == "TRUE" ] && [ "${DO_ENSEMBLE}" = "TRUE" ]); then +if ([ "$STOCH" == "TRUE" ] && [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]); then python3 $USHdir/set_fv3nml_ens_stoch_seeds.py \ --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ --cdate "$CDATE" || print_err_msg_exit "\ @@ -507,7 +603,7 @@ fi # #----------------------------------------------------------------------- # -if [ "${CPL_AQM}" = "TRUE" ] && [ "${PREDEF_GRID_NAME}" = "AQM_NA_13km" ]; then +if [ $(boolify "${CPL_AQM}") = "TRUE" ] && [ "${PREDEF_GRID_NAME}" = "AQM_NA_13km" ]; then python3 $USHdir/update_input_nml.py \ --namelist "${DATA}/${FV3_NML_FN}" \ --aqm_na_13km || print_err_msg_exit "\ @@ -525,11 +621,11 @@ fi #----------------------------------------------------------------------- # flag_fcst_restart="FALSE" -if [ "${DO_FCST_RESTART}" = "TRUE" ] && [ "$(ls -A ${DATA}/RESTART )" ]; then - cp_vrfy input.nml input.nml_orig - cp_vrfy model_configure model_configure_orig - if [ "${CPL_AQM}" = "TRUE" ]; then - cp_vrfy aqm.rc aqm.rc_orig +if [ $(boolify "${DO_FCST_RESTART}") = "TRUE" ] && [ "$(ls -A ${DATA}/RESTART )" ]; then + cp input.nml input.nml_orig + cp model_configure model_configure_orig + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then + cp aqm.rc aqm.rc_orig fi relative_link_flag="FALSE" flag_fcst_restart="TRUE" @@ -576,14 +672,14 @@ for the current cycle's (cdate) run directory (DATA) failed: done # Create soft-link of restart files in INPUT directory - cd_vrfy ${DATA}/INPUT + cd ${DATA}/INPUT for file_id in "${file_ids[@]}"; do - rm_vrfy "${file_id}" + rm "${file_id}" target="${DATA}/RESTART/${rst_yyyymmdd}.${rst_hh}0000.${file_id}" symlink="${file_id}" - create_symlink_to_file target="$target" symlink="$symlink" relative="${relative_link_flag}" + create_symlink_to_file $target $symlink ${relative_link_flag} done - cd_vrfy ${DATA} + cd ${DATA} fi # #----------------------------------------------------------------------- @@ -592,8 +688,10 @@ fi # #----------------------------------------------------------------------- # -if [ "${CPL_AQM}" = "TRUE" ]; then - if [ "${COLDSTART}" = "TRUE" ] && [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ] && [ "${flag_fcst_restart}" = "FALSE" ]; then +if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then + if [ $(boolify "${COLDSTART}") = "TRUE" ] && \ + [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ] && \ + [ $(boolify "${flag_fcst_restart}") = "FALSE" ]; then init_concentrations="true" else init_concentrations="false" @@ -657,6 +755,49 @@ fi # #----------------------------------------------------------------------- # +# Call the function for updating the &time section of namelist.fire +# +#----------------------------------------------------------------------- +# +if [ $(boolify "${UFS_FIRE}") = "TRUE" ]; then + FCST_END_DATE=$( $DATE_UTIL --utc --date "${PDY} ${cyc} UTC + ${FCST_LEN_HRS} hours" "+%Y%m%d%H%M%S" ) + # This horrible syntax $((10#$VARNAME)) is to force bash to treat numbers as decimal instead of + # trying to octal all up in our business + settings=" + &time + start_year = $((10#${CDATE:0:4})), + start_month = $((10#${CDATE:4:2})), + start_day = $((10#${CDATE:6:2})), + start_hour = $((10#${CDATE:8:2})), + start_minute = 00, + start_second = 00, + end_year = $((10#${FCST_END_DATE:0:4})), + end_month = $((10#${FCST_END_DATE:4:2})), + end_day = $((10#${FCST_END_DATE:6:2})), + end_hour = $((10#${FCST_END_DATE:8:2})), + end_minute = $((10#${FCST_END_DATE:10:2})), + end_second = $((10#${FCST_END_DATE:12:2})), + / +" + + echo $settings | uw config realize --update-format nml --input-format nml --output-format nml --input-file "${FIRE_NML_FP}" -o "${FIRE_NML_FN}" + err=$? + if [ $err -ne 0 ]; then + print_err_msg_exit "\ + Call to uw config realize to create ${FIRE_NML_FN} failed. + Parameters passed to this script are: + FIRE_NML_FN = \"${FIRE_NML_FN}\" + FIRE_NML_FP = \"${FIRE_NML_FP}\" + Namelist settings specified on command line: + settings = +$settings" + fi + # Link fire input file + create_symlink_to_file ${FIRE_INPUT_DIR}/geo_em.d01.nc geo_em.d01.nc FALSE +fi +# +#----------------------------------------------------------------------- +# # Call the function that creates the diag_table file within each cycle # directory. # @@ -684,9 +825,9 @@ fi # #----------------------------------------------------------------------- # -if [ "${RUN_ENVIR}" = "nco" ] && [ "${CPL_AQM}" = "TRUE" ]; then +if [ "${RUN_ENVIR}" = "nco" ] && [ $(boolify "${CPL_AQM}") = "TRUE" ]; then # create an intermediate symlink to RESTART - ln_vrfy -sf "${DATA}/RESTART" "${COMIN}/RESTART" + ln -sf "${DATA}/RESTART" "${COMIN}/RESTART" fi # #----------------------------------------------------------------------- @@ -743,17 +884,17 @@ POST_STEP # #----------------------------------------------------------------------- # -if [ "${CPL_AQM}" = "TRUE" ]; then +if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then if [ "${RUN_ENVIR}" = "nco" ]; then if [ -d "${COMIN}/RESTART" ] && [ "$(ls -A ${DATA}/RESTART)" ]; then - rm_vrfy -rf "${COMIN}/RESTART" + rm -rf "${COMIN}/RESTART" fi if [ "$(ls -A ${DATA}/RESTART)" ]; then - cp_vrfy -Rp ${DATA}/RESTART ${COMIN} + cp -Rp ${DATA}/RESTART ${COMIN} fi fi - cp_vrfy -p ${DATA}/${AQM_RC_PRODUCT_FN} ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${AQM_RC_PRODUCT_FN} + cp -p ${DATA}/${AQM_RC_PRODUCT_FN} ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${AQM_RC_PRODUCT_FN} fhr_ct=0 fhr=0 @@ -763,8 +904,8 @@ if [ "${CPL_AQM}" = "TRUE" ]; then source_phy="${DATA}/phyf${fhr_ct}.nc" target_dyn="${COMIN}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr_ct}.nc" target_phy="${COMIN}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr_ct}.nc" - [ -f ${source_dyn} ] && cp_vrfy -p ${source_dyn} ${target_dyn} - [ -f ${source_phy} ] && cp_vrfy -p ${source_phy} ${target_phy} + [ -f ${source_dyn} ] && cp -p ${source_dyn} ${target_dyn} + [ -f ${source_phy} ] && cp -p ${source_phy} ${target_phy} (( fhr=fhr+1 )) done fi @@ -776,8 +917,8 @@ fi # #----------------------------------------------------------------------- # -if [ ${WRITE_DOPOST} = "TRUE" ]; then - +if [ $(boolify ${WRITE_DOPOST}) = "TRUE" ]; then + yyyymmdd=${PDY} hh=${cyc} fmn="00" @@ -785,9 +926,9 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then if [ "${RUN_ENVIR}" != "nco" ]; then export COMOUT="${DATA}/postprd" fi - mkdir_vrfy -p "${COMOUT}" + mkdir -p "${COMOUT}" - cd_vrfy ${COMOUT} + cd ${COMOUT} for fhr in $(seq -f "%03g" 0 ${FCST_LEN_HRS}); do @@ -803,7 +944,7 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then post_fn_suffix="GrbF${fhr_d}" post_renamed_fn_suffix="f${fhr}${post_mn_or_null}.${POST_OUTPUT_DOMAIN_NAME}.grib2" - if [ "${CPL_AQM}" = "TRUE" ]; then + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then fids=( "cmaq" ) else fids=( "prslev" "natlev" ) @@ -814,23 +955,21 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then post_orig_fn="${FID}.${post_fn_suffix}" post_renamed_fn="${NET}.${cycle}${dot_ensmem}.${fid}.${post_renamed_fn_suffix}" - mv_vrfy ${DATA}/${post_orig_fn} ${post_renamed_fn} + mv ${DATA}/${post_orig_fn} ${post_renamed_fn} if [ $RUN_ENVIR != "nco" ]; then basetime=$( $DATE_UTIL --date "$yyyymmdd $hh" +%y%j%H%M ) symlink_suffix="_${basetime}f${fhr}${post_mn}" - create_symlink_to_file target="${post_renamed_fn}" \ - symlink="${FID}${symlink_suffix}" \ - relative="TRUE" + create_symlink_to_file ${post_renamed_fn} ${FID}${symlink_suffix} TRUE fi # DBN alert - if [ $SENDDBN = "TRUE" ]; then + if [ "$SENDDBN" = "TRUE" ]; then $DBNROOT/bin/dbn_alert MODEL rrfs_post ${job} ${COMOUT}/${post_renamed_fn} fi done - if [ "${CPL_AQM}" = "TRUE" ]; then - mv_vrfy ${DATA}/dynf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr}.nc - mv_vrfy ${DATA}/phyf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr}.nc + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then + mv ${DATA}/dynf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr}.nc + mv ${DATA}/phyf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr}.nc fi done diff --git a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh index fe0e119b19..a7ec52ad6a 100755 --- a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh +++ b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_ensgrid|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -18,7 +22,6 @@ source_config_for_task "task_run_vx_ensgrid|task_run_post" ${GLOBAL_VAR_DEFNS_FP # . $USHdir/get_metplus_tool_name.sh . $USHdir/set_vx_params.sh -. $USHdir/set_vx_fhr_list.sh # #----------------------------------------------------------------------- # @@ -104,7 +107,7 @@ FIELDNAME_IN_MET_FILEDIR_NAMES="" set_vx_params \ obtype="${OBTYPE}" \ - field="$VAR" \ + field_group="${FIELD_GROUP}" \ accum_hh="${ACCUM_HH}" \ outvarname_grid_or_point="grid_or_point" \ outvarname_fieldname_in_obs_input="FIELDNAME_IN_OBS_INPUT" \ @@ -114,53 +117,6 @@ set_vx_params \ # #----------------------------------------------------------------------- # -# Set additional field-dependent verification parameters. -# -#----------------------------------------------------------------------- -# -if [ "${grid_or_point}" = "grid" ]; then - - case "${FIELDNAME_IN_MET_FILEDIR_NAMES}" in - "APCP01h") - FIELD_THRESHOLDS="gt0.0, ge0.254, ge0.508, ge2.54" - ;; - "APCP03h") - FIELD_THRESHOLDS="gt0.0, ge0.508, ge2.54, ge6.350" - ;; - "APCP06h") - FIELD_THRESHOLDS="gt0.0, ge2.54, ge6.350, ge12.700" - ;; - "APCP24h") - FIELD_THRESHOLDS="gt0.0, ge6.350, ge12.700, ge25.400" - ;; - "ASNOW06h") - FIELD_THRESHOLDS="gt0.0, ge2.54, ge5.08, ge10.16, ge20.32" - ;; - "ASNOW24h") - FIELD_THRESHOLDS="gt0.0, ge2.54, ge5.08, ge10.16, ge20.32" - ;; - "REFC") - FIELD_THRESHOLDS="ge20, ge30, ge40, ge50" - ;; - "RETOP") - FIELD_THRESHOLDS="ge20, ge30, ge40, ge50" - ;; - *) - print_err_msg_exit "\ -Verification parameters have not been defined for this field -(FIELDNAME_IN_MET_FILEDIR_NAMES): - FIELDNAME_IN_MET_FILEDIR_NAMES = \"${FIELDNAME_IN_MET_FILEDIR_NAMES}\"" - ;; - esac - -elif [ "${grid_or_point}" = "point" ]; then - - FIELD_THRESHOLDS="" - -fi -# -#----------------------------------------------------------------------- -# # Set paths and file templates for input to and output from the MET/ # METplus tool to be run as well as other file/directory parameters. # @@ -178,23 +134,23 @@ if [ "${grid_or_point}" = "grid" ]; then case "${FIELDNAME_IN_MET_FILEDIR_NAMES}" in "APCP"*) - OBS_INPUT_DIR="${vx_output_basedir}/metprd/PcpCombine_obs" + OBS_INPUT_DIR="${vx_output_basedir}${slash_cdate_or_null}/obs/metprd/PcpCombine_obs" OBS_INPUT_FN_TEMPLATE="${OBS_CCPA_APCP_FN_TEMPLATE_PCPCOMBINE_OUTPUT}" FCST_INPUT_DIR="${vx_output_basedir}" ;; "ASNOW"*) - OBS_INPUT_DIR="${OBS_DIR}" - OBS_INPUT_FN_TEMPLATE="${OBS_NOHRSC_ASNOW_FN_TEMPLATE}" + OBS_INPUT_DIR="${vx_output_basedir}${slash_cdate_or_null}/obs/metprd/PcpCombine_obs" + OBS_INPUT_FN_TEMPLATE="${OBS_NOHRSC_ASNOW_FN_TEMPLATE_PCPCOMBINE_OUTPUT}" FCST_INPUT_DIR="${vx_output_basedir}" ;; "REFC") OBS_INPUT_DIR="${OBS_DIR}" - OBS_INPUT_FN_TEMPLATE="${OBS_MRMS_REFC_FN_TEMPLATE}" + OBS_INPUT_FN_TEMPLATE="${OBS_MRMS_FN_TEMPLATES[1]}" FCST_INPUT_DIR="${vx_fcst_input_basedir}" ;; "RETOP") OBS_INPUT_DIR="${OBS_DIR}" - OBS_INPUT_FN_TEMPLATE="${OBS_MRMS_RETOP_FN_TEMPLATE}" + OBS_INPUT_FN_TEMPLATE="${OBS_MRMS_FN_TEMPLATES[3]}" FCST_INPUT_DIR="${vx_fcst_input_basedir}" ;; esac @@ -202,7 +158,7 @@ if [ "${grid_or_point}" = "grid" ]; then elif [ "${grid_or_point}" = "point" ]; then OBS_INPUT_DIR="${vx_output_basedir}/metprd/Pb2nc_obs" - OBS_INPUT_FN_TEMPLATE="${OBS_NDAS_ADPSFCorADPUPA_FN_TEMPLATE_PB2NC_OUTPUT}" + OBS_INPUT_FN_TEMPLATE="${OBS_NDAS_SFCandUPA_FN_TEMPLATE_PB2NC_OUTPUT}" FCST_INPUT_DIR="${vx_fcst_input_basedir}" fi @@ -227,19 +183,16 @@ for (( i=0; i<${NUM_ENS_MEMBERS}; i++ )); do time_lag=$( bc -l <<< "${ENS_TIME_LAG_HRS[$i]}*${SECS_PER_HOUR}" ) - if [ "${VAR}" = "APCP" ] || [ "${VAR}" = "ASNOW" ]; then + if [ "${FIELD_GROUP}" = "APCP" ] || [ "${FIELD_GROUP}" = "ASNOW" ]; then template="${cdate_ensmem_subdir_or_null:+${cdate_ensmem_subdir_or_null}/}metprd/PcpCombine_fcst/${FCST_FN_TEMPLATE_PCPCOMBINE_OUTPUT}" else template="${FCST_SUBDIR_TEMPLATE}/${FCST_FN_TEMPLATE}" fi - slash_ensmem_subdir_or_null="/${ensmem_name}" if [ -z "${FCST_INPUT_FN_TEMPLATE}" ]; then FCST_INPUT_FN_TEMPLATE="$(eval echo ${template})" else - FCST_INPUT_FN_TEMPLATE="\ -${FCST_INPUT_FN_TEMPLATE}, -$(eval echo ${template})" + FCST_INPUT_FN_TEMPLATE="${FCST_INPUT_FN_TEMPLATE}, $(eval echo ${template})" fi done @@ -250,20 +203,54 @@ STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}" # #----------------------------------------------------------------------- # -# Set the array of forecast hours for which to run the MET/METplus tool. +# Generate the list of forecast hours for which to run the specified +# METplus tool. +# +# If running the GenEnsProd tool, we set this to the list of forecast +# output times without filtering for the existence of observation files +# corresponding to those times. This is because GenEnsProd operates +# only on forecasts; it does not need observations. +# +# On the other hand, if running the EnsembleStat tool, we set the list of +# forecast hours to a set of times that takes into consideration whether +# or not observations exist. We do this by starting with the full list +# of forecast times for which there is forecast output and then removing +# from that list any times for which there is no corresponding observations. # #----------------------------------------------------------------------- # -set_vx_fhr_list \ - cdate="${CDATE}" \ - fcst_len_hrs="${FCST_LEN_HRS}" \ - field="$VAR" \ - accum_hh="${ACCUM_HH}" \ - base_dir="${OBS_INPUT_DIR}" \ - fn_template="${OBS_INPUT_FN_TEMPLATE}" \ - check_accum_contrib_files="FALSE" \ - num_missing_files_max="${NUM_MISSING_OBS_FILES_MAX}" \ - outvarname_fhr_list="FHR_LIST" +case "$OBTYPE" in + "CCPA"|"NOHRSC") + vx_intvl="$((10#${ACCUM_HH}))" + vx_hr_start="${vx_intvl}" + ;; + *) + vx_intvl="$((${VX_FCST_OUTPUT_INTVL_HRS}))" + vx_hr_start="0" + ;; +esac +vx_hr_end="${FCST_LEN_HRS}" + +if [ "${MetplusToolName}" = "GenEnsProd" ]; then + VX_LEADHR_LIST=$( python3 $USHdir/set_leadhrs.py \ + --lhr_min="${vx_hr_start}" \ + --lhr_max="${vx_hr_end}" \ + --lhr_intvl="${vx_intvl}" \ + --skip_check_files ) || \ + print_err_msg_exit "Call to set_leadhrs.py failed with return code: $?" + +elif [ "${MetplusToolName}" = "EnsembleStat" ]; then + VX_LEADHR_LIST=$( python3 $USHdir/set_leadhrs.py \ + --date_init="${CDATE}" \ + --lhr_min="${vx_hr_start}" \ + --lhr_max="${vx_hr_end}" \ + --lhr_intvl="${vx_intvl}" \ + --base_dir="${OBS_INPUT_DIR}" \ + --fn_template="${OBS_INPUT_FN_TEMPLATE}" \ + --num_missing_files_max="${NUM_MISSING_OBS_FILES_MAX}" \ + --time_lag="${time_lag%.*}" ) || \ + print_err_msg_exit "Call to set_leadhrs.py failed with return code: $?" +fi # #----------------------------------------------------------------------- # @@ -271,7 +258,7 @@ set_vx_fhr_list \ # #----------------------------------------------------------------------- # -mkdir_vrfy -p "${OUTPUT_DIR}" +mkdir -p "${OUTPUT_DIR}" # #----------------------------------------------------------------------- # @@ -297,15 +284,15 @@ export LOGDIR # #----------------------------------------------------------------------- # -# Do not run METplus if there isn't at least one valid forecast hour for -# which to run it. +# Do not run METplus if there isn't at least one lead hour for which to +# run it. # #----------------------------------------------------------------------- # -if [ -z "${FHR_LIST}" ]; then +if [ -z "${VX_LEADHR_LIST}" ]; then print_err_msg_exit "\ -The list of forecast hours for which to run METplus is empty: - FHR_LIST = [${FHR_LIST}]" +The list of lead hours for which to run METplus is empty: + VX_LEADHR_LIST = [${VX_LEADHR_LIST}]" fi # #----------------------------------------------------------------------- @@ -318,16 +305,28 @@ fi # # First, set the base file names. # -metplus_config_tmpl_fn="${VAR}" -metplus_config_tmpl_fn="${MetplusToolName}_${metplus_config_tmpl_fn}" -metplus_config_fn="${MetplusToolName}_${FIELDNAME_IN_MET_FILEDIR_NAMES}" -metplus_log_fn="${metplus_config_fn}" +metplus_config_tmpl_bn="${MetplusToolName}" +metplus_config_bn="${MetplusToolName}_${FIELDNAME_IN_MET_FILEDIR_NAMES}" +metplus_log_bn="${metplus_config_bn}_$CDATE" # # Add prefixes and suffixes (extensions) to the base file names. # -metplus_config_tmpl_fn="${metplus_config_tmpl_fn}.conf" -metplus_config_fn="${metplus_config_fn}.conf" -metplus_log_fn="metplus.log.${metplus_log_fn}" +metplus_config_tmpl_fn="${metplus_config_tmpl_bn}.conf" +metplus_config_fn="${metplus_config_bn}.conf" +metplus_log_fn="metplus.log.${metplus_log_bn}" +# +#----------------------------------------------------------------------- +# +# Load the yaml-like file containing the configuration for ensemble +# verification. +# +#----------------------------------------------------------------------- +# +vx_config_fp="${METPLUS_CONF}/${VX_CONFIG_ENS_FN}" +vx_config_dict=$(<"${vx_config_fp}") +# Indent each line of vx_config_dict so that it is aligned properly when +# included in the yaml-formatted variable "settings" below. +vx_config_dict=$( printf "%s\n" "${vx_config_dict}" | sed 's/^/ /' ) # #----------------------------------------------------------------------- # @@ -348,59 +347,65 @@ settings="\ # # MET/METplus information. # - 'metplus_tool_name': '${metplus_tool_name}' - 'MetplusToolName': '${MetplusToolName}' - 'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}' - 'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}' +'metplus_tool_name': '${metplus_tool_name}' +'MetplusToolName': '${MetplusToolName}' +'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}' +'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}' # # Date and forecast hour information. # - 'cdate': '$CDATE' - 'fhr_list': '${FHR_LIST}' +'cdate': '$CDATE' +'vx_leadhr_list': '${VX_LEADHR_LIST}' # # Input and output directory/file information. # - 'metplus_config_fn': '${metplus_config_fn:-}' - 'metplus_log_fn': '${metplus_log_fn:-}' - 'obs_input_dir': '${OBS_INPUT_DIR:-}' - 'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' - 'fcst_input_dir': '${FCST_INPUT_DIR:-}' - 'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' - 'output_base': '${OUTPUT_BASE}' - 'output_dir': '${OUTPUT_DIR}' - 'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' - 'staging_dir': '${STAGING_DIR}' - 'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}' +'metplus_config_fn': '${metplus_config_fn:-}' +'metplus_log_fn': '${metplus_log_fn:-}' +'obs_input_dir': '${OBS_INPUT_DIR:-}' +'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' +'fcst_input_dir': '${FCST_INPUT_DIR:-}' +'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' +'output_base': '${OUTPUT_BASE}' +'output_dir': '${OUTPUT_DIR}' +'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' +'staging_dir': '${STAGING_DIR}' +'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}' # # Ensemble and member-specific information. # - 'num_ens_members': '${NUM_ENS_MEMBERS}' - 'ensmem_name': '${ensmem_name:-}' - 'time_lag': '${time_lag:-}' +'num_ens_members': '${NUM_ENS_MEMBERS}' +'ensmem_name': '${ensmem_name:-}' +'time_lag': '${time_lag:-}' # # Field information. # - 'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}' - 'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}' - 'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' - 'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' - 'obtype': '${OBTYPE}' - 'accum_hh': '${ACCUM_HH:-}' - 'accum_no_pad': '${ACCUM_NO_PAD:-}' - 'field_thresholds': '${FIELD_THRESHOLDS:-}' +'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}' +'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}' +'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' +'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' +'obtype': '${OBTYPE}' +'accum_hh': '${ACCUM_HH:-}' +'accum_no_pad': '${ACCUM_NO_PAD:-}' +'metplus_templates_dir': '${METPLUS_CONF:-}' +'input_field_group': '${FIELD_GROUP:-}' +'input_level_fcst': '${FCST_LEVEL:-}' +'input_thresh_fcst': '${FCST_THRESH:-}' +# +# Verification configuration dictionary. +# +'vx_config_dict': +${vx_config_dict:-} " # Render the template to create a METplus configuration file tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)") -cat > $tmpfile << EOF -$settings -EOF - +printf "%s" "$settings" > "$tmpfile" uw template render \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} \ --verbose \ - --values-file "${tmpfile}" + --values-file "${tmpfile}" \ + --search-path "/" err=$? rm $tmpfile diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh index 7eb1ce4605..0531d21755 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_gridstat|task_run_vx_pointstat|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -18,7 +22,6 @@ source_config_for_task "task_run_vx_gridstat|task_run_vx_pointstat|task_run_post # . $USHdir/get_metplus_tool_name.sh . $USHdir/set_vx_params.sh -. $USHdir/set_vx_fhr_list.sh # #----------------------------------------------------------------------- # @@ -66,8 +69,8 @@ Entering script: \"${scrfunc_fn}\" In directory: \"${scrfunc_dir}\" This is the ex-script for the task that runs the METplus ${MetplusToolName} -tool to perform deterministic verification of the specified field (VAR) -for a single forecast. +tool to perform deterministic verification of the specified field group +(FIELD_GROUP) for a single forecast. ========================================================================" # #----------------------------------------------------------------------- @@ -91,10 +94,12 @@ FIELDNAME_IN_FCST_INPUT="" FIELDNAME_IN_MET_OUTPUT="" FIELDNAME_IN_MET_FILEDIR_NAMES="" +# Note that ACCUM_HH will not be defined for the REFC, RETOP, SFC, and +# UPA field groups. set_vx_params \ obtype="${OBTYPE}" \ - field="$VAR" \ - accum_hh="${ACCUM_HH}" \ + field_group="${FIELD_GROUP}" \ + accum_hh="${ACCUM_HH:-}" \ outvarname_grid_or_point="grid_or_point" \ outvarname_fieldname_in_obs_input="FIELDNAME_IN_OBS_INPUT" \ outvarname_fieldname_in_fcst_input="FIELDNAME_IN_FCST_INPUT" \ @@ -118,60 +123,13 @@ set_vx_params \ #----------------------------------------------------------------------- # i="0" -if [ "${DO_ENSEMBLE}" = "TRUE" ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]; then i=$( bc -l <<< "${ENSMEM_INDX}-1" ) fi time_lag=$( bc -l <<< "${ENS_TIME_LAG_HRS[$i]}*${SECS_PER_HOUR}" ) # #----------------------------------------------------------------------- # -# Set additional field-dependent verification parameters. -# -#----------------------------------------------------------------------- -# -if [ "${grid_or_point}" = "grid" ]; then - - case "${FIELDNAME_IN_MET_FILEDIR_NAMES}" in - "APCP01h") - FIELD_THRESHOLDS="gt0.0, ge0.254, ge0.508, ge1.27, ge2.54" - ;; - "APCP03h") - FIELD_THRESHOLDS="gt0.0, ge0.254, ge0.508, ge1.27, ge2.54, ge3.810, ge6.350" - ;; - "APCP06h") - FIELD_THRESHOLDS="gt0.0, ge0.254, ge0.508, ge1.27, ge2.54, ge3.810, ge6.350, ge8.890, ge12.700" - ;; - "APCP24h") - FIELD_THRESHOLDS="gt0.0, ge0.254, ge0.508, ge1.27, ge2.54, ge3.810, ge6.350, ge8.890, ge12.700, ge25.400" - ;; - "ASNOW06h") - FIELD_THRESHOLDS="gt0.0, ge2.54, ge5.08, ge10.16, ge20.32" - ;; - "ASNOW24h") - FIELD_THRESHOLDS="gt0.0, ge2.54, ge5.08, ge10.16, ge20.32" - ;; - "REFC") - FIELD_THRESHOLDS="ge20, ge30, ge40, ge50" - ;; - "RETOP") - FIELD_THRESHOLDS="ge20, ge30, ge40, ge50" - ;; - *) - print_err_msg_exit "\ -Verification parameters have not been defined for this field -(FIELDNAME_IN_MET_FILEDIR_NAMES): - FIELDNAME_IN_MET_FILEDIR_NAMES = \"${FIELDNAME_IN_MET_FILEDIR_NAMES}\"" - ;; - esac - -elif [ "${grid_or_point}" = "point" ]; then - - FIELD_THRESHOLDS="" - -fi -# -#----------------------------------------------------------------------- -# # Set paths and file templates for input to and output from the MET/ # METplus tool to be run as well as other file/directory parameters. # @@ -179,11 +137,13 @@ fi # vx_fcst_input_basedir=$( eval echo "${VX_FCST_INPUT_BASEDIR}" ) vx_output_basedir=$( eval echo "${VX_OUTPUT_BASEDIR}" ) + ensmem_indx=$(printf "%0${VX_NDIGITS_ENSMEM_NAMES}d" $(( 10#${ENSMEM_INDX}))) ensmem_name="mem${ensmem_indx}" if [ "${RUN_ENVIR}" = "nco" ]; then slash_cdate_or_null="" slash_ensmem_subdir_or_null="" + slash_obs_or_null="" else slash_cdate_or_null="/${CDATE}" # @@ -198,10 +158,12 @@ else # or, better, just remove this variale and code "/${ensmem_name}" where # slash_ensmem_subdir_or_null currently appears below. # - if [ "${DO_ENSEMBLE}" = "TRUE" ]; then + if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]; then slash_ensmem_subdir_or_null="/${ensmem_name}" + slash_obs_or_null="/obs" else slash_ensmem_subdir_or_null="" + slash_obs_or_null="" fi fi @@ -209,26 +171,26 @@ if [ "${grid_or_point}" = "grid" ]; then case "${FIELDNAME_IN_MET_FILEDIR_NAMES}" in "APCP"*) - OBS_INPUT_DIR="${vx_output_basedir}/metprd/PcpCombine_obs" + OBS_INPUT_DIR="${vx_output_basedir}${slash_cdate_or_null}${slash_obs_or_null}/metprd/PcpCombine_obs" OBS_INPUT_FN_TEMPLATE="${OBS_CCPA_APCP_FN_TEMPLATE_PCPCOMBINE_OUTPUT}" - FCST_INPUT_DIR="${vx_output_basedir}${slash_cdate_or_null}/${slash_ensmem_subdir_or_null}/metprd/PcpCombine_fcst" + FCST_INPUT_DIR="${vx_output_basedir}${slash_cdate_or_null}${slash_ensmem_subdir_or_null}/metprd/PcpCombine_fcst" FCST_INPUT_FN_TEMPLATE="${FCST_FN_TEMPLATE_PCPCOMBINE_OUTPUT}" ;; "ASNOW"*) - OBS_INPUT_DIR="${OBS_DIR}" - OBS_INPUT_FN_TEMPLATE="${OBS_NOHRSC_ASNOW_FN_TEMPLATE}" - FCST_INPUT_DIR="${vx_output_basedir}${slash_cdate_or_null}/${slash_ensmem_subdir_or_null}/metprd/PcpCombine_fcst" + OBS_INPUT_DIR="${vx_output_basedir}${slash_cdate_or_null}${slash_obs_or_null}/metprd/PcpCombine_obs" + OBS_INPUT_FN_TEMPLATE="${OBS_NOHRSC_ASNOW_FN_TEMPLATE_PCPCOMBINE_OUTPUT}" + FCST_INPUT_DIR="${vx_output_basedir}${slash_cdate_or_null}${slash_ensmem_subdir_or_null}/metprd/PcpCombine_fcst" FCST_INPUT_FN_TEMPLATE="${FCST_FN_TEMPLATE_PCPCOMBINE_OUTPUT}" ;; "REFC") OBS_INPUT_DIR="${OBS_DIR}" - OBS_INPUT_FN_TEMPLATE="${OBS_MRMS_REFC_FN_TEMPLATE}" + OBS_INPUT_FN_TEMPLATE="${OBS_MRMS_FN_TEMPLATES[1]}" FCST_INPUT_DIR="${vx_fcst_input_basedir}" FCST_INPUT_FN_TEMPLATE="${FCST_SUBDIR_TEMPLATE:+${FCST_SUBDIR_TEMPLATE}/}${FCST_FN_TEMPLATE}" ;; "RETOP") OBS_INPUT_DIR="${OBS_DIR}" - OBS_INPUT_FN_TEMPLATE="${OBS_MRMS_RETOP_FN_TEMPLATE}" + OBS_INPUT_FN_TEMPLATE="${OBS_MRMS_FN_TEMPLATES[3]}" FCST_INPUT_DIR="${vx_fcst_input_basedir}" FCST_INPUT_FN_TEMPLATE="${FCST_SUBDIR_TEMPLATE:+${FCST_SUBDIR_TEMPLATE}/}${FCST_FN_TEMPLATE}" ;; @@ -237,7 +199,7 @@ if [ "${grid_or_point}" = "grid" ]; then elif [ "${grid_or_point}" = "point" ]; then OBS_INPUT_DIR="${vx_output_basedir}/metprd/Pb2nc_obs" - OBS_INPUT_FN_TEMPLATE="${OBS_NDAS_ADPSFCorADPUPA_FN_TEMPLATE_PB2NC_OUTPUT}" + OBS_INPUT_FN_TEMPLATE="${OBS_NDAS_SFCandUPA_FN_TEMPLATE_PB2NC_OUTPUT}" FCST_INPUT_DIR="${vx_fcst_input_basedir}" FCST_INPUT_FN_TEMPLATE="${FCST_SUBDIR_TEMPLATE:+${FCST_SUBDIR_TEMPLATE}/}${FCST_FN_TEMPLATE}" @@ -245,26 +207,41 @@ fi OBS_INPUT_FN_TEMPLATE=$( eval echo ${OBS_INPUT_FN_TEMPLATE} ) FCST_INPUT_FN_TEMPLATE=$( eval echo ${FCST_INPUT_FN_TEMPLATE} ) -OUTPUT_BASE="${vx_output_basedir}${slash_cdate_or_null}/${slash_ensmem_subdir_or_null}" +OUTPUT_BASE="${vx_output_basedir}${slash_cdate_or_null}${slash_ensmem_subdir_or_null}" OUTPUT_DIR="${OUTPUT_BASE}/metprd/${MetplusToolName}" STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}" # #----------------------------------------------------------------------- # -# Set the array of forecast hours for which to run the MET/METplus tool. +# Set the lead hours for which to run the MET/METplus tool. This is done +# by starting with the full list of lead hours for which we expect to +# find forecast output and then removing from that list any hours for +# which there is no corresponding observation data. # #----------------------------------------------------------------------- # -set_vx_fhr_list \ - cdate="${CDATE}" \ - fcst_len_hrs="${FCST_LEN_HRS}" \ - field="$VAR" \ - accum_hh="${ACCUM_HH}" \ - base_dir="${OBS_INPUT_DIR}" \ - fn_template="${OBS_INPUT_FN_TEMPLATE}" \ - check_accum_contrib_files="FALSE" \ - num_missing_files_max="${NUM_MISSING_OBS_FILES_MAX}" \ - outvarname_fhr_list="FHR_LIST" +case "$OBTYPE" in + "CCPA"|"NOHRSC") + vx_intvl="$((10#${ACCUM_HH}))" + vx_hr_start="${vx_intvl}" + ;; + *) + vx_intvl="$((${VX_FCST_OUTPUT_INTVL_HRS}))" + vx_hr_start="0" + ;; +esac +vx_hr_end="${FCST_LEN_HRS}" + +VX_LEADHR_LIST=$( python3 $USHdir/set_leadhrs.py \ + --date_init="${CDATE}" \ + --lhr_min="${vx_hr_start}" \ + --lhr_max="${vx_hr_end}" \ + --lhr_intvl="${vx_intvl}" \ + --base_dir="${OBS_INPUT_DIR}" \ + --fn_template="${OBS_INPUT_FN_TEMPLATE}" \ + --num_missing_files_max="${NUM_MISSING_OBS_FILES_MAX}" \ + --time_lag="${time_lag%.*}") || \ + print_err_msg_exit "Call to set_leadhrs.py failed with return code: $?" # #----------------------------------------------------------------------- # @@ -272,7 +249,7 @@ set_vx_fhr_list \ # #----------------------------------------------------------------------- # -mkdir_vrfy -p "${OUTPUT_DIR}" +mkdir -p "${OUTPUT_DIR}" # #----------------------------------------------------------------------- # @@ -298,15 +275,15 @@ export LOGDIR # #----------------------------------------------------------------------- # -# Do not run METplus if there isn't at least one valid forecast hour for -# which to run it. +# Do not run METplus if there isn't at least one lead hour for which to +# run it. # #----------------------------------------------------------------------- # -if [ -z "${FHR_LIST}" ]; then +if [ -z "${VX_LEADHR_LIST}" ]; then print_err_msg_exit "\ -The list of forecast hours for which to run METplus is empty: - FHR_LIST = [${FHR_LIST}]" +The list of lead hours for which to run METplus is empty: + VX_LEADHR_LIST = [${VX_LEADHR_LIST}]" fi # #----------------------------------------------------------------------- @@ -319,16 +296,28 @@ fi # # First, set the base file names. # -metplus_config_tmpl_fn="${VAR}" -metplus_config_tmpl_fn="${MetplusToolName}_${metplus_config_tmpl_fn}" -metplus_config_fn="${MetplusToolName}_${FIELDNAME_IN_MET_FILEDIR_NAMES}_${ensmem_name}" -metplus_log_fn="${metplus_config_fn}" +metplus_config_tmpl_bn="GridStat_or_PointStat" +metplus_config_bn="${MetplusToolName}_${FIELDNAME_IN_MET_FILEDIR_NAMES}_${ensmem_name}" +metplus_log_bn="${metplus_config_bn}_$CDATE" # # Add prefixes and suffixes (extensions) to the base file names. # -metplus_config_tmpl_fn="${metplus_config_tmpl_fn}.conf" -metplus_config_fn="${metplus_config_fn}.conf" -metplus_log_fn="metplus.log.${metplus_log_fn}" +metplus_config_tmpl_fn="${metplus_config_tmpl_bn}.conf" +metplus_config_fn="${metplus_config_bn}.conf" +metplus_log_fn="metplus.log.${metplus_log_bn}" +# +#----------------------------------------------------------------------- +# +# Load the yaml-like file containing the configuration for deterministic +# verification. +# +#----------------------------------------------------------------------- +# +vx_config_fp="${METPLUS_CONF}/${VX_CONFIG_DET_FN}" +vx_config_dict=$(<"${vx_config_fp}") +# Indent each line of vx_config_dict so that it is aligned properly when +# included in the yaml-formatted variable "settings" below. +vx_config_dict=$( printf "%s\n" "${vx_config_dict}" | sed 's/^/ /' ) # #----------------------------------------------------------------------- # @@ -349,59 +338,65 @@ settings="\ # # MET/METplus information. # - 'metplus_tool_name': '${metplus_tool_name}' - 'MetplusToolName': '${MetplusToolName}' - 'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}' - 'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}' +'metplus_tool_name': '${metplus_tool_name}' +'MetplusToolName': '${MetplusToolName}' +'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}' +'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}' # # Date and forecast hour information. # - 'cdate': '$CDATE' - 'fhr_list': '${FHR_LIST}' +'cdate': '$CDATE' +'vx_leadhr_list': '${VX_LEADHR_LIST}' # # Input and output directory/file information. # - 'metplus_config_fn': '${metplus_config_fn:-}' - 'metplus_log_fn': '${metplus_log_fn:-}' - 'obs_input_dir': '${OBS_INPUT_DIR:-}' - 'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' - 'fcst_input_dir': '${FCST_INPUT_DIR:-}' - 'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' - 'output_base': '${OUTPUT_BASE}' - 'output_dir': '${OUTPUT_DIR}' - 'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' - 'staging_dir': '${STAGING_DIR}' - 'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}' +'metplus_config_fn': '${metplus_config_fn:-}' +'metplus_log_fn': '${metplus_log_fn:-}' +'obs_input_dir': '${OBS_INPUT_DIR:-}' +'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' +'fcst_input_dir': '${FCST_INPUT_DIR:-}' +'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' +'output_base': '${OUTPUT_BASE}' +'output_dir': '${OUTPUT_DIR}' +'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' +'staging_dir': '${STAGING_DIR}' +'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}' # # Ensemble and member-specific information. # - 'num_ens_members': '${NUM_ENS_MEMBERS}' - 'ensmem_name': '${ensmem_name:-}' - 'time_lag': '${time_lag:-}' +'num_ens_members': '${NUM_ENS_MEMBERS}' +'ensmem_name': '${ensmem_name:-}' +'time_lag': '${time_lag:-}' # # Field information. # - 'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}' - 'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}' - 'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' - 'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' - 'obtype': '${OBTYPE}' - 'accum_hh': '${ACCUM_HH:-}' - 'accum_no_pad': '${ACCUM_NO_PAD:-}' - 'field_thresholds': '${FIELD_THRESHOLDS:-}' +'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}' +'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}' +'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' +'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' +'obtype': '${OBTYPE}' +'accum_hh': '${ACCUM_HH:-}' +'accum_no_pad': '${ACCUM_NO_PAD:-}' +'metplus_templates_dir': '${METPLUS_CONF:-}' +'input_field_group': '${FIELD_GROUP:-}' +'input_level_fcst': '${FCST_LEVEL:-}' +'input_thresh_fcst': '${FCST_THRESH:-}' +# +# Verification configuration dictionary. +# +'vx_config_dict': +${vx_config_dict:-} " # Render the template to create a METplus configuration file tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)") -cat > $tmpfile << EOF -$settings -EOF - +printf "%s" "$settings" > "$tmpfile" uw template render \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} \ --verbose \ - --values-file "${tmpfile}" + --values-file "${tmpfile}" \ + --search-path "/" err=$? rm $tmpfile @@ -415,7 +410,6 @@ $settings" print_err_msg_exit "${message_txt}" fi fi - # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh index 458dcec33f..5ecc588316 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_ensgrid_mean|task_run_vx_enspoint_mean|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -18,7 +22,6 @@ source_config_for_task "task_run_vx_ensgrid_mean|task_run_vx_enspoint_mean|task_ # . $USHdir/get_metplus_tool_name.sh . $USHdir/set_vx_params.sh -. $USHdir/set_vx_fhr_list.sh # #----------------------------------------------------------------------- # @@ -66,8 +69,8 @@ Entering script: \"${scrfunc_fn}\" In directory: \"${scrfunc_dir}\" This is the ex-script for the task that runs the METplus ${MetplusToolName} -tool to perform verification of the specified field (VAR) on the ensemble -mean. +tool to perform verification of the specified field group (FIELD_GROUP) +on the ensemble mean. ========================================================================" # #----------------------------------------------------------------------- @@ -93,7 +96,7 @@ FIELDNAME_IN_MET_FILEDIR_NAMES="" set_vx_params \ obtype="${OBTYPE}" \ - field="$VAR" \ + field_group="${FIELD_GROUP}" \ accum_hh="${ACCUM_HH}" \ outvarname_grid_or_point="grid_or_point" \ outvarname_fieldname_in_obs_input="FIELDNAME_IN_OBS_INPUT" \ @@ -103,53 +106,6 @@ set_vx_params \ # #----------------------------------------------------------------------- # -# Set additional field-dependent verification parameters. -# -#----------------------------------------------------------------------- -# -if [ "${grid_or_point}" = "grid" ]; then - - case "${FIELDNAME_IN_MET_FILEDIR_NAMES}" in - "APCP01h") - FIELD_THRESHOLDS="gt0.0, ge0.254, ge0.508, ge2.54" - ;; - "APCP03h") - FIELD_THRESHOLDS="gt0.0, ge0.508, ge2.54, ge6.350" - ;; - "APCP06h") - FIELD_THRESHOLDS="gt0.0, ge2.54, ge6.350, ge12.700" - ;; - "APCP24h") - FIELD_THRESHOLDS="gt0.0, ge6.350, ge12.700, ge25.400" - ;; - "ASNOW06h") - FIELD_THRESHOLDS="gt0.0, ge2.54, ge5.08, ge10.16, ge20.32" - ;; - "ASNOW24h") - FIELD_THRESHOLDS="gt0.0, ge2.54, ge5.08, ge10.16, ge20.32" - ;; - "REFC") - FIELD_THRESHOLDS="ge20, ge30, ge40, ge50" - ;; - "RETOP") - FIELD_THRESHOLDS="ge20, ge30, ge40, ge50" - ;; - *) - print_err_msg_exit "\ -Verification parameters have not been defined for this field -(FIELDNAME_IN_MET_FILEDIR_NAMES): - FIELDNAME_IN_MET_FILEDIR_NAMES = \"${FIELDNAME_IN_MET_FILEDIR_NAMES}\"" - ;; - esac - -elif [ "${grid_or_point}" = "point" ]; then - - FIELD_THRESHOLDS="" - -fi -# -#----------------------------------------------------------------------- -# # Set paths and file templates for input to and output from the MET/ # METplus tool to be run as well as other file/directory parameters. # @@ -166,20 +122,20 @@ if [ "${grid_or_point}" = "grid" ]; then case "${FIELDNAME_IN_MET_FILEDIR_NAMES}" in "APCP"*) - OBS_INPUT_DIR="${vx_output_basedir}/metprd/PcpCombine_obs" + OBS_INPUT_DIR="${vx_output_basedir}${slash_cdate_or_null}/obs/metprd/PcpCombine_obs" OBS_INPUT_FN_TEMPLATE="${OBS_CCPA_APCP_FN_TEMPLATE_PCPCOMBINE_OUTPUT}" ;; "ASNOW"*) - OBS_INPUT_DIR="${OBS_DIR}" - OBS_INPUT_FN_TEMPLATE="${OBS_NOHRSC_ASNOW_FN_TEMPLATE}" + OBS_INPUT_DIR="${vx_output_basedir}${slash_cdate_or_null}/obs/metprd/PcpCombine_obs" + OBS_INPUT_FN_TEMPLATE="${OBS_NOHRSC_ASNOW_FN_TEMPLATE_PCPCOMBINE_OUTPUT}" ;; "REFC") OBS_INPUT_DIR="${OBS_DIR}" - OBS_INPUT_FN_TEMPLATE="${OBS_MRMS_REFC_FN_TEMPLATE}" + OBS_INPUT_FN_TEMPLATE="${OBS_MRMS_FN_TEMPLATES[1]}" ;; "RETOP") OBS_INPUT_DIR="${OBS_DIR}" - OBS_INPUT_FN_TEMPLATE="${OBS_MRMS_RETOP_FN_TEMPLATE}" + OBS_INPUT_FN_TEMPLATE="${OBS_MRMS_FN_TEMPLATES[3]}" ;; esac FCST_INPUT_DIR="${vx_output_basedir}${slash_cdate_or_null}/metprd/GenEnsProd" @@ -187,7 +143,7 @@ if [ "${grid_or_point}" = "grid" ]; then elif [ "${grid_or_point}" = "point" ]; then OBS_INPUT_DIR="${vx_output_basedir}/metprd/Pb2nc_obs" - OBS_INPUT_FN_TEMPLATE="${OBS_NDAS_ADPSFCorADPUPA_FN_TEMPLATE_PB2NC_OUTPUT}" + OBS_INPUT_FN_TEMPLATE="${OBS_NDAS_SFCandUPA_FN_TEMPLATE_PB2NC_OUTPUT}" FCST_INPUT_DIR="${vx_output_basedir}${slash_cdate_or_null}/metprd/GenEnsProd" fi @@ -200,20 +156,35 @@ STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}_ensmean" # #----------------------------------------------------------------------- # -# Set the array of forecast hours for which to run the MET/METplus tool. +# Set the lead hours for which to run the MET/METplus tool. This is done +# by starting with the full list of lead hours for which we expect to +# find forecast output and then removing from that list any hours for +# which there is no corresponding observation data. # #----------------------------------------------------------------------- # -set_vx_fhr_list \ - cdate="${CDATE}" \ - fcst_len_hrs="${FCST_LEN_HRS}" \ - field="$VAR" \ - accum_hh="${ACCUM_HH}" \ - base_dir="${OBS_INPUT_DIR}" \ - fn_template="${OBS_INPUT_FN_TEMPLATE}" \ - check_accum_contrib_files="FALSE" \ - num_missing_files_max="${NUM_MISSING_OBS_FILES_MAX}" \ - outvarname_fhr_list="FHR_LIST" +case "$OBTYPE" in + "CCPA"|"NOHRSC") + vx_intvl="$((10#${ACCUM_HH}))" + vx_hr_start="${vx_intvl}" + ;; + *) + vx_intvl="$((${VX_FCST_OUTPUT_INTVL_HRS}))" + vx_hr_start="0" + ;; +esac +vx_hr_end="${FCST_LEN_HRS}" + +VX_LEADHR_LIST=$( python3 $USHdir/set_leadhrs.py \ + --date_init="${CDATE}" \ + --lhr_min="${vx_hr_start}" \ + --lhr_max="${vx_hr_end}" \ + --lhr_intvl="${vx_intvl}" \ + --base_dir="${OBS_INPUT_DIR}" \ + --fn_template="${OBS_INPUT_FN_TEMPLATE}" \ + --num_missing_files_max="${NUM_MISSING_OBS_FILES_MAX}" ) || \ + print_err_msg_exit "Call to set_leadhrs.py failed with return code: $?" + # #----------------------------------------------------------------------- # @@ -221,7 +192,7 @@ set_vx_fhr_list \ # #----------------------------------------------------------------------- # -mkdir_vrfy -p "${OUTPUT_DIR}" +mkdir -p "${OUTPUT_DIR}" # #----------------------------------------------------------------------- # @@ -256,15 +227,15 @@ export LOGDIR # #----------------------------------------------------------------------- # -# Do not run METplus if there isn't at least one valid forecast hour for -# which to run it. +# Do not run METplus if there isn't at least one lead hour for which to +# run it. # #----------------------------------------------------------------------- # -if [ -z "${FHR_LIST}" ]; then +if [ -z "${VX_LEADHR_LIST}" ]; then print_err_msg_exit "\ -The list of forecast hours for which to run METplus is empty: - FHR_LIST = [${FHR_LIST}]" +The list of lead hours for which to run METplus is empty: + VX_LEADHR_LIST = [${VX_LEADHR_LIST}]" fi # #----------------------------------------------------------------------- @@ -277,16 +248,28 @@ fi # # First, set the base file names. # -metplus_config_tmpl_fn="${VAR}" -metplus_config_tmpl_fn="${MetplusToolName}_ensmean_${metplus_config_tmpl_fn}" -metplus_config_fn="${MetplusToolName}_ensmean_${FIELDNAME_IN_MET_FILEDIR_NAMES}" -metplus_log_fn="${metplus_config_fn}" +metplus_config_tmpl_bn="${MetplusToolName}_ensmean" +metplus_config_bn="${MetplusToolName}_${FIELDNAME_IN_MET_FILEDIR_NAMES}_ensmean" +metplus_log_bn="${metplus_config_bn}_$CDATE" # # Add prefixes and suffixes (extensions) to the base file names. # -metplus_config_tmpl_fn="${metplus_config_tmpl_fn}.conf" -metplus_config_fn="${metplus_config_fn}.conf" -metplus_log_fn="metplus.log.${metplus_log_fn}" +metplus_config_tmpl_fn="${metplus_config_tmpl_bn}.conf" +metplus_config_fn="${metplus_config_bn}.conf" +metplus_log_fn="metplus.log.${metplus_log_bn}" +# +#----------------------------------------------------------------------- +# +# Load the yaml-like file containing the configuration for ensemble +# verification. +# +#----------------------------------------------------------------------- +# +vx_config_fp="${METPLUS_CONF}/${VX_CONFIG_ENS_FN}" +vx_config_dict=$(<"${vx_config_fp}") +# Indent each line of vx_config_dict so that it is aligned properly when +# included in the yaml-formatted variable "settings" below. +vx_config_dict=$( printf "%s\n" "${vx_config_dict}" | sed 's/^/ /' ) # #----------------------------------------------------------------------- # @@ -307,59 +290,65 @@ settings="\ # # MET/METplus information. # - 'metplus_tool_name': '${metplus_tool_name}' - 'MetplusToolName': '${MetplusToolName}' - 'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}' - 'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}' +'metplus_tool_name': '${metplus_tool_name}' +'MetplusToolName': '${MetplusToolName}' +'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}' +'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}' # # Date and forecast hour information. # - 'cdate': '$CDATE' - 'fhr_list': '${FHR_LIST}' +'cdate': '$CDATE' +'vx_leadhr_list': '${VX_LEADHR_LIST}' # # Input and output directory/file information. # - 'metplus_config_fn': '${metplus_config_fn:-}' - 'metplus_log_fn': '${metplus_log_fn:-}' - 'obs_input_dir': '${OBS_INPUT_DIR:-}' - 'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' - 'fcst_input_dir': '${FCST_INPUT_DIR:-}' - 'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' - 'output_base': '${OUTPUT_BASE}' - 'output_dir': '${OUTPUT_DIR}' - 'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' - 'staging_dir': '${STAGING_DIR}' - 'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}' +'metplus_config_fn': '${metplus_config_fn:-}' +'metplus_log_fn': '${metplus_log_fn:-}' +'obs_input_dir': '${OBS_INPUT_DIR:-}' +'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' +'fcst_input_dir': '${FCST_INPUT_DIR:-}' +'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' +'output_base': '${OUTPUT_BASE}' +'output_dir': '${OUTPUT_DIR}' +'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' +'staging_dir': '${STAGING_DIR}' +'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}' # # Ensemble and member-specific information. # - 'num_ens_members': '${NUM_ENS_MEMBERS}' - 'ensmem_name': '${ensmem_name:-}' - 'time_lag': '${time_lag:-}' +'num_ens_members': '${NUM_ENS_MEMBERS}' +'ensmem_name': '${ensmem_name:-}' +'time_lag': '${time_lag:-}' # # Field information. # - 'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}' - 'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}' - 'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' - 'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' - 'obtype': '${OBTYPE}' - 'accum_hh': '${ACCUM_HH:-}' - 'accum_no_pad': '${ACCUM_NO_PAD:-}' - 'field_thresholds': '${FIELD_THRESHOLDS:-}' +'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}' +'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}' +'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' +'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' +'obtype': '${OBTYPE}' +'accum_hh': '${ACCUM_HH:-}' +'accum_no_pad': '${ACCUM_NO_PAD:-}' +'metplus_templates_dir': '${METPLUS_CONF:-}' +'input_field_group': '${FIELD_GROUP:-}' +'input_level_fcst': '${FCST_LEVEL:-}' +'input_thresh_fcst': '${FCST_THRESH:-}' +# +# Verification configuration dictionary. +# +'vx_config_dict': +${vx_config_dict:-} " # Render the template to create a METplus configuration file tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)") -cat > $tmpfile << EOF -$settings -EOF - +printf "%s" "$settings" > "$tmpfile" uw template render \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} \ --verbose \ - --values-file "${tmpfile}" + --values-file "${tmpfile}" \ + --search-path "/" err=$? rm $tmpfile @@ -373,7 +362,6 @@ $settings" print_err_msg_exit "${message_txt}" fi fi - # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh index fc735845c9..c7693fe06c 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_ensgrid_prob|task_run_vx_enspoint_prob|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -18,7 +22,6 @@ source_config_for_task "task_run_vx_ensgrid_prob|task_run_vx_enspoint_prob|task_ # . $USHdir/get_metplus_tool_name.sh . $USHdir/set_vx_params.sh -. $USHdir/set_vx_fhr_list.sh # #----------------------------------------------------------------------- # @@ -66,7 +69,8 @@ Entering script: \"${scrfunc_fn}\" In directory: \"${scrfunc_dir}\" This is the ex-script for the task that runs the METplus ${MetplusToolName} -tool to perform verification of the specified field (VAR) on the ensemble +tool to perform verification of the specified field group (FIELD_GROUP) +on the ensemble frequencies/probabilities. ========================================================================" # @@ -93,7 +97,7 @@ FIELDNAME_IN_MET_FILEDIR_NAMES="" set_vx_params \ obtype="${OBTYPE}" \ - field="$VAR" \ + field_group="${FIELD_GROUP}" \ accum_hh="${ACCUM_HH}" \ outvarname_grid_or_point="grid_or_point" \ outvarname_fieldname_in_obs_input="FIELDNAME_IN_OBS_INPUT" \ @@ -119,27 +123,27 @@ if [ "${grid_or_point}" = "grid" ]; then case "${FIELDNAME_IN_MET_FILEDIR_NAMES}" in "APCP"*) - OBS_INPUT_DIR="${vx_output_basedir}/metprd/PcpCombine_obs" + OBS_INPUT_DIR="${vx_output_basedir}${slash_cdate_or_null}/obs/metprd/PcpCombine_obs" OBS_INPUT_FN_TEMPLATE="${OBS_CCPA_APCP_FN_TEMPLATE_PCPCOMBINE_OUTPUT}" ;; "ASNOW"*) - OBS_INPUT_DIR="${OBS_DIR}" - OBS_INPUT_FN_TEMPLATE="${OBS_NOHRSC_ASNOW_FN_TEMPLATE}" + OBS_INPUT_DIR="${vx_output_basedir}${slash_cdate_or_null}/obs/metprd/PcpCombine_obs" + OBS_INPUT_FN_TEMPLATE="${OBS_NOHRSC_ASNOW_FN_TEMPLATE_PCPCOMBINE_OUTPUT}" ;; "REFC") OBS_INPUT_DIR="${OBS_DIR}" - OBS_INPUT_FN_TEMPLATE="${OBS_MRMS_REFC_FN_TEMPLATE}" + OBS_INPUT_FN_TEMPLATE="${OBS_MRMS_FN_TEMPLATES[1]}" ;; "RETOP") OBS_INPUT_DIR="${OBS_DIR}" - OBS_INPUT_FN_TEMPLATE="${OBS_MRMS_RETOP_FN_TEMPLATE}" + OBS_INPUT_FN_TEMPLATE="${OBS_MRMS_FN_TEMPLATES[3]}" ;; esac elif [ "${grid_or_point}" = "point" ]; then OBS_INPUT_DIR="${vx_output_basedir}/metprd/Pb2nc_obs" - OBS_INPUT_FN_TEMPLATE="${OBS_NDAS_ADPSFCorADPUPA_FN_TEMPLATE_PB2NC_OUTPUT}" + OBS_INPUT_FN_TEMPLATE="${OBS_NDAS_SFCandUPA_FN_TEMPLATE_PB2NC_OUTPUT}" fi OBS_INPUT_FN_TEMPLATE=$( eval echo ${OBS_INPUT_FN_TEMPLATE} ) @@ -152,20 +156,35 @@ STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}_ensprob" # #----------------------------------------------------------------------- # -# Set the array of forecast hours for which to run the MET/METplus tool. +# Set the lead hours for which to run the MET/METplus tool. This is done +# by starting with the full list of lead hours for which we expect to +# find forecast output and then removing from that list any hours for +# which there is no corresponding observation data. # #----------------------------------------------------------------------- # -set_vx_fhr_list \ - cdate="${CDATE}" \ - fcst_len_hrs="${FCST_LEN_HRS}" \ - field="$VAR" \ - accum_hh="${ACCUM_HH}" \ - base_dir="${OBS_INPUT_DIR}" \ - fn_template="${OBS_INPUT_FN_TEMPLATE}" \ - check_accum_contrib_files="FALSE" \ - num_missing_files_max="${NUM_MISSING_OBS_FILES_MAX}" \ - outvarname_fhr_list="FHR_LIST" +case "$OBTYPE" in + "CCPA"|"NOHRSC") + vx_intvl="$((10#${ACCUM_HH}))" + vx_hr_start="${vx_intvl}" + ;; + *) + vx_intvl="$((${VX_FCST_OUTPUT_INTVL_HRS}))" + vx_hr_start="0" + ;; +esac +vx_hr_end="${FCST_LEN_HRS}" + +VX_LEADHR_LIST=$( python3 $USHdir/set_leadhrs.py \ + --date_init="${CDATE}" \ + --lhr_min="${vx_hr_start}" \ + --lhr_max="${vx_hr_end}" \ + --lhr_intvl="${vx_intvl}" \ + --base_dir="${OBS_INPUT_DIR}" \ + --fn_template="${OBS_INPUT_FN_TEMPLATE}" \ + --num_missing_files_max="${NUM_MISSING_OBS_FILES_MAX}" ) || \ + print_err_msg_exit "Call to set_leadhrs.py failed with return code: $?" + # #----------------------------------------------------------------------- # @@ -173,7 +192,7 @@ set_vx_fhr_list \ # #----------------------------------------------------------------------- # -mkdir_vrfy -p "${OUTPUT_DIR}" +mkdir -p "${OUTPUT_DIR}" # #----------------------------------------------------------------------- # @@ -208,15 +227,15 @@ export LOGDIR # #----------------------------------------------------------------------- # -# Do not run METplus if there isn't at least one valid forecast hour for -# which to run it. +# Do not run METplus if there isn't at least one lead hour for which to +# run it. # #----------------------------------------------------------------------- # -if [ -z "${FHR_LIST}" ]; then +if [ -z "${VX_LEADHR_LIST}" ]; then print_err_msg_exit "\ -The list of forecast hours for which to run METplus is empty: - FHR_LIST = [${FHR_LIST}]" +The list of lead hours for which to run METplus is empty: + VX_LEADHR_LIST = [${VX_LEADHR_LIST}]" fi # #----------------------------------------------------------------------- @@ -229,16 +248,28 @@ fi # # First, set the base file names. # -metplus_config_tmpl_fn="${VAR}" -metplus_config_tmpl_fn="${MetplusToolName}_ensprob_${metplus_config_tmpl_fn}" -metplus_config_fn="${MetplusToolName}_ensprob_${FIELDNAME_IN_MET_FILEDIR_NAMES}" -metplus_log_fn="${metplus_config_fn}" +metplus_config_tmpl_bn="${MetplusToolName}_ensprob" +metplus_config_bn="${MetplusToolName}_${FIELDNAME_IN_MET_FILEDIR_NAMES}_ensprob" +metplus_log_bn="${metplus_config_bn}_$CDATE" # # Add prefixes and suffixes (extensions) to the base file names. # -metplus_config_tmpl_fn="${metplus_config_tmpl_fn}.conf" -metplus_config_fn="${metplus_config_fn}.conf" -metplus_log_fn="metplus.log.${metplus_log_fn}" +metplus_config_tmpl_fn="${metplus_config_tmpl_bn}.conf" +metplus_config_fn="${metplus_config_bn}.conf" +metplus_log_fn="metplus.log.${metplus_log_bn}" +# +#----------------------------------------------------------------------- +# +# Load the yaml-like file containing the configuration for ensemble +# verification. +# +#----------------------------------------------------------------------- +# +vx_config_fp="${METPLUS_CONF}/${VX_CONFIG_ENS_FN}" +vx_config_dict=$(<"${vx_config_fp}") +# Indent each line of vx_config_dict so that it is aligned properly when +# included in the yaml-formatted variable "settings" below. +vx_config_dict=$( printf "%s\n" "${vx_config_dict}" | sed 's/^/ /' ) # #----------------------------------------------------------------------- # @@ -259,59 +290,65 @@ settings="\ # # MET/METplus information. # - 'metplus_tool_name': '${metplus_tool_name}' - 'MetplusToolName': '${MetplusToolName}' - 'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}' - 'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}' +'metplus_tool_name': '${metplus_tool_name}' +'MetplusToolName': '${MetplusToolName}' +'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}' +'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}' # # Date and forecast hour information. # - 'cdate': '$CDATE' - 'fhr_list': '${FHR_LIST}' +'cdate': '$CDATE' +'vx_leadhr_list': '${VX_LEADHR_LIST}' # # Input and output directory/file information. # - 'metplus_config_fn': '${metplus_config_fn:-}' - 'metplus_log_fn': '${metplus_log_fn:-}' - 'obs_input_dir': '${OBS_INPUT_DIR:-}' - 'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' - 'fcst_input_dir': '${FCST_INPUT_DIR:-}' - 'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' - 'output_base': '${OUTPUT_BASE}' - 'output_dir': '${OUTPUT_DIR}' - 'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' - 'staging_dir': '${STAGING_DIR}' - 'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}' +'metplus_config_fn': '${metplus_config_fn:-}' +'metplus_log_fn': '${metplus_log_fn:-}' +'obs_input_dir': '${OBS_INPUT_DIR:-}' +'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' +'fcst_input_dir': '${FCST_INPUT_DIR:-}' +'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' +'output_base': '${OUTPUT_BASE}' +'output_dir': '${OUTPUT_DIR}' +'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' +'staging_dir': '${STAGING_DIR}' +'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}' # # Ensemble and member-specific information. # - 'num_ens_members': '${NUM_ENS_MEMBERS}' - 'ensmem_name': '${ensmem_name:-}' - 'time_lag': '${time_lag:-}' +'num_ens_members': '${NUM_ENS_MEMBERS}' +'ensmem_name': '${ensmem_name:-}' +'time_lag': '${time_lag:-}' # # Field information. # - 'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}' - 'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}' - 'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' - 'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' - 'obtype': '${OBTYPE}' - 'accum_hh': '${ACCUM_HH:-}' - 'accum_no_pad': '${ACCUM_NO_PAD:-}' - 'field_thresholds': '${FIELD_THRESHOLDS:-}' +'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}' +'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}' +'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' +'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' +'obtype': '${OBTYPE}' +'accum_hh': '${ACCUM_HH:-}' +'accum_no_pad': '${ACCUM_NO_PAD:-}' +'metplus_templates_dir': '${METPLUS_CONF:-}' +'input_field_group': '${FIELD_GROUP:-}' +'input_level_fcst': '${FCST_LEVEL:-}' +'input_thresh_fcst': '${FCST_THRESH:-}' +# +# Verification configuration dictionary. +# +'vx_config_dict': +${vx_config_dict:-} " # Render the template to create a METplus configuration file tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)") -cat > $tmpfile << EOF -$settings -EOF - +printf "%s" "$settings" > "$tmpfile" uw template render \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} \ --verbose \ - --values-file "${tmpfile}" + --values-file "${tmpfile}" \ + --search-path "/" err=$? rm $tmpfile @@ -325,8 +362,6 @@ $settings" print_err_msg_exit "${message_txt}" fi fi - - # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_pb2nc_obs.sh b/scripts/exregional_run_met_pb2nc_obs.sh index 92d39102fc..63ea5ca760 100755 --- a/scripts/exregional_run_met_pb2nc_obs.sh +++ b/scripts/exregional_run_met_pb2nc_obs.sh @@ -8,7 +8,10 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_met_pb2nc_obs" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -18,7 +21,6 @@ source_config_for_task "task_run_met_pb2nc_obs" ${GLOBAL_VAR_DEFNS_FP} # . $USHdir/get_metplus_tool_name.sh . $USHdir/set_vx_params.sh -. $USHdir/set_vx_fhr_list.sh # #----------------------------------------------------------------------- # @@ -71,6 +73,30 @@ to convert NDAS prep buffer observation files to NetCDF format. # #----------------------------------------------------------------------- # +# The day (in the form YYYMMDD) associated with the current task via the +# task's cycledefs attribute in the ROCOTO xml. +# +#----------------------------------------------------------------------- +# +yyyymmdd_task=${PDY} + +# Seconds since some reference time that the DATE_UTIL utility uses of +# the day of the current task. This will be used below to find hours +# since the start of this day. +sec_since_ref_task=$(${DATE_UTIL} --date "${yyyymmdd_task} 0 hours" +%s) +# +#----------------------------------------------------------------------- +# +# Get the list of all the times in the current day at which to retrieve +# obs. This is an array with elements having format "YYYYMMDDHH". +# +#----------------------------------------------------------------------- +# +array_name="OBS_RETRIEVE_TIMES_${OBTYPE}_${yyyymmdd_task}" +eval obs_retrieve_times_crnt_day=\( \${${array_name}[@]} \) +# +#----------------------------------------------------------------------- +# # Get the cycle date and time in YYYYMMDDHH format. # #----------------------------------------------------------------------- @@ -92,7 +118,7 @@ FIELDNAME_IN_MET_FILEDIR_NAMES="" set_vx_params \ obtype="${OBTYPE}" \ - field="$VAR" \ + field_group="${FIELD_GROUP}" \ accum_hh="${ACCUM_HH}" \ outvarname_grid_or_point="grid_or_point" \ outvarname_fieldname_in_obs_input="FIELDNAME_IN_OBS_INPUT" \ @@ -110,29 +136,75 @@ set_vx_params \ vx_output_basedir=$( eval echo "${VX_OUTPUT_BASEDIR}" ) OBS_INPUT_DIR="${OBS_DIR}" -OBS_INPUT_FN_TEMPLATE=$( eval echo ${OBS_NDAS_ADPSFCorADPUPA_FN_TEMPLATE} ) +OBS_INPUT_FN_TEMPLATE=$( eval echo ${OBS_NDAS_FN_TEMPLATES[1]} ) OUTPUT_BASE="${vx_output_basedir}" OUTPUT_DIR="${OUTPUT_BASE}/metprd/${MetplusToolName}_obs" -OUTPUT_FN_TEMPLATE=$( eval echo ${OBS_NDAS_ADPSFCorADPUPA_FN_TEMPLATE_PB2NC_OUTPUT} ) +OUTPUT_FN_TEMPLATE=$( eval echo ${OBS_NDAS_SFCandUPA_FN_TEMPLATE_PB2NC_OUTPUT} ) STAGING_DIR="${OUTPUT_BASE}/stage/${MetplusToolName}_obs" # #----------------------------------------------------------------------- # -# Set the array of forecast hours for which to run the MET/METplus tool. +# Set the array of lead hours (relative to the date associated with this +# task) for which to run the MET/METplus tool. # #----------------------------------------------------------------------- # -set_vx_fhr_list \ - cdate="${CDATE}" \ - fcst_len_hrs="${FCST_LEN_HRS}" \ - field="$VAR" \ - accum_hh="${ACCUM_HH}" \ - base_dir="${OBS_INPUT_DIR}" \ - fn_template="${OBS_INPUT_FN_TEMPLATE}" \ - check_accum_contrib_files="FALSE" \ - num_missing_files_max="${NUM_MISSING_OBS_FILES_MAX}" \ - outvarname_fhr_list="FHR_LIST" +LEADHR_LIST="" +num_missing_files=0 +for yyyymmddhh in ${obs_retrieve_times_crnt_day[@]}; do + yyyymmdd=$(echo ${yyyymmddhh} | cut -c1-8) + hh=$(echo ${yyyymmddhh} | cut -c9-10) + + # Set the full path to the final processed obs file (fp_proc) we want to + # create. + sec_since_ref=$(${DATE_UTIL} --date "${yyyymmdd} ${hh} hours" +%s) + lhr=$(( (sec_since_ref - sec_since_ref_task)/3600 )) + + fp=$( python3 $USHdir/eval_metplus_timestr_tmpl.py \ + --init_time="${yyyymmdd_task}00" \ + --lhr="${lhr}" \ + --fn_template="${OBS_DIR}/${OBS_NDAS_FN_TEMPLATES[1]}") || \ + print_err_msg_exit "Call to eval_metplus_timestr_tmpl.py failed with return code: $?" + + if [[ -f "${fp}" ]]; then + print_info_msg " +Found ${OBTYPE} obs file corresponding to observation retrieval time (yyyymmddhh): + yyyymmddhh = \"${yyyymmddhh}\" + fp = \"${fp}\" +" + hh_noZero=$((10#${hh})) + LEADHR_LIST="${LEADHR_LIST},${hh_noZero}" + else + num_missing_files=$((num_missing_files+1)) + print_info_msg " +${OBTYPE} obs file corresponding to observation retrieval time (yyyymmddhh) +does not exist on disk: + yyyymmddhh = \"${yyyymmddhh}\" + fp = \"${fp}\" +Removing this time from the list of times to be processed by ${METPLUSTOOLNAME}. +" + fi +done + +# If the number of missing files is greater than the maximum allowed +# (specified by num_missing_files_max), print out an error message and +# exit. +if [ "${num_missing_files}" -gt "${NUM_MISSING_OBS_FILES_MAX}" ]; then + print_err_msg_exit "\ +The number of missing ${OBTYPE} obs files (num_missing_files) is greater +than the maximum allowed number (NUM_MISSING_FILES_MAX): + num_missing_files = ${num_missing_files} + NUM_MISSING_OBS_FILES_MAX = ${NUM_MISSING_OBS_FILES_MAX}" +fi + +# Remove leading comma from LEADHR_LIST. +LEADHR_LIST=$( echo "${LEADHR_LIST}" | $SED "s/^,//g" ) +print_info_msg "$VERBOSE" "\ +Final (i.e. after filtering for missing obs files) set of lead hours +(saved in a scalar string variable) is: + LEADHR_LIST = \"${LEADHR_LIST}\" +" # #----------------------------------------------------------------------- # @@ -140,7 +212,7 @@ set_vx_fhr_list \ # #----------------------------------------------------------------------- # -mkdir_vrfy -p "${OUTPUT_DIR}" +mkdir -p "${OUTPUT_DIR}" # #----------------------------------------------------------------------- # @@ -166,15 +238,15 @@ export LOGDIR # #----------------------------------------------------------------------- # -# Do not run METplus if there isn't at least one valid forecast hour for -# which to run it. +# Do not run METplus if there isn't at least one lead hour for which to +# run it. # #----------------------------------------------------------------------- # -if [ -z "${FHR_LIST}" ]; then +if [ -z "${LEADHR_LIST}" ]; then print_err_msg_exit "\ -The list of forecast hours for which to run METplus is empty: - FHR_LIST = [${FHR_LIST}]" +The list of lead hours for which to run METplus is empty: + LEADHR_LIST = [${LEADHR_LIST}]" fi # #----------------------------------------------------------------------- @@ -205,8 +277,8 @@ metplus_config_tmpl_fn="${MetplusToolName}_obs" # information, but we still include that info in the file name so that # the behavior in the two modes is as similar as possible. # -metplus_config_fn="${metplus_config_tmpl_fn}_${CDATE}" -metplus_log_fn="${metplus_config_fn}" +metplus_config_fn="${metplus_config_tmpl_fn}_NDAS_${CDATE}" +metplus_log_fn="${metplus_config_fn}_NDAS" # # Add prefixes and suffixes (extensions) to the base file names. # @@ -238,10 +310,10 @@ settings="\ 'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}' 'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}' # -# Date and forecast hour information. +# Date and lead hour information. # 'cdate': '$CDATE' - 'fhr_list': '${FHR_LIST}' + 'leadhr_list': '${LEADHR_LIST}' # # Input and output directory/file information. # @@ -272,20 +344,17 @@ settings="\ 'obtype': '${OBTYPE}' 'accum_hh': '${ACCUM_HH:-}' 'accum_no_pad': '${ACCUM_NO_PAD:-}' - 'field_thresholds': '${FIELD_THRESHOLDS:-}' " # Render the template to create a METplus configuration file tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)") -cat > $tmpfile << EOF -$settings -EOF - +printf "%s" "$settings" > "$tmpfile" uw template render \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} \ --verbose \ - --values-file "${tmpfile}" + --values-file "${tmpfile}" \ + --search-path "/" err=$? rm $tmpfile @@ -299,7 +368,6 @@ $settings" print_err_msg_exit "${message_txt}" fi fi - # #----------------------------------------------------------------------- # @@ -319,6 +387,16 @@ METplus configuration file used is: # #----------------------------------------------------------------------- # +# Create flag file that indicates completion of task. This is needed by +# the workflow. +# +#----------------------------------------------------------------------- +# +mkdir -p ${WFLOW_FLAG_FILES_DIR} +touch "${WFLOW_FLAG_FILES_DIR}/run_met_pb2nc_obs_ndas_${PDY}_complete.txt" +# +#----------------------------------------------------------------------- +# # Print message indicating successful completion of script. # #----------------------------------------------------------------------- diff --git a/scripts/exregional_run_met_pcpcombine.sh b/scripts/exregional_run_met_pcpcombine.sh index 7eabe02901..9ff0ee5ada 100755 --- a/scripts/exregional_run_met_pcpcombine.sh +++ b/scripts/exregional_run_met_pcpcombine.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_met_pcpcombine|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -18,7 +22,6 @@ source_config_for_task "task_run_met_pcpcombine|task_run_post" ${GLOBAL_VAR_DEFN # . $USHdir/get_metplus_tool_name.sh . $USHdir/set_vx_params.sh -. $USHdir/set_vx_fhr_list.sh # #----------------------------------------------------------------------- # @@ -95,7 +98,7 @@ FIELDNAME_IN_MET_FILEDIR_NAMES="" set_vx_params \ obtype="${OBTYPE}" \ - field="$VAR" \ + field_group="${FIELD_GROUP}" \ accum_hh="${ACCUM_HH}" \ outvarname_grid_or_point="grid_or_point" \ outvarname_fieldname_in_obs_input="FIELDNAME_IN_OBS_INPUT" \ @@ -120,9 +123,9 @@ set_vx_params \ #----------------------------------------------------------------------- # time_lag="0" -if [ "${obs_or_fcst}" = "fcst" ]; then +if [ "${FCST_OR_OBS}" = "FCST" ]; then i="0" - if [ "${DO_ENSEMBLE}" = "TRUE" ]; then + if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]; then i=$( bc -l <<< "${ENSMEM_INDX}-1" ) fi time_lag=$( bc -l <<< "${ENS_TIME_LAG_HRS[$i]}*${SECS_PER_HOUR}" ) @@ -137,13 +140,13 @@ fi # vx_fcst_input_basedir=$( eval echo "${VX_FCST_INPUT_BASEDIR}" ) vx_output_basedir=$( eval echo "${VX_OUTPUT_BASEDIR}" ) -if [ "${obs_or_fcst}" = "fcst" ]; then +if [ "${FCST_OR_OBS}" = "FCST" ]; then ensmem_indx=$(printf "%0${VX_NDIGITS_ENSMEM_NAMES}d" $(( 10#${ENSMEM_INDX}))) ensmem_name="mem${ensmem_indx}" - if [ "${RUN_ENVIR}" = "nco" ]; then slash_cdate_or_null="" slash_ensmem_subdir_or_null="" + slash_obs_or_null="" else slash_cdate_or_null="/${CDATE}" # @@ -158,12 +161,19 @@ if [ "${obs_or_fcst}" = "fcst" ]; then # or, better, just remove this variale and code "/${ensmem_name}" where # slash_ensmem_subdir_or_null currently appears below. # - if [ "${DO_ENSEMBLE}" = "TRUE" ]; then + if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]; then slash_ensmem_subdir_or_null="/${ensmem_name}" else slash_ensmem_subdir_or_null="" fi fi +elif [ "${FCST_OR_OBS}" = "OBS" ]; then + slash_cdate_or_null="/${CDATE}" + if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]; then + slash_obs_or_null="/obs" + else + slash_obs_or_null="" + fi fi OBS_INPUT_DIR="" @@ -171,54 +181,91 @@ OBS_INPUT_FN_TEMPLATE="" FCST_INPUT_DIR="" FCST_INPUT_FN_TEMPLATE="" -if [ "${obs_or_fcst}" = "obs" ]; then - - OBS_INPUT_DIR="${OBS_DIR}" - OBS_INPUT_FN_TEMPLATE=$( eval echo ${OBS_CCPA_APCP_FN_TEMPLATE} ) - - OUTPUT_BASE="${vx_output_basedir}" - OUTPUT_DIR="${OUTPUT_BASE}/metprd/${MetplusToolName}_obs" - OUTPUT_FN_TEMPLATE=$( eval echo ${OBS_CCPA_APCP_FN_TEMPLATE_PCPCOMBINE_OUTPUT} ) - STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}" - -elif [ "${obs_or_fcst}" = "fcst" ]; then +if [ "${FCST_OR_OBS}" = "FCST" ]; then FCST_INPUT_DIR="${vx_fcst_input_basedir}" FCST_INPUT_FN_TEMPLATE=$( eval echo ${FCST_SUBDIR_TEMPLATE:+${FCST_SUBDIR_TEMPLATE}/}${FCST_FN_TEMPLATE} ) - OUTPUT_BASE="${vx_output_basedir}${slash_cdate_or_null}/${slash_ensmem_subdir_or_null}" + OUTPUT_BASE="${vx_output_basedir}${slash_cdate_or_null}${slash_ensmem_subdir_or_null}" OUTPUT_DIR="${OUTPUT_BASE}/metprd/${MetplusToolName}_fcst" OUTPUT_FN_TEMPLATE=$( eval echo ${FCST_FN_TEMPLATE_PCPCOMBINE_OUTPUT} ) STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}" +elif [ "${FCST_OR_OBS}" = "OBS" ]; then + + OBS_INPUT_DIR="${OBS_DIR}" + fn_template=$(eval echo \${OBS_${OBTYPE}_FN_TEMPLATES[1]}) + OBS_INPUT_FN_TEMPLATE=$( eval echo ${fn_template} ) + + OUTPUT_BASE="${vx_output_basedir}${slash_cdate_or_null}${slash_obs_or_null}" + OUTPUT_DIR="${OUTPUT_BASE}/metprd/${MetplusToolName}_obs" + fn_template=$(eval echo \${OBS_${OBTYPE}_${FIELD_GROUP}_FN_TEMPLATE_PCPCOMBINE_OUTPUT}) + OUTPUT_FN_TEMPLATE=$( eval echo ${fn_template} ) + STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}" + fi # #----------------------------------------------------------------------- # -# Set the array of forecast hours for which to run the MET/METplus tool. +# Set the array of lead hours for which to run the MET/METplus tool. # #----------------------------------------------------------------------- # -if [ "${obs_or_fcst}" = "obs" ]; then - base_dir="${OBS_INPUT_DIR}" - fn_template="${OBS_INPUT_FN_TEMPLATE}" - num_missing_files_max="${NUM_MISSING_OBS_FILES_MAX}" -elif [ "${obs_or_fcst}" = "fcst" ]; then +vx_intvl="$((10#${ACCUM_HH}))" +VX_LEADHR_LIST=$( python3 $USHdir/set_leadhrs.py \ + --lhr_min="${vx_intvl}" \ + --lhr_max="${FCST_LEN_HRS}" \ + --lhr_intvl="${vx_intvl}" \ + --skip_check_files ) || \ + print_err_msg_exit "Call to set_leadhrs.py failed with return code: $?" +# +#----------------------------------------------------------------------- +# +# Check for the presence of files (either from observations or forecasts) +# needed to create required accumulation given by ACCUM_HH. +# +#----------------------------------------------------------------------- +# +if [ "${FCST_OR_OBS}" = "FCST" ]; then base_dir="${FCST_INPUT_DIR}" fn_template="${FCST_INPUT_FN_TEMPLATE}" - num_missing_files_max="${NUM_MISSING_FCST_FILES_MAX}" + subintvl="${VX_FCST_OUTPUT_INTVL_HRS}" +elif [ "${FCST_OR_OBS}" = "OBS" ]; then + base_dir="${OBS_INPUT_DIR}" + fn_template="${OBS_INPUT_FN_TEMPLATE}" + subintvl="${OBS_AVAIL_INTVL_HRS}" fi +num_missing_files_max="0" +input_accum_hh=$(printf "%02d" ${subintvl}) +# +# Convert the list of hours at which the PcpCombine tool will be run to +# an array. This represents the hours at which each accumulation period +# ends. Then use it to check the presence of all files requied to build +# the required accumulations from the sub-accumulations. +# +subintvl_end_hrs=($( echo ${VX_LEADHR_LIST} | $SED "s/,//g" )) +for hr_end in ${subintvl_end_hrs[@]}; do + hr_start=$((hr_end - vx_intvl + subintvl)) + print_info_msg " +Checking for the presence of files that will contribute to the ${vx_intvl}-hour +accumulation ending at lead hour ${hr_end} (relative to ${CDATE})... +" + python3 $USHdir/set_leadhrs.py \ + --date_init="${CDATE}" \ + --lhr_min="${hr_start}" \ + --lhr_max="${hr_end}" \ + --lhr_intvl="${subintvl}" \ + --base_dir="${base_dir}" \ + --fn_template="${fn_template}" \ + --num_missing_files_max="${num_missing_files_max}" \ + --time_lag="${time_lag%.*}" || \ + print_err_msg_exit "Call to set_leadhrs.py failed with return code: $?" +done -set_vx_fhr_list \ - cdate="${CDATE}" \ - fcst_len_hrs="${FCST_LEN_HRS}" \ - field="$VAR" \ - accum_hh="${ACCUM_HH}" \ - base_dir="${base_dir}" \ - fn_template="${fn_template}" \ - check_accum_contrib_files="TRUE" \ - num_missing_files_max="${num_missing_files_max}" \ - outvarname_fhr_list="FHR_LIST" +print_info_msg " +${MetplusToolName} will be run for the following lead hours (relative to ${CDATE}): + VX_LEADHR_LIST = ${VX_LEADHR_LIST} +" # #----------------------------------------------------------------------- # @@ -226,7 +273,7 @@ set_vx_fhr_list \ # #----------------------------------------------------------------------- # -mkdir_vrfy -p "${OUTPUT_DIR}" +mkdir -p "${OUTPUT_DIR}" # #----------------------------------------------------------------------- # @@ -252,15 +299,15 @@ export LOGDIR # #----------------------------------------------------------------------- # -# Do not run METplus if there isn't at least one valid forecast hour for -# which to run it. +# Do not run METplus if there isn't at least one lead hour for which to +# run it. # #----------------------------------------------------------------------- # -if [ -z "${FHR_LIST}" ]; then +if [ -z "${VX_LEADHR_LIST}" ]; then print_err_msg_exit "\ -The list of forecast hours for which to run METplus is empty: - FHR_LIST = [${FHR_LIST}]" +The list of lead hours for which to run METplus is empty: + VX_LEADHR_LIST = [${VX_LEADHR_LIST}]" fi # #----------------------------------------------------------------------- @@ -273,23 +320,18 @@ fi # # First, set the base file names. # -metplus_config_tmpl_fn="${MetplusToolName}_${obs_or_fcst}" -metplus_config_fn="${metplus_config_tmpl_fn}_${FIELDNAME_IN_MET_FILEDIR_NAMES}${ENSMEM_INDX:+_${ensmem_name}}" -metplus_log_fn="${metplus_config_fn}_$CDATE" -# -# If operating on observation files, append the cycle date to the name -# of the configuration file because in this case, the output files from -# METplus are not placed under cycle directories (so another method is -# necessary to associate the configuration file with the cycle for which -# it is used). -# -if [ "${obs_or_fcst}" = "obs" ]; then - metplus_config_fn="${metplus_log_fn}" +metplus_config_tmpl_fn="${MetplusToolName}" +if [ "${FCST_OR_OBS}" = "FCST" ]; then + suffix="${ENSMEM_INDX:+_${ensmem_name}}" +elif [ "${FCST_OR_OBS}" = "OBS" ]; then + suffix="_${OBTYPE}" fi +metplus_config_fn="${metplus_config_tmpl_fn}_$(echo_lowercase ${FCST_OR_OBS})_${FIELDNAME_IN_MET_FILEDIR_NAMES}${suffix}" +metplus_log_fn="${metplus_config_fn}_$CDATE" # # Add prefixes and suffixes (extensions) to the base file names. # -metplus_config_tmpl_fn="${metplus_config_tmpl_fn}_${field}.conf" +metplus_config_tmpl_fn="${metplus_config_tmpl_fn}.conf" metplus_config_fn="${metplus_config_fn}.conf" metplus_log_fn="metplus.log.${metplus_log_fn}" # @@ -320,16 +362,14 @@ settings="\ # Date and forecast hour information. # 'cdate': '$CDATE' - 'fhr_list': '${FHR_LIST}' + 'vx_leadhr_list': '${VX_LEADHR_LIST}' # # Input and output directory/file information. # 'metplus_config_fn': '${metplus_config_fn:-}' 'metplus_log_fn': '${metplus_log_fn:-}' - 'obs_input_dir': '${OBS_INPUT_DIR:-}' - 'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' - 'fcst_input_dir': '${FCST_INPUT_DIR:-}' - 'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' + 'input_dir': '${FCST_INPUT_DIR:-${OBS_INPUT_DIR}}' + 'input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-${OBS_INPUT_FN_TEMPLATE}}' 'output_base': '${OUTPUT_BASE}' 'output_dir': '${OUTPUT_DIR}' 'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' @@ -349,21 +389,25 @@ settings="\ 'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' 'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' 'obtype': '${OBTYPE}' - 'accum_hh': '${ACCUM_HH:-}' + 'FCST_OR_OBS': '${FCST_OR_OBS}' + 'input_accum_hh': '${input_accum_hh}' + 'output_accum_hh': '${ACCUM_HH:-}' 'accum_no_pad': '${ACCUM_NO_PAD:-}' - 'field_thresholds': '${FIELD_THRESHOLDS:-}' + 'metplus_templates_dir': '${METPLUS_CONF:-}' + 'input_field_group': '${FIELD_GROUP:-}' + 'input_level_fcst': '${FCST_LEVEL:-}' + 'input_thresh_fcst': '${FCST_THRESH:-}' " + # Render the template to create a METplus configuration file tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)") -cat > $tmpfile << EOF -$settings -EOF - +printf "%s" "$settings" > "$tmpfile" uw template render \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} \ --verbose \ - --values-file "${tmpfile}" + --values-file "${tmpfile}" \ + --search-path "/" err=$? rm $tmpfile diff --git a/scripts/exregional_run_post.sh b/scripts/exregional_run_post.sh index 60f87c3eaf..3f0ca93df9 100755 --- a/scripts/exregional_run_post.sh +++ b/scripts/exregional_run_post.sh @@ -1,5 +1,62 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The ex-script that runs UPP. +# +# Run-time environment variables: +# +# CDATE +# COMOUT +# DATA_FHR +# DBNROOT +# ENSMEM_INDX +# GLOBAL_VAR_DEFNS_FP +# NET +# PDY +# REDIRECT_OUT_ERR +# SENDDBN +# +# Experiment variables +# +# user: +# EXECdir +# MACHINE +# PARMdir +# RUN_ENVIR +# USHdir +# +# platform: +# PRE_TASK_CMDS +# RUN_CMD_POST +# +# workflow: +# VERBOSE +# +# task_run_fcst: +# DT_ATMOS +# +# task_run_post: +# CUSTOM_POST_CONFIG_FP +# KMP_AFFINITY_RUN_POST +# OMP_NUM_THREADS_RUN_POST +# OMP_STACKSIZE_RUN_POST +# NUMX +# POST_OUTPUT_DOMAIN_NAME +# SUB_HOURLY_POST +# USE_CUSTOM_POST_CONFIG_FILE +# +# global: +# CRTM_DIR +# USE_CRTM +# +# cpl_aqm_parm: +# CPL_AQM +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +65,10 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow global cpl_aqm_parm \ + task_run_fcst task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -80,9 +140,9 @@ fi # #----------------------------------------------------------------------- # -rm_vrfy -f fort.* -cp_vrfy ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat -if [ ${USE_CUSTOM_POST_CONFIG_FILE} = "TRUE" ]; then +rm -f fort.* +cp ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat +if [ $(boolify ${USE_CUSTOM_POST_CONFIG_FILE}) = "TRUE" ]; then post_config_fp="${CUSTOM_POST_CONFIG_FP}" print_info_msg " ==================================================================== @@ -92,7 +152,7 @@ to the temporary work directory (DATA_FHR): DATA_FHR = \"${DATA_FHR}\" ====================================================================" else - if [ "${CPL_AQM}" = "TRUE" ]; then + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then post_config_fp="${PARMdir}/upp/postxconfig-NT-AQM.txt" else post_config_fp="${PARMdir}/upp/postxconfig-NT-fv3lam.txt" @@ -105,18 +165,18 @@ temporary work directory (DATA_FHR): DATA_FHR = \"${DATA_FHR}\" ====================================================================" fi -cp_vrfy ${post_config_fp} ./postxconfig-NT.txt -cp_vrfy ${PARMdir}/upp/params_grib2_tbl_new . -if [ ${USE_CRTM} = "TRUE" ]; then - cp_vrfy ${CRTM_DIR}/Nalli.IRwater.EmisCoeff.bin ./ - cp_vrfy ${CRTM_DIR}/FAST*.bin ./ - cp_vrfy ${CRTM_DIR}/NPOESS.IRland.EmisCoeff.bin ./ - cp_vrfy ${CRTM_DIR}/NPOESS.IRsnow.EmisCoeff.bin ./ - cp_vrfy ${CRTM_DIR}/NPOESS.IRice.EmisCoeff.bin ./ - cp_vrfy ${CRTM_DIR}/AerosolCoeff.bin ./ - cp_vrfy ${CRTM_DIR}/CloudCoeff.bin ./ - cp_vrfy ${CRTM_DIR}/*.SpcCoeff.bin ./ - cp_vrfy ${CRTM_DIR}/*.TauCoeff.bin ./ +cp ${post_config_fp} ./postxconfig-NT.txt +cp ${PARMdir}/upp/params_grib2_tbl_new . +if [ $(boolify ${USE_CRTM}) = "TRUE" ]; then + cp ${CRTM_DIR}/Nalli.IRwater.EmisCoeff.bin ./ + cp ${CRTM_DIR}/FAST*.bin ./ + cp ${CRTM_DIR}/NPOESS.IRland.EmisCoeff.bin ./ + cp ${CRTM_DIR}/NPOESS.IRsnow.EmisCoeff.bin ./ + cp ${CRTM_DIR}/NPOESS.IRice.EmisCoeff.bin ./ + cp ${CRTM_DIR}/AerosolCoeff.bin ./ + cp ${CRTM_DIR}/CloudCoeff.bin ./ + cp ${CRTM_DIR}/*.SpcCoeff.bin ./ + cp ${CRTM_DIR}/*.TauCoeff.bin ./ print_info_msg " ==================================================================== Copying the external CRTM fix files from CRTM_DIR to the temporary @@ -155,7 +215,7 @@ hh=${cyc} # must be set to a null string. # mnts_secs_str="" -if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then +if [ $(boolify "${SUB_HOURLY_POST}") = "TRUE" ]; then if [ ${fhr}${fmn} = "00000" ]; then mnts_secs_str=":"$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${DT_ATMOS} seconds" "+%M:%S" ) else @@ -185,7 +245,7 @@ post_mn=${post_time:10:2} # # Create the input namelist file to the post-processor executable. # -if [ "${CPL_AQM}" = "TRUE" ]; then +if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then post_itag_add="aqf_on=.true.," else post_itag_add="" @@ -270,10 +330,10 @@ post_renamed_fn_suffix="f${fhr}${post_mn_or_null}.${POST_OUTPUT_DOMAIN_NAME}.gri # generates (i.e. "...prslev..." and "...natlev..." files) and move, # rename, and create symlinks to them. # -cd_vrfy "${COMOUT}" +cd "${COMOUT}" basetime=$( $DATE_UTIL --date "$yyyymmdd $hh" +%y%j%H%M ) symlink_suffix="${dot_ensmem}.${basetime}f${fhr}${post_mn}" -if [ "${CPL_AQM}" = "TRUE" ]; then +if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then fids=( "cmaq" ) else fids=( "prslev" "natlev" ) @@ -282,19 +342,17 @@ for fid in "${fids[@]}"; do FID=$(echo_uppercase $fid) post_orig_fn="${FID}.${post_fn_suffix}" post_renamed_fn="${NET}.${cycle}${dot_ensmem}.${fid}.${post_renamed_fn_suffix}" - mv_vrfy ${DATA_FHR}/${post_orig_fn} ${post_renamed_fn} + mv ${DATA_FHR}/${post_orig_fn} ${post_renamed_fn} if [ $RUN_ENVIR != "nco" ]; then - create_symlink_to_file target="${post_renamed_fn}" \ - symlink="${FID}${symlink_suffix}" \ - relative="TRUE" + create_symlink_to_file ${post_renamed_fn} ${FID}${symlink_suffix} TRUE fi # DBN alert - if [ $SENDDBN = "TRUE" ]; then + if [ "$SENDDBN" = "TRUE" ]; then $DBNROOT/bin/dbn_alert MODEL rrfs_post ${job} ${COMOUT}/${post_renamed_fn} fi done -rm_vrfy -rf ${DATA_FHR} +rm -rf ${DATA_FHR} # #----------------------------------------------------------------------- diff --git a/scripts/exregional_run_prdgen.sh b/scripts/exregional_run_prdgen.sh index 8fc72dff1c..5baa779821 100755 --- a/scripts/exregional_run_prdgen.sh +++ b/scripts/exregional_run_prdgen.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_prdgen|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post task_run_prdgen ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -166,7 +170,7 @@ net4=$(echo ${NET:0:4} | tr '[:upper:]' '[:lower:]') for leveltype in prslev natlev ififip testbed do if [ -f ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 ]; then - ln_vrfy -sf --relative ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2 + ln -sf --relative ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2 wgrib2 ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2 -s > ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2.idx fi done @@ -175,7 +179,7 @@ done # Remap to additional output grids if requested #----------------------------------------------- -if [ ${DO_PARALLEL_PRDGEN} == "TRUE" ]; then +if [ $(boolify ${DO_PARALLEL_PRDGEN}) = "TRUE" ]; then # # parallel run wgrib2 for product generation # @@ -184,7 +188,7 @@ if [ ${PREDEF_GRID_NAME} = "RRFS_NA_3km" ]; then DATA=$COMOUT DATAprdgen=$DATA/prdgen_${fhr} -mkdir_vrfy $DATAprdgen +mkdir $DATAprdgen wgrib2 ${COMOUT}/${NET}.${cycle}.prslev.f${fhr}.grib2 >& $DATAprdgen/prslevf${fhr}.txt @@ -223,7 +227,7 @@ for domain in ${domains[@]} do for task in $(seq ${tasks[count]}) do - mkdir_vrfy -p $DATAprdgen/prdgen_${domain}_${task} + mkdir -p $DATAprdgen/prdgen_${domain}_${task} echo "$SCRIPTSdir/exregional_run_prdgen_subpiece.sh $fhr $cyc $task $domain ${DATAprdgen} ${COMOUT} &" >> $DATAprdgen/poescript_${fhr} done count=$count+1 @@ -269,7 +273,7 @@ else # if [ ${#ADDNL_OUTPUT_GRIDS[@]} -gt 0 ]; then - cd_vrfy ${COMOUT} + cd ${COMOUT} grid_specs_130="lambert:265:25.000000 233.862000:451:13545.000000 16.281000:337:13545.000000" grid_specs_200="lambert:253:50.000000 285.720000:108:16232.000000 16.201000:94:16232.000000" @@ -289,7 +293,7 @@ if [ ${#ADDNL_OUTPUT_GRIDS[@]} -gt 0 ]; then eval grid_specs=\$grid_specs_${grid} subdir=${COMOUT}/${grid}_grid - mkdir_vrfy -p ${subdir}/${fhr} + mkdir -p ${subdir}/${fhr} bg_remap=${subdir}/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 # Interpolate fields to new grid @@ -317,11 +321,11 @@ if [ ${#ADDNL_OUTPUT_GRIDS[@]} -gt 0 ]; then rm -f ${subdir}/${fhr}/tmp_${grid}.grib2 # Save to com directory - mkdir_vrfy -p ${COMOUT}/${grid}_grid - cp_vrfy ${bg_remap} ${COMOUT}/${grid}_grid/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 + mkdir -p ${COMOUT}/${grid}_grid + cp ${bg_remap} ${COMOUT}/${grid}_grid/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 if [[ -f ${COMOUT}/${grid}_grid/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 ]]; then - ln_vrfy -fs --relative ${COMOUT}/${grid}_grid/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2 + ln -fs --relative ${COMOUT}/${grid}_grid/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2 wgrib2 ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2 -s > ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2.idx fi @@ -331,7 +335,7 @@ fi fi # block for parallel or series wgrib2 runs. -rm_vrfy -rf ${DATA_FHR} +rm -rf ${DATA_FHR} # #----------------------------------------------------------------------- # diff --git a/scripts/exsrw_aqm_ics.sh b/scripts/exsrw_aqm_ics.sh index efd833b092..4fd040e597 100755 --- a/scripts/exsrw_aqm_ics.sh +++ b/scripts/exsrw_aqm_ics.sh @@ -8,7 +8,10 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_aqm_ics" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exsrw_aqm_lbcs.sh b/scripts/exsrw_aqm_lbcs.sh index 93dc119ec2..7b3058ef34 100755 --- a/scripts/exsrw_aqm_lbcs.sh +++ b/scripts/exsrw_aqm_lbcs.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_get_extrn_lbcs|task_make_orog|task_make_lbcs|cpl_aqm_parm|task_aqm_lbcs" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_get_extrn_lbcs task_make_lbcs task_make_orog ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -100,7 +104,7 @@ for hr in 0 ${LBC_SPEC_FCST_HRS[@]}; do cp -p "${DATA_SHARE}/${aqm_lbcs_fn}" ${DATA} done -if [ "${DO_AQM_CHEM_LBCS}" = "TRUE" ]; then +if [ $(boolify "${DO_AQM_CHEM_LBCS}") = "TRUE" ]; then ext_lbcs_file="${AQM_LBCS_FILES}" chem_lbcs_fn=${ext_lbcs_file///${MM}} chem_lbcs_fp="${FIXaqm}/chemlbc/${chem_lbcs_fn}" @@ -141,7 +145,7 @@ fi # #----------------------------------------------------------------------- # -if [ "${DO_AQM_GEFS_LBCS}" = "TRUE" ]; then +if [ $(boolify "${DO_AQM_GEFS_LBCS}") = "TRUE" ]; then AQM_GEFS_FILE_CYC=${AQM_GEFS_FILE_CYC:-"${HH}"} AQM_GEFS_FILE_CYC=$( printf "%02d" "${AQM_GEFS_FILE_CYC}" ) @@ -153,7 +157,7 @@ if [ "${DO_AQM_GEFS_LBCS}" = "TRUE" ]; then fi aqm_mofile_fn="${AQM_GEFS_FILE_PREFIX}.t${AQM_GEFS_FILE_CYC}z.atmf" - if [ "${DO_REAL_TIME}" = "TRUE" ]; then + if [ $(boolify "${DO_REAL_TIME}") = "TRUE" ]; then aqm_mofile_fp="${COMINgefs}/gefs.${YYYYMMDD}/${AQM_GEFS_FILE_CYC}/chem/sfcsig/${aqm_mofile_fn}" else aqm_mofile_fp="${COMINgefs}/${YYYYMMDD}/${AQM_GEFS_FILE_CYC}/${aqm_mofile_fn}" diff --git a/scripts/exsrw_bias_correction_o3.sh b/scripts/exsrw_bias_correction_o3.sh index 1ef4012528..343e7e6f2b 100755 --- a/scripts/exsrw_bias_correction_o3.sh +++ b/scripts/exsrw_bias_correction_o3.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_bias_correction_o3" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_bias_correction_o3 ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -199,7 +203,7 @@ POST_STEP cp ${DATA}/out/ozone/${yyyy}/*nc ${DATA}/data/bcdata.${yyyymm}/interpolated/ozone/${yyyy} -if [ "${DO_AQM_SAVE_AIRNOW_HIST}" = "TRUE" ]; then +if [ $(boolify "${DO_AQM_SAVE_AIRNOW_HIST}") = "TRUE" ]; then mkdir -p ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/ozone/${yyyy} cp ${DATA}/out/ozone/${yyyy}/*nc ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/ozone/${yyyy} diff --git a/scripts/exsrw_bias_correction_pm25.sh b/scripts/exsrw_bias_correction_pm25.sh index ae1a2d6f65..70cf512589 100755 --- a/scripts/exsrw_bias_correction_pm25.sh +++ b/scripts/exsrw_bias_correction_pm25.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_bias_correction_pm25" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_bias_correction_pm25 ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -198,7 +202,7 @@ POST_STEP cp ${DATA}/out/pm25/${yyyy}/*nc ${DATA}/data/bcdata.${yyyymm}/interpolated/pm25/${yyyy} -if [ "${DO_AQM_SAVE_AIRNOW_HIST}" = "TRUE" ]; then +if [ $(boolify "${DO_AQM_SAVE_AIRNOW_HIST}") = "TRUE" ]; then mkdir -p ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/pm25/${yyyy} cp ${DATA}/out/pm25/${yyyy}/*nc ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/pm25/${yyyy} fi diff --git a/scripts/exsrw_fire_emission.sh b/scripts/exsrw_fire_emission.sh index cb44c99d8d..3ae78422f5 100755 --- a/scripts/exsrw_fire_emission.sh +++ b/scripts/exsrw_fire_emission.sh @@ -8,7 +8,10 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_fire_emission" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exsrw_nexus_emission.sh b/scripts/exsrw_nexus_emission.sh index a5769a6483..0fa8c48754 100755 --- a/scripts/exsrw_nexus_emission.sh +++ b/scripts/exsrw_nexus_emission.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_nexus_emission" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_nexus_emission ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exsrw_nexus_gfs_sfc.sh b/scripts/exsrw_nexus_gfs_sfc.sh index 103842d46f..cadc27b89c 100755 --- a/scripts/exsrw_nexus_gfs_sfc.sh +++ b/scripts/exsrw_nexus_gfs_sfc.sh @@ -8,7 +8,10 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_nexus_gfs_sfc" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -73,7 +76,7 @@ fcst_len_hrs_offset=$(( FCST_LEN_HRS + TIME_OFFSET_HRS )) GFS_SFC_TAR_DIR="${NEXUS_GFS_SFC_ARCHV_DIR}/rh${YYYY}/${YYYYMM}/${YYYYMMDD}" GFS_SFC_TAR_SUB_DIR="gfs.${YYYYMMDD}/${HH}/atmos" -if [ "${DO_REAL_TIME}" = "TRUE" ]; then +if [ $(boolify "${DO_REAL_TIME}") = "TRUE" ]; then GFS_SFC_LOCAL_DIR="${COMINgfs}/${GFS_SFC_TAR_SUB_DIR}" else GFS_SFC_LOCAL_DIR="${NEXUS_GFS_SFC_DIR}/${GFS_SFC_TAR_SUB_DIR}" diff --git a/scripts/exsrw_nexus_post_split.sh b/scripts/exsrw_nexus_post_split.sh index 517893b5e5..151e0a2ea5 100755 --- a/scripts/exsrw_nexus_post_split.sh +++ b/scripts/exsrw_nexus_post_split.sh @@ -8,7 +8,10 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_nexus_post_split" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exsrw_point_source.sh b/scripts/exsrw_point_source.sh index 7acbc946f7..4cd693506c 100755 --- a/scripts/exsrw_point_source.sh +++ b/scripts/exsrw_point_source.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_run_fcst|cpl_aqm_parm|task_point_source" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_point_source task_run_fcst ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exsrw_post_stat_o3.sh b/scripts/exsrw_post_stat_o3.sh index 6fa1db7f8f..dfcdd24ffa 100755 --- a/scripts/exsrw_post_stat_o3.sh +++ b/scripts/exsrw_post_stat_o3.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_o3" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exsrw_post_stat_pm25.sh b/scripts/exsrw_post_stat_pm25.sh index ea7c1717c3..bdbf1fcbc5 100755 --- a/scripts/exsrw_post_stat_pm25.sh +++ b/scripts/exsrw_post_stat_pm25.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_pm25" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exsrw_pre_post_stat.sh b/scripts/exsrw_pre_post_stat.sh index dfb4c2cf9e..f6ec6a9a7d 100755 --- a/scripts/exsrw_pre_post_stat.sh +++ b/scripts/exsrw_pre_post_stat.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_pre_post|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/sorc/build_settings_template.yaml b/sorc/build_settings_template.yaml new file mode 100644 index 0000000000..792ee540c4 --- /dev/null +++ b/sorc/build_settings_template.yaml @@ -0,0 +1,23 @@ +# UFS Short Range Weather App Configuration Summary +#=================================================== + +# General +#--------- +SRW_Version: @SRWA_VERSION@ +SRW_hash: @GIT_HASH@ +Machine: @machine@ +Configured_On: @CONFIG_DATE@ +Host_System: @host_cpu@-@host_vendor@-@host_os@ +Build_Directory: @abs_top_builddir@ +Install_Prefix: @prefix@ +Application: @application@ +# Compiling Options +#------------------- +C_Compiler: @CC_VERSION@ +CFLAGS: @CFLAGS@ +CPPFLAGS: @CPPFLAGS@ +LDFLAGS: @LDFLAGS@ +Shared_Library: @enable_shared@ +Static_Library: @enable_static@ +Extra_libraries: @LIBS@ + diff --git a/ufs_srweather_app_meta.h.in b/sorc/ufs_srweather_app_meta.h.in similarity index 100% rename from ufs_srweather_app_meta.h.in rename to sorc/ufs_srweather_app_meta.h.in diff --git a/tests/WE2E/WE2E_summary.py b/tests/WE2E/WE2E_summary.py index de478a0f38..147380eb4e 100755 --- a/tests/WE2E/WE2E_summary.py +++ b/tests/WE2E/WE2E_summary.py @@ -6,7 +6,13 @@ sys.path.append("../../ush") -from python_utils import load_config_file +try: + from python_utils import load_config_file +except ModuleNotFoundError: + print("\n\nERROR: Could not load python utilities.") + print('Note that this script can only be run in the SRW App from the directory:') + print("ufs-srweather-app/tests/WE2E\n\n") + raise from check_python_version import check_python_version @@ -14,8 +20,13 @@ def setup_logging(debug: bool = False) -> None: """ - Sets up logging, printing high-priority (INFO and higher) messages to screen, and printing all - messages with detailed timing and routine info in the specified text file. + Sets up logging to print high-priority (INFO and higher) messages to the console and to print all + messages with detailed timing and routine info to the specified text file. + + Args: + debug (bool): Set to True to print more verbose output to the console + Returns: + None """ logging.getLogger().setLevel(logging.DEBUG) @@ -63,9 +74,13 @@ def setup_logging(debug: bool = False) -> None: else: raise ValueError(f'Bad arguments; run {__file__} -h for more information') - # Calculate core hours and update yaml - expts_dict = calculate_core_hours(expts_dict) - write_monitor_file(yaml_file,expts_dict) + if expts_dict: + # Calculate core hours and update yaml + expts_dict = calculate_core_hours(expts_dict) + write_monitor_file(yaml_file,expts_dict) + + #Call function to print summary + print_WE2E_summary(expts_dict, args.debug) + else: + logging.error(f'No experiments found in provided directory {args.expt_dir}') - #Call function to print summary - print_WE2E_summary(expts_dict, args.debug) diff --git a/tests/WE2E/machine_suites/comprehensive b/tests/WE2E/machine_suites/comprehensive index 3af6ae0db4..8c42aa4599 100644 --- a/tests/WE2E/machine_suites/comprehensive +++ b/tests/WE2E/machine_suites/comprehensive @@ -2,6 +2,11 @@ 2020_CAPE 2019_hurricane_barry 2019_halloween_storm +2019_hurricane_lorenzo +2019_memorial_day_heat_wave +2020_denver_radiation_inversion +2020_easter_storm +2020_jan_cold_blast community custom_ESGgrid custom_ESGgrid_Central_Asia_3km @@ -52,6 +57,7 @@ grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta grid_RRFS_CONUScompact_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta +grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta grid_RRFS_CONUScompact_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta @@ -69,3 +75,15 @@ MET_verification_only_vx pregen_grid_orog_sfc_climo specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS specify_template_filenames +vx-det_long-fcst_custom-vx-config_aiml-fourcastnet +vx-det_long-fcst_custom-vx-config_aiml-graphcast +vx-det_long-fcst_custom-vx-config_aiml-panguweather +vx-det_long-fcst_custom-vx-config_gfs +vx-det_long-fcst_winter-wx_SRW-staged +vx-det_multicyc_fcst-overlap_ncep-hrrr +vx-det_multicyc_first-obs-00z_ncep-hrrr +vx-det_multicyc_last-obs-00z_ncep-hrrr +vx-det_multicyc_long-fcst-no-overlap_nssl-mpas +vx-det_multicyc_long-fcst-overlap_nssl-mpas +vx-det_multicyc_no-00z-obs_nssl-mpas +vx-det_multicyc_no-fcst-overlap_ncep-hrrr diff --git a/tests/WE2E/machine_suites/comprehensive.derecho b/tests/WE2E/machine_suites/comprehensive.derecho index 9ce8d067ac..b88ec31bba 100644 --- a/tests/WE2E/machine_suites/comprehensive.derecho +++ b/tests/WE2E/machine_suites/comprehensive.derecho @@ -1,12 +1,17 @@ -2020_CAD -2020_CAPE -2019_hurricane_barry -2019_halloween_storm +#2020_CAD +#2020_CAPE +#2019_hurricane_barry +#2019_halloween_storm +#2019_hurricane_lorenzo +#2019_memorial_day_heat_wave +#2020_denver_radiation_inversion +#2020_easter_storm +#2020_jan_cold_blast community custom_ESGgrid -#custom_ESGgrid_Central_Asia_3km +custom_ESGgrid_Central_Asia_3km custom_ESGgrid_IndianOcean_6km -#custom_ESGgrid_NewZealand_3km +custom_ESGgrid_NewZealand_3km custom_ESGgrid_Peru_12km custom_ESGgrid_SF_1p1km custom_GFDLgrid__GFDLgrid_USE_NUM_CELLS_IN_FILENAMES_eq_FALSE @@ -15,8 +20,8 @@ deactivate_tasks get_from_AWS_ics_GEFS_lbcs_GEFS_fmt_grib2_2022040400_ensemble_2mems get_from_NOMADS_ics_FV3GFS_lbcs_FV3GFS grid_CONUS_25km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 -#grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta -#grid_RRFS_AK_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot +grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta +grid_RRFS_AK_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot grid_RRFS_AK_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_RAP grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot @@ -43,6 +48,7 @@ grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta grid_RRFS_CONUScompact_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta +grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta grid_RRFS_CONUScompact_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta diff --git a/tests/WE2E/machine_suites/comprehensive.cheyenne b/tests/WE2E/machine_suites/comprehensive.jet similarity index 66% rename from tests/WE2E/machine_suites/comprehensive.cheyenne rename to tests/WE2E/machine_suites/comprehensive.jet index 96792e37b0..0e15479feb 100644 --- a/tests/WE2E/machine_suites/comprehensive.cheyenne +++ b/tests/WE2E/machine_suites/comprehensive.jet @@ -1,6 +1,16 @@ +2020_CAD +2020_CAPE +2019_hurricane_barry +2019_halloween_storm +2019_hurricane_lorenzo +2019_memorial_day_heat_wave +2020_denver_radiation_inversion +2020_easter_storm +2020_jan_cold_blast community custom_ESGgrid custom_ESGgrid_Central_Asia_3km +custom_ESGgrid_Great_Lakes_snow_8km custom_ESGgrid_IndianOcean_6km custom_ESGgrid_NewZealand_3km custom_ESGgrid_Peru_12km @@ -8,6 +18,16 @@ custom_ESGgrid_SF_1p1km custom_GFDLgrid__GFDLgrid_USE_NUM_CELLS_IN_FILENAMES_eq_FALSE custom_GFDLgrid deactivate_tasks +#get_from_AWS_ics_GEFS_lbcs_GEFS_fmt_grib2_2022040400_ensemble_2mems +get_from_HPSS_ics_FV3GFS_lbcs_FV3GFS_fmt_grib2_2019061200 +get_from_HPSS_ics_FV3GFS_lbcs_FV3GFS_fmt_nemsio_2019061200 +get_from_HPSS_ics_FV3GFS_lbcs_FV3GFS_fmt_nemsio_2021032018 +get_from_HPSS_ics_FV3GFS_lbcs_FV3GFS_fmt_netcdf_2022060112_48h +#get_from_HPSS_ics_GDAS_lbcs_GDAS_fmt_netcdf_2022040400_ensemble_2mems +get_from_HPSS_ics_GSMGFS_lbcs_GSMGFS +get_from_HPSS_ics_HRRR_lbcs_RAP +get_from_HPSS_ics_RAP_lbcs_RAP +get_from_NOMADS_ics_FV3GFS_lbcs_FV3GFS grid_CONUS_25km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta grid_RRFS_AK_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot @@ -26,17 +46,18 @@ grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_RAP_suite_RAP grid_RRFS_CONUS_25km_ics_GSMGFS_lbcs_GSMGFS_suite_GFS_v15p2 grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16 grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_RRFS_v1beta -grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2 -grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km -grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 -grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR -grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta +#grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2 +#grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km +#grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 +#grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR +#grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta grid_RRFS_CONUScompact_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_HRRR grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta grid_RRFS_CONUScompact_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta +grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta grid_RRFS_CONUScompact_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta @@ -46,7 +67,10 @@ grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_HRRR_suite_HRRR grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_RAP_suite_WoFS_v0 grid_SUBCONUS_Ind_3km_ics_NAM_lbcs_NAM_suite_GFS_v16 grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot +long_fcst MET_ensemble_verification_only_vx +MET_ensemble_verification_only_vx_time_lag +MET_ensemble_verification_winter_wx MET_verification_only_vx pregen_grid_orog_sfc_climo specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS diff --git a/tests/WE2E/machine_suites/comprehensive.noaacloud b/tests/WE2E/machine_suites/comprehensive.noaacloud index 23c0aa8456..c9bb96ae64 100644 --- a/tests/WE2E/machine_suites/comprehensive.noaacloud +++ b/tests/WE2E/machine_suites/comprehensive.noaacloud @@ -37,6 +37,7 @@ grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta grid_RRFS_CONUScompact_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta +grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta grid_RRFS_CONUScompact_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta @@ -56,6 +57,11 @@ specify_template_filenames 2020_CAPE 2019_hurricane_barry 2019_halloween_storm +2019_hurricane_lorenzo +2019_memorial_day_heat_wave +2020_denver_radiation_inversion +2020_easter_storm +2020_jan_cold_blast get_from_AWS_ics_GEFS_lbcs_GEFS_fmt_grib2_2022040400_ensemble_2mems get_from_NOMADS_ics_FV3GFS_lbcs_FV3GFS long_fcst diff --git a/tests/WE2E/machine_suites/comprehensive.orion b/tests/WE2E/machine_suites/comprehensive.orion index 739b4fff8e..5930843582 100644 --- a/tests/WE2E/machine_suites/comprehensive.orion +++ b/tests/WE2E/machine_suites/comprehensive.orion @@ -2,6 +2,11 @@ 2020_CAPE 2019_hurricane_barry 2019_halloween_storm +2019_hurricane_lorenzo +2019_memorial_day_heat_wave +2020_denver_radiation_inversion +2020_easter_storm +2020_jan_cold_blast community custom_ESGgrid custom_ESGgrid_Central_Asia_3km @@ -43,6 +48,7 @@ grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta grid_RRFS_CONUScompact_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta +grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta grid_RRFS_CONUScompact_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta diff --git a/tests/WE2E/machine_suites/coverage.cheyenne b/tests/WE2E/machine_suites/coverage.cheyenne deleted file mode 100644 index 8f3c3ec78c..0000000000 --- a/tests/WE2E/machine_suites/coverage.cheyenne +++ /dev/null @@ -1,8 +0,0 @@ -custom_ESGgrid_IndianOcean_6km -grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot -grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16 -grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_HRRR -grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta -grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_HRRR_suite_HRRR -pregen_grid_orog_sfc_climo -specify_template_filenames diff --git a/tests/WE2E/machine_suites/coverage.derecho b/tests/WE2E/machine_suites/coverage.derecho index c2a770672e..a10c865447 100644 --- a/tests/WE2E/machine_suites/coverage.derecho +++ b/tests/WE2E/machine_suites/coverage.derecho @@ -1,9 +1,11 @@ custom_ESGgrid_IndianOcean_6km grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot +grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2 grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16 +grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_HRRR +grid_RRFS_CONUScompact_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_HRRR_suite_HRRR pregen_grid_orog_sfc_climo specify_template_filenames -2019_hurricane_barry diff --git a/tests/WE2E/machine_suites/coverage.gaea b/tests/WE2E/machine_suites/coverage.gaea index e6aba6ea3d..970fdf4086 100644 --- a/tests/WE2E/machine_suites/coverage.gaea +++ b/tests/WE2E/machine_suites/coverage.gaea @@ -8,3 +8,4 @@ grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot 2020_CAPE +2020_easter_storm diff --git a/tests/WE2E/machine_suites/coverage.hera.gnu.com b/tests/WE2E/machine_suites/coverage.hera.gnu.com index 4c802781f9..e820e6327e 100644 --- a/tests/WE2E/machine_suites/coverage.hera.gnu.com +++ b/tests/WE2E/machine_suites/coverage.hera.gnu.com @@ -8,3 +8,10 @@ long_fcst MET_verification_only_vx MET_ensemble_verification_only_vx_time_lag 2019_halloween_storm +2020_jan_cold_blast +vx-det_long-fcst_custom-vx-config_aiml-fourcastnet +vx-det_long-fcst_custom-vx-config_aiml-panguweather +vx-det_long-fcst_custom-vx-config_gfs +vx-det_long-fcst_winter-wx_SRW-staged +vx-det_multicyc_fcst-overlap_ncep-hrrr +vx-det_multicyc_last-obs-00z_ncep-hrrr diff --git a/tests/WE2E/machine_suites/coverage.hera.intel.nco b/tests/WE2E/machine_suites/coverage.hera.intel.nco index d5ab0d6fe8..82442a6835 100644 --- a/tests/WE2E/machine_suites/coverage.hera.intel.nco +++ b/tests/WE2E/machine_suites/coverage.hera.intel.nco @@ -1,8 +1,8 @@ +2019_memorial_day_heat_wave custom_ESGgrid_Peru_12km get_from_HPSS_ics_FV3GFS_lbcs_FV3GFS_fmt_grib2_2019061200 get_from_HPSS_ics_GDAS_lbcs_GDAS_fmt_netcdf_2022040400_ensemble_2mems get_from_HPSS_ics_HRRR_lbcs_RAP -grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2 grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_RAP_suite_RAP grid_RRFS_CONUS_25km_ics_GSMGFS_lbcs_GSMGFS_suite_GFS_v15p2 @@ -10,3 +10,9 @@ grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2 grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR pregen_grid_orog_sfc_climo +vx-det_long-fcst_custom-vx-config_aiml-graphcast +vx-det_multicyc_long-fcst-overlap_nssl-mpas +vx-det_multicyc_long-fcst-no-overlap_nssl-mpas +vx-det_multicyc_first-obs-00z_ncep-hrrr +vx-det_multicyc_no-00z-obs_nssl-mpas +vx-det_multicyc_no-fcst-overlap_ncep-hrrr diff --git a/tests/WE2E/machine_suites/coverage.hercules b/tests/WE2E/machine_suites/coverage.hercules index 273de3108e..668a26e685 100644 --- a/tests/WE2E/machine_suites/coverage.hercules +++ b/tests/WE2E/machine_suites/coverage.hercules @@ -4,8 +4,9 @@ grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v17_p8_plot grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_RAP -grid_RRFS_CONUScompact_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_NA_13km_ics_FV3GFS_lbcs_FV3GFS_suite_RAP grid_SUBCONUS_Ind_3km_ics_NAM_lbcs_NAM_suite_GFS_v16 MET_verification_only_vx specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS +2019_hurricane_lorenzo +2020_denver_radiation_inversion diff --git a/tests/WE2E/machine_suites/coverage.jet b/tests/WE2E/machine_suites/coverage.jet index 53308090b1..8c79a0b700 100644 --- a/tests/WE2E/machine_suites/coverage.jet +++ b/tests/WE2E/machine_suites/coverage.jet @@ -1,3 +1,4 @@ +2019_hurricane_barry community custom_ESGgrid custom_ESGgrid_Great_Lakes_snow_8km @@ -8,4 +9,3 @@ get_from_HPSS_ics_RAP_lbcs_RAP grid_RRFS_AK_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2 -grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta diff --git a/tests/WE2E/machine_suites/coverage.orion b/tests/WE2E/machine_suites/coverage.orion index c698648b10..5cb4441437 100644 --- a/tests/WE2E/machine_suites/coverage.orion +++ b/tests/WE2E/machine_suites/coverage.orion @@ -5,6 +5,7 @@ grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta grid_RRFS_AK_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_RRFS_v1beta grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR +grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta grid_RRFS_CONUScompact_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0 diff --git a/tests/WE2E/monitor_jobs.py b/tests/WE2E/monitor_jobs.py index 93db4fb1a5..db7f1a1a8e 100755 --- a/tests/WE2E/monitor_jobs.py +++ b/tests/WE2E/monitor_jobs.py @@ -18,20 +18,17 @@ def monitor_jobs(expts_dict: dict, monitor_file: str = '', procs: int = 1, mode: str = 'continuous', debug: bool = False) -> str: - """Function to monitor and run jobs for the specified experiment using Rocoto + """Monitors and runs jobs for the specified experiment using Rocoto Args: - expts_dict (dict): A dictionary containing the information needed to run - one or more experiments. See example file monitor_jobs.yaml - monitor_file (str): [optional] - mode (str): [optional] Mode of job monitoring - continuous (default): monitor jobs continuously until complete - advance: increment jobs once, then quit + expts_dict (dict): A dictionary containing the information needed to run one or more experiments. See example file ``monitor_jobs.yaml``. + monitor_file (str): [optional] Name of the file used to monitor experiment results. Default is ``monitor_jobs.yaml``. + procs (int): [optional] The number of parallel processes to run + mode (str): [optional] Mode of job monitoring. Options: (1) ``'continuous'`` (default): monitor jobs continuously until complete or (2) ``'advance'``: increment jobs once, then quit. debug (bool): [optional] Enable extra output for debugging Returns: - str: The name of the file used for job monitoring (when script is finished, this - contains results/summary) + monitor_file: The name of the file used for job monitoring (when script is finished, this contains results/summary) """ monitor_start = datetime.now() # Write monitor_file, which will contain information on each monitored experiment @@ -143,6 +140,12 @@ def setup_logging(logfile: str = "log.run_WE2E_tests", debug: bool = False) -> N """ Sets up logging, printing high-priority (INFO and higher) messages to screen, and printing all messages with detailed timing and routine info in the specified text file. + + Args: + logfile (str): Name of log file for WE2E tests (default: ``log.run_WE2E_tests``) + debug (bool): Set to True to enable extra output for debugging + Returns: + None """ logging.getLogger().setLevel(logging.DEBUG) @@ -187,7 +190,7 @@ def setup_logging(logfile: str = "log.run_WE2E_tests", debug: bool = False) -> N 'advance: will only advance each experiment one step') parser.add_argument('-d', '--debug', action='store_true', help='Script will be run in debug mode with more verbose output. ' + - 'WARNING: increased verbosity may run very slow on some platforms') + 'WARNING: increased verbosity may run very slowly on some platforms') args = parser.parse_args() diff --git a/tests/WE2E/print_test_info.py b/tests/WE2E/print_test_info.py index f2301bb690..a46142aa0c 100755 --- a/tests/WE2E/print_test_info.py +++ b/tests/WE2E/print_test_info.py @@ -1,5 +1,7 @@ #!/usr/bin/env python3 +"""Script for parsing all test files in the ``test_configs`` directory and printing a pipe-delimited summary file of the details of each test.""" + import argparse import sys diff --git a/tests/WE2E/run_WE2E_tests.py b/tests/WE2E/run_WE2E_tests.py index 5d4bd81105..fc0a3e3268 100755 --- a/tests/WE2E/run_WE2E_tests.py +++ b/tests/WE2E/run_WE2E_tests.py @@ -1,5 +1,6 @@ #!/usr/bin/env python3 # pylint: disable=logging-fstring-interpolation + import os import sys import glob @@ -22,11 +23,11 @@ from utils import print_test_info def run_we2e_tests(homedir, args) -> None: - """Function to run the WE2E tests selected by the user + """Runs the Workflow End-to-End (WE2E) tests selected by the user Args: - homedir (str): The full path of the top-level app directory - args (obj): The argparse.Namespace object containing command-line arguments + homedir (str): The full path to the top-level application directory + args (argparse.Namespace): Command-line arguments Returns: None @@ -141,7 +142,6 @@ def run_we2e_tests(homedir, args) -> None: pretty_list = "\n".join(str(x) for x in tests_to_run) logging.info(f'Will run {len(tests_to_run)} tests:\n{pretty_list}') - config_default_file = os.path.join(ushdir,'config_defaults.yaml') logging.debug(f"Loading config defaults file {config_default_file}") config_defaults = load_config_file(config_default_file) @@ -202,13 +202,11 @@ def run_we2e_tests(homedir, args) -> None: # obs. If so, and if the config file does not explicitly set the observation locations, # fill these in with defaults from the machine files obs_vars = ['CCPA_OBS_DIR','MRMS_OBS_DIR','NDAS_OBS_DIR','NOHRSC_OBS_DIR'] - if 'platform' not in test_cfg: - test_cfg['platform'] = {} for obvar in obs_vars: mach_path = machine_defaults['platform'].get('TEST_'+obvar) - if not test_cfg['platform'].get(obvar) and mach_path: + if not test_cfg['verification'].get(obvar) and mach_path: logging.debug(f'Setting {obvar} = {mach_path} from machine file') - test_cfg['platform'][obvar] = mach_path + test_cfg['verification'][obvar] = mach_path if args.compiler == "gnu": # 2D decomposition doesn't work with GNU compilers. Deactivate 2D decomposition for GNU @@ -229,7 +227,7 @@ def run_we2e_tests(homedir, args) -> None: if args.quiet: console_handler = logging.getLogger().handlers[1] console_handler.setLevel(logging.WARNING) - expt_dir = generate_FV3LAM_wflow(ushdir,logfile=f"{ushdir}/log.generate_FV3LAM_wflow", + expt_dir = generate_FV3LAM_wflow(ushdir,"config.yaml",logfile=f"{ushdir}/log.generate_FV3LAM_wflow", debug=args.debug) if args.quiet: if args.debug: @@ -274,13 +272,13 @@ def run_we2e_tests(homedir, args) -> None: def check_tests(tests: list) -> list: """ - Function for checking that all tests in a provided list of tests are valid + Checks that all tests in a provided list of tests are valid Args: tests (list): List of potentially valid test names Returns: - list: List of config files corresponding to test names + tests_to_run: List of configuration files corresponding to test names """ testfiles = glob.glob('test_configs/**/config*.yaml', recursive=True) @@ -309,7 +307,7 @@ def check_tests(tests: list) -> list: if not match: raise Exception(f"Could not find test {test}") tests_to_run.append(match) - # Because some test files are symlinks to other tests, check that we don't + # Because some test files are symlinked to other tests, check that we don't # include the same test twice for testfile in tests_to_run.copy(): if os.path.islink(testfile): @@ -328,13 +326,13 @@ def check_tests(tests: list) -> list: def check_test(test: str) -> str: """ - Function for checking that a string corresponds to a valid test name + Checks that a string corresponds to a valid test name Args: - test (str) : String of potential test name + test (str): Potential test name Returns: - str: File name of test config file (empty string if no test file found) + config: Name of the test configuration file (empty string if no test file is found) """ # potential test files testfiles = glob.glob('test_configs/**/config*.yaml', recursive=True) @@ -350,17 +348,17 @@ def check_test(test: str) -> str: def check_task_get_extrn_bcs(cfg: dict, mach: dict, dflt: dict, ics_or_lbcs: str = "") -> dict: """ - Function for checking and updating various settings in task_get_extrn_ics or - task_get_extrn_lbcs section of test config yaml + Checks and updates various settings in the ``task_get_extrn_ics`` or + ``task_get_extrn_lbcs`` section of the test's configuration YAML file Args: - cfg (dict): Dictionary loaded from test config file - mach (dict): Dictionary loaded from machine settings file - dflt (dict): Dictionary loaded from default config file - ics_or_lbcs (bool): Perform checks for ICs task or LBCs task + cfg (dict): Contents loaded from test configuration file + mach (dict): Contents loaded from machine settings file + dflt (dict): Contents loaded from default configuration file (``config_defaults.yaml``) + ics_or_lbcs (str): Perform checks for either the ICs task or the LBCs task. Valid values: ``"ics"`` | ``"lbcs"`` Returns: - dict: Updated dictionary for task_get_extrn_[ics|lbcs] section of test config + cfg_bcs: Updated dictionary for ``task_get_extrn_[ics|lbcs]`` section of test configuration file """ if ics_or_lbcs not in ["lbcs", "ics"]: @@ -434,8 +432,14 @@ def check_task_get_extrn_bcs(cfg: dict, mach: dict, dflt: dict, ics_or_lbcs: str def setup_logging(logfile: str = "log.run_WE2E_tests", debug: bool = False) -> None: """ - Sets up logging, printing high-priority (INFO and higher) messages to screen, and printing all - messages with detailed timing and routine info in the specified text file. + Sets up logging, prints high-priority (INFO and higher) messages to screen, and prints all + messages with detailed timing and routine info to the specified text file. + + Args: + logfile (str): Name of the test logging file (default: ``log.run_WE2E_tests``) + debug (bool): Set to True for more detailed output/information + Returns: + None """ logging.getLogger().setLevel(logging.DEBUG) diff --git a/tests/WE2E/setup_WE2E_tests.sh b/tests/WE2E/setup_WE2E_tests.sh index 309c755966..8fa0977af7 100755 --- a/tests/WE2E/setup_WE2E_tests.sh +++ b/tests/WE2E/setup_WE2E_tests.sh @@ -80,6 +80,7 @@ export HOME=$homedir source ../../ush/load_modules_wflow.sh ${machine} # Run the E2E Workflow tests +[[ ${tests} = none ]] && echo "none" || \ ./run_WE2E_tests.py \ --machine=${machine} \ --account=${account} \ diff --git a/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml b/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml index 1587fadcc1..a9548f8b5f 100644 --- a/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml +++ b/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml @@ -21,7 +21,7 @@ rocoto: task_aqm_ics_ext: metatask_run_ensemble: task_run_fcst_mem#mem#: - walltime: 01:00:00 + walltime: 01:20:00 task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS FV3GFS_FILE_FMT_ICS: netcdf @@ -34,6 +34,7 @@ task_get_extrn_lbcs: EXTRN_MDL_LBCS_OFFSET_HRS: 0 USE_USER_STAGED_EXTRN_FILES: true task_run_fcst: + OMP_NUM_THREADS_RUN_FCST: 1 DT_ATMOS: 180 LAYOUT_X: 50 LAYOUT_Y: 34 diff --git a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_Great_Lakes_snow_8km.yaml b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_Great_Lakes_snow_8km.yaml index d773c632e2..ffacb0a8cb 100644 --- a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_Great_Lakes_snow_8km.yaml +++ b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_Great_Lakes_snow_8km.yaml @@ -55,10 +55,9 @@ task_run_fcst: task_run_post: POST_OUTPUT_DOMAIN_NAME: custom_ESGgrid_Michigan_Ontario verification: - VX_FCST_MODEL_NAME: Michigan_Ontario_snow_8km - VX_FIELDS: [ "APCP", "REFC", "RETOP", "ADPSFC", "ADPUPA", "ASNOW" ] -platform: CCPA_OBS_DIR: '{{ workflow.EXPTDIR }}/CCPA_obs' MRMS_OBS_DIR: '{{ workflow.EXPTDIR }}/MRMS_obs' NDAS_OBS_DIR: '{{ workflow.EXPTDIR }}/NDAS_obs' NOHRSC_OBS_DIR: '{{ workflow.EXPTDIR }}/NOHRSC_obs' + VX_FCST_MODEL_NAME: Michigan_Ontario_snow_8km + VX_FIELD_GROUPS: [ "APCP", "REFC", "RETOP", "SFC", "UPA", "ASNOW" ] diff --git a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_SF_1p1km.yaml b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_SF_1p1km.yaml index 6d9e2e0d6d..867b4675a0 100644 --- a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_SF_1p1km.yaml +++ b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_SF_1p1km.yaml @@ -57,7 +57,7 @@ rocoto: tasks: metatask_run_ensemble: task_run_fcst_mem#mem#: - walltime: 01:00:00 + walltime: 02:30:00 task_make_ics_mem#mem#: nnodes: 16 ppn: 12 diff --git a/tests/WE2E/test_configs/fire/config.UFS_FIRE_multifire_one-way-coupled.yaml b/tests/WE2E/test_configs/fire/config.UFS_FIRE_multifire_one-way-coupled.yaml new file mode 100644 index 0000000000..6bd217914f --- /dev/null +++ b/tests/WE2E/test_configs/fire/config.UFS_FIRE_multifire_one-way-coupled.yaml @@ -0,0 +1,76 @@ +metadata: + description: >- + Tests the UFS_FIRE capability for multiple fire ignitions with one-way (ATM-->FIRE) coupling +user: + RUN_ENVIR: community +workflow: + CCPP_PHYS_SUITE: FV3_HRRR + PREDEF_GRID_NAME: SUBCONUS_CO_3km + DATE_FIRST_CYCL: '2020081318' + DATE_LAST_CYCL: '2020081318' + FCST_LEN_HRS: 3 # 27 hours of LBCs staged on Derecho + PREEXISTING_DIR_METHOD: rename +task_get_extrn_ics: + USE_USER_STAGED_EXTRN_FILES: true + EXTRN_MDL_NAME_ICS: HRRR + FV3GFS_FILE_FMT_ICS: grib2 + EXTRN_MDL_FILES_ICS: + - '{fyyyymmdd}.hrrr.t{fhh}z.wrfprsf00.grib2' +task_get_extrn_lbcs: + USE_USER_STAGED_EXTRN_FILES: true + EXTRN_MDL_FILES_LBCS: + - '{fyyyymmdd}.hrrr.t{fhh}z.wrfprsf00.grib2' + EXTRN_MDL_NAME_LBCS: HRRR + EXTRN_MDL_LBCS_OFFSET_HRS: 0 + LBC_SPEC_INTVL_HRS: 3 + FV3GFS_FILE_FMT_LBCS: grib2 +task_run_fcst: + QUILTING: true + PRINT_ESMF: True + OMP_NUM_THREADS_RUN_FCST: 1 +task_plot_allvars: + COMOUT_REF: "" +fire: + UFS_FIRE: True + FIRE_NUM_TASKS: 1 + FIRE_INPUT_DIR: '{{ platform.WE2E_TEST_DATA }}/UFS_FIRE/{{ workflow.DATE_FIRST_CYCL }}' + DT_FIRE: 0.5 + OUTPUT_DT_FIRE: 300 + FIRE_NUM_IGNITIONS: 2 + FIRE_IGNITION_ROS: + - 0.05 + - 0.1 + FIRE_IGNITION_START_LAT: + - 40.609 + - 40.666 + FIRE_IGNITION_START_LON: + - -105.879 + - -105.95 + FIRE_IGNITION_END_LAT: + - 40.609 + - 40.678 + FIRE_IGNITION_END_LON: + - -105.879 + - -105.94 + FIRE_IGNITION_RADIUS: + - 250 + - 100 + FIRE_IGNITION_START_TIME: + - 6480 + - 3600 + FIRE_IGNITION_END_TIME: + - 7000 + - 3720 + FIRE_WIND_HEIGHT: 5.0 + FIRE_PRINT_MSG: 0 + FIRE_ATM_FEEDBACK: 0.0 + FIRE_VISCOSITY: 0.4 + FIRE_UPWINDING: 9 + FIRE_LSM_ZCOUPLING: False + FIRE_LSM_ZCOUPLING_REF: 60.0 +rocoto: + tasks: + metatask_run_ensemble: + task_run_fcst_mem#mem#: + walltime: 01:00:00 + diff --git a/tests/WE2E/test_configs/fire/config.UFS_FIRE_one-way-coupled.yaml b/tests/WE2E/test_configs/fire/config.UFS_FIRE_one-way-coupled.yaml new file mode 100644 index 0000000000..756dac8135 --- /dev/null +++ b/tests/WE2E/test_configs/fire/config.UFS_FIRE_one-way-coupled.yaml @@ -0,0 +1,60 @@ +metadata: + description: >- + Tests the UFS_FIRE capability for a single fire with one-way (ATM-->FIRE) coupling +user: + RUN_ENVIR: community +workflow: + CCPP_PHYS_SUITE: FV3_HRRR + PREDEF_GRID_NAME: SUBCONUS_CO_3km + DATE_FIRST_CYCL: '2020081318' + DATE_LAST_CYCL: '2020081318' + FCST_LEN_HRS: 3 # 27 hours of LBCs staged on Derecho + PREEXISTING_DIR_METHOD: rename +task_get_extrn_ics: + USE_USER_STAGED_EXTRN_FILES: true + EXTRN_MDL_NAME_ICS: HRRR + FV3GFS_FILE_FMT_ICS: grib2 + EXTRN_MDL_FILES_ICS: + - '{fyyyymmdd}.hrrr.t{fhh}z.wrfprsf00.grib2' +task_get_extrn_lbcs: + USE_USER_STAGED_EXTRN_FILES: true + EXTRN_MDL_FILES_LBCS: + - '{fyyyymmdd}.hrrr.t{fhh}z.wrfprsf00.grib2' + EXTRN_MDL_NAME_LBCS: HRRR + EXTRN_MDL_LBCS_OFFSET_HRS: 0 + LBC_SPEC_INTVL_HRS: 3 + FV3GFS_FILE_FMT_LBCS: grib2 +task_run_fcst: + QUILTING: true + PRINT_ESMF: True + OMP_NUM_THREADS_RUN_FCST: 1 +task_plot_allvars: + COMOUT_REF: "" +fire: + UFS_FIRE: True + FIRE_NUM_TASKS: 1 + FIRE_INPUT_DIR: '{{ platform.WE2E_TEST_DATA }}/UFS_FIRE/{{ workflow.DATE_FIRST_CYCL }}' + DT_FIRE: 0.5 + OUTPUT_DT_FIRE: 300 + FIRE_NUM_IGNITIONS: 1 + FIRE_IGNITION_ROS: 0.05 + FIRE_IGNITION_START_LAT: 40.609 + FIRE_IGNITION_START_LON: -105.879 + FIRE_IGNITION_END_LAT: 40.609 + FIRE_IGNITION_END_LON: -105.879 + FIRE_IGNITION_RADIUS: 250 + FIRE_IGNITION_START_TIME: 6480 + FIRE_IGNITION_END_TIME: 7000 + FIRE_WIND_HEIGHT: 5.0 + FIRE_PRINT_MSG: 0 + FIRE_ATM_FEEDBACK: 0.0 + FIRE_VISCOSITY: 0.4 + FIRE_UPWINDING: 9 + FIRE_LSM_ZCOUPLING: False + FIRE_LSM_ZCOUPLING_REF: 60.0 +rocoto: + tasks: + metatask_run_ensemble: + task_run_fcst_mem#mem#: + walltime: 01:00:00 + diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml index de456cea73..f4c40bf722 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml @@ -20,5 +20,3 @@ task_get_extrn_lbcs: EXTRN_MDL_NAME_LBCS: FV3GFS LBC_SPEC_INTVL_HRS: 3 USE_USER_STAGED_EXTRN_FILES: true -task_run_fcst: - OMP_NUM_THREADS_RUN_FCST: 3 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR.yaml index 4a340185f3..6d4dbc3b33 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR.yaml @@ -19,5 +19,3 @@ task_get_extrn_lbcs: EXTRN_MDL_NAME_LBCS: FV3GFS LBC_SPEC_INTVL_HRS: 3 USE_USER_STAGED_EXTRN_FILES: true -task_run_fcst: - OMP_NUM_THREADS_RUN_FCST: 3 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml index 933042c82f..8e93259539 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml @@ -3,8 +3,8 @@ metadata: This test is to ensure that the workflow running in community mode completes successfully on the RRFS_CONUS_25km grid using the GFS_v16 physics suite with ICs and LBCs derived from the NAM. - This test also runs with two ensemble members, and ensures the MET - ensemble-specific tasks run successfully. + This test also runs with two ensemble members, runs ploting tasks for each + ensemble member, and ensures the MET ensemble-specific tasks run successfully. user: RUN_ENVIR: community workflow: @@ -16,7 +16,7 @@ workflow: PREEXISTING_DIR_METHOD: rename rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml", "parm/wflow/verify_ens.yaml", "parm/wflow/test.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml", "parm/wflow/verify_ens.yaml", "parm/wflow/test.yaml"]|include }}' metatask_run_ensemble: task_run_fcst_mem#mem#: walltime: 01:00:00 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta.yaml new file mode 100644 index 0000000000..908b79dc43 --- /dev/null +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta.yaml @@ -0,0 +1,29 @@ +metadata: + description: |- + This test is to ensure that the workflow running in community mode + completes successfully on the RRFS_CONUScompact_25km grid using the RRFS_v1beta + physics suite. It uses RRFS forecasts mapped onto 3-km regular grid (rrfs*.conus.grib2) for + ICs and LBCs. This test uses old v1 sfc_data, not the v2 fractional grid sfc_data. +user: + RUN_ENVIR: community +workflow: + CCPP_PHYS_SUITE: FV3_RRFS_v1beta + PREDEF_GRID_NAME: RRFS_CONUScompact_25km + DATE_FIRST_CYCL: '2024060517' + DATE_LAST_CYCL: '2024060517' + FCST_LEN_HRS: 3 + PREEXISTING_DIR_METHOD: rename +task_get_extrn_ics: + EXTRN_MDL_NAME_ICS: RRFS + FV3GFS_FILE_FMT_ICS: grib2 + USE_USER_STAGED_EXTRN_FILES: true +task_get_extrn_lbcs: + EXTRN_MDL_NAME_LBCS: RRFS + LBC_SPEC_INTVL_HRS: 1 + FV3GFS_FILE_FMT_LBCS: grib2 + USE_USER_STAGED_EXTRN_FILES: true +task_plot_allvars: + COMOUT_REF: "" +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml"]|include }}' diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml index 35be12a1ee..b00a24ae84 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml @@ -23,5 +23,3 @@ task_get_extrn_lbcs: USE_USER_STAGED_EXTRN_FILES: true EXTRN_MDL_FILES_LBCS: - '{yy}{jjj}{hh}00{fcst_hr:02d}00' -task_run_fcst: - OMP_NUM_THREADS_RUN_FCST: 3 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml index 1265fa8e0c..44dfec5e75 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml @@ -24,5 +24,3 @@ task_get_extrn_lbcs: USE_USER_STAGED_EXTRN_FILES: true EXTRN_MDL_FILES_LBCS: - '{yy}{jjj}{hh}00{fcst_hr:02d}00' -task_run_fcst: - OMP_NUM_THREADS_RUN_FCST: 3 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml index 120a38291e..0d850b0147 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml @@ -19,7 +19,7 @@ rocoto: taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml", "parm/wflow/test.yaml"]|include }}' metatask_run_ensemble: task_run_fcst_mem#mem#: - walltime: 01:00:00 + walltime: 02:00:00 task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS FV3GFS_FILE_FMT_ICS: grib2 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_lorenzo.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_lorenzo.yaml new file mode 100644 index 0000000000..557607d810 --- /dev/null +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_lorenzo.yaml @@ -0,0 +1,38 @@ +metadata: + description: |- + This test is to ensure that the workflow running in community mode + completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16 + physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms + for 2019 Hurricane Lorenzo. + # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to + # download these files, which can delay the WE2E testing process. + # To capture the event, extend the FCST_LEN_HRS from 6 to 90. +user: + RUN_ENVIR: community +platform: + EXTRN_MDL_DATA_STORES: aws +workflow: + CCPP_PHYS_SUITE: FV3_GFS_v16 + PREDEF_GRID_NAME: RRFS_CONUS_13km + DATE_FIRST_CYCL: '2019092512' + DATE_LAST_CYCL: '2019092512' + FCST_LEN_HRS: 6 + PREEXISTING_DIR_METHOD: rename +task_get_extrn_ics: + EXTRN_MDL_NAME_ICS: UFS-CASE-STUDY + FV3GFS_FILE_FMT_ICS: nemsio +task_get_extrn_lbcs: + EXTRN_MDL_NAME_LBCS: UFS-CASE-STUDY + LBC_SPEC_INTVL_HRS: 3 + FV3GFS_FILE_FMT_LBCS: nemsio +rocoto: + tasks: + task_get_extrn_ics: + walltime: 06:00:00 + task_get_extrn_lbcs: + walltime: 06:00:00 + metatask_run_ensemble: + task_make_lbcs_mem#mem#: + walltime: 06:00:00 + task_run_fcst_mem#mem#: + walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2019_memorial_day_heat_wave.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2019_memorial_day_heat_wave.yaml new file mode 100644 index 0000000000..fcba9c7924 --- /dev/null +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2019_memorial_day_heat_wave.yaml @@ -0,0 +1,36 @@ +metadata: + description: |- + This test is to ensure that the workflow running in community mode + completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16 + physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms + for 2019 Memorial Day Heat Wave. + # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to + # download these files, which can delay the WE2E testing process. + # To capture the event, extend the FCST_LEN_HRS from 6 to 24. +user: + RUN_ENVIR: community +platform: + EXTRN_MDL_DATA_STORES: aws +workflow: + CCPP_PHYS_SUITE: FV3_GFS_v16 + PREDEF_GRID_NAME: RRFS_CONUS_13km + DATE_FIRST_CYCL: '2019052300' + DATE_LAST_CYCL: '2019052300' + FCST_LEN_HRS: 6 + PREEXISTING_DIR_METHOD: rename +task_get_extrn_ics: + EXTRN_MDL_NAME_ICS: UFS-CASE-STUDY + FV3GFS_FILE_FMT_ICS: nemsio +task_get_extrn_lbcs: + EXTRN_MDL_NAME_LBCS: UFS-CASE-STUDY + LBC_SPEC_INTVL_HRS: 6 + FV3GFS_FILE_FMT_LBCS: nemsio +rocoto: + tasks: + task_get_extrn_ics: + walltime: 06:00:00 + task_get_extrn_lbcs: + walltime: 06:00:00 + metatask_run_ensemble: + task_make_lbcs_mem#mem#: + walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2020_denver_radiation_inversion.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2020_denver_radiation_inversion.yaml new file mode 100644 index 0000000000..8bf5ece9ee --- /dev/null +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2020_denver_radiation_inversion.yaml @@ -0,0 +1,38 @@ +metadata: + description: |- + This test is to ensure that the workflow running in community mode + completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16 + physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms + for 2020 Denver Radiation Inversion. + # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to + # download these files, which can delay the WE2E testing process. + # To capture the event, extend the FCST_LEN_HRS from 6 to 90. +user: + RUN_ENVIR: community +platform: + EXTRN_MDL_DATA_STORES: aws +workflow: + CCPP_PHYS_SUITE: FV3_GFS_v16 + PREDEF_GRID_NAME: RRFS_CONUS_13km + DATE_FIRST_CYCL: '2020042912' + DATE_LAST_CYCL: '2020042912' + FCST_LEN_HRS: 6 + PREEXISTING_DIR_METHOD: rename +task_get_extrn_ics: + EXTRN_MDL_NAME_ICS: UFS-CASE-STUDY + FV3GFS_FILE_FMT_ICS: nemsio +task_get_extrn_lbcs: + EXTRN_MDL_NAME_LBCS: UFS-CASE-STUDY + LBC_SPEC_INTVL_HRS: 3 + FV3GFS_FILE_FMT_LBCS: nemsio +rocoto: + tasks: + task_get_extrn_ics: + walltime: 06:00:00 + task_get_extrn_lbcs: + walltime: 06:00:00 + metatask_run_ensemble: + task_make_lbcs_mem#mem#: + walltime: 06:00:00 + task_run_fcst_mem#mem#: + walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2020_easter_storm.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2020_easter_storm.yaml new file mode 100644 index 0000000000..3c619c06bb --- /dev/null +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2020_easter_storm.yaml @@ -0,0 +1,38 @@ +metadata: + description: |- + This test is to ensure that the workflow running in community mode + completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16 + physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms + for 2020 Easter Sunday Storm. + # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to + # download these files, which can delay the WE2E testing process. + # To capture the event, extend the FCST_LEN_HRS from 6 to 90. +user: + RUN_ENVIR: community +platform: + EXTRN_MDL_DATA_STORES: aws +workflow: + CCPP_PHYS_SUITE: FV3_GFS_v16 + PREDEF_GRID_NAME: RRFS_CONUS_13km + DATE_FIRST_CYCL: '2020040912' + DATE_LAST_CYCL: '2020040912' + FCST_LEN_HRS: 6 + PREEXISTING_DIR_METHOD: rename +task_get_extrn_ics: + EXTRN_MDL_NAME_ICS: UFS-CASE-STUDY + FV3GFS_FILE_FMT_ICS: nemsio +task_get_extrn_lbcs: + EXTRN_MDL_NAME_LBCS: UFS-CASE-STUDY + LBC_SPEC_INTVL_HRS: 3 + FV3GFS_FILE_FMT_LBCS: nemsio +rocoto: + tasks: + task_get_extrn_ics: + walltime: 06:00:00 + task_get_extrn_lbcs: + walltime: 06:00:00 + metatask_run_ensemble: + task_make_lbcs_mem#mem#: + walltime: 06:00:00 + task_run_fcst_mem#mem#: + walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2020_jan_cold_blast.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2020_jan_cold_blast.yaml new file mode 100644 index 0000000000..6121228cb8 --- /dev/null +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2020_jan_cold_blast.yaml @@ -0,0 +1,38 @@ +metadata: + description: |- + This test is to ensure that the workflow running in community mode + completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16 + physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms + for 2020 January Cold Blast. + # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to + # download these files, which can delay the WE2E testing process. + # To capture the event, extend the FCST_LEN_HRS from 6 to 90. +user: + RUN_ENVIR: community +platform: + EXTRN_MDL_DATA_STORES: aws +workflow: + CCPP_PHYS_SUITE: FV3_GFS_v16 + PREDEF_GRID_NAME: RRFS_CONUS_13km + DATE_FIRST_CYCL: '2020011812' + DATE_LAST_CYCL: '2020011812' + FCST_LEN_HRS: 6 + PREEXISTING_DIR_METHOD: rename +task_get_extrn_ics: + EXTRN_MDL_NAME_ICS: UFS-CASE-STUDY + FV3GFS_FILE_FMT_ICS: nemsio +task_get_extrn_lbcs: + EXTRN_MDL_NAME_LBCS: UFS-CASE-STUDY + LBC_SPEC_INTVL_HRS: 3 + FV3GFS_FILE_FMT_LBCS: nemsio +rocoto: + tasks: + task_get_extrn_ics: + walltime: 06:00:00 + task_get_extrn_lbcs: + walltime: 06:00:00 + metatask_run_ensemble: + task_make_lbcs_mem#mem#: + walltime: 06:00:00 + task_run_fcst_mem#mem#: + walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/verification/config.MET_ensemble_verification_only_vx.yaml b/tests/WE2E/test_configs/verification/config.MET_ensemble_verification_only_vx.yaml index 812e805645..80b2e3099f 100644 --- a/tests/WE2E/test_configs/verification/config.MET_ensemble_verification_only_vx.yaml +++ b/tests/WE2E/test_configs/verification/config.MET_ensemble_verification_only_vx.yaml @@ -8,9 +8,6 @@ metadata: user: RUN_ENVIR: community -nco: - NET_default: rrfs - workflow: PREDEF_GRID_NAME: RRFS_CONUS_25km DATE_FIRST_CYCL: '2019061500' @@ -18,6 +15,9 @@ workflow: FCST_LEN_HRS: 6 PREEXISTING_DIR_METHOD: rename +nco: + NET_default: rrfs + rocoto: tasks: taskgroups: '{{ ["parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml", "parm/wflow/verify_ens.yaml"]|include }}' diff --git a/tests/WE2E/test_configs/verification/config.MET_ensemble_verification_only_vx_time_lag.yaml b/tests/WE2E/test_configs/verification/config.MET_ensemble_verification_only_vx_time_lag.yaml index f7d82cb8cd..f26ae7db21 100644 --- a/tests/WE2E/test_configs/verification/config.MET_ensemble_verification_only_vx_time_lag.yaml +++ b/tests/WE2E/test_configs/verification/config.MET_ensemble_verification_only_vx_time_lag.yaml @@ -16,24 +16,26 @@ workflow: FCST_LEN_HRS: 6 PREEXISTING_DIR_METHOD: rename +nco: + NET_default: 'RRFSE_CONUS' + rocoto: tasks: taskgroups: '{{ ["parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml", "parm/wflow/verify_ens.yaml"]|include }}' -nco: - NET_default: 'RRFSE_CONUS' - global: DO_ENSEMBLE: true NUM_ENS_MEMBERS: 2 ENS_TIME_LAG_HRS: '[ 0, 12 ]' -platform: + +verification: + # If the following is commented out, then the obs files staged on each + # platform will be (found and) used. CCPA_OBS_DIR: '{{ workflow.EXPTDIR }}/obs_data/ccpa/proc' MRMS_OBS_DIR: '{{ workflow.EXPTDIR }}/obs_data/mrms/proc' NDAS_OBS_DIR: '{{ workflow.EXPTDIR }}/obs_data/ndas/proc' - -verification: + # VX_FCST_MODEL_NAME: FV3_GFS_v15p2_CONUS_25km VX_FCST_INPUT_BASEDIR: '{{ platform.get("TEST_VX_FCST_INPUT_BASEDIR") }}' VX_NDIGITS_ENSMEM_NAMES: 1 diff --git a/tests/WE2E/test_configs/verification/config.MET_ensemble_verification_winter_wx.yaml b/tests/WE2E/test_configs/verification/config.MET_ensemble_verification_winter_wx.yaml index 85a515f293..7f761117bb 100644 --- a/tests/WE2E/test_configs/verification/config.MET_ensemble_verification_winter_wx.yaml +++ b/tests/WE2E/test_configs/verification/config.MET_ensemble_verification_winter_wx.yaml @@ -31,4 +31,8 @@ global: DO_ENSEMBLE: true NUM_ENS_MEMBERS: 10 verification: - VX_FIELDS: [ "APCP", "ASNOW", "REFC", "RETOP", "ADPSFC", "ADPUPA" ] + VX_FIELD_GROUPS: [ "APCP", "ASNOW", "REFC", "RETOP", "SFC", "UPA" ] + OBS_NOHRSC_FN_TEMPLATES: [ 'ASNOW', + '{%- set data_intvl_hrs = "%02d" % NOHRSC_OBS_AVAIL_INTVL_HRS %} + {{- "{valid?fmt=%Y%m%d}/sfav2_CONUS_" ~ data_intvl_hrs ~ "h_{valid?fmt=%Y%m%d%H}_grid184.grb2" }}' ] + diff --git a/tests/WE2E/test_configs/verification/config.vx-det_long-fcst_custom-vx-config_aiml-fourcastnet.yaml b/tests/WE2E/test_configs/verification/config.vx-det_long-fcst_custom-vx-config_aiml-fourcastnet.yaml new file mode 100644 index 0000000000..f4d71ceeb8 --- /dev/null +++ b/tests/WE2E/test_configs/verification/config.vx-det_long-fcst_custom-vx-config_aiml-fourcastnet.yaml @@ -0,0 +1,63 @@ +metadata: + description: |- + SRW App configuration file to test deterministic verification of the + FourCastNet (fcnv2) global AI model. Note that this test uses a custom + verification configuration file (as opposed to the default one in the + SRW) because the AI model output often does not include many of the + fields that exist in physics-based models. + +user: + RUN_ENVIR: community + +platform: + EXTRN_MDL_DATA_STORES: hpss + +workflow: + PREEXISTING_DIR_METHOD: rename + PREDEF_GRID_NAME: RRFS_CONUS_25km + DATE_FIRST_CYCL: '2024073000' + DATE_LAST_CYCL: '2024073000' + FCST_LEN_HRS: 240 + +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml"]|include }}' + +task_get_extrn_lbcs: + LBC_SPEC_INTVL_HRS: 3 + +task_run_post: + POST_OUTPUT_DOMAIN_NAME: 'global' + +verification: + METPLUS_VERBOSITY_LEVEL: 5 + # Base directories in which to look for obs. If these do not exist and/or + # do not contain the required obs, create and populate them. + # Note that when performing WE2E tests, the default behavior is not to + # get obs files from HPSS but to use staged obs files. This is done by + # setting these variables to the (platform-specific) locations of these + # staged files. To force the WE2E testing system to get the obs from + # HPSS, here we reset these variables to their default values in the SRW + # workflow configuration file config_defaults.yaml. + CCPA_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/ccpa" + NOHRSC_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/nohrsc" + MRMS_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/mrms" + NDAS_OBS_DIR: "{{ platform.TEST_GDAS_OBS_DIR }}" + # Do not remove raw obs files to be able to verify that only the necessary + # raw files are fetched from HPSS (if it's necessary to fetch obs files). + REMOVE_RAW_OBS_CCPA: false + REMOVE_RAW_OBS_MRMS: false + REMOVE_RAW_OBS_NDAS: false + REMOVE_RAW_OBS_NOHRSC: false + # + OBS_NDAS_FN_TEMPLATES: [ 'SFCandUPA', '{valid?fmt=%Y%m%d}/gdas.t{valid?fmt=%H}z.prepbufr.nr' ] + # + VX_FCST_MODEL_NAME: 'fcnv2' + VX_CONFIG_DET_FN: 'vx_configs/vx_config_det.obs_gdas.model_aiml.yaml' + VX_FCST_INPUT_BASEDIR: '{{- platform.TEST_EXTRN_MDL_SOURCE_BASEDIR }}' + FCST_SUBDIR_TEMPLATE: '{{- verification.VX_FCST_MODEL_NAME }}.{init?fmt=%Y%m%d}/{init?fmt=%H}' + FCST_FN_TEMPLATE: '{{- verification.VX_FCST_MODEL_NAME }}.t00z.prslev.f{lead?fmt=%HHH}.grb2' + FCST_FN_TEMPLATE_PCPCOMBINE_OUTPUT: '{{- verification.VX_FCST_MODEL_NAME }}.t00z.prslev.f{lead?fmt=%HHH}_a${ACCUM_HH}h.nc' + # + VX_FCST_OUTPUT_INTVL_HRS: 6 + VX_FIELD_GROUPS: [ "SFC" ] diff --git a/tests/WE2E/test_configs/verification/config.vx-det_long-fcst_custom-vx-config_aiml-graphcast.yaml b/tests/WE2E/test_configs/verification/config.vx-det_long-fcst_custom-vx-config_aiml-graphcast.yaml new file mode 100644 index 0000000000..caa917be41 --- /dev/null +++ b/tests/WE2E/test_configs/verification/config.vx-det_long-fcst_custom-vx-config_aiml-graphcast.yaml @@ -0,0 +1,63 @@ +metadata: + description: |- + SRW App configuration file to test deterministic verification of the + GraphCast (gc) global AI model. Note that this test uses a custom + verification configuration file (as opposed to the default one in the + SRW) because the AI model output often does not include many of the + fields that exist in physics-based models. + +user: + RUN_ENVIR: community + +platform: + EXTRN_MDL_DATA_STORES: hpss + +workflow: + PREEXISTING_DIR_METHOD: rename + PREDEF_GRID_NAME: RRFS_CONUS_25km + DATE_FIRST_CYCL: '2024073000' + DATE_LAST_CYCL: '2024073000' + FCST_LEN_HRS: 240 + +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml"]|include }}' + +task_get_extrn_lbcs: + LBC_SPEC_INTVL_HRS: 3 + +task_run_post: + POST_OUTPUT_DOMAIN_NAME: 'global' + +verification: + METPLUS_VERBOSITY_LEVEL: 5 + # Base directories in which to look for obs. If these do not exist and/or + # do not contain the required obs, create and populate them. + # Note that when performing WE2E tests, the default behavior is not to + # get obs files from HPSS but to use staged obs files. This is done by + # setting these variables to the (platform-specific) locations of these + # staged files. To force the WE2E testing system to get the obs from + # HPSS, here we reset these variables to their default values in the SRW + # workflow configuration file config_defaults.yaml. + CCPA_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/ccpa" + NOHRSC_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/nohrsc" + MRMS_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/mrms" + NDAS_OBS_DIR: "{{ platform.TEST_GDAS_OBS_DIR }}" + # Do not remove raw obs files to be able to verify that only the necessary + # raw files are fetched from HPSS (if it's necessary to fetch obs files). + REMOVE_RAW_OBS_CCPA: false + REMOVE_RAW_OBS_MRMS: false + REMOVE_RAW_OBS_NDAS: false + REMOVE_RAW_OBS_NOHRSC: false + # + OBS_NDAS_FN_TEMPLATES: [ 'SFCandUPA', '{valid?fmt=%Y%m%d}/gdas.t{valid?fmt=%H}z.prepbufr.nr' ] + # + VX_FCST_MODEL_NAME: 'gc' + VX_CONFIG_DET_FN: 'vx_configs/vx_config_det.obs_gdas.model_aiml.yaml' + VX_FCST_INPUT_BASEDIR: '{{- platform.TEST_EXTRN_MDL_SOURCE_BASEDIR }}' + FCST_SUBDIR_TEMPLATE: '{{- verification.VX_FCST_MODEL_NAME }}.{init?fmt=%Y%m%d}/{init?fmt=%H}' + FCST_FN_TEMPLATE: '{{- verification.VX_FCST_MODEL_NAME }}.t00z.prslev.f{lead?fmt=%HHH}' + FCST_FN_TEMPLATE_PCPCOMBINE_OUTPUT: '{{- verification.VX_FCST_MODEL_NAME }}.t00z.prslev.f{lead?fmt=%HHH}_a${ACCUM_HH}h.nc' + # + VX_FCST_OUTPUT_INTVL_HRS: 6 + VX_FIELD_GROUPS: [ "SFC" ] diff --git a/tests/WE2E/test_configs/verification/config.vx-det_long-fcst_custom-vx-config_aiml-panguweather.yaml b/tests/WE2E/test_configs/verification/config.vx-det_long-fcst_custom-vx-config_aiml-panguweather.yaml new file mode 100644 index 0000000000..cf1fd79ad3 --- /dev/null +++ b/tests/WE2E/test_configs/verification/config.vx-det_long-fcst_custom-vx-config_aiml-panguweather.yaml @@ -0,0 +1,63 @@ +metadata: + description: |- + SRW App configuration file to test deterministic verification of the + Pangu-Weather (pw) global AI model. Note that this test uses a custom + verification configuration file (as opposed to the default one in the + SRW) because the AI model output often does not include many of the + fields that exist in physics-based models. + +user: + RUN_ENVIR: community + +platform: + EXTRN_MDL_DATA_STORES: hpss + +workflow: + PREEXISTING_DIR_METHOD: rename + PREDEF_GRID_NAME: RRFS_CONUS_25km + DATE_FIRST_CYCL: '2024073000' + DATE_LAST_CYCL: '2024073000' + FCST_LEN_HRS: 240 + +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml"]|include }}' + +task_get_extrn_lbcs: + LBC_SPEC_INTVL_HRS: 3 + +task_run_post: + POST_OUTPUT_DOMAIN_NAME: 'global' + +verification: + METPLUS_VERBOSITY_LEVEL: 5 + # Base directories in which to look for obs. If these do not exist and/or + # do not contain the required obs, create and populate them. + # Note that when performing WE2E tests, the default behavior is not to + # get obs files from HPSS but to use staged obs files. This is done by + # setting these variables to the (platform-specific) locations of these + # staged files. To force the WE2E testing system to get the obs from + # HPSS, here we reset these variables to their default values in the SRW + # workflow configuration file config_defaults.yaml. + CCPA_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/ccpa" + NOHRSC_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/nohrsc" + MRMS_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/mrms" + NDAS_OBS_DIR: "{{ platform.TEST_GDAS_OBS_DIR }}" + # Do not remove raw obs files to be able to verify that only the necessary + # raw files are fetched from HPSS (if it's necessary to fetch obs files). + REMOVE_RAW_OBS_CCPA: false + REMOVE_RAW_OBS_MRMS: false + REMOVE_RAW_OBS_NDAS: false + REMOVE_RAW_OBS_NOHRSC: false + # + OBS_NDAS_FN_TEMPLATES: [ 'SFCandUPA', '{valid?fmt=%Y%m%d}/gdas.t{valid?fmt=%H}z.prepbufr.nr' ] + # + VX_FCST_MODEL_NAME: 'pw' + VX_CONFIG_DET_FN: 'vx_configs/vx_config_det.obs_gdas.model_aiml.yaml' + VX_FCST_INPUT_BASEDIR: '{{- platform.TEST_EXTRN_MDL_SOURCE_BASEDIR }}' + FCST_SUBDIR_TEMPLATE: '{{- verification.VX_FCST_MODEL_NAME }}.{init?fmt=%Y%m%d}/{init?fmt=%H}' + FCST_FN_TEMPLATE: '{{- verification.VX_FCST_MODEL_NAME }}.t00z.prslev.f{lead?fmt=%HHH}.grb2' + FCST_FN_TEMPLATE_PCPCOMBINE_OUTPUT: '{{- verification.VX_FCST_MODEL_NAME }}.t00z.prslev.f{lead?fmt=%HHH}_a${ACCUM_HH}h.nc' + # + VX_FCST_OUTPUT_INTVL_HRS: 6 + VX_FIELD_GROUPS: [ "SFC" ] diff --git a/tests/WE2E/test_configs/verification/config.vx-det_long-fcst_custom-vx-config_gfs.yaml b/tests/WE2E/test_configs/verification/config.vx-det_long-fcst_custom-vx-config_gfs.yaml new file mode 100644 index 0000000000..5ea940f055 --- /dev/null +++ b/tests/WE2E/test_configs/verification/config.vx-det_long-fcst_custom-vx-config_gfs.yaml @@ -0,0 +1,66 @@ +metadata: + description: |- + SRW App configuration file to test deterministic verification of the + Global Forecast System (GFS) model in a way that is comparable to vx + for several AI models [GraphCast (gc), FourCastNet (fcnv2), and Pangu- + Weather (pw)]. The idea is for this test to serve as a baseline to + which the AI vx can be compared. Thus, this test uses a custom vx + verification configuration file (as opposed to the default one in the + SRW) because the AI model output often does not include many of the + fields that exist in physics-based models. + +user: + RUN_ENVIR: community + +platform: + EXTRN_MDL_DATA_STORES: hpss + +workflow: + PREEXISTING_DIR_METHOD: rename + PREDEF_GRID_NAME: RRFS_CONUS_25km + DATE_FIRST_CYCL: '2024073000' + DATE_LAST_CYCL: '2024073000' + FCST_LEN_HRS: 240 + +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml"]|include }}' + +task_get_extrn_lbcs: + LBC_SPEC_INTVL_HRS: 3 + +task_run_post: + POST_OUTPUT_DOMAIN_NAME: 'global' + +verification: + METPLUS_VERBOSITY_LEVEL: 5 + # Base directories in which to look for obs. If these do not exist and/or + # do not contain the required obs, create and populate them. + # Note that when performing WE2E tests, the default behavior is not to + # get obs files from HPSS but to use staged obs files. This is done by + # setting these variables to the (platform-specific) locations of these + # staged files. To force the WE2E testing system to get the obs from + # HPSS, here we reset these variables to their default values in the SRW + # workflow configuration file config_defaults.yaml. + CCPA_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/ccpa" + NOHRSC_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/nohrsc" + MRMS_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/mrms" + NDAS_OBS_DIR: "{{ platform.TEST_GDAS_OBS_DIR }}" + # Do not remove raw obs files to be able to verify that only the necessary + # raw files are fetched from HPSS (if it's necessary to fetch obs files). + REMOVE_RAW_OBS_CCPA: false + REMOVE_RAW_OBS_MRMS: false + REMOVE_RAW_OBS_NDAS: false + REMOVE_RAW_OBS_NOHRSC: false + # + OBS_NDAS_FN_TEMPLATES: [ 'SFCandUPA', '{valid?fmt=%Y%m%d}/gdas.t{valid?fmt=%H}z.prepbufr.nr' ] + # + VX_FCST_MODEL_NAME: 'gfs' + VX_CONFIG_DET_FN: 'vx_configs/vx_config_det.obs_gdas.model_gfs.yaml' + VX_FCST_INPUT_BASEDIR: '{{- platform.TEST_EXTRN_MDL_SOURCE_BASEDIR }}' + FCST_SUBDIR_TEMPLATE: '{{- verification.VX_FCST_MODEL_NAME }}.{init?fmt=%Y%m%d}/{init?fmt=%H}/atmos' + FCST_FN_TEMPLATE: '{{- verification.VX_FCST_MODEL_NAME }}.t00z.pgrb2.0p25.f{lead?fmt=%HHH}' + FCST_FN_TEMPLATE_PCPCOMBINE_OUTPUT: '{{- verification.VX_FCST_MODEL_NAME }}.t00z.pgrb2.0p25.f{lead?fmt=%HHH}_a${ACCUM_HH}h.nc' + # + VX_FCST_OUTPUT_INTVL_HRS: 6 + VX_FIELD_GROUPS: [ "SFC" ] diff --git a/tests/WE2E/test_configs/verification/config.vx-det_long-fcst_winter-wx_SRW-staged.yaml b/tests/WE2E/test_configs/verification/config.vx-det_long-fcst_winter-wx_SRW-staged.yaml new file mode 100644 index 0000000000..2a9fe731a0 --- /dev/null +++ b/tests/WE2E/test_configs/verification/config.vx-det_long-fcst_winter-wx_SRW-staged.yaml @@ -0,0 +1,62 @@ +metadata: + description: |- + SRW App configuration file to first pull CCPA, NOHRSC, MRMS, and NDAS + observations from HPSS for a single cycle with a relatively long forecast + (36 hours) cycle and then perform deterministic verification, including + first performing vx preprocessing with METplus tools such as PcpCombine + and Pb2Nc. + + The staged forecast data are from the SRW itself. + + This test uses a winter case to ensure that ASNOW is verified correctly + for both 6-hour and 24-hour accumulations. + +user: + RUN_ENVIR: community + +platform: + EXTRN_MDL_DATA_STORES: hpss + +workflow: + PREEXISTING_DIR_METHOD: rename + # This is required in the experiment generation step, although it shouldn't + # since a forecast is not being run. + PREDEF_GRID_NAME: RRFS_CONUS_25km + INCR_CYCL_FREQ: 24 + DATE_FIRST_CYCL: '2023021700' + DATE_LAST_CYCL: '2023021700' + FCST_LEN_HRS: 36 + +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml"]|include }}' + +task_get_extrn_lbcs: + LBC_SPEC_INTVL_HRS: 3 + +task_run_post: + POST_OUTPUT_DOMAIN_NAME: 'custom_ESGgrid_Michigan_Ontario' + +verification: + # Base directories in which to look for obs. If these do not exist and/or + # do not contain the required obs, create and populate them. + # Note that when performing WE2E tests, the default behavior is not to + # get obs files from HPSS but to use staged obs files. This is done by + # setting these variables to the (platform-specific) locations of these + # staged files. To force the WE2E testing system to get the obs from + # HPSS, here we reset these variables to their default values in the SRW + # workflow configuration file config_defaults.yaml. + CCPA_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/ccpa" + NOHRSC_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/nohrsc" + MRMS_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/mrms" + NDAS_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/ndas" + # Do not remove raw obs files to be able to verify that only the necessary + # raw files are fetched from HPSS. + REMOVE_RAW_OBS_CCPA: false + REMOVE_RAW_OBS_MRMS: false + REMOVE_RAW_OBS_NDAS: false + REMOVE_RAW_OBS_NOHRSC: false + # + VX_FCST_MODEL_NAME: 'Michigan_Ontario_snow_8km' + VX_FIELD_GROUPS: [ "APCP", "ASNOW", "REFC", "RETOP", "SFC", "UPA" ] + VX_FCST_INPUT_BASEDIR: '{{- "/".join([platform.TEST_VX_FCST_INPUT_BASEDIR, "..", "custom_ESGgrid_Michigan_Ontario_8km"]) }}' diff --git a/tests/WE2E/test_configs/verification/config.vx-det_multicyc_fcst-overlap_ncep-hrrr.yaml b/tests/WE2E/test_configs/verification/config.vx-det_multicyc_fcst-overlap_ncep-hrrr.yaml new file mode 100644 index 0000000000..0be883f1e8 --- /dev/null +++ b/tests/WE2E/test_configs/verification/config.vx-det_multicyc_fcst-overlap_ncep-hrrr.yaml @@ -0,0 +1,61 @@ +metadata: + description: |- + SRW App configuration file to first pull CCPA, MRMS, and NDAS observations + from HPSS for multiple cycles and then perform deterministic verification + for all cycles, including first performing vx preprocessing with METplus + tools such as PcpCombine and Pb2Nc. + + The staged forecast data are from NCEP's operational version of the HRRR. + + This test is for the scenario in which there are multiple, short (i.e. + shorter than 24hr), overlapping forecasts in a day. + +user: + RUN_ENVIR: community + +platform: + EXTRN_MDL_DATA_STORES: hpss + +workflow: + PREEXISTING_DIR_METHOD: rename + PREDEF_GRID_NAME: RRFS_CONUS_25km + INCR_CYCL_FREQ: 7 + DATE_FIRST_CYCL: '2024042902' + DATE_LAST_CYCL: '2024043006' + FCST_LEN_HRS: 9 + +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml"]|include }}' + +task_get_extrn_lbcs: + LBC_SPEC_INTVL_HRS: 3 + +task_run_post: + POST_OUTPUT_DOMAIN_NAME: 'hrrr_ncep' + +verification: + METPLUS_VERBOSITY_LEVEL: 5 + # Base directories in which to look for obs. If these do not exist and/or + # do not contain the required obs, create and populate them. + # Note that when performing WE2E tests, the default behavior is not to + # get obs files from HPSS but to use staged obs files. This is done by + # setting these variables to the (platform-specific) locations of these + # staged files. To force the WE2E testing system to get the obs from + # HPSS, here we reset these variables to their default values in the SRW + # workflow configuration file config_defaults.yaml. + CCPA_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/ccpa" + NOHRSC_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/nohrsc" + MRMS_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/mrms" + NDAS_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/ndas" + # Do not remove raw obs files to be able to verify that only the necessary + # raw files are fetched from HPSS. + REMOVE_RAW_OBS_CCPA: false + REMOVE_RAW_OBS_MRMS: false + REMOVE_RAW_OBS_NDAS: false + REMOVE_RAW_OBS_NOHRSC: false + # + VX_FCST_MODEL_NAME: 'hrrr_ncep' + VX_FCST_INPUT_BASEDIR: '{{- "/".join([platform.TEST_EXTRN_MDL_SOURCE_BASEDIR, verification.VX_FCST_MODEL_NAME]) }}' + FCST_FN_TEMPLATE: '${POST_OUTPUT_DOMAIN_NAME}_{init?fmt=%Y%m%d%H?shift=-${time_lag}}f{lead?fmt=%HHH?shift=${time_lag}}.grib2' + FCST_FN_TEMPLATE_PCPCOMBINE_OUTPUT: '${POST_OUTPUT_DOMAIN_NAME}_{init?fmt=%Y%m%d%H?shift=-${time_lag}}f{lead?fmt=%HHH?shift=${time_lag}}_${FIELD_GROUP}_a${ACCUM_HH}h.nc' diff --git a/tests/WE2E/test_configs/verification/config.vx-det_multicyc_first-obs-00z_ncep-hrrr.yaml b/tests/WE2E/test_configs/verification/config.vx-det_multicyc_first-obs-00z_ncep-hrrr.yaml new file mode 100644 index 0000000000..80654ec42d --- /dev/null +++ b/tests/WE2E/test_configs/verification/config.vx-det_multicyc_first-obs-00z_ncep-hrrr.yaml @@ -0,0 +1,61 @@ +metadata: + description: |- + SRW App configuration file to first pull CCPA, MRMS, and NDAS observations + from HPSS for multiple cycles and then perform deterministic verification + for all cycles, including first performing vx preprocessing with METplus + tools such as PcpCombine and Pb2Nc. + + The staged forecast data are from NCEP's operational version of the HRRR. + + This test is for the scenario in which the first obs needed is at 00z. It + tests the special treatment needed for obtaining CCPA and NDAS obs at 00z. + +user: + RUN_ENVIR: community + +platform: + EXTRN_MDL_DATA_STORES: hpss + +workflow: + PREEXISTING_DIR_METHOD: rename + PREDEF_GRID_NAME: RRFS_CONUS_25km + INCR_CYCL_FREQ: 24 + DATE_FIRST_CYCL: '2024042900' + DATE_LAST_CYCL: '2024043000' + FCST_LEN_HRS: 3 + +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml"]|include }}' + +task_get_extrn_lbcs: + LBC_SPEC_INTVL_HRS: 3 + +task_run_post: + POST_OUTPUT_DOMAIN_NAME: 'hrrr_ncep' + +verification: + METPLUS_VERBOSITY_LEVEL: 5 + # Base directories in which to look for obs. If these do not exist and/or + # do not contain the required obs, create and populate them. + # Note that when performing WE2E tests, the default behavior is not to + # get obs files from HPSS but to use staged obs files. This is done by + # setting these variables to the (platform-specific) locations of these + # staged files. To force the WE2E testing system to get the obs from + # HPSS, here we reset these variables to their default values in the SRW + # workflow configuration file config_defaults.yaml. + CCPA_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/ccpa" + NOHRSC_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/nohrsc" + MRMS_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/mrms" + NDAS_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/ndas" + # Do not remove raw obs files to be able to verify that only the necessary + # raw files are fetched from HPSS. + REMOVE_RAW_OBS_CCPA: false + REMOVE_RAW_OBS_MRMS: false + REMOVE_RAW_OBS_NDAS: false + REMOVE_RAW_OBS_NOHRSC: false + # + VX_FCST_MODEL_NAME: 'hrrr_ncep' + VX_FCST_INPUT_BASEDIR: '{{- "/".join([platform.TEST_EXTRN_MDL_SOURCE_BASEDIR, verification.VX_FCST_MODEL_NAME]) }}' + FCST_FN_TEMPLATE: '${POST_OUTPUT_DOMAIN_NAME}_{init?fmt=%Y%m%d%H?shift=-${time_lag}}f{lead?fmt=%HHH?shift=${time_lag}}.grib2' + FCST_FN_TEMPLATE_PCPCOMBINE_OUTPUT: '${POST_OUTPUT_DOMAIN_NAME}_{init?fmt=%Y%m%d%H?shift=-${time_lag}}f{lead?fmt=%HHH?shift=${time_lag}}_${FIELD_GROUP}_a${ACCUM_HH}h.nc' diff --git a/tests/WE2E/test_configs/verification/config.vx-det_multicyc_last-obs-00z_ncep-hrrr.yaml b/tests/WE2E/test_configs/verification/config.vx-det_multicyc_last-obs-00z_ncep-hrrr.yaml new file mode 100644 index 0000000000..18508af72e --- /dev/null +++ b/tests/WE2E/test_configs/verification/config.vx-det_multicyc_last-obs-00z_ncep-hrrr.yaml @@ -0,0 +1,61 @@ +metadata: + description: |- + SRW App configuration file to first pull CCPA, MRMS, and NDAS observations + from HPSS for multiple cycles and then perform deterministic verification + for all cycles, including first performing vx preprocessing with METplus + tools such as PcpCombine and Pb2Nc. + + The staged forecast data are from NCEP's operational version of the HRRR. + + This test is for the scenario in which the last obs needed is at 00z. It + tests the special treatment needed for obtaining CCPA and NDAS obs at 00z. + +user: + RUN_ENVIR: community + +platform: + EXTRN_MDL_DATA_STORES: hpss + +workflow: + PREEXISTING_DIR_METHOD: rename + PREDEF_GRID_NAME: RRFS_CONUS_25km + INCR_CYCL_FREQ: 24 + DATE_FIRST_CYCL: '2024042921' + DATE_LAST_CYCL: '2024043021' + FCST_LEN_HRS: 3 + +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml"]|include }}' + +task_get_extrn_lbcs: + LBC_SPEC_INTVL_HRS: 3 + +task_run_post: + POST_OUTPUT_DOMAIN_NAME: 'hrrr_ncep' + +verification: + METPLUS_VERBOSITY_LEVEL: 5 + # Base directories in which to look for obs. If these do not exist and/or + # do not contain the required obs, create and populate them. + # Note that when performing WE2E tests, the default behavior is not to + # get obs files from HPSS but to use staged obs files. This is done by + # setting these variables to the (platform-specific) locations of these + # staged files. To force the WE2E testing system to get the obs from + # HPSS, here we reset these variables to their default values in the SRW + # workflow configuration file config_defaults.yaml. + CCPA_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/ccpa" + NOHRSC_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/nohrsc" + MRMS_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/mrms" + NDAS_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/ndas" + # Do not remove raw obs files to be able to verify that only the necessary + # raw files are fetched from HPSS. + REMOVE_RAW_OBS_CCPA: false + REMOVE_RAW_OBS_MRMS: false + REMOVE_RAW_OBS_NDAS: false + REMOVE_RAW_OBS_NOHRSC: false + # + VX_FCST_MODEL_NAME: 'hrrr_ncep' + VX_FCST_INPUT_BASEDIR: '{{- "/".join([platform.TEST_EXTRN_MDL_SOURCE_BASEDIR, verification.VX_FCST_MODEL_NAME]) }}' + FCST_FN_TEMPLATE: '${POST_OUTPUT_DOMAIN_NAME}_{init?fmt=%Y%m%d%H?shift=-${time_lag}}f{lead?fmt=%HHH?shift=${time_lag}}.grib2' + FCST_FN_TEMPLATE_PCPCOMBINE_OUTPUT: '${POST_OUTPUT_DOMAIN_NAME}_{init?fmt=%Y%m%d%H?shift=-${time_lag}}f{lead?fmt=%HHH?shift=${time_lag}}_${FIELD_GROUP}_a${ACCUM_HH}h.nc' diff --git a/tests/WE2E/test_configs/verification/config.vx-det_multicyc_long-fcst-no-overlap_nssl-mpas.yaml b/tests/WE2E/test_configs/verification/config.vx-det_multicyc_long-fcst-no-overlap_nssl-mpas.yaml new file mode 100644 index 0000000000..2745c580e3 --- /dev/null +++ b/tests/WE2E/test_configs/verification/config.vx-det_multicyc_long-fcst-no-overlap_nssl-mpas.yaml @@ -0,0 +1,63 @@ +metadata: + description: |- + SRW App configuration file to first pull CCPA, MRMS, and NDAS observations + from HPSS for multiple cycles and then perform deterministic verification + for all cycles, including first performing vx preprocessing with METplus + tools such as PcpCombine and Pb2Nc. + + The staged forecast data are from one of NSSL's MPAS prototypes submitted + to the 2024 HWT Spring Forecast Experiment. + + This test is for the scenario in which there are multiple, long (i.e. + longer than 24hr) NON-overlapping forecasts with multi-day gaps between + the end of one forecast and the start of the next. + +user: + RUN_ENVIR: community + +platform: + EXTRN_MDL_DATA_STORES: hpss + +workflow: + PREEXISTING_DIR_METHOD: rename + PREDEF_GRID_NAME: RRFS_CONUS_25km + INCR_CYCL_FREQ: 96 + DATE_FIRST_CYCL: '2024042912' + DATE_LAST_CYCL: '2024051112' + FCST_LEN_HRS: 48 + +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml"]|include }}' + +task_get_extrn_lbcs: + LBC_SPEC_INTVL_HRS: 3 + +task_run_post: + POST_OUTPUT_DOMAIN_NAME: 'mpashn4nssl' + +verification: + METPLUS_VERBOSITY_LEVEL: 5 + # Base directories in which to look for obs. If these do not exist and/or + # do not contain the required obs, create and populate them. + # Note that when performing WE2E tests, the default behavior is not to + # get obs files from HPSS but to use staged obs files. This is done by + # setting these variables to the (platform-specific) locations of these + # staged files. To force the WE2E testing system to get the obs from + # HPSS, here we reset these variables to their default values in the SRW + # workflow configuration file config_defaults.yaml. + CCPA_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/ccpa" + NOHRSC_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/nohrsc" + MRMS_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/mrms" + NDAS_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/ndas" + # Do not remove raw obs files to be able to verify that only the necessary + # raw files are fetched from HPSS. + REMOVE_RAW_OBS_CCPA: false + REMOVE_RAW_OBS_MRMS: false + REMOVE_RAW_OBS_NDAS: false + REMOVE_RAW_OBS_NOHRSC: false + # + VX_FCST_MODEL_NAME: 'NSSL-MPAS-HN' + VX_FCST_INPUT_BASEDIR: '{{- "/".join([platform.TEST_EXTRN_MDL_SOURCE_BASEDIR, verification.VX_FCST_MODEL_NAME]) }}' + FCST_FN_TEMPLATE: '${POST_OUTPUT_DOMAIN_NAME}_{init?fmt=%Y%m%d%H?shift=-${time_lag}}f{lead?fmt=%H?shift=${time_lag}}.grib2' + FCST_FN_TEMPLATE_PCPCOMBINE_OUTPUT: '${POST_OUTPUT_DOMAIN_NAME}_{init?fmt=%Y%m%d%H?shift=-${time_lag}}f{lead?fmt=%HHH?shift=${time_lag}}_${FIELD_GROUP}_a${ACCUM_HH}h.nc' diff --git a/tests/WE2E/test_configs/verification/config.vx-det_multicyc_long-fcst-overlap_nssl-mpas.yaml b/tests/WE2E/test_configs/verification/config.vx-det_multicyc_long-fcst-overlap_nssl-mpas.yaml new file mode 100644 index 0000000000..fbd67884a5 --- /dev/null +++ b/tests/WE2E/test_configs/verification/config.vx-det_multicyc_long-fcst-overlap_nssl-mpas.yaml @@ -0,0 +1,62 @@ +metadata: + description: |- + SRW App configuration file to first pull CCPA, MRMS, and NDAS observations + from HPSS for multiple cycles and then perform deterministic verification + for all cycles, including first performing vx preprocessing with METplus + tools such as PcpCombine and Pb2Nc. + + The staged forecast data are from one of NSSL's MPAS prototypes submitted + to the 2024 HWT Spring Forecast Experiment. + + This test is for the scenario in which there are multiple, long (i.e. + longer than 24hr) overlapping forecasts. + +user: + RUN_ENVIR: community + +platform: + EXTRN_MDL_DATA_STORES: hpss + +workflow: + PREEXISTING_DIR_METHOD: rename + PREDEF_GRID_NAME: RRFS_CONUS_25km + INCR_CYCL_FREQ: 24 + DATE_FIRST_CYCL: '2024042912' + DATE_LAST_CYCL: '2024050212' + FCST_LEN_HRS: 48 + +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml"]|include }}' + +task_get_extrn_lbcs: + LBC_SPEC_INTVL_HRS: 3 + +task_run_post: + POST_OUTPUT_DOMAIN_NAME: 'mpashn4nssl' + +verification: + METPLUS_VERBOSITY_LEVEL: 5 + # Base directories in which to look for obs. If these do not exist and/or + # do not contain the required obs, create and populate them. + # Note that when performing WE2E tests, the default behavior is not to + # get obs files from HPSS but to use staged obs files. This is done by + # setting these variables to the (platform-specific) locations of these + # staged files. To force the WE2E testing system to get the obs from + # HPSS, here we reset these variables to their default values in the SRW + # workflow configuration file config_defaults.yaml. + CCPA_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/ccpa" + NOHRSC_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/nohrsc" + MRMS_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/mrms" + NDAS_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/ndas" + # Do not remove raw obs files to be able to verify that only the necessary + # raw files are fetched from HPSS. + REMOVE_RAW_OBS_CCPA: false + REMOVE_RAW_OBS_MRMS: false + REMOVE_RAW_OBS_NDAS: false + REMOVE_RAW_OBS_NOHRSC: false + # + VX_FCST_MODEL_NAME: 'NSSL-MPAS-HN' + VX_FCST_INPUT_BASEDIR: '{{- "/".join([platform.TEST_EXTRN_MDL_SOURCE_BASEDIR, verification.VX_FCST_MODEL_NAME]) }}' + FCST_FN_TEMPLATE: '${POST_OUTPUT_DOMAIN_NAME}_{init?fmt=%Y%m%d%H?shift=-${time_lag}}f{lead?fmt=%H?shift=${time_lag}}.grib2' + FCST_FN_TEMPLATE_PCPCOMBINE_OUTPUT: '${POST_OUTPUT_DOMAIN_NAME}_{init?fmt=%Y%m%d%H?shift=-${time_lag}}f{lead?fmt=%HHH?shift=${time_lag}}_${FIELD_GROUP}_a${ACCUM_HH}h.nc' diff --git a/tests/WE2E/test_configs/verification/config.vx-det_multicyc_no-00z-obs_nssl-mpas.yaml b/tests/WE2E/test_configs/verification/config.vx-det_multicyc_no-00z-obs_nssl-mpas.yaml new file mode 100644 index 0000000000..85f55c8fe4 --- /dev/null +++ b/tests/WE2E/test_configs/verification/config.vx-det_multicyc_no-00z-obs_nssl-mpas.yaml @@ -0,0 +1,63 @@ +metadata: + description: |- + SRW App configuration file to first pull CCPA, MRMS, and NDAS observations + from HPSS for multiple cycles and then perform deterministic verification + for all cycles, including first performing vx preprocessing with METplus + tools such as PcpCombine and Pb2Nc. + + The staged forecast data are from one of NSSL's MPAS prototypes submitted + to the 2024 HWT Spring Forecast Experiment. + + This test is for the scenario in which forecasts do not include 00z. It + is the simplest case of obtaining CCPA and NDAS obs because it avoids + testing the special treatment needed at 00z for these obs types. + +user: + RUN_ENVIR: community + +platform: + EXTRN_MDL_DATA_STORES: hpss + +workflow: + PREEXISTING_DIR_METHOD: rename + PREDEF_GRID_NAME: RRFS_CONUS_25km + INCR_CYCL_FREQ: 24 + DATE_FIRST_CYCL: '2024042912' + DATE_LAST_CYCL: '2024043012' + FCST_LEN_HRS: 3 + +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml"]|include }}' + +task_get_extrn_lbcs: + LBC_SPEC_INTVL_HRS: 3 + +task_run_post: + POST_OUTPUT_DOMAIN_NAME: 'mpashn4nssl' + +verification: + METPLUS_VERBOSITY_LEVEL: 5 + # Base directories in which to look for obs. If these do not exist and/or + # do not contain the required obs, create and populate them. + # Note that when performing WE2E tests, the default behavior is not to + # get obs files from HPSS but to use staged obs files. This is done by + # setting these variables to the (platform-specific) locations of these + # staged files. To force the WE2E testing system to get the obs from + # HPSS, here we reset these variables to their default values in the SRW + # workflow configuration file config_defaults.yaml. + CCPA_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/ccpa" + NOHRSC_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/nohrsc" + MRMS_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/mrms" + NDAS_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/ndas" + # Do not remove raw obs files to be able to verify that only the necessary + # raw files are fetched from HPSS. + REMOVE_RAW_OBS_CCPA: false + REMOVE_RAW_OBS_MRMS: false + REMOVE_RAW_OBS_NDAS: false + REMOVE_RAW_OBS_NOHRSC: false + # + VX_FCST_MODEL_NAME: 'NSSL-MPAS-HN' + VX_FCST_INPUT_BASEDIR: '{{- "/".join([platform.TEST_EXTRN_MDL_SOURCE_BASEDIR, verification.VX_FCST_MODEL_NAME]) }}' + FCST_FN_TEMPLATE: '${POST_OUTPUT_DOMAIN_NAME}_{init?fmt=%Y%m%d%H?shift=-${time_lag}}f{lead?fmt=%H?shift=${time_lag}}.grib2' + FCST_FN_TEMPLATE_PCPCOMBINE_OUTPUT: '${POST_OUTPUT_DOMAIN_NAME}_{init?fmt=%Y%m%d%H?shift=-${time_lag}}f{lead?fmt=%HHH?shift=${time_lag}}_${FIELD_GROUP}_a${ACCUM_HH}h.nc' diff --git a/tests/WE2E/test_configs/verification/config.vx-det_multicyc_no-fcst-overlap_ncep-hrrr.yaml b/tests/WE2E/test_configs/verification/config.vx-det_multicyc_no-fcst-overlap_ncep-hrrr.yaml new file mode 100644 index 0000000000..c65fb74ec4 --- /dev/null +++ b/tests/WE2E/test_configs/verification/config.vx-det_multicyc_no-fcst-overlap_ncep-hrrr.yaml @@ -0,0 +1,63 @@ +metadata: + description: |- + SRW App configuration file to first pull CCPA, MRMS, and NDAS observations + from HPSS for multiple cycles and then perform deterministic verification + for all cycles, including first performing vx preprocessing with METplus + tools such as PcpCombine and Pb2Nc. + + The staged forecast data are from NCEP's operational version of the HRRR. + + This test is for the scenario in which there are multiple, short (i.e. + shorter than 24hr), NON-overlapping forecasts in a day with multi-hour + (but < 24hr) gaps between the end of one forecast and the start of the + next. + +user: + RUN_ENVIR: community + +platform: + EXTRN_MDL_DATA_STORES: hpss + +workflow: + PREEXISTING_DIR_METHOD: rename + PREDEF_GRID_NAME: RRFS_CONUS_25km + INCR_CYCL_FREQ: 11 + DATE_FIRST_CYCL: '2024042902' + DATE_LAST_CYCL: '2024043022' + FCST_LEN_HRS: 3 + +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml"]|include }}' + +task_get_extrn_lbcs: + LBC_SPEC_INTVL_HRS: 3 + +task_run_post: + POST_OUTPUT_DOMAIN_NAME: 'hrrr_ncep' + +verification: + METPLUS_VERBOSITY_LEVEL: 5 + # Base directories in which to look for obs. If these do not exist and/or + # do not contain the required obs, create and populate them. + # Note that when performing WE2E tests, the default behavior is not to + # get obs files from HPSS but to use staged obs files. This is done by + # setting these variables to the (platform-specific) locations of these + # staged files. To force the WE2E testing system to get the obs from + # HPSS, here we reset these variables to their default values in the SRW + # workflow configuration file config_defaults.yaml. + CCPA_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/ccpa" + NOHRSC_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/nohrsc" + MRMS_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/mrms" + NDAS_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/ndas" + # Do not remove raw obs files to be able to verify that only the necessary + # raw files are fetched from HPSS. + REMOVE_RAW_OBS_CCPA: false + REMOVE_RAW_OBS_MRMS: false + REMOVE_RAW_OBS_NDAS: false + REMOVE_RAW_OBS_NOHRSC: false + # + VX_FCST_MODEL_NAME: 'hrrr_ncep' + VX_FCST_INPUT_BASEDIR: '{{- "/".join([platform.TEST_EXTRN_MDL_SOURCE_BASEDIR, verification.VX_FCST_MODEL_NAME]) }}' + FCST_FN_TEMPLATE: '${POST_OUTPUT_DOMAIN_NAME}_{init?fmt=%Y%m%d%H?shift=-${time_lag}}f{lead?fmt=%HHH?shift=${time_lag}}.grib2' + FCST_FN_TEMPLATE_PCPCOMBINE_OUTPUT: '${POST_OUTPUT_DOMAIN_NAME}_{init?fmt=%Y%m%d%H?shift=-${time_lag}}f{lead?fmt=%HHH?shift=${time_lag}}_${FIELD_GROUP}_a${ACCUM_HH}h.nc' diff --git a/tests/WE2E/utils.py b/tests/WE2E/utils.py index fb96dab004..d46d52b468 100755 --- a/tests/WE2E/utils.py +++ b/tests/WE2E/utils.py @@ -21,18 +21,18 @@ cfg_to_yaml_str, flatten_dict, load_config_file, - load_shell_config + load_yaml_config ) REPORT_WIDTH = 100 EXPT_COLUMN_WIDTH = 65 TASK_COLUMN_WIDTH = 40 def print_WE2E_summary(expts_dict: dict, debug: bool = False): - """Function that creates a summary for the specified experiment + """Creates a summary of the specified experiment Args: expts_dict (dict): A dictionary containing the information needed to run - one or more experiments. See example file WE2E_tests.yaml + one or more experiments. See example file ``WE2E_tests.yaml``. debug (bool): [optional] Enable extra output for debugging Returns: @@ -53,7 +53,7 @@ def print_WE2E_summary(expts_dict: dict, debug: bool = False): expt_details.append('') expt_details.append('-'*REPORT_WIDTH) expt_details.append(f'Detailed summary of experiment {expt}') - expt_details.append(f"in directory {expts_dict[expt]['expt_dir']}") + expt_details.append(f"in directory {os.path.abspath(expts_dict[expt]['expt_dir'])}") expt_details.append(f'{" "*TASK_COLUMN_WIDTH}| Status | Walltime | Core hours used') expt_details.append('-'*REPORT_WIDTH) @@ -105,16 +105,16 @@ def print_WE2E_summary(expts_dict: dict, debug: bool = False): for line in expt_details: f.write(f"{line}\n") -def create_expts_dict(expt_dir: str) -> dict: +def create_expts_dict(expt_dir: str): """ - Function takes in a directory, searches that directory for subdirectories containing - experiments, and creates a skeleton dictionary that can be filled out by update_expt_status() + Takes in a directory, searches that directory for subdirectories containing + experiments, and creates a skeleton dictionary that can be filled out by ``update_expt_status()`` Args: - expt_dir (str): Experiment directory + expt_dir (str): Experiment directory name Returns: - dict: Experiment dictionary + (summary_file, expts_dict): A tuple including the name of the summary file (``WE2E_tests_YYYYMMDDHHmmSS.yaml``) and the experiment dictionary """ contents = sorted(os.listdir(expt_dir)) @@ -123,6 +123,12 @@ def create_expts_dict(expt_dir: str) -> dict: # Look for FV3LAM_wflow.xml to indicate directories with experiments in them fullpath = os.path.join(expt_dir, item) if not os.path.isdir(fullpath): + # If user is providing an experiment subdir directly, print a warning + if item == "FV3LAM_wflow.xml": + msg = "WARNING: found a rocoto XML in the provided directory!\n" + msg += "This script will only look for experiments in subdirectories\n" + msg += f"of the provided directory {expt_dir}\n" + logging.warning(msg) continue xmlfile = os.path.join(expt_dir, item, 'FV3LAM_wflow.xml') if os.path.isfile(xmlfile): @@ -141,26 +147,25 @@ def create_expts_dict(expt_dir: str) -> dict: def calculate_core_hours(expts_dict: dict) -> dict: """ - Function takes in an experiment dictionary, reads the var_defns file for necessary information, - and calculates the core hours used by each task, updating expts_dict with this info + Takes in an experiment dictionary, reads the ``var_defns.sh`` file for necessary information, + and calculates the core hours used by each task, updating ``expts_dict`` with this information Args: - expts_dict (dict): A dictionary containing the information needed to run - one or more experiments. See example file WE2E_tests.yaml + expts_dict (dict): The information needed to run one or more experiments. See example file ``WE2E_tests.yaml`` Returns: - dict: Experiments dictionary updated with core hours + expts_dict: Experiment dictionary updated with core hours """ for expt in expts_dict: # Read variable definitions file - vardefs_file = os.path.join(expts_dict[expt]["expt_dir"],"var_defns.sh") + vardefs_file = os.path.join(expts_dict[expt]["expt_dir"],"var_defns.yaml") if not os.path.isfile(vardefs_file): logging.warning(f"\nWARNING: For experiment {expt}, variable definitions file") logging.warning(f"{vardefs_file}\ndoes not exist!\n\nDropping experiment from summary") continue logging.debug(f'Reading variable definitions file {vardefs_file}') - vardefs = load_shell_config(vardefs_file) + vardefs = load_yaml_config(vardefs_file) vdf = flatten_dict(vardefs) cores_per_node = vdf["NCORES_PER_NODE"] for task in expts_dict[expt]: @@ -170,8 +175,8 @@ def calculate_core_hours(expts_dict: dict) -> dict: # Cycle is last 12 characters, task name is rest (minus separating underscore) taskname = task[:-13] # Handle task names that have ensemble and/or fhr info appended with regex - taskname = re.sub('_mem\d{3}', '', taskname) - taskname = re.sub('_f\d{3}', '', taskname) + taskname = re.sub(r'_mem\d{3}', '', taskname) + taskname = re.sub(r'_f\d{3}', '', taskname) nnodes_var = f'NNODES_{taskname.upper()}' if nnodes_var in vdf: nnodes = vdf[nnodes_var] @@ -188,6 +193,16 @@ def calculate_core_hours(expts_dict: dict) -> dict: def write_monitor_file(monitor_file: str, expts_dict: dict): + """Writes status of tests to file + + Args: + monitor_file (str): File name + expts_dict (dict): Experiments being monitored + Returns: + None + Raises: + KeyboardInterrupt: If a user attempts to disrupt program execution (e.g., with ``Ctrl+C``) while program is writing information to ``monitor_file``. + """ try: with open(monitor_file,"w", encoding="utf-8") as f: f.write("### WARNING ###\n") @@ -209,55 +224,29 @@ def write_monitor_file(monitor_file: str, expts_dict: dict): def update_expt_status(expt: dict, name: str, refresh: bool = False, debug: bool = False, submit: bool = True) -> dict: """ - This function reads the dictionary showing the location of a given experiment, runs a - `rocotorun` command to update the experiment (running new jobs and updating the status of - previously submitted ones), and reads the rocoto database file to update the status of - each job for that experiment in the experiment dictionary. - - The function then and uses a simple set of rules to combine the statuses of every task - into a useful "status" for the whole experiment, and returns the updated experiment dictionary. - - Experiment "status" levels explained: - CREATED: The experiments have been created, but the monitor script has not yet processed them. - This is immediately overwritten at the beginning of the "monitor_jobs" function, so we - should never see this status in this function. Including just for completeness sake. - SUBMITTING: All jobs are in status SUBMITTING or SUCCEEDED. This is a normal state; we will - continue to monitor this experiment. - DYING: One or more tasks have died (status "DEAD"), so this experiment has had an error. - We will continue to monitor this experiment until all tasks are either status DEAD or - status SUCCEEDED (see next entry). - DEAD: One or more tasks are at status DEAD, and the rest are either DEAD or SUCCEEDED. We - will no longer monitor this experiment. - ERROR: Could not read the rocoto database file. This will require manual intervention to - solve, so we will no longer monitor this experiment. - This status may also appear if we fail to read the rocoto database file. - RUNNING: One or more jobs are at status RUNNING, and the rest are either status QUEUED, - SUBMITTED, or SUCCEEDED. This is a normal state; we will continue to monitor this - experiment. - QUEUED: One or more jobs are at status QUEUED, and some others may be at status SUBMITTED or - SUCCEEDED. - This is a normal state; we will continue to monitor this experiment. - SUCCEEDED: All jobs are status SUCCEEDED; we will monitor for one more cycle in case there are - unsubmitted jobs remaining. - COMPLETE:All jobs are status SUCCEEDED, and we have monitored this job for an additional cycle - to ensure there are no un-submitted jobs. We will no longer monitor this experiment. + This function reads the dictionary for a given experiment, runs the ``rocotorun`` command to update the experiment (by running new jobs and updating the status of previously submitted ones), and reads the Rocoto database (``.db``) file to update the status of each job in the experiment dictionary. The function then uses a simple set of rules to combine the statuses of every task into a useful summary status for the whole experiment and returns the updated experiment dictionary. + + Experiment status levels explained: + + * **CREATED:** The experiments have been created, but the monitor script has not yet processed them. This is immediately overwritten at the beginning of the ``monitor_jobs()`` function. + * **SUBMITTING:** All jobs are in status SUBMITTING or SUCCEEDED. This is a normal state; experiment monitoring will continue. + * **DYING:** One or more tasks have died (status DEAD), so this experiment has an error. Experiment monitoring will continue until all previously submitted tasks are in either status DEAD or status SUCCEEDED (see next entry). + * **DEAD:** One or more tasks are in status DEAD, and other previously submitted jobs are either DEAD or SUCCEEDED. This experiment will no longer be monitored. + * **ERROR:** Could not read the Rocoto database (``.db``) file. This will require manual intervention to solve, so the experiment will no longer be monitored. + * **RUNNING:** One or more jobs are in status RUNNING, and other previously submitted jobs are in status QUEUED, SUBMITTED, or SUCCEEDED. This is a normal state; experiment monitoring will continue. + * **QUEUED:** One or more jobs are in status QUEUED, and some others may be in status SUBMITTED or SUCCEEDED. This is a normal state; experiment monitoring will continue. + * **SUCCEEDED:** All jobs are in status SUCCEEDED; experiment monitoring will continue for one more cycle in case there are unsubmitted jobs remaining. + * **COMPLETE:** All jobs are in status SUCCEEDED, and the experiment has been monitored for an additional cycle to ensure that there are no unsubmitted jobs. This experiment will no longer be monitored. Args: - expt (dict): A dictionary containing the information for an individual experiment, as - described in the main monitor_jobs() function. + expt (dict): A dictionary containing the information for an individual experiment, as described in the main ``monitor_jobs()`` function. name (str): Name of the experiment; used for logging only - refresh (bool): If true, this flag will check an experiment status even if it is listed - as DEAD, ERROR, or COMPLETE. Used for initial checks for experiments - that may have been restarted. - debug (bool): Will capture all output from rocotorun. This will allow information such - as job cards and job submit messages to appear in the log files, but can - slow down the process drastically. - submit (bool): In addition to reading the rocoto database, script will advance the - workflow by calling rocotorun. If simply generating a report, set this - to False + refresh (bool): If True, this flag will check an experiment status even if it is listed as DEAD, ERROR, or COMPLETE. Used for initial checks for experiments that may have been restarted. + debug (bool): Will capture all output from ``rocotorun``. This will allow information such as job cards and job submit messages to appear in the log files, but turning on this option can drastically slow down the testing process. + submit (bool): In addition to reading the Rocoto database (``.db``) file, the script will advance the workflow by calling ``rocotorun``. If simply generating a report, set this to False. Returns: - dict: The updated experiment dictionary. + expt: The updated experiment dictionary """ #If we are no longer tracking this experiment, return unchanged @@ -305,8 +294,8 @@ def update_expt_status(expt: dict, name: str, refresh: bool = False, debug: bool for task in db: # For each entry from rocoto database, store that task's info under a dictionary key named - # TASKNAME_CYCLE; Cycle comes from the database in Unix Time (seconds), so convert to - # human-readable + # TASKNAME_CYCLE; cycle comes from the database in Unix Time (seconds), so convert to + # human-readable time cycle = datetime.utcfromtimestamp(task[1]).strftime('%Y%m%d%H%M') if f"{task[0]}_{cycle}" not in expt: expt[f"{task[0]}_{cycle}"] = dict() @@ -377,20 +366,18 @@ def update_expt_status_parallel(expts_dict: dict, procs: int, refresh: bool = Fa debug: bool = False) -> dict: """ This function updates an entire set of experiments in parallel, drastically speeding up - the process if given enough parallel processes. Given a dictionary of experiments, it will - pass each individual experiment dictionary to update_expt_status() to be updated, making use - of the python multiprocessing starmap functionality to achieve this in parallel + the testing if given enough parallel processes. Given a dictionary of experiments, it will + pass each individual experiment dictionary to ``update_expt_status()``, making use + of the Python multiprocessing ``starmap()`` functionality to update the experiments in parallel. Args: expts_dict (dict): A dictionary containing information for all experiments procs (int): The number of parallel processes - refresh (bool): "Refresh" flag to pass to update_expt_status() - debug (bool): Will capture all output from rocotorun. This will allow information such - as job cards and job submit messages to appear in the log files, but can - slow down the process drastically. + refresh (bool): "Refresh" flag to pass to ``update_expt_status()``. If True, this flag will check an experiment status even if it is listed as DEAD, ERROR, or COMPLETE. Used for initial checks for experiments that may have been restarted. + debug (bool): Will capture all output from ``rocotorun``. This will allow information such as job cards and job submit messages to appear in the log files, but can drastically slow down the testing process. Returns: - dict: The updated dictionary of experiment dictionaries + expts_dict: The updated dictionary of experiment dictionaries """ args = [] @@ -413,11 +400,12 @@ def update_expt_status_parallel(expts_dict: dict, procs: int, refresh: bool = Fa def print_test_info(txtfile: str = "WE2E_test_info.txt") -> None: - """Prints a pipe ( | ) delimited text file containing summaries of each test defined by a - config file in test_configs/* + """Prints a pipe-delimited ( ``|`` ) text file containing summaries of each test with a configuration file in ``test_configs/*`` Args: - txtfile (str): File name for test details file + txtfile (str): File name for test details file (default: ``WE2E_test_info.txt``) + Returns: + None """ testfiles = glob.glob('test_configs/**/config*.yaml', recursive=True) @@ -508,9 +496,15 @@ def print_test_info(txtfile: str = "WE2E_test_info.txt") -> None: def compare_rocotostat(expt_dict,name): - """Reads the dictionary showing the location of a given experiment, runs a `rocotostat` command + """Reads the dictionary showing the location of a given experiment, runs a ``rocotostat`` command to get the full set of tasks for the experiment, and compares the two to see if there are any unsubmitted tasks remaining. + + Args: + expt_dict (dict): A dictionary containing the information for an individual experiment + name (str): Name of the experiment + Returns: + expt_dict: A dictionary containing the information for an individual experiment """ # Call rocotostat and store output @@ -530,7 +524,7 @@ def compare_rocotostat(expt_dict,name): continue line_array = line.split() # Skip header lines - if line_array[0] == 'CYCLE': + if line_array[0] == 'CYCLE' or line_array[0] == '/apps/rocoto/1.3.3/lib/workflowmgr/launchserver.rb:40:': continue # We should now just have lines describing jobs, in the form: # line_array = ['cycle','task','jobid','status','exit status','num tries','walltime'] diff --git a/tests/test_python/test_create_diag_table_file.py b/tests/test_python/test_create_diag_table_file.py index f7559536f2..d90dcb3cd0 100644 --- a/tests/test_python/test_create_diag_table_file.py +++ b/tests/test_python/test_create_diag_table_file.py @@ -29,3 +29,4 @@ def setUp(self): set_env_var("DIAG_TABLE_TMPL_FP", diag_table_tmpl_fp) set_env_var("CRES", "C48") set_env_var("CDATE", "2021010106") + set_env_var("UFS_FIRE", False) diff --git a/tests/test_python/test_create_model_configure_file.py b/tests/test_python/test_create_model_configure_file.py index d5aea79ed8..9475028505 100644 --- a/tests/test_python/test_create_model_configure_file.py +++ b/tests/test_python/test_create_model_configure_file.py @@ -43,11 +43,9 @@ def setUp(self): set_env_var("USHdir", USHdir) set_env_var("MODEL_CONFIG_FN", MODEL_CONFIG_FN) set_env_var("MODEL_CONFIG_TMPL_FP", MODEL_CONFIG_TMPL_FP) - set_env_var("PE_MEMBER01", 24) set_env_var("FCST_LEN_HRS", 72) set_env_var("FHROT", 0) set_env_var("DT_ATMOS", 1) - set_env_var("OMP_NUM_THREADS_RUN_FCST", 1) set_env_var("RESTART_INTERVAL", 4) set_env_var("ITASKS", 1) diff --git a/tests/test_python/test_generate_FV3LAM_wflow.py b/tests/test_python/test_generate_FV3LAM_wflow.py index 48029d21b6..aa4e038f7a 100644 --- a/tests/test_python/test_generate_FV3LAM_wflow.py +++ b/tests/test_python/test_generate_FV3LAM_wflow.py @@ -27,7 +27,7 @@ def test_generate_FV3LAM_wflow(self): # run workflows in separate process to avoid conflict between community and nco settings def run_workflow(USHdir, logfile): - p = Process(target=generate_FV3LAM_wflow, args=(USHdir, logfile)) + p = Process(target=generate_FV3LAM_wflow, args=(USHdir,"config.yaml",logfile)) p.start() p.join() exit_code = p.exitcode @@ -39,6 +39,15 @@ def run_workflow(USHdir, logfile): logfile = "log.generate_FV3LAM_wflow" sed = get_env_var("SED") + # create a build settings file if needed + EXECdir = os.path.join(USHdir, "..", "exec") + build_settings_file = os.path.join(EXECdir, "build_settings.yaml") + if not os.path.exists(build_settings_file): + os.makedirs(EXECdir, exist_ok=True) + with open(build_settings_file, 'w', encoding='utf-8') as build_settings: + build_settings.write('Machine: linux\n') + build_settings.write('Application:\n') + # community test case cp_vrfy(f"{USHdir}/config.community.yaml", f"{USHdir}/config.yaml") run_command( diff --git a/tests/test_python/test_retrieve_data.py b/tests/test_python/test_retrieve_data.py index 1d54e0904c..2c749c97ac 100644 --- a/tests/test_python/test_retrieve_data.py +++ b/tests/test_python/test_retrieve_data.py @@ -493,61 +493,3 @@ def test_ufs_lbcs_from_aws(self): # Testing that there is no failure retrieve_data.main(args) - - @unittest.skipIf(os.environ.get("CI") == "true", "Skipping HPSS tests") - def test_rap_obs_from_hpss(self): - - """Get RAP observations from hpss for a 06z time""" - - with tempfile.TemporaryDirectory(dir=self.path) as tmp_dir: - os.chdir(tmp_dir) - - # fmt: off - args = [ - '--file_set', 'obs', - '--config', self.config, - '--cycle_date', '2023032106', - '--data_stores', 'hpss', - '--data_type', 'RAP_obs', - '--output_path', tmp_dir, - '--debug', - ] - # fmt: on - - retrieve_data.main(args) - - # Verify files exist in temp dir - - path = os.path.join(tmp_dir, "*") - files_on_disk = glob.glob(path) - self.assertEqual(len(files_on_disk), 30) - - @unittest.skipIf(os.environ.get("CI") == "true", "Skipping HPSS tests") - def test_rap_e_obs_from_hpss(self): - - """Get RAP observations from hpss for a 12z time; - at 00z and 12z we expect to see additional files - with the 'rap_e' naming convention""" - - with tempfile.TemporaryDirectory(dir=self.path) as tmp_dir: - os.chdir(tmp_dir) - - # fmt: off - args = [ - '--file_set', 'obs', - '--config', self.config, - '--cycle_date', '2023032112', - '--data_stores', 'hpss', - '--data_type', 'RAP_obs', - '--output_path', tmp_dir, - '--debug', - ] - # fmt: on - - retrieve_data.main(args) - - # Verify files exist in temp dir - - path = os.path.join(tmp_dir, "*") - files_on_disk = glob.glob(path) - self.assertEqual(len(files_on_disk), 37) diff --git a/tests/test_python/test_set_cycle_dates.py b/tests/test_python/test_set_cycle_dates.py index eb76f579c6..8baae643ac 100644 --- a/tests/test_python/test_set_cycle_dates.py +++ b/tests/test_python/test_set_cycle_dates.py @@ -1,20 +1,22 @@ """ Test set_cycle_dates.py """ -from datetime import datetime +from datetime import datetime, timedelta import unittest -from set_cycle_dates import set_cycle_dates +from set_cycle_and_obs_timeinfo import set_cycle_dates class Testing(unittest.TestCase): """ Define the tests""" - def test_set_cycle_dates(self): + + def test_set_cycle_dates_string(self): """ Test that the proper list of dates are produced given the - intput data""" + input data and return_type left to its default value (so the + output should be a list of strings)""" cdates = set_cycle_dates( - date_start=datetime(2022, 1, 1, 6), - date_end=datetime(2022, 1, 2, 12), - incr_cycl_freq=6, + start_time_first_cycl=datetime(2022, 1, 1, 6), + start_time_last_cycl=datetime(2022, 1, 2, 12), + cycl_intvl=timedelta(hours=6), ) self.assertEqual( cdates, @@ -27,3 +29,26 @@ def test_set_cycle_dates(self): "2022010212", ], ) + + def test_set_cycle_dates_datetime(self): + + """ Test that the proper list of dates are produced given the + input data and return_type left set to "datetime" (so the output + should be a list of datetime objects)""" + cdates = set_cycle_dates( + start_time_first_cycl=datetime(2022, 1, 1, 6), + start_time_last_cycl=datetime(2022, 1, 2, 12), + cycl_intvl=timedelta(hours=6), + return_type="datetime", + ) + self.assertEqual( + cdates, + [ + datetime(2022, 1, 1, 6), + datetime(2022, 1, 1, 12), + datetime(2022, 1, 1, 18), + datetime(2022, 1, 2, 0), + datetime(2022, 1, 2, 6), + datetime(2022, 1, 2, 12), + ], + ) diff --git a/ufs_srweather_app.settings.in b/ufs_srweather_app.settings.in deleted file mode 100644 index 7958d2706f..0000000000 --- a/ufs_srweather_app.settings.in +++ /dev/null @@ -1,21 +0,0 @@ -# UFS Short Range Weather App Configuration Summary -=================================================== - -# General ---------- -SRWA Version: @SRWA_VERSION@ -Configured On: @CONFIG_DATE@ -Host System: @host_cpu@-@host_vendor@-@host_os@ -Build Directory: @abs_top_builddir@ -Install Prefix: @prefix@ - -# Compiling Options -------------------- -C Compiler: @CC_VERSION@ -CFLAGS: @CFLAGS@ -CPPFLAGS: @CPPFLAGS@ -LDFLAGS: @LDFLAGS@ -Shared Library: @enable_shared@ -Static Library: @enable_static@ -Extra libraries: @LIBS@ - diff --git a/ush/UFS_plot_domains.py b/ush/UFS_plot_domains.py index 847cd96725..827991287a 100755 --- a/ush/UFS_plot_domains.py +++ b/ush/UFS_plot_domains.py @@ -1,143 +1,39 @@ #!/usr/bin/env python +"""This script generates a plot of the ESG grid and the write component grid. + To use this script, modify the ESG grid and write component grid parameters in this script + manually to reflect the configuration of the grids you wish to plot. Then run the script. """ + import matplotlib.pyplot as plt from mpl_toolkits.basemap import Basemap from matplotlib.path import Path import matplotlib.patches as patches import numpy as np -#### User-defined variables - - -# Computational grid definitions -ESGgrid_LON_CTR = -153.0 -ESGgrid_LAT_CTR = 61.0 -ESGgrid_DELX = 3000.0 -ESGgrid_DELY = 3000.0 -ESGgrid_NX = 1344 -ESGgrid_NY = 1152 - -# Write component grid definitions - -WRTCMP_nx = 1340 -WRTCMP_ny = 1132 -WRTCMP_lon_lwr_left = 151.5 -WRTCMP_lat_lwr_left = 42.360 -WRTCMP_dx = ESGgrid_DELX -WRTCMP_dy = ESGgrid_DELY - -# Plot-specific definitions - -plot_res = "i" # background map resolution - -# Note: Resolution can be 'c' (crude), 'l' (low), 'i' (intermediate), 'h' (high), or 'f' (full) -# To plot maps with higher resolution than low, -# you will need to download and install the basemap-data-hires package - - -#### END User-defined variables - - -ESGgrid_width = ESGgrid_NX * ESGgrid_DELX -ESGgrid_height = ESGgrid_NY * ESGgrid_DELY - -big_grid_width = np.ceil(ESGgrid_width * 1.25) -big_grid_height = np.ceil(ESGgrid_height * 1.25) - -WRTCMP_width = WRTCMP_nx * WRTCMP_dx -WRTCMP_height = WRTCMP_ny * WRTCMP_dy - -fig = plt.figure() - -# ax1 = plt.axes -ax1 = plt.subplot2grid((1, 1), (0, 0)) - -map1 = Basemap( - projection="gnom", - resolution=plot_res, - lon_0=ESGgrid_LON_CTR, - lat_0=ESGgrid_LAT_CTR, - width=big_grid_width, - height=big_grid_height, -) - -map1.drawmapboundary(fill_color="#9999FF") -map1.fillcontinents(color="#ddaa66", lake_color="#9999FF") -map1.drawcoastlines() - -map2 = Basemap( - projection="gnom", - lon_0=ESGgrid_LON_CTR, - lat_0=ESGgrid_LAT_CTR, - width=ESGgrid_width, - height=ESGgrid_height, -) - -# map2.drawmapboundary(fill_color='#9999FF') -# map2.fillcontinents(color='#ddaa66',lake_color='#9999FF') -# map2.drawcoastlines() - - -map3 = Basemap( - llcrnrlon=WRTCMP_lon_lwr_left, - llcrnrlat=WRTCMP_lat_lwr_left, - width=WRTCMP_width, - height=WRTCMP_height, - resolution=plot_res, - projection="lcc", - lat_0=ESGgrid_LAT_CTR, - lon_0=ESGgrid_LON_CTR, -) - -# map3.drawmapboundary(fill_color='#9999FF') -# map3.fillcontinents(color='#ddaa66',lake_color='#9999FF',alpha=0.5) -# map3.drawcoastlines() - - -# Draw gnomonic compute grid rectangle: - -lbx1, lby1 = map1(*map2(map2.xmin, map2.ymin, inverse=True)) -ltx1, lty1 = map1(*map2(map2.xmin, map2.ymax, inverse=True)) -rtx1, rty1 = map1(*map2(map2.xmax, map2.ymax, inverse=True)) -rbx1, rby1 = map1(*map2(map2.xmax, map2.ymin, inverse=True)) - -verts1 = [ - (lbx1, lby1), # left, bottom - (ltx1, lty1), # left, top - (rtx1, rty1), # right, top - (rbx1, rby1), # right, bottom - (lbx1, lby1), # ignored -] - -codes2 = [ - Path.MOVETO, - Path.LINETO, - Path.LINETO, - Path.LINETO, - Path.CLOSEPOLY, -] - -path = Path(verts1, codes2) -patch = patches.PathPatch(path, facecolor="r", lw=2, alpha=0.5) -ax1.add_patch(patch) - - -# Draw lambert write grid rectangle: - # Define a function to get the lambert points in the gnomonic space - - + def get_lambert_points(gnomonic_map, lambert_map, pps): - # This function takes the lambert domain we have defined, lambert_map, as well as - # pps (the number of points to interpolate and draw for each side of the lambert "rectangle"), - # and returns an array of two lists: one a list of tuples of the 4*ppf + 4 vertices mapping the approximate shape - # of the lambert domain on the gnomonic map, the other a list of "draw" instructions to be used by - # the PathPatch function - - # pps is recommended 10 or less due to time of calculation - - # Start array with bottom left point, "MOVETO" instruction + """This function takes the lambert domain we have defined, ``lambert_map``, and ``pps``, + and returns an array of two lists: one a list of tuples of the ``4*ppf + 4`` vertices mapping + the approximate shape of the lambert domain on the gnomonic map, the other a list of "draw" + instructions to be used by the PathPatch function. + + Start array with bottom left point, "MOVETO" instruction + + Args: + gnomonic_map (Basemap): A map of the ESG grid + lambert_map (Basemap): A map of the write component grid + pps: The number of points to interpolate and draw for each side of the lambert + "rectangle." It is recommended to set to 10 or less due to time of calculation. + + Returns: + vertices, instructions: A tuple of two lists---a list of tuples of the ``4*ppf + 4`` + vertices mapping the approximate shape of the lambert domain + on the gnomonic map, the other a list of "draw" instructions + to be used by the PathPatch function. + """ + vertices = [ gnomonic_map(*lambert_map(lambert_map.xmin, lambert_map.ymin, inverse=True)) ] @@ -189,15 +85,133 @@ def get_lambert_points(gnomonic_map, lambert_map, pps): return vertices, instructions -# Call the function we just defined to generate a polygon roughly approximating the lambert "rectangle" in gnomonic space -verts3, codes3 = get_lambert_points(map1, map3, 10) +#### User-defined variables + +if __name__ == "__main__": + + # Computational grid definitions + ESGgrid_LON_CTR = -153.0 + ESGgrid_LAT_CTR = 61.0 + ESGgrid_DELX = 3000.0 + ESGgrid_DELY = 3000.0 + ESGgrid_NX = 1344 + ESGgrid_NY = 1152 + + # Write component grid definitions + + WRTCMP_nx = 1340 + WRTCMP_ny = 1132 + WRTCMP_lon_lwr_left = 151.5 + WRTCMP_lat_lwr_left = 42.360 + WRTCMP_dx = ESGgrid_DELX + WRTCMP_dy = ESGgrid_DELY + + # Plot-specific definitions + + plot_res = "i" # background map resolution + + # Note: Resolution can be 'c' (crude), 'l' (low), 'i' (intermediate), 'h' (high), or 'f' (full) + # To plot maps with higher resolution than low, + # you will need to download and install the basemap-data-hires package + + + #### END User-defined variables + + + ESGgrid_width = ESGgrid_NX * ESGgrid_DELX + ESGgrid_height = ESGgrid_NY * ESGgrid_DELY + + big_grid_width = np.ceil(ESGgrid_width * 1.25) + big_grid_height = np.ceil(ESGgrid_height * 1.25) + + WRTCMP_width = WRTCMP_nx * WRTCMP_dx + WRTCMP_height = WRTCMP_ny * WRTCMP_dy + + fig = plt.figure() + + # ax1 = plt.axes + ax1 = plt.subplot2grid((1, 1), (0, 0)) + + map1 = Basemap( + projection="gnom", + resolution=plot_res, + lon_0=ESGgrid_LON_CTR, + lat_0=ESGgrid_LAT_CTR, + width=big_grid_width, + height=big_grid_height, + ) + + map1.drawmapboundary(fill_color="#9999FF") + map1.fillcontinents(color="#ddaa66", lake_color="#9999FF") + map1.drawcoastlines() + + map2 = Basemap( + projection="gnom", + lon_0=ESGgrid_LON_CTR, + lat_0=ESGgrid_LAT_CTR, + width=ESGgrid_width, + height=ESGgrid_height, + ) + + # map2.drawmapboundary(fill_color='#9999FF') + # map2.fillcontinents(color='#ddaa66',lake_color='#9999FF') + # map2.drawcoastlines() + + + map3 = Basemap( + llcrnrlon=WRTCMP_lon_lwr_left, + llcrnrlat=WRTCMP_lat_lwr_left, + width=WRTCMP_width, + height=WRTCMP_height, + resolution=plot_res, + projection="lcc", + lat_0=ESGgrid_LAT_CTR, + lon_0=ESGgrid_LON_CTR, + ) + + # map3.drawmapboundary(fill_color='#9999FF') + # map3.fillcontinents(color='#ddaa66',lake_color='#9999FF',alpha=0.5) + # map3.drawcoastlines() + + + # Draw gnomonic compute grid rectangle: + + lbx1, lby1 = map1(*map2(map2.xmin, map2.ymin, inverse=True)) + ltx1, lty1 = map1(*map2(map2.xmin, map2.ymax, inverse=True)) + rtx1, rty1 = map1(*map2(map2.xmax, map2.ymax, inverse=True)) + rbx1, rby1 = map1(*map2(map2.xmax, map2.ymin, inverse=True)) + + verts1 = [ + (lbx1, lby1), # left, bottom + (ltx1, lty1), # left, top + (rtx1, rty1), # right, top + (rbx1, rby1), # right, bottom + (lbx1, lby1), # ignored + ] + + codes2 = [ + Path.MOVETO, + Path.LINETO, + Path.LINETO, + Path.LINETO, + Path.CLOSEPOLY, + ] + + path = Path(verts1, codes2) + patch = patches.PathPatch(path, facecolor="r", lw=2, alpha=0.5) + ax1.add_patch(patch) + + # Draw lambert write grid rectangle: + # Call get_lambert_points() to generate a polygon roughly approximating the lambert "rectangle" in gnomonic space + + verts3, codes3 = get_lambert_points(map1, map3, 10) -# Now draw! + # Now draw! -path = Path(verts3, codes3) -patch = patches.PathPatch(path, facecolor="w", lw=2, alpha=0.5) -ax1.add_patch(patch) + path = Path(verts3, codes3) + patch = patches.PathPatch(path, facecolor="w", lw=2, alpha=0.5) + ax1.add_patch(patch) -plt.show() + plt.show() diff --git a/ush/bash_utils/boolify.sh b/ush/bash_utils/boolify.sh index 577b7c16eb..d6857e18e0 100644 --- a/ush/bash_utils/boolify.sh +++ b/ush/bash_utils/boolify.sh @@ -11,15 +11,6 @@ function boolify() { # #----------------------------------------------------------------------- # -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# # Get the name of this function and input. # #----------------------------------------------------------------------- @@ -57,13 +48,4 @@ where: echo "FALSE" fi - # - #----------------------------------------------------------------------- - # - # Restore the shell options saved at the beginning of this script/func- - # tion. - # - #----------------------------------------------------------------------- - # - { restore_shell_opts; } > /dev/null 2>&1 } diff --git a/ush/bash_utils/check_for_preexist_dir_file.sh b/ush/bash_utils/check_for_preexist_dir_file.sh index 4ca55766d2..2843222230 100644 --- a/ush/bash_utils/check_for_preexist_dir_file.sh +++ b/ush/bash_utils/check_for_preexist_dir_file.sh @@ -107,7 +107,7 @@ where the arguments are defined as follows: # "delete") - rm_vrfy -rf "${dir_or_file}" + rm -rf "${dir_or_file}" ;; # #----------------------------------------------------------------------- @@ -134,7 +134,7 @@ Specified directory or file (dir_or_file) already exists: Moving (renaming) preexisting directory or file to: old_dir_or_file = \"${old_dir_or_file}\"" - mv_vrfy "${dir_or_file}" "${old_dir_or_file}" + mv "${dir_or_file}" "${old_dir_or_file}" ;; # #----------------------------------------------------------------------- diff --git a/ush/bash_utils/check_var_valid_value.sh b/ush/bash_utils/check_var_valid_value.sh index 21288184db..5b942c1f73 100644 --- a/ush/bash_utils/check_var_valid_value.sh +++ b/ush/bash_utils/check_var_valid_value.sh @@ -96,7 +96,7 @@ where the arguments are defined as follows: var_value=${!var_name} valid_var_values_at="$valid_var_values_array_name[@]" - valid_var_values=("${!valid_var_values_at}") + valid_var_values=("${!valid_var_values_at:-}") if [ "$#" -eq 3 ]; then err_msg="$3" diff --git a/ush/bash_utils/create_symlink_to_file.sh b/ush/bash_utils/create_symlink_to_file.sh index 38099ffb8e..3f82dfd1fe 100644 --- a/ush/bash_utils/create_symlink_to_file.sh +++ b/ush/bash_utils/create_symlink_to_file.sh @@ -16,78 +16,23 @@ function create_symlink_to_file() { # #----------------------------------------------------------------------- # -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# - local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) - local scrfunc_fn=$( basename "${scrfunc_fp}" ) - local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# - local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# # Specify the set of valid argument names for this script/function. Then # process the arguments provided to this script/function (which should # consist of a set of name-value pairs of the form arg1="value1", etc). # #----------------------------------------------------------------------- # - local valid_args=( \ -"target" \ -"symlink" \ -"relative" \ - ) - process_args valid_args "$@" -# -#----------------------------------------------------------------------- -# -# For debugging purposes, print out values of arguments passed to this -# script. Note that these will be printed out only if VERBOSE is set to -# TRUE. -# -#----------------------------------------------------------------------- -# - print_input_args valid_args -# -#----------------------------------------------------------------------- -# -# Verify that the required arguments to this function have been specified. -# If not, print out an error message and exit. -# -#----------------------------------------------------------------------- -# - if [ -z "${target}" ]; then - print_err_msg_exit "\ -The argument \"target\" specifying the target of the symbolic link that -this function will create was not specified in the call to this function: - target = \"$target\"" - fi +if [[ $# -lt 2 ]]; then + print_err_msg_exit "Function create_symlink_to_file() requires at least two arguments" +fi - if [ -z "${symlink}" ]; then - print_err_msg_exit "\ -The argument \"symlink\" specifying the symbolic link that this function -will create was not specified in the call to this function: - symlink = \"$symlink\"" - fi +target=$1 +symlink=$2 +relative=${3:-TRUE} +relative=$(boolify $relative) +if [ "$relative" != "TRUE" ] && [ "$relative" != "FALSE" ]; then + print_err_msg_exit "'relative' must be set to TRUE or FALSE" +fi # #----------------------------------------------------------------------- # @@ -100,19 +45,6 @@ will create was not specified in the call to this function: # #----------------------------------------------------------------------- # -# If "relative" is not set (i.e. if it is set to a null string), reset -# it to a default value of "TRUE". Then check that it is set to a vaild -# value. -# -#----------------------------------------------------------------------- -# - relative=${relative:-"TRUE"} - - valid_vals_relative=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") - check_var_valid_value "relative" "valid_vals_relative" -# -#----------------------------------------------------------------------- -# # Make sure that the target file exists and is a file. # #----------------------------------------------------------------------- @@ -140,24 +72,9 @@ not exist or is not a file: # # Create the symlink. # -# Important note: -# In the ln_vrfy command below, do not quote ${relative_flag} because if -# is quoted (either single or double quotes) but happens to be a null -# string, it will be treated as the (empty) name of (or path to) the -# target and will cause an error. -# -#----------------------------------------------------------------------- -# - ln_vrfy -sf ${relative_flag} "$target" "$symlink" -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# #----------------------------------------------------------------------- # - { restore_shell_opts; } > /dev/null 2>&1 +ln -sf ${relative_flag} "$target" "$symlink" } diff --git a/ush/bash_utils/eval_METplus_timestr_tmpl.sh b/ush/bash_utils/eval_METplus_timestr_tmpl.sh deleted file mode 100644 index 245369509b..0000000000 --- a/ush/bash_utils/eval_METplus_timestr_tmpl.sh +++ /dev/null @@ -1,267 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This file defines a function that evaluates a METplus time-string -# template. -# -#----------------------------------------------------------------------- -# -function eval_METplus_timestr_tmpl() { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# - local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) - local scrfunc_fn=$( basename "${scrfunc_fp}" ) - local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# - local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# Specify the set of valid argument names for this script/function. Then -# process the arguments provided to this script/function (which should -# consist of a set of name-value pairs of the form arg1="value1", etc). -# -#----------------------------------------------------------------------- -# - local valid_args=( \ - "init_time" \ - "fhr" \ - "METplus_timestr_tmpl" \ - "outvarname_formatted_time" \ - ) - process_args valid_args "$@" -# -#----------------------------------------------------------------------- -# -# For debugging purposes, print out values of arguments passed to this -# script. Note that these will be printed out only if VERBOSE is set to -# TRUE. -# -#----------------------------------------------------------------------- -# - print_input_args "valid_args" -# -#----------------------------------------------------------------------- -# -# Declare local variables. -# -#----------------------------------------------------------------------- -# - local fmt \ - formatted_time \ - hh_init \ - init_time_str \ - lead_hrs \ - len \ - mn_init \ - METplus_time_fmt \ - METplus_time_shift \ - METplus_time_type \ - regex_search \ - ss_init \ - valid_time_str \ - yyyymmdd_init -# -#----------------------------------------------------------------------- -# -# Run checks on input arguments. -# -#----------------------------------------------------------------------- -# - if [ -z "${METplus_timestr_tmpl}" ]; then - print_err_msg_exit "\ -The specified METplus time string template (METplus_timestr_tmpl) cannot be empty: - METplus_timestr_tmpl = \"${METplus_timestr_tmpl}\"" - fi - - len=${#init_time} - if [[ ${init_time} =~ ^[0-9]+$ ]]; then - if [ "$len" -ne 10 ] && [ "$len" -ne 12 ] && [ "$len" -ne 14 ]; then - print_err_msg_exit "\ -The specified initial time string (init_time) must contain exactly 10, -12, or 14 integers (but contains $len): - init_time = \"${init_time}\"" - fi - else - print_err_msg_exit "\ -The specified initial time string (init_time) must consist of only -integers and cannot be empty: - init_time = \"${init_time}\"" - fi - - if ! [[ $fhr =~ ^[0-9]+$ ]]; then - print_err_msg_exit "\ -The specified forecast hour (fhr) must consist of only integers and -cannot be empty: - fhr = \"${fhr}\"" - fi -# -#----------------------------------------------------------------------- -# -# Set strings for the initial and valid times that can be passed to the -# "date" utility for evaluation. -# -#----------------------------------------------------------------------- -# - yyyymmdd_init=${init_time:0:8} - hh_init=${init_time:8:2} - - mn_init="00" - if [ "$len" -gt "10" ]; then - mn_init=${init_time:10:2} - fi - - ss_init="00" - if [ "$len" -gt "12" ]; then - ss_init=${init_time:12:2} - fi - - init_time_str=$( printf "%s" "${yyyymmdd_init} + ${hh_init} hours + ${mn_init} minutes + ${ss_init} seconds" ) - valid_time_str=$( printf "%s" "${init_time_str} + ${fhr} hours" ) -# -#----------------------------------------------------------------------- -# -# Parse the input METplus time string template. -# -#----------------------------------------------------------------------- -# - regex_search="^\{(init|valid|lead)(\?)(fmt=)([^\?]*)(\?)?(shift=)?([^\?]*)?\}" - METplus_time_type=$( \ - printf "%s" "${METplus_timestr_tmpl}" | $SED -n -r -e "s/${regex_search}/\1/p" ) - METplus_time_fmt=$( \ - printf "%s" "${METplus_timestr_tmpl}" | $SED -n -r -e "s/${regex_search}/\4/p" ) - METplus_time_shift=$( \ - printf "%s" "${METplus_timestr_tmpl}" | $SED -n -r -e "s/${regex_search}/\7/p" ) -# -#----------------------------------------------------------------------- -# -# Get strings for the time format and time shift that can be passed to -# the "date" utility or the "printf" command. -# -#----------------------------------------------------------------------- -# - case "${METplus_time_fmt}" in - "%Y%m%d%H"|"%Y%m%d"|"%H%M%S"|"%H") - fmt="${METplus_time_fmt}" - ;; - "%HHH") -# -# Print format assumes that the argument to printf (i.e. the number to -# print out) may be a float. If we instead assume an integer and use -# "%03d" as the format, the printf function below will fail if the argument -# happens to be a float. The "%03.0f" format will work for both a float -# and an integer argument (and will truncate the float and print out a -# 3-digit integer). -# - fmt="%03.0f" - ;; - *) - print_err_msg_exit "\ -Unsupported METplus time format: - METplus_time_fmt = \"${METplus_time_fmt}\" -METplus time string template passed to this function is: - METplus_timestr_tmpl = \"${METplus_timestr_tmpl}\"" - ;; - esac -# -# Calculate the time shift as an integer in units of seconds. -# - time_shift_str=$(( $(printf "%.0f" "${METplus_time_shift}") + 0 ))" seconds" -# -#----------------------------------------------------------------------- -# -# Set the formatted time string. -# -#----------------------------------------------------------------------- -# - case "${METplus_time_type}" in - "init") - formatted_time=$( ${DATE_UTIL} --date="${init_time_str} + ${time_shift_str}" +"${fmt}" ) - ;; - "valid") - formatted_time=$( ${DATE_UTIL} --date="${valid_time_str} + ${time_shift_str}" +"${fmt}" ) - ;; - "lead") - lead_secs=$(( $( ${DATE_UTIL} --date="${valid_time_str} + ${time_shift_str}" +"%s" ) \ - - $( ${DATE_UTIL} --date="${init_time_str}" +"%s" ) )) - lead_hrs=$( bc -l <<< "${lead_secs}/${SECS_PER_HOUR}" ) -# -# Check to make sure lead_hrs is an integer. -# - lead_hrs_trunc=$( bc <<< "${lead_secs}/${SECS_PER_HOUR}" ) - lead_hrs_rem=$( bc -l <<< "${lead_hrs} - ${lead_hrs_trunc}" ) - if [ "${lead_hrs_rem}" != "0" ]; then - print_err_msg_exit "\ -The lead in hours (lead_hrs) must be an integer but isn't: - lead_hrs = ${lead_hrs} -The lead in seconds (lead_secs) is: - lead_secs = ${lead_secs} -The remainder (lead_hrs_rem) after dividing the lead_secs by SECS_PER_HOUR -= ${SECS_PER_HOUR} is: - lead_hrs_rem = ${lead_hrs_rem}" - fi -# -# Get the lead in the proper format. -# - formatted_time=$( printf "${fmt}" "${lead_hrs}" ) - ;; - *) - print_err_msg_exit "\ -Unsupported METplus time type: - METplus_time_type = \"${METplus_time_type}\" -METplus time string template passed to this function is: - METplus_timestr_tmpl = \"${METplus_timestr_tmpl}\"" - ;; - esac - - if [ -z "${formatted_time}" ]; then - print_err_msg_exit "\ -The specified METplus time string template (METplus_timestr_tmpl) could -not be evaluated for the given initial time (init_time) and forecast -hour (fhr): - METplus_timestr_tmpl = \"${METplus_timestr_tmpl}\" - init_time = \"${init_time}\" - fhr = \"${fhr}\"" - fi -# -#----------------------------------------------------------------------- -# -# Set output variables. -# -#----------------------------------------------------------------------- -# - if [ ! -z "${outvarname_formatted_time}" ]; then - printf -v ${outvarname_formatted_time} "%s" "${formatted_time}" - fi -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/function. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 - -} diff --git a/ush/bash_utils/filesys_cmds_vrfy.sh b/ush/bash_utils/filesys_cmds_vrfy.sh deleted file mode 100644 index b355d293ad..0000000000 --- a/ush/bash_utils/filesys_cmds_vrfy.sh +++ /dev/null @@ -1,280 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This is a generic function that executes the specified command (e.g. -# "cp", "mv", etc) with the specified options/arguments and then verifies -# that the command executed without errors. The first argument to this -# function is the command to execute while the remaining ones are the -# options/arguments to be passed to that command. -# -#----------------------------------------------------------------------- -# -function filesys_cmd_vrfy() { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# - local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) - local scrfunc_fn=$( basename "${scrfunc_fp}" ) - local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# - local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# Get information about the script or function that calls this function. -# Note that caller_name will be set as follows: -# -# 1) If the caller is a function, caller_name will be set to the name of -# that function. -# 2) If the caller is a sourced script, caller_name will be set to -# "script". Note that a sourced script cannot be the top level -# script since by defintion, it is sourced by another script or -# function. -# 3) If the caller is the top-level script, caller_name will be set to -# "main". -# -# Thus, if caller_name is set to "script" or "main", the caller is a -# script, and if it is set to anything else, the caller is a function. -# -# Below, the index into FUNCNAME and BASH_SOURCE is 2 (not 1 as is usually -# the case) because this function is called by functions such as cp_vrfy, -# mv_vrfy, rm_vrfy, ln_vrfy, mkdir_vrfy, and cd_vrfy, but these are just -# wrappers, and in the error and informational messages, we are really -# interested in the scripts/functions that in turn call these wrappers. -# -#----------------------------------------------------------------------- -# - local caller_name="main" - local caller_fp="" - if [ -z "${BASH_SOURCE[2]-x}" ]; then - caller_fp=$( $READLINK -f "${BASH_SOURCE[2]}" ) - local caller_fn=$( basename "${caller_fp}" ) - local caller_dir=$( dirname "${caller_fp}" ) - caller_name="${FUNCNAME[2]}" - fi -# -#----------------------------------------------------------------------- -# -# Declare local variables that are used later below. -# -#----------------------------------------------------------------------- -# - local cmd \ - output \ - exit_code \ - double_space \ - script_or_function -# -#----------------------------------------------------------------------- -# -# Check that at least one argument is supplied. -# -#----------------------------------------------------------------------- -# - if [ "$#" -lt 1 ]; then - - print_err_msg_exit " -Incorrect number of arguments specified: - - Function name: \"${func_name}\" - Number of arguments specified: $# - -Usage: - - ${func_name} cmd [args_to_cmd] - -where \"cmd\" is the name of the command to execute and \"args_to_cmd\" -are zero or more options and arguments to pass to that command. -" - - fi -# -#----------------------------------------------------------------------- -# -# The first argument to this function is the command to execute while -# the remaining ones are the arguments to that command. Extract the -# command and save it in the variable "cmd". Then shift the argument -# list so that $@ contains the arguments to the command but not the -# name of the command itself. -# -#----------------------------------------------------------------------- -# - cmd="$1" - shift -# -#----------------------------------------------------------------------- -# -# Pass the arguments to the command and execute it, saving the outputs -# to stdout and stderr in the variable "output". Also, save the exit -# code from the execution. -# -#----------------------------------------------------------------------- -# - local output=$( "$cmd" "$@" 2>&1 ) - local exit_code=$? -# -#----------------------------------------------------------------------- -# -# If output is not empty, it will be printed to stdout below either as -# an error message or an informational message. In either case, format -# it by adding a double space to the beginning of each line. -# -#----------------------------------------------------------------------- -# - if [ -n "$output" ]; then - local double_space=" " - output="${double_space}${output}" - output=${output/$'\n'/$'\n'${double_space}} - fi -# -#----------------------------------------------------------------------- -# -# If the exit code from the execution of cmd above is nonzero, print out -# an error message and exit. -# -#----------------------------------------------------------------------- -# - if [ "${caller_name}" = "main" ] || \ - [ "${caller_name}" = "script" ]; then - local script_or_function="the script" - else - local script_or_function="function \"${caller_name}\"" - fi - - if [ ${exit_code} -ne 0 ]; then - - print_err_msg_exit "\ -Call to function \"${cmd}_vrfy\" failed. This function was called from -${script_or_function} in file: - - \"${caller_fp}\" - -Error message from \"${cmd}_vrfy\" function's \"$cmd\" operation: -$output" - - fi -# -#----------------------------------------------------------------------- -# -# If the exit code from the execution of cmd above is zero, continue. -# -# First, check if cmd is set to "cd". If so, the execution of cmd above -# in a separate subshell [which is what happens when using the $("$cmd") -# construct above] will change directory in that subshell but not in the -# current shell. Thus, rerun the "cd" command in the current shell. -# -#----------------------------------------------------------------------- -# - if [ "$cmd" = "cd" ]; then - "$cmd" "$@" 2>&1 > /dev/null - fi -# -#----------------------------------------------------------------------- -# -# If output is not empty, print out whatever message it contains (e.g. -# it might contain a warning or other informational message). -# -#----------------------------------------------------------------------- -# - if [ -n "$output" ]; then - - print_info_msg " -\"${cmd}_vrfy\" operation returned with a message. This command was -issued from ${script_or_function} in file: - - \"${caller_fp}\" - -Message from \"${cmd}_vrfy\" function's \"$cmd\" operation: -$output" - - fi -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 - -} - - -# -#----------------------------------------------------------------------- -# -# The following are functions are counterparts of common filesystem -# commands "with verification", i.e. they execute a filesystem command -# (such as "cp" and "mv") and then verify that the execution was successful. -# -# These functions are called using the "filesys_cmd_vrfy" function defined -# above. In each of these functions, we: -# -# 1) Save current shell options (in a global array) and then set new -# options for this script/function. -# 2) Call the generic function "filesys_cmd_vrfy" with the command of -# interest (e.g. "cp") as the first argument and the arguments passed -# in as the rest. -# 3) Restore the shell options saved at the beginning of the function. -# -#----------------------------------------------------------------------- -# - -function cp_vrfy() { - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 - filesys_cmd_vrfy "cp" "$@" - { restore_shell_opts; } > /dev/null 2>&1 -} - -function mv_vrfy() { - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 - filesys_cmd_vrfy "mv" "$@" - { restore_shell_opts; } > /dev/null 2>&1 -} - -function rm_vrfy() { - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 - filesys_cmd_vrfy "rm" "$@" - { restore_shell_opts; } > /dev/null 2>&1 -} - -function ln_vrfy() { - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 - filesys_cmd_vrfy "$LN_UTIL" "$@" - { restore_shell_opts; } > /dev/null 2>&1 -} - -function mkdir_vrfy() { - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 - filesys_cmd_vrfy "mkdir" "$@" - { restore_shell_opts; } > /dev/null 2>&1 -} - -function cd_vrfy() { - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 - filesys_cmd_vrfy "cd" "$@" - { restore_shell_opts; } > /dev/null 2>&1 -} - diff --git a/ush/bash_utils/print_msg.sh b/ush/bash_utils/print_msg.sh index 28a70d1431..8b032f9698 100644 --- a/ush/bash_utils/print_msg.sh +++ b/ush/bash_utils/print_msg.sh @@ -68,7 +68,7 @@ function print_info_msg() { elif [ "$#" -eq 2 ]; then - verbose="$1" + verbose=$(boolify "$1") info_msg="$2" # #----------------------------------------------------------------------- diff --git a/ush/bash_utils/source_config.sh b/ush/bash_utils/source_config.sh deleted file mode 100644 index df5a79a0df..0000000000 --- a/ush/bash_utils/source_config.sh +++ /dev/null @@ -1,53 +0,0 @@ -# -#----------------------------------------------------------------------- -# This file defines function that sources a config file (yaml/json etc) -# into the calling shell script -#----------------------------------------------------------------------- -# - -function config_to_str() { - $USHdir/config_utils.py -o $1 -c $2 "${@:3}" -} - -# -#----------------------------------------------------------------------- -# Define functions for different file formats -#----------------------------------------------------------------------- -# -function config_to_shell_str() { - config_to_str shell "$@" -} -function config_to_ini_str() { - config_to_str ini "$@" -} -function config_to_yaml_str() { - config_to_str yaml "$@" -} -function config_to_json_str() { - config_to_str json "$@" -} -function config_to_xml_str() { - config_to_str xml "$@" -} - -# -#----------------------------------------------------------------------- -# Source contents of a config file to shell script -#----------------------------------------------------------------------- -# -function source_config() { - - source <( config_to_shell_str "$@" ) - -} -# -#----------------------------------------------------------------------- -# Source partial contents of a config file to shell script. -# Only those variables needed by the task are sourced -#----------------------------------------------------------------------- -# -function source_config_for_task() { - - source <( config_to_shell_str "${@:2}" -k "(^(?!task_)|$1).*" ) - -} diff --git a/ush/bash_utils/source_yaml.sh b/ush/bash_utils/source_yaml.sh new file mode 100644 index 0000000000..669408416e --- /dev/null +++ b/ush/bash_utils/source_yaml.sh @@ -0,0 +1,36 @@ + + +function source_yaml () { + + local func_name="${FUNCNAME[0]}" + + if [ "$#" -lt 1 ] ; then + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: ${func_name} + Number of args specified: $# + +Usage: + + ${func_name} yaml_file [section] + + yaml_file: path to the YAML file to source + section: optional subsection of yaml +" + fi + local section + yaml_file=$1 + section=$2 + + while read -r line ; do + + + # A regex to match list representations + line=$(echo "$line" | sed -E "s/='\[(.*)\]'/=(\1)/") + line=${line//,/} + line=${line//\"/} + line=${line/None/} + source <( echo "${line}" ) + done < <(uw config realize -i "${yaml_file}" --output-format sh --key-path $section) +} diff --git a/ush/calculate_cost.py b/ush/calculate_cost.py index b5e952b252..5fd22bb5b9 100755 --- a/ush/calculate_cost.py +++ b/ush/calculate_cost.py @@ -15,6 +15,18 @@ def calculate_cost(config_fn): + """Calculates the cost of running an experiment based on its configuration file details + + Args: + config_fn (str): Name of a configuration file containing experiment parameters + + Returns: + cost (list): Cost array containing information related to experiment parameters + (e.g., time step and grid) + + Raises: + ValueError: If ``GRID_GEN_METHOD`` is set to an invalid value + """ ushdir = os.path.dirname(os.path.abspath(__file__)) cfg_u = load_config_file(config_fn) diff --git a/ush/check_python_version.py b/ush/check_python_version.py index afc3dac62f..8c61cba865 100755 --- a/ush/check_python_version.py +++ b/ush/check_python_version.py @@ -7,8 +7,13 @@ def check_python_version(): - """Check if python version >= 3.6 and presence of some - non-standard packages currently jinja2, yaml, f90nml""" + """Checks for python version >= 3.6 and for presence of some + non-standard packages (currently ``jinja2``, ``yaml``, ``f90nml``) + + Raises: + ImportError: If checked packages are missing. + Exception: If Python version is less than 3.6 + """ # Check for non-standard python packages try: diff --git a/ush/config.aqm.yaml b/ush/config.aqm.yaml index 2718eafbbf..77512ec7a2 100644 --- a/ush/config.aqm.yaml +++ b/ush/config.aqm.yaml @@ -1,8 +1,8 @@ metadata: - description: config for Online-CMAQ, AQM_NA_13km, warm-start + description: config for SRW-AQM, AQM_NA_13km, warm-start user: RUN_ENVIR: community - MACHINE: hera + MACHINE: [hera/orion/hercules/derecho] ACCOUNT: [account name] workflow: USE_CRON_TO_RELAUNCH: true @@ -22,7 +22,9 @@ workflow: FIELD_TABLE_TMPL_FN: field_table_aqm.FV3_GFS_v16 DO_REAL_TIME: false COLDSTART: false # set to true for cold start - WARMSTART_CYCLE_DIR: '/scratch2/NAGAPE/epic/SRW-AQM_DATA/aqm_data/restart/2023111000' + WARMSTART_CYCLE_DIR: '/scratch2/NAGAPE/epic/SRW-AQM_DATA/aqm_data/restart/2023111000' # for hera +# WARMSTART_CYCLE_DIR: '/work/noaa/epic/SRW-AQM_DATA/aqm_data/restart/2023111000' # for orion/hercules +# WARMSTART_CYCLE_DIR: '/glade/work/chanhooj/SRW-AQM_DATA/aqm_data/restart/2023111000' # for derecho nco: envir_default: test_aqm_warmstart NET_default: aqm @@ -33,7 +35,7 @@ rocoto: # task_aqm_ics_ext: # uncomment this in case of COLDSTART: true metatask_run_ensemble: task_run_fcst_mem#mem#: - walltime: 01:00:00 + walltime: 01:20:00 task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS FV3GFS_FILE_FMT_ICS: netcdf @@ -44,6 +46,7 @@ task_get_extrn_lbcs: FV3GFS_FILE_FMT_LBCS: netcdf EXTRN_MDL_LBCS_OFFSET_HRS: 0 task_run_fcst: + OMP_NUM_THREADS_RUN_FCST: 1 DT_ATMOS: 180 LAYOUT_X: 50 LAYOUT_Y: 34 diff --git a/ush/config.community.yaml b/ush/config.community.yaml index 417b9edb91..1ce7fc0108 100644 --- a/ush/config.community.yaml +++ b/ush/config.community.yaml @@ -5,10 +5,6 @@ user: RUN_ENVIR: community MACHINE: hera ACCOUNT: an_account -platform: - CCPA_OBS_DIR: "" - MRMS_OBS_DIR: "" - NDAS_OBS_DIR: "" workflow: USE_CRON_TO_RELAUNCH: false EXPT_SUBDIR: test_community @@ -34,8 +30,6 @@ task_plot_allvars: global: DO_ENSEMBLE: false NUM_ENS_MEMBERS: 2 -verification: - VX_FCST_MODEL_NAME: FV3_GFS_v16_CONUS_25km rocoto: tasks: metatask_run_ensemble: diff --git a/ush/config.fire.yaml b/ush/config.fire.yaml new file mode 100644 index 0000000000..fc986f6a6a --- /dev/null +++ b/ush/config.fire.yaml @@ -0,0 +1,67 @@ +metadata: + description: >- + Sample fire config +user: + RUN_ENVIR: community + MACHINE: derecho + ACCOUNT: ACCOUNT_NUMBER +workflow: + USE_CRON_TO_RELAUNCH: true + EXPT_SUBDIR: fire_test_case + CCPP_PHYS_SUITE: FV3_HRRR + PREDEF_GRID_NAME: SUBCONUS_CO_3km + DATE_FIRST_CYCL: '2020081318' + DATE_LAST_CYCL: '2020081318' + FCST_LEN_HRS: 6 # 27 hours of LBCs staged on Derecho + PREEXISTING_DIR_METHOD: rename + VERBOSE: true + COMPILER: intel +task_get_extrn_ics: + USE_USER_STAGED_EXTRN_FILES: true + EXTRN_MDL_SOURCE_BASEDIR_ICS: '{{ fire.FIRE_INPUT_DIR }}/HRRR' + EXTRN_MDL_NAME_ICS: HRRR + FV3GFS_FILE_FMT_ICS: grib2 + EXTRN_MDL_FILES_ICS: + - '{fyyyymmdd}.hrrr.t{fhh}z.wrfprsf00.grib2' +task_get_extrn_lbcs: + USE_USER_STAGED_EXTRN_FILES: true + EXTRN_MDL_SOURCE_BASEDIR_LBCS: '{{ fire.FIRE_INPUT_DIR }}/HRRR' + EXTRN_MDL_FILES_LBCS: + - '{fyyyymmdd}.hrrr.t{fhh}z.wrfprsf00.grib2' + EXTRN_MDL_NAME_LBCS: HRRR + EXTRN_MDL_LBCS_OFFSET_HRS: 0 + LBC_SPEC_INTVL_HRS: 3 + FV3GFS_FILE_FMT_LBCS: grib2 +task_run_fcst: + QUILTING: true + PRINT_ESMF: True +task_plot_allvars: + COMOUT_REF: "" +fire: + UFS_FIRE: True + FIRE_NUM_TASKS: 1 + FIRE_INPUT_DIR: /glade/derecho/scratch/kavulich/FIRE/2024/test_data + DT_FIRE: 0.5 + OUTPUT_DT_FIRE: 300 + FIRE_NUM_IGNITIONS: 1 + FIRE_IGNITION_ROS: 0.05 + FIRE_IGNITION_START_LAT: 40.609 + FIRE_IGNITION_START_LON: -105.879 + FIRE_IGNITION_END_LAT: 40.609 + FIRE_IGNITION_END_LON: -105.879 + FIRE_IGNITION_RADIUS: 250 + FIRE_IGNITION_START_TIME: 6480 + FIRE_IGNITION_END_TIME: 7000 + FIRE_WIND_HEIGHT: 5.0 + FIRE_PRINT_MSG: 0 + FIRE_ATM_FEEDBACK: 0.0 + FIRE_VISCOSITY: 0.4 + FIRE_UPWINDING: 9 + FIRE_LSM_ZCOUPLING: False + FIRE_LSM_ZCOUPLING_REF: 60.0 +rocoto: + tasks: + metatask_run_ensemble: + task_run_fcst_mem#mem#: + walltime: 01:00:00 + diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index 6a403754cb..f155307a6b 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -5,33 +5,22 @@ metadata: description: >- Default configuration for an experiment. The valid values for most of the parameters are specified in valid_param_vals.yaml -#---------------------------- -# USER config parameters -#---------------------------- +### +# USER-related configuration parameters +# user: + + ### + # (Default: ``"nco"``) + # + # This variable determines the workflow mode. The user can choose between two options: "nco" and "community". The "nco" mode uses a directory structure that mimics what is used in operations at NOAA/NCEP Central Operations (NCO) and at the NOAA/NCEP/Environmental Modeling Center (EMC), which works with NCO on pre-implementation testing. Specifics of the conventions used in "nco" mode can be found in the following :nco:`WCOSS Implementation Standards <>` document: # - #----------------------------------------------------------------------- - # - # Set the RUN_ENVIR variable that is listed and described in the WCOSS - # Implementation Standards document: - # - # NCEP Central Operations - # WCOSS Implementation Standards - # April 19, 2022 - # Version 11.0.0 - # - # RUN_ENVIR is described in this document as follows: - # - # Set to "nco" if running in NCO's production environment. Used to - # distinguish between organizations. - # - # Valid values are "nco" and "community". Here, we use it to generate - # and run the experiment either in NCO mode (if RUN_ENVIR is set to "nco") - # or in community mode (if RUN_ENVIR is set to "community"). This has - # implications on the experiment variables that need to be set and the - # the directory structure used. + # | NCEP Central Operations + # | WCOSS Implementation Standards + # | January 19, 2022 + # | Version 11.0.0 # - #----------------------------------------------------------------------- + # Setting ``RUN_ENVIR`` to "community" is recommended in most cases for users who are not running in NCO's production environment. Valid values: ``"nco"`` | ``"community"`` # RUN_ENVIR: "nco" # @@ -245,8 +234,8 @@ platform: # RUN_CMD_SERIAL: # The run command for some serial jobs # - # RUN_CMD_AQM: - # The run command for some AQM tasks. + # RUN_CMD_NEXUS: + # The run command for the AQM NEXUS tasks. # # RUN_CMD_AQMLBC: # The run command for the AQM_LBCS task. @@ -258,9 +247,8 @@ platform: RUN_CMD_FCST: "" RUN_CMD_POST: "" RUN_CMD_PRDGEN: "" - RUN_CMD_AQM: "" + RUN_CMD_NEXUS: "" RUN_CMD_AQMLBC: "" - # #----------------------------------------------------------------------- # @@ -277,54 +265,6 @@ platform: # #----------------------------------------------------------------------- # - # Set METplus parameters. Definitions: - # - # CCPA_OBS_DIR: - # User-specified location of the directory where CCPA hourly - # precipitation files used by METplus are located (or, if - # retrieved by the workflow, where they will be placed). See comments - # in file scripts/exregional_get_verif_obs.sh for more details about - # files and directory structure, as well as important caveats about - # errors in the metadata and file names. - # NOTE: Do not set this to the same path as other *_OBS_DIR variables; - # otherwise unexpected results and data loss may occur. - # - # NOHRSC_OBS_DIR: - # User-specified location of top-level directory where NOHRSC 6- and - # 24-hour snowfall accumulation files used by METplus are located (or, - # if retrieved by the workflow, where they will be placed). See comments - # in file scripts/exregional_get_verif_obs.sh for more details about - # files and directory structure - # NOTE: Do not set this to the same path as other *_OBS_DIR variables; - # otherwise unexpected results and data loss may occur. - # - # MRMS_OBS_DIR: - # User-specified location of the directory where MRMS composite - # reflectivity and echo top files used by METplus are located (or, if - # retrieved by the workflow, where they will be placed). See comments - # in the scripts/exregional_get_verif_obs.sh for more details about - # files and directory structure. - # NOTE: Do not set this to the same path as other *_OBS_DIR variables; - # otherwise unexpected results and data loss may occur. - # - # NDAS_OBS_DIR: - # User-specified location of top-level directory where NDAS prepbufr - # files used by METplus are located (or, if retrieved by the workflow, - # where they will be placed). See comments in file - # scripts/exregional_get_verif_obs.sh for more details about files - # and directory structure. - # NOTE: Do not set this to the same path as other *_OBS_DIR variables; - # otherwise unexpected results and data loss may occur. - # - #----------------------------------------------------------------------- - # - CCPA_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/ccpa/proc" - NOHRSC_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/nohrsc/proc" - MRMS_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/mrms/proc" - NDAS_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/ndas/proc" - # - #----------------------------------------------------------------------- - # # DOMAIN_PREGEN_BASEDIR: # The base directory containing pregenerated grid, orography, and surface # climatology files. This is an alternative for setting GRID_DIR, @@ -339,7 +279,7 @@ platform: # that will point to a subdirectory (having the name of the grid being # used) under this directory. This variable should be set to a null # string in this file, but it can be specified in the user-specified - # workflow configuration file (EXPT_CONFIG_FN). + # workflow configuration file # #----------------------------------------------------------------------- # @@ -421,6 +361,12 @@ platform: # FIXshp: # System directory where the graphics shapefiles are located. # + # FIXaqm: + # System directory where AQM data files are located + # + # FIXemis: + # System directory where AQM emission data files are located. + # # FIXcrtm: # System directory where CRTM fixed files are located # @@ -435,6 +381,8 @@ platform: FIXorg: "" FIXsfc: "" FIXshp: "" + FIXaqm: "" + FIXemis: "" FIXcrtm: "" FIXcrtmupp: "" # @@ -461,7 +409,7 @@ workflow: # #----------------------------------------------------------------------- # - WORKFLOW_ID: !nowtimestamp '' + WORKFLOW_ID: "" # #----------------------------------------------------------------------- # @@ -522,12 +470,17 @@ workflow: # default will point to: # # EXPTDIR: "${EXPT_BASEDIR}/${EXPT_SUBDIR}" + # + # WFLOW_FLAG_FILES_DIR: + # Directory in which flag files marking completion of various workflow + # tasks can be placed. #----------------------------------------------------------------------- # EXPT_BASEDIR: '' # This will be set in setup.py prior to extend_yaml() being called EXPT_SUBDIR: 'experiment' EXEC_SUBDIR: "exec" EXPTDIR: '{{ [workflow.EXPT_BASEDIR, workflow.EXPT_SUBDIR]|path_join }}' + WFLOW_FLAG_FILES_DIR: '{{ [workflow.EXPTDIR, "wflow_flag_files"]|path_join }}' # #----------------------------------------------------------------------- # @@ -546,9 +499,6 @@ workflow: # # Set file names. Definitions: # - # EXPT_CONFIG_FN: - # Name of the user-specified configuration file for the forecast experiment. - # # CONSTANTS_FN: # Name of the file containing definitions of various mathematical, physical, # and SRW App constants. @@ -622,9 +572,10 @@ workflow: # AQM_RC_TMPL_FN: # Template file name of resource file for NOAA Air Quality Model (AQM) # + # FIRE_NML_FN: + # Name of namelist file for UFS_FIRE capability. #----------------------------------------------------------------------- # - EXPT_CONFIG_FN: "config.yaml" CONSTANTS_FN: "constants.yaml" RGNL_GRID_NML_FN: "regional_grid.nml" @@ -644,6 +595,7 @@ workflow: UFS_CONFIG_FN: "ufs.configure" AQM_RC_FN: "aqm.rc" AQM_RC_TMPL_FN: "aqm.rc" + FIRE_NML_FN: "namelist.fire" # #----------------------------------------------------------------------- @@ -686,6 +638,7 @@ workflow: MODEL_CONFIG_TMPL_FP: '{{ [user.PARMdir, MODEL_CONFIG_FN]|path_join }}' UFS_CONFIG_TMPL_FP: '{{ [user.PARMdir, UFS_CONFIG_FN]|path_join }}' AQM_RC_TMPL_FP: '{{ [user.PARMdir, AQM_RC_TMPL_FN]|path_join }}' + FIRE_NML_BASE_FP: '{{ [user.PARMdir, FIRE_NML_FN]|path_join }}' # #----------------------------------------------------------------------- @@ -711,13 +664,11 @@ workflow: # script creates and that defines the workflow for the experiment. # # GLOBAL_VAR_DEFNS_FN: - # Name of file (a shell script) containing the definitions of the primary - # experiment variables (parameters) defined in this default configuration - # script and in the user-specified configuration as well as secondary - # experiment variables generated by the experiment generation script. - # This file is sourced by many scripts (e.g. the J-job scripts corresponding - # to each workflow task) in order to make all the experiment variables - # available in those scripts. + # Name of the experiment configuration file. It contains the primary + # experiment variables defined in this default configuration script and in the + # user-specified configuration as well as secondary experiment variables + # generated by the experiment generation script. This file is the primary + # source of information used in the scripts at run time. # # ROCOTO_YAML_FN: # Name of the YAML file containing the YAML workflow definition from @@ -762,10 +713,11 @@ workflow: UFS_CONFIG_FP: '{{ [EXPTDIR, UFS_CONFIG_FN]|path_join }}' FV3_NML_FP: '{{ [EXPTDIR, FV3_NML_FN]|path_join }}' FV3_NML_STOCH_FP: '{{ [EXPTDIR, [FV3_NML_FN, "_stoch"]|join ]|path_join }}' + FIRE_NML_FP: '{{ [EXPTDIR, FIRE_NML_FN]|path_join }}' FCST_MODEL: "ufs-weather-model" WFLOW_XML_FN: "FV3LAM_wflow.xml" - GLOBAL_VAR_DEFNS_FN: "var_defns.sh" + GLOBAL_VAR_DEFNS_FN: "var_defns.yaml" ROCOTO_YAML_FN: "rocoto_defns.yaml" EXTRN_MDL_VAR_DEFNS_FN: "extrn_mdl_var_defns" WFLOW_LAUNCH_SCRIPT_FN: "launch_FV3LAM_wflow.sh" @@ -929,11 +881,11 @@ workflow: # they are also set to predefined values for the specified grid. # # * If PREDEF_GRID_NAME is set to an empty string, it implies the user - # will provide the native grid parameters in the user-specified - # experiment configuration file (EXPT_CONFIG_FN). In this case, the + # is providing the native grid parameters in the user-specified + # experiment configuration file. In this case, the # grid generation method GRID_GEN_METHOD, the native grid parameters, # the write-component grid parameters, the forecast model's - # main time step DT_ATMOS, and the computational + # main time step DT_ATMOS, and the computational # parameters (LAYOUT_X, LAYOUT_Y, and BLOCKSIZE) must be set in that # configuration file; otherwise, the values of all of these parameters # in this default experiment configuration file will be used. @@ -1635,9 +1587,16 @@ task_make_ics: # FV3-LAM grid. This file will be copied later to a new location and name # changed to fvcom.nc # + # LEVP: + # Number of vertical levels in the atmosphere. In order to change this + # number, the user will additionally need to create a vertical coordinate + # distribution file; this process is described in the Users Guide section + # "Changing the Number of Vertical Levels". This value should be the same + # in both make_ics and make_lbcs. + # # VCOORD_FILE: - # Full path to the file used to set the vertical coordinate in FV3. - # This file should be the same in both make_ics and make_lbcs. + # Full path to the vertical coordinate distribution file. This file should + # be the same in both make_ics and make_lbcs. # #------------------------------------------------------------------------ # @@ -1645,6 +1604,7 @@ task_make_ics: FVCOM_WCSTART: "cold" FVCOM_DIR: "" FVCOM_FILE: "fvcom.nc" + LEVP: 65 VCOORD_FILE: "{{ workflow.FIXam }}/global_hyblev.l65.txt" #---------------------------- @@ -1670,6 +1630,7 @@ task_make_lbcs: KMP_AFFINITY_MAKE_LBCS: "scatter" OMP_NUM_THREADS_MAKE_LBCS: 1 OMP_STACKSIZE_MAKE_LBCS: "1024m" + LEVP: 65 VCOORD_FILE: "{{ workflow.FIXam }}/global_hyblev.l65.txt" #---------------------------- @@ -1701,7 +1662,7 @@ task_run_fcst: #----------------------------------------------------------------------- # # KMP_AFFINITY_*: - # From Intel: "The Intel® runtime library has the ability to bind OpenMP + # From Intel: "The Intel runtime library has the ability to bind OpenMP # threads to physical processing units. The interface is controlled using # the KMP_AFFINITY environment variable. Depending on the system (machine) # topology, application, and operating system, thread affinity can have a @@ -1728,7 +1689,7 @@ task_run_fcst: #----------------------------------------------------------------------- # KMP_AFFINITY_RUN_FCST: "scatter" - OMP_NUM_THREADS_RUN_FCST: 2 # atmos_nthreads in model_configure + OMP_NUM_THREADS_RUN_FCST: 2 # ATM_omp_num_threads in ufs.configure OMP_STACKSIZE_RUN_FCST: "1024m" # #----------------------------------------------------------------------- @@ -1790,7 +1751,7 @@ task_run_fcst: # # 1) If the experiment is using a predefined grid, then if the user # sets the parameter in the user-specified experiment configuration - # file (EXPT_CONFIG_FN), that value will be used in the forecast(s). + # file, that value will be used in the forecast(s). # Otherwise, the default value of the parameter for that predefined # grid will be used. # @@ -1924,7 +1885,7 @@ task_run_fcst: QUILTING: true PRINT_ESMF: false - PE_MEMBER01: '{{ LAYOUT_Y * LAYOUT_X + WRTCMP_write_groups * WRTCMP_write_tasks_per_group if QUILTING else LAYOUT_Y * LAYOUT_X}}' + PE_MEMBER01: '{{ OMP_NUM_THREADS_RUN_FCST * (LAYOUT_Y * LAYOUT_X + WRTCMP_write_groups * WRTCMP_write_tasks_per_group) + fire.FIRE_NUM_TASKS if QUILTING else OMP_NUM_THREADS_RUN_FCST * (LAYOUT_Y * LAYOUT_X) + fire.FIRE_NUM_TASKS }}' WRTCMP_write_groups: "" WRTCMP_write_tasks_per_group: "" @@ -2385,149 +2346,351 @@ global: PRINT_DIFF_PGR: false #---------------------------- -# verification (vx) parameters +# Verification (VX) parameters #----------------------------- verification: # - # METPLUS_VERBOSITY_LEVEL: - # Logging verbosity level used by METplus verification tools. 0 to 5, - # with 0 quiet and 5 loud. - # - METPLUS_VERBOSITY_LEVEL: 2 + # General VX Parameters + # ------------------------------- # - # Templates for CCPA, MRMS, and NDAS observation files. - # - # OBS_CCPA_APCP_FN_TEMPLATE: - # File name template for CCPA accumulated precipitation (APCP) observations. - # This template is used by the workflow tasks that call the METplus PcpCombine - # tool on CCPA obs to find the input observation files containing 1-hour - # APCP and then generate NetCDF files containing either 1-hour or greater - # than 1-hour APCP. - # - # OBS_NOHRSC_ASNOW_FN_TEMPLATE: - # File name template for NOHRSC snow observations. - # - # OBS_MRMS_REFC_FN_TEMPLATE: - # File name template for MRMS reflectivity observations. - # - # OBS_MRMS_RETOP_FN_TEMPLATE: - # File name template for MRMS echo top observations. - # - # OBS_NDAS_ADPSFCorADPUPA_FN_TEMPLATE: - # File name template for NDAS surface and upper air observations. - # This template is used by the workflow tasks that call the METplus Pb2nc - # tool on NDAS obs to find the input observation files containing ADP - # surface (ADPSFC) or ADP upper air (ADPUPA) fields and then generate - # NetCDF versions of these files. + # VX_FIELD_GROUPS: + # The groups of fields (some of which may consist of only a single field) + # on which to run verification. # - OBS_CCPA_APCP_FN_TEMPLATE: '{valid?fmt=%Y%m%d}/ccpa.t{valid?fmt=%H}z.01h.hrap.conus.gb2' - OBS_NOHRSC_ASNOW_FN_TEMPLATE: '{valid?fmt=%Y%m%d}/sfav2_CONUS_${ACCUM_HH}h_{valid?fmt=%Y%m%d%H}_grid184.grb2' - OBS_MRMS_REFC_FN_TEMPLATE: '{valid?fmt=%Y%m%d}/MergedReflectivityQCComposite_00.50_{valid?fmt=%Y%m%d}-{valid?fmt=%H%M%S}.grib2' - OBS_MRMS_RETOP_FN_TEMPLATE: '{valid?fmt=%Y%m%d}/EchoTop_18_00.50_{valid?fmt=%Y%m%d}-{valid?fmt=%H%M%S}.grib2' - OBS_NDAS_ADPSFCorADPUPA_FN_TEMPLATE: 'prepbufr.ndas.{valid?fmt=%Y%m%d%H}' - # - # OBS_CCPA_APCP_FN_TEMPLATE_PCPCOMBINE_OUTPUT: - # Template used to specify the names of the output NetCDF observation - # files generated by the worfklow verification tasks that call the METplus - # PcpCombine tool on CCPA observations. (These files will contain obs - # APCP, both for 1 hour and for > 1 hour accumulation periods, in NetCDF - # format.) - # - # OBS_NDAS_ADPSFCorADPUPA_FN_TEMPLATE_PB2NC_OUTPUT: - # Template used to specify the names of the output NetCDF observation - # files generated by the worfklow verification tasks that call the - # METplus Pb2nc tool on NDAS observations. (These files will contain - # obs ADPSFC or ADPUPA fields in NetCDF format.) - # - OBS_CCPA_APCP_FN_TEMPLATE_PCPCOMBINE_OUTPUT: '${OBS_CCPA_APCP_FN_TEMPLATE}_a${ACCUM_HH}h.nc' - OBS_NDAS_ADPSFCorADPUPA_FN_TEMPLATE_PB2NC_OUTPUT: '${OBS_NDAS_ADPSFCorADPUPA_FN_TEMPLATE}.nc' - # - # VX_FCST_MODEL_NAME: - # String that specifies a descriptive name for the model being verified. - # This is used in forming the names of the verification output files as - # well as in the contents of those files. - # - # VX_FIELDS: - # The fields or groups of fields on which to run verification. Because - # accumulated snow (ASNOW) is often not of interest in non-winter cases - # and because observation files for ASNOW are not available on NOAA - # HPSS for retrospective cases before March 2020, by default ASNOW is - # not included VX_FIELDS, but it may be added to this list in order to + # Since accumulated snowfall (ASNOW) is often not of interest in non-winter + # cases and because observation files for ASNOW are not available on NOAA + # HPSS for retrospective cases before March 2020, by default ASNOW is not + # included VX_FIELD_GROUPS, but it may be added to this list in order to # include the verification tasks for ASNOW in the workflow. # + VX_FIELD_GROUPS: [ "APCP", "REFC", "RETOP", "SFC", "UPA" ] + # # VX_APCP_ACCUMS_HRS: - # The 2-digit accumulation periods (in units of hours) to consider for - # APCP (accumulated precipitation). If VX_FIELDS contains "APCP", then - # VX_APCP_ACCUMS_HRS must contain at least one element. If not, + # The accumulation intervals (in hours) to include in the verification of + # accumulated precipitation (APCP). If VX_FIELD_GROUPS contains "APCP", + # then VX_APCP_ACCUMS_HRS must contain at least one element. Otherwise, # VX_APCP_ACCUMS_HRS will be ignored. # + VX_APCP_ACCUMS_HRS: [ 1, 3, 6, 24 ] + # # VX_ASNOW_ACCUMS_HRS: - # The 2-digit accumulation periods (in units of hours) to consider for - # ASNOW (accumulated snowfall). If VX_FIELDS contains "ASNOW", then - # VX_ASNOW_ACCUMS_HRS must contain at least one element. If not, + # The accumulation intervals (in hours) to include in the verification of + # accumulated snowfall (ASNOW). If VX_FIELD_GROUPS contains "ASNOW", + # then VX_ASNOW_ACCUMS_HRS must contain at least one element. Otherwise, # VX_ASNOW_ACCUMS_HRS will be ignored. # - VX_FCST_MODEL_NAME: '{{ nco.NET_default }}.{{ task_run_post.POST_OUTPUT_DOMAIN_NAME }}' - VX_FIELDS: [ "APCP", "REFC", "RETOP", "ADPSFC", "ADPUPA" ] - VX_APCP_ACCUMS_HRS: [ 1, 3, 6, 24 ] VX_ASNOW_ACCUMS_HRS: [ 6, 24 ] # - # VX_FCST_INPUT_BASEDIR: - # Template for top-level directory containing forecast (but not obs) - # files that will be used as input into METplus for verification. + # VX_CONFIG_[DET|ENS]_FN: + # Names of configuration files for deterministic and ensemble verification + # that specify the field groups, field names, levels, and (if applicable) + # thresholds for which to run verification. These are relative to the + # directory METPLUS_CONF in which the METplus config templates are + # located. They may include leading relative paths before the file + # names, e.g. "some_dir/another_dir/vx_config_det.yaml". + # + VX_CONFIG_DET_FN: 'vx_configs/vx_config_det.yaml' + VX_CONFIG_ENS_FN: 'vx_configs/vx_config_ens.yaml' # # VX_OUTPUT_BASEDIR: - # Template for top-level directory in which METplus will place its - # output. + # Template for base (i.e. top-level) directory in which METplus will place + # its output. # - VX_FCST_INPUT_BASEDIR: '{% if user.RUN_ENVIR == "nco" %}$COMOUT/../..{% else %}{{ workflow.EXPTDIR }}{% endif %}' VX_OUTPUT_BASEDIR: '{% if user.RUN_ENVIR == "nco" %}$COMOUT/metout{% else %}{{ workflow.EXPTDIR }}{% endif %}' # - # Number of digits in the ensemble member names. This is a configurable - # variable to allow users to change its value (e.g. to go from "mem004" - # to "mem04") when using staged forecast files that do not use the same - # number of digits as the SRW App. + # METplus-Specific Parameters + # ------------------------------- # - VX_NDIGITS_ENSMEM_NAMES: 3 + # METPLUS_VERBOSITY_LEVEL: + # Logging verbosity level used by METplus verification tools. 0 to 5, + # with 0 quiet and 5 loudest. + # + METPLUS_VERBOSITY_LEVEL: 2 # - # File name and path templates used in the verification tasks. + # VX Parameters for Observations + # ------------------------------- # - # FCST_SUBDIR_TEMPLATE: - # Template for the subdirectory containing forecast files that are - # inputs to the verification tasks. - # - # FCST_FN_TEMPLATE: - # Template for the names of the forecast files that are inputs to the - # verification tasks. + # Note: + # The observation types that the SRW App can currently retrieve (if + # necessary) and use in verification are: + # + # * CCPA (Climatology-Calibrated Precipitation Analysis) + # * NOHRSC (National Operational Hydrologic Remote Sensing Center) + # * MRMS (Multi-Radar Multi-Sensor) + # * NDAS (NAM Data Assimilation System) + # + # The script ush/get_obs.py contains further details on the files and + # directory structure of each obs type. + # + + # + # [CCPA|NOHRSC|MRMS|NDAS]_OBS_AVAIL_INTVL_HRS: + # Time interval (in hours) at which various types of obs are available on + # NOAA's HPSS. + # + # Note that MRMS files are in fact available every few minutes, but here + # we set the obs availability interval to 1 hour because currently that + # is the shortest output interval for forecasts, i.e. the forecasts cannot + # (yet) support sub-hourly output. + # + CCPA_OBS_AVAIL_INTVL_HRS: 1 + NOHRSC_OBS_AVAIL_INTVL_HRS: 6 + MRMS_OBS_AVAIL_INTVL_HRS: 1 + NDAS_OBS_AVAIL_INTVL_HRS: 1 + # + # [CCPA|NOHRSC|MRMS|NDAS]_OBS_DIR: + # Base directory in which CCPA, NOHRSC, MRMS, or NDAS obs files needed by + # the verification tasks are located. If the files do not exist, they + # will be retrieved and placed under this directory. + # + # Note that: + # + # * If the obs files need to be retrieved (e.g. from NOAA's HPSS), because + # they are not already staged on disk, then the user must have write + # permission to this directory. Otherwise, the "get_obs" workflow tasks + # that attempt to create these files will fail. + # + # * CCPA obs contain errors in the metadata for a certain range of dates + # that need to be corrected during obs retrieval. This is described + # in more detail in the script ush/get_obs.py. + # + CCPA_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/ccpa" + NOHRSC_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/nohrsc" + MRMS_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/mrms" + NDAS_OBS_DIR: "{{ workflow.EXPTDIR }}/obs_data/ndas" + # + # OBS_[CCPA|NOHRSC|MRMS|NDAS]_FN_TEMPLATES: + # File name templates for various obs types. These are meant to be used + # in METplus configuration files and thus contain METplus time formatting + # strings. Each of these variables is a python list containing pairs of + # values. The first element of each pair specifies the verification field + # group(s) for which the file name template will be needed, and the second + # element is the file name template itself, which may include a leading + # relative directory. (Here, by "verification field group", we mean a + # group of fields that is verified together in the workflow; see the + # description of the variable VX_FIELD_GROUPS.) For example, for CCPA + # obs, the variable name is OBS_CCPA_FN_TEMPLATES. From the default value + # of this variable given above, we see that if CCPA_OBS_AVAIL_INTVL_HRS + # is set to 1 (i.e. the CCPA obs are assumed to be available every hour) + # and the valid time is 2024042903, then the obs file (including a relative + # path) to look for and, if necessary, create is + # + # 20240429/ccpa.t03z.01h.hrap.conus.gb2 + # + # This file will be used in the verification of fields under the APCP + # field group (which consist of accumulated precipitation for the + # accumulation intervals specified in VX_APCP_ACCUMS_HRS). + # + # Notes: + # + # * The file name templates are relative to the base directories given in + # the variables + # + # [CCPA|NOHRSC|MRMS|NDAS]_OBS_DIR + # + # defined above. Thus, the template for the full path to the obs files + # is given, e.g. for CCPA obs, by + # + # {CCPA_OBS_DIR}/{OBS_CCPA_FN_TEMPLATES[1]} + # + # where the [1] indicates the second element of the list OBS_CCPA_FN_TEMPLATES. + # + # * The file name templates may represent file names only, or they may + # include leading relative directories. + # + # * The default values of these variables for the CCPA, NOHRSC, and NDAS + # obs types contain only one pair of values (because these obs types + # contain only one set of files that we use in the verification) while + # the default value for the MRMS obs type contains two pairs of values, + # one for the set of files that contains composite reflectivity data + # and another for the set that contains echo top data. This is simply + # because the MRMS obs type does not group these two fields together + # one set of files as does, for example, the NDAS obs type. + # + # * Each file name template must contain full information about the year, + # month, day, and hour by including METplus time formatting strings for + # this information. Some of this information (e.g. the year, month, + # and day) may be in the relative directory portion of the template and + # the rest (e.g. the hour) in the file name, or there may be no relative + # directory portion and all of this information may be in the file name, + # but all four pieces of timing information must be present somewhere in + # each template as METplus time formatting strings. If not, obs files + # created by the "get_obs" tasks for different days might overwrite each + # other. + # + # * The workflow generation scripts create a "get_obs" task for each obs + # type that is needed in the verification and for each day on which that + # obs type is needed at at least some hours. That "get_obs" task first + # checks whether all the necessary obs files for that day already exist + # at the locations specified by the full path template(s) (which are + # obtained by combining the base directories [CCPA|NOHRSC|MRMS|NDAS]_OBS_DIR + # with the file name template(s)). If for a given day one or more of + # these obs files do not exist on disk, the "get_obs" task will retrieve + # "raw" versions of these files from a data store (e.g. NOAA's HPSS) + # and will place them in a temporary "raw" directory. It will then + # move or copy these raw files to the locations specified by the full + # path template(s). + # + # * The raw obs files, i.e. the obs files as they are named and arranged + # in the data stores and retrieved and placed in the raw directories, + # may be arranged differently and/or have names that are different from + # the ones specified in the file name templates. If so, they are renamed + # while being moved or copied from the raw directories to the locations + # specified by the full path template(s). (The lists of templates for + # searching for and retrieving files from the data stores is different + # than the METplus templates described here; the former are given in + # the data retrieval configuration file at parm/data_locations.yml.) + # + # * When the ex-scripts for the various vx tasks are converted from bash + # to python scripts, these variables should be converted from python + # lists to python dictionaries, where the first element of each pair + # becomes the key and the second becomes the value. This currently + # cannot be done due to limitations in the workflow on converting + # python dictionaries to bash variables. + # + OBS_CCPA_FN_TEMPLATES: [ 'APCP', + '{%- set obs_avail_intvl_hrs = "%02d" % CCPA_OBS_AVAIL_INTVL_HRS %} + {{- "{valid?fmt=%Y%m%d}/ccpa.t{valid?fmt=%H}z." ~ obs_avail_intvl_hrs ~ "h.hrap.conus.gb2" }}' ] + OBS_NOHRSC_FN_TEMPLATES: [ 'ASNOW', + '{%- set obs_avail_intvl_hrs = "%d" % NOHRSC_OBS_AVAIL_INTVL_HRS %} + {{- "sfav2_CONUS_" ~ obs_avail_intvl_hrs ~ "h_{valid?fmt=%Y%m%d%H}_grid184.grb2" }}' ] + OBS_MRMS_FN_TEMPLATES: [ 'REFC', '{valid?fmt=%Y%m%d}/MergedReflectivityQCComposite_00.50_{valid?fmt=%Y%m%d}-{valid?fmt=%H%M%S}.grib2', + 'RETOP', '{valid?fmt=%Y%m%d}/EchoTop_18_00.50_{valid?fmt=%Y%m%d}-{valid?fmt=%H%M%S}.grib2' ] + OBS_NDAS_FN_TEMPLATES: [ 'SFCandUPA', 'prepbufr.ndas.{valid?fmt=%Y%m%d%H}' ] + # + # REMOVE_RAW_OBS_DIRS_[CCPA|NOHRSC|MRMS|NDAS]: + # Flag specifying whether to remove the "raw" observation directories + # after retrieving the specified type of obs (CCPA, NOHRSC, MRMS, or + # NOHRSC) from a data store (e.g. NOAA's HPSS). The raw directories + # are the ones in which the observation files are placed immediately + # after pulling them from the data store but before performing any + # processing on them such as renaming the files and/or reorganizing + # their directory structure. + # + REMOVE_RAW_OBS_CCPA: True + REMOVE_RAW_OBS_NOHRSC: True + REMOVE_RAW_OBS_MRMS: True + REMOVE_RAW_OBS_NDAS: True # - # FCST_FN_TEMPLATE_PCPCOMBINE_OUTPUT: - # Template used to specify the names of the output NetCDF forecast files - # generated by the worfklow verification tasks that call the METplus - # PcpCombine tool on forecasts. (These files will contain forecast APCP, - # both for 1 hour and for > 1 hour accumulation periods, in NetCDF - # format.) + # OBS_CCPA_APCP_FN_TEMPLATE_PCPCOMBINE_OUTPUT: + # METplus template for the names of the NetCDF files generated by the + # worfklow verification tasks that call METplus's PcpCombine tool on + # CCPA observations. These files will contain observed accumulated + # precipitation in NetCDF format for various accumulation intervals. + # + OBS_CCPA_APCP_FN_TEMPLATE_PCPCOMBINE_OUTPUT: '{%- set obs_avail_intvl_hrs = "%02d" % CCPA_OBS_AVAIL_INTVL_HRS %} + {{- "ccpa.t{valid?fmt=%H}z." ~ obs_avail_intvl_hrs ~ "h.hrap.conus.gb2_a${ACCUM_HH}h.nc" }}' + # + # OBS_NOHRSC_ASNOW_FN_TEMPLATE_PCPCOMBINE_OUTPUT: + # METplus template for the names of the NetCDF files generated by the + # worfklow verification tasks that call METplus's PcpCombine tool on + # NOHRSC observations. These files will contain observed accumulated + # snowfall for various accumulaton intervals. + # + OBS_NOHRSC_ASNOW_FN_TEMPLATE_PCPCOMBINE_OUTPUT: '{%- set obs_avail_intvl_hrs = "%d" % NOHRSC_OBS_AVAIL_INTVL_HRS %} + {{- "sfav2_CONUS_" ~ obs_avail_intvl_hrs ~ "h_{valid?fmt=%Y%m%d%H}_grid184.grb2_a${ACCUM_HH}h.nc" }}' + # + # OBS_NDAS_SFCandUPA_FN_TEMPLATE_PB2NC_OUTPUT: + # METplus template for the names of the NetCDF files generated by the + # worfklow verification tasks that call METplus's Pb2nc tool on the + # prepbufr files in NDAS observations. These files will contain the + # observed surface (SFC) and upper-air (UPA) fields in NetCDF format + # (instead of NDAS's native prepbufr format). # - FCST_SUBDIR_TEMPLATE: '{% if user.RUN_ENVIR == "nco" %}${NET_default}.{init?fmt=%Y%m%d?shift=-${time_lag}}/{init?fmt=%H?shift=-${time_lag}}{% else %}{init?fmt=%Y%m%d%H?shift=-${time_lag}}{% if global.DO_ENSEMBLE %}/${ensmem_name}{% endif %}/postprd{% endif %}' - FCST_FN_TEMPLATE: '${NET_default}.t{init?fmt=%H?shift=-${time_lag}}z{% if user.RUN_ENVIR == "nco" and global.DO_ENSEMBLE %}.${ensmem_name}{% endif %}.prslev.f{lead?fmt=%HHH?shift=${time_lag}}.${POST_OUTPUT_DOMAIN_NAME}.grib2' - FCST_FN_TEMPLATE_PCPCOMBINE_OUTPUT: '${NET_default}.t{init?fmt=%H}z{% if user.RUN_ENVIR == "nco" and global.DO_ENSEMBLE %}.${ensmem_name}{% endif %}.prslev.f{lead?fmt=%HHH}.${POST_OUTPUT_DOMAIN_NAME}_${VAR}_a${ACCUM_HH}h.nc' + OBS_NDAS_SFCandUPA_FN_TEMPLATE_PB2NC_OUTPUT: '${OBS_NDAS_FN_TEMPLATES[1]}.nc' # + # NUM_MISSING_OBS_FILES_MAX: # For verification tasks that need observational data, this specifies # the maximum number of observation files that may be missing. If more # than this number are missing, the verification task will error out. - # - # Note that this is a crude way of checking that there are enough obs to - # conduct verification since this number should probably depend on the + # This is a crude way of checking that there are enough obs to conduct + # verification (crude because this number should probably depend on the # field being verified, the time interval between observations, the - # length of the forecast, etc. An alternative may be to specify the - # maximum allowed fraction of obs files that can be missing (i.e. the - # number missing divided by the number that are expected to exist). + # length of the forecast, etc; an alternative may be to specify the + # maximum allowed fraction of obs files that can be missing). # NUM_MISSING_OBS_FILES_MAX: 2 # + # VX Parameters for Forecasts + # ---------------------------- + # + # VX_FCST_MODEL_NAME: + # String that specifies a descriptive name for the model being verified. + # This is used in forming the names of the verification output files and + # is also included in the contents of those files. + # + VX_FCST_MODEL_NAME: '{{ nco.NET_default }}.{{ task_run_post.POST_OUTPUT_DOMAIN_NAME }}' + # + # VX_FCST_OUTPUT_INTVL_HRS: + # The forecast output interval (in hours) to assume for verification + # purposes. + # Note: + # If/when a variable is created in this configuration file that specifies + # the forecast output interval for native SRW forecasts, it should be + # used as the default value of this variable. + # + VX_FCST_OUTPUT_INTVL_HRS: 1 + # + # VX_FCST_INPUT_BASEDIR: + # METplus template for the name of the base (i.e. top-level) directory + # containing the forecast files to use as inputs to the verification + # tasks. + # + VX_FCST_INPUT_BASEDIR: '{% if user.RUN_ENVIR == "nco" %}$COMOUT/../..{% else %}{{ workflow.EXPTDIR }}{% endif %}' + # + # FCST_SUBDIR_TEMPLATE: + # METplus template for the name of the subdirectory containing forecast + # files to use as inputs to the verification tasks. + # + FCST_SUBDIR_TEMPLATE: '{%- if user.RUN_ENVIR == "nco" %} + {{- "${NET_default}.{init?fmt=%Y%m%d?shift=-${time_lag}}/{init?fmt=%H?shift=-${time_lag}}" }} + {%- else %} + {{- "{init?fmt=%Y%m%d%H?shift=-${time_lag}}" }} + {%- if global.DO_ENSEMBLE %} + {{- "/${ensmem_name}" }} + {%- endif %} + {{- "/postprd" }} + {%- endif %}' + # + # FCST_FN_TEMPLATE: + # METplus template for the names of the forecast files to use as inputs + # to the verification tasks. + # + FCST_FN_TEMPLATE: '{{- "${NET_default}.t{init?fmt=%H?shift=-${time_lag}}z" }} + {%- if user.RUN_ENVIR == "nco" and global.DO_ENSEMBLE %} + {{- ".${ensmem_name}" }} + {%- endif %} + {{- ".prslev.f{lead?fmt=%HHH?shift=${time_lag}}.${POST_OUTPUT_DOMAIN_NAME}.grib2" }}' + # + # FCST_FN_TEMPLATE_PCPCOMBINE_OUTPUT: + # METplus template for the names of the NetCDF files generated by the + # worfklow verification tasks that call METplus's PcpCombine tool on + # forecast output. These files will contain forecast accumulated + # precipitation in NetCDF format for various accumulation intervals. + # + FCST_FN_TEMPLATE_PCPCOMBINE_OUTPUT: '{{- "${NET_default}.t{init?fmt=%H}z" }} + {%- if user.RUN_ENVIR == "nco" and global.DO_ENSEMBLE %} + {{- ".${ensmem_name}" }} + {%- endif %} + {{- ".prslev.f{lead?fmt=%HHH}.${POST_OUTPUT_DOMAIN_NAME}_${FIELD_GROUP}_a${ACCUM_HH}h.nc" }}' + # + # VX_NDIGITS_ENSMEM_NAMES: + # Number of digits to assume/use in the forecast ensemble member identifier + # string used in directory and file names and other instances in which the + # ensemble member needs to be identified. For example, if this is set to + # 3, the identifier for ensemble member 4 will be "mem004", while if it's + # set to 2, the identifier will be "mem04". This is useful when verifying + # staged forecast files from a forecasting model/system other than the + # SRW that uses a different number of digits in the ensemble member + # identifier string. + # + VX_NDIGITS_ENSMEM_NAMES: 3 + # + # NUM_MISSING_FCST_FILES_MAX: # For verification tasks that need forecast data, this specifies the # maximum number of post-processed forecast files that may be missing. - # If more than this number are missing, the verification task will not - # be run. + # If more than this number are missing, the verification task will exit + # with an error. # NUM_MISSING_FCST_FILES_MAX: 0 @@ -2672,6 +2835,124 @@ cpl_aqm_parm: NEXUS_GFS_SFC_DIR: "" NEXUS_GFS_SFC_ARCHV_DIR: "/NCEPPROD/hpssprod/runhistory" +#---------------------------- +# UFS FIRE config parameters +#----------------------------- +fire: + # + #----------------------------------------------------------------------- + # + # UFS_FIRE: + # Switch for turning on fire simulation + # + # FIRE_INPUT_DIR + # Directory where fire input file (geo_em.d01.nc) can be found + # + # DT_FIRE + # The fire behavior component’s integration timestep + # + # OUTPUT_DT_FIRE + # The fire behavior component’s output timestep + # + # FIRE_NUM_TASKS + # Number of MPI tasks assigned to the FIRE_BEHAVIOR component. + # + + UFS_FIRE: False + FIRE_INPUT_DIR: "" + DT_FIRE: 0.5 + OUTPUT_DT_FIRE: 300 + FIRE_NUM_TASKS: 0 + + + # The following options control namelist values in the Community Fire + # Behavior Model. See the users guide for more information. + # ---------------------------------------------------------------------- + # + # FIRE_WIND_HEIGHT + # Height to interpolate winds to for calculating fire spread rate + # + # FIRE_PRINT_MSG + # Debug print level for the weather model fire core. Levels greater than 1 will print extra + # messages to the log file at run time. + # 0: no extra prints + # 1: Extra prints + # 2: More extra prints + # 3: Even more extra prints + # + # FIRE_ATM_FEEDBACK + # Multiplier for heat fluxes. Use 1.0 for normal two-way coupling. Use 0.0 for one-way coupling. + # Intermediate values will vary the amount of forcing provided from the fire to the dynamical core + # + # FIRE_VISCOSITY + # Artificial viscosity in level set method. Maximum 1, required for FIRE_UPWINDING=0 + # + # FIRE_UPWINDING + # Upwinding scheme used for calculating the normal spread of the fire front + # 0 = Central Difference + # 1 = Standard + # 2 = Godunov + # 3 = ENO1 + # 4 = Sethian + # 5 = 2nd-order Sethian + # 6 = WENO3 + # 7 = WENO5 + # 8 = Hybrid WENO3/ENO1 + # 9 = Hybrid WENO5/ENO1 + # + # FIRE_LSM_ZCOUPLING + # When true, uses FIRE_LSM_ZCOUPLING_REF instead of FIRE_WIND_HEIGHT as a reference height + # to calculate the logarithmic surface layer wind profile + # + # FIRE_LSM_ZCOUPLING_REF + # Reference height from which the velocity at FIRE_WIND_HEIGHT is calculated using a logarithmic profile + # + # FIRE_NUM_IGNITIONS + # Number of fire ignitions. + # + # NOTE: If FIRE_NUM_IGNITIONS > 1, the following variables should be lists with one entry for each ignition + # FIRE_IGNITION_ROS + # + # + # FIRE_IGNITION_START_LAT + # + # + # FIRE_IGNITION_START_LON + # + # + # FIRE_IGNITION_END_LAT + # + # + # FIRE_IGNITION_END_LON + # + # + # FIRE_IGNITION_RADIUS + # + # + # FIRE_IGNITION_START_TIME + # + # + # FIRE_IGNITION_END_TIME + # + + FIRE_WIND_HEIGHT: 5.0 + FIRE_PRINT_MSG: 0 + FIRE_ATM_FEEDBACK: 0.0 + FIRE_VISCOSITY: 0.4 + FIRE_UPWINDING: 9 + FIRE_LSM_ZCOUPLING: False + FIRE_LSM_ZCOUPLING_REF: 60.0 + FIRE_NUM_IGNITIONS: 1 + FIRE_IGNITION_ROS: 0.05 + FIRE_IGNITION_START_LAT: 40.609 + FIRE_IGNITION_START_LON: -105.879 + FIRE_IGNITION_END_LAT: 40.609 + FIRE_IGNITION_END_LON: -105.879 + FIRE_IGNITION_RADIUS: 250 + FIRE_IGNITION_START_TIME: 6480 + FIRE_IGNITION_END_TIME: 7000 + + rocoto: attrs: "" cycledefs: "" diff --git a/ush/config_utils.py b/ush/config_utils.py index 84949c7db4..7e5f2d77e4 100755 --- a/ush/config_utils.py +++ b/ush/config_utils.py @@ -2,9 +2,11 @@ """ Interface to configuration file management utilities. -To see what it can do: +To see what it can do, run: - ./config_utils --help +.. code-block:: console + + ./config_utils --help """ diff --git a/ush/create_aqm_rc_file.py b/ush/create_aqm_rc_file.py index 726e8eb0f3..c459b6a587 100644 --- a/ush/create_aqm_rc_file.py +++ b/ush/create_aqm_rc_file.py @@ -6,88 +6,94 @@ import argparse import os import sys -import tempfile -from subprocess import STDOUT, CalledProcessError, check_output from textwrap import dedent +from uwtools.api.template import render from python_utils import ( cfg_to_yaml_str, flatten_dict, import_vars, - load_shell_config, + load_yaml_config, print_info_msg, print_input_args, str_to_type, ) + def create_aqm_rc_file(cdate, run_dir, init_concentrations): - """ Creates an aqm.rc file in the specified run directory + """Creates an ``aqm.rc`` file in the specified run directory Args: - cdate: cycle date - run_dir: run directory - init_concentrations + cdate (str): Cycle date + run_dir (str): Run directory + init_concentrations (bool): Flag to reset initial AQM concentrations (tracer values) to + zero. Returns: - Boolean + True """ print_input_args(locals()) - #import all environment variables + # import all environment variables import_vars() - #pylint: disable=undefined-variable + # pylint: disable=undefined-variable # - #----------------------------------------------------------------------- + # ----------------------------------------------------------------------- # # Create the aqm.rc file in the specified run directory. # - #----------------------------------------------------------------------- + # ----------------------------------------------------------------------- # - print_info_msg(f''' + print_info_msg( + f''' Creating the aqm.rc file (\"{AQM_RC_FN}\") in the specified run directory (run_dir): - run_dir = \"{run_dir}\"''', verbose=VERBOSE) + run_dir = \"{run_dir}\"''', + verbose=VERBOSE, + ) # # Set output file path # - aqm_rc_fp=os.path.join(run_dir, AQM_RC_FN) + aqm_rc_fp = os.path.join(run_dir, AQM_RC_FN) # # Extract from cdate the starting year, month, and day of the forecast. # - yyyymmdd=cdate.strftime('%Y%m%d') - mm=f"{cdate.month:02d}" # pylint: disable=invalid-name - hh=f"{cdate.hour:02d}" # pylint: disable=invalid-name + yyyymmdd = cdate.strftime("%Y%m%d") + mm = f"{cdate.month:02d}" # pylint: disable=invalid-name + hh = f"{cdate.hour:02d}" # pylint: disable=invalid-name # # Set parameters in the aqm.rc file. # - aqm_rc_bio_file_fp=os.path.join(FIXaqm,"bio", AQM_BIO_FILE) + aqm_rc_bio_file_fp = os.path.join(FIXaqm, "bio", AQM_BIO_FILE) # Fire config - aqm_rc_fire_file_fp=os.path.join( - COMIN, - f"{AQM_FIRE_FILE_PREFIX}_{yyyymmdd}_t{hh}z{AQM_FIRE_FILE_SUFFIX}" - ) + aqm_rc_fire_file_fp = os.path.join( + COMIN, f"{AQM_FIRE_FILE_PREFIX}_{yyyymmdd}_t{hh}z{AQM_FIRE_FILE_SUFFIX}" + ) # Dust config - aqm_rc_dust_file_fp=os.path.join( - FIXaqm,"dust", - f"{AQM_DUST_FILE_PREFIX}_{PREDEF_GRID_NAME}{AQM_DUST_FILE_SUFFIX}", - ) + aqm_rc_dust_file_fp = os.path.join( + FIXaqm, + "dust", + f"{AQM_DUST_FILE_PREFIX}_{PREDEF_GRID_NAME}{AQM_DUST_FILE_SUFFIX}", + ) # Canopy config - aqm_rc_canopy_file_fp=os.path.join( - FIXaqm,"canopy",PREDEF_GRID_NAME, + aqm_rc_canopy_file_fp = os.path.join( + FIXaqm, + "canopy", + PREDEF_GRID_NAME, f"{AQM_CANOPY_FILE_PREFIX}.{mm}{AQM_CANOPY_FILE_SUFFIX}", - ) + ) # - #----------------------------------------------------------------------- + # ----------------------------------------------------------------------- # # Create a multiline variable that consists of a yaml-compliant string # specifying the values that the jinja variables in the template # AQM_RC_TMPL_FN file should be set to. # - #----------------------------------------------------------------------- + # ----------------------------------------------------------------------- # settings = { "do_aqm_dust": DO_AQM_DUST, @@ -102,7 +108,7 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations): "aqm_rc_dust_file_fp": aqm_rc_dust_file_fp, "aqm_rc_canopy_file_fp": aqm_rc_canopy_file_fp, "aqm_rc_product_fn": AQM_RC_PRODUCT_FN, - "aqm_rc_product_frequency": AQM_RC_PRODUCT_FREQUENCY + "aqm_rc_product_frequency": AQM_RC_PRODUCT_FREQUENCY, } settings_str = cfg_to_yaml_str(settings) @@ -117,74 +123,57 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations): verbose=VERBOSE, ) # - #----------------------------------------------------------------------- + # ----------------------------------------------------------------------- # # Call a python script to generate the experiment's actual AQM_RC_FN # file from the template file. # - #----------------------------------------------------------------------- - # - with tempfile.NamedTemporaryFile( - dir="./", - mode="w+t", - prefix="aqm_rc_settings", - suffix=".yaml") as tmpfile: - tmpfile.write(settings_str) - tmpfile.seek(0) - cmd = " ".join(["uw template render", - "-i", - AQM_RC_TMPL_FP, - "-o", - aqm_rc_fp, - "-v", - "--values-file", - tmpfile.name, - ] - ) - indent = " " - output = "" - try: - output = check_output(cmd, encoding="utf=8", shell=True, - stderr=STDOUT, text=True) - except CalledProcessError as e: - output = e.output - print(f"Failed with status: {e.returncode}") - sys.exit(1) - finally: - print("Output:") - for line in output.split("\n"): - print(f"{indent * 2}{line}") + # ----------------------------------------------------------------------- + # + render( + input_file=AQM_RC_TMPL_FP, + output_file=aqm_rc_fp, + values_src=settings, + ) return True -def parse_args(argv): - """ Parse command line arguments""" + +def _parse_args(argv): + """Parses command line arguments""" parser = argparse.ArgumentParser(description="Creates aqm.rc file.") - parser.add_argument("-r", "--run-dir", - dest="run_dir", - required=True, - help="Run directory.") + parser.add_argument("-r", "--run-dir", dest="run_dir", required=True, help="Run directory.") - parser.add_argument("-c", "--cdate", - dest="cdate", - required=True, - help="Date string in YYYYMMDD format.") + parser.add_argument( + "-c", + "--cdate", + dest="cdate", + required=True, + help="Date string in YYYYMMDD format.", + ) - parser.add_argument("-i", "--init_concentrations", - dest="init_concentrations", - required=True, - help="Flag for initial concentrations.") + parser.add_argument( + "-i", + "--init_concentrations", + dest="init_concentrations", + required=True, + help="Flag for initial concentrations.", + ) - parser.add_argument("-p", "--path-to-defns", - dest="path_to_defns", - required=True, - help="Path to var_defns file.") + parser.add_argument( + "-p", + "--path-to-defns", + dest="path_to_defns", + required=True, + help="Path to var_defns file.", + ) return parser.parse_args(argv) + if __name__ == "__main__": - args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) + args = _parse_args(sys.argv[1:]) + cfg = load_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) import_vars(dictionary=cfg) create_aqm_rc_file( diff --git a/ush/create_diag_table_file.py b/ush/create_diag_table_file.py index 40f5e0deee..9ae7530b3a 100644 --- a/ush/create_diag_table_file.py +++ b/ush/create_diag_table_file.py @@ -1,33 +1,31 @@ #!/usr/bin/env python3 """ -Function to create a diag_table file for the FV3 model using a -template. +Creates a ``diag_table`` file for the FV3 model using a template """ import argparse import os import sys -import tempfile -from subprocess import STDOUT, CalledProcessError, check_output from textwrap import dedent +from uwtools.api.template import render from python_utils import ( cfg_to_yaml_str, flatten_dict, import_vars, - load_shell_config, + load_yaml_config, print_info_msg, print_input_args, ) def create_diag_table_file(run_dir): - """Creates a diagnostic table file for each cycle to be run + """Creates an FV3 diagnostic table (``diag_table``) file for each cycle to be run Args: - run_dir: run directory + run_dir (str): Run directory Returns: - Boolean + True """ print_input_args(locals()) @@ -60,7 +58,10 @@ def create_diag_table_file(run_dir): verbose=VERBOSE, ) - settings = {"starttime": CDATE, "cres": CRES} + settings = {"starttime": CDATE, "cres": CRES, "additional_entries": ""} + if UFS_FIRE: + settings["additional_entries"] = \ + '"gfs_phys","fsmoke","fsmoke","fv3_history","all",.false.,"none",2' settings_str = cfg_to_yaml_str(settings) print_info_msg( @@ -74,37 +75,16 @@ def create_diag_table_file(run_dir): verbose=VERBOSE, ) - with tempfile.NamedTemporaryFile(dir="./", - mode="w+t", - prefix="aqm_rc_settings", - suffix=".yaml") as tmpfile: - tmpfile.write(settings_str) - tmpfile.seek(0) - cmd = " ".join(["uw template render", - "-i", DIAG_TABLE_TMPL_FP, - "-o", diag_table_fp, - "-v", - "--values-file", tmpfile.name, - ] + render( + input_file = DIAG_TABLE_TMPL_FP, + output_file = diag_table_fp, + values_src = settings, ) - indent = " " - output = "" - try: - output = check_output(cmd, encoding="utf=8", shell=True, - stderr=STDOUT, text=True) - except CalledProcessError as e: - output = e.output - print(f"Failed with status: {e.returncode}") - sys.exit(1) - finally: - print("Output:") - for line in output.split("\n"): - print(f"{indent * 2}{line}") return True -def parse_args(argv): - """Parse command line arguments""" +def _parse_args(argv): + """Parses command line arguments""" parser = argparse.ArgumentParser(description="Creates diagnostic table file.") parser.add_argument( @@ -123,8 +103,8 @@ def parse_args(argv): if __name__ == "__main__": - args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) + args = _parse_args(sys.argv[1:]) + cfg = load_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) import_vars(dictionary=cfg) create_diag_table_file(args.run_dir) diff --git a/ush/create_model_configure_file.py b/ush/create_model_configure_file.py index cd10ac404e..e0e420c71b 100644 --- a/ush/create_model_configure_file.py +++ b/ush/create_model_configure_file.py @@ -1,20 +1,18 @@ #!/usr/bin/env python3 """ -Create a model_configure file for the FV3 forecast model from a -template. +Creates a ``model_configure`` file for the FV3 forecast model from a template. """ import argparse import os import sys -import tempfile from textwrap import dedent -from subprocess import STDOUT, CalledProcessError, check_output +from uwtools.api.template import render from python_utils import ( cfg_to_yaml_str, flatten_dict, import_vars, - load_shell_config, + load_yaml_config, lowercase, print_info_msg, print_input_args, @@ -25,19 +23,20 @@ def create_model_configure_file( cdate, fcst_len_hrs, fhrot, run_dir, sub_hourly_post, dt_subhourly_post_mnts, dt_atmos ): #pylint: disable=too-many-arguments - """Creates a model configuration file in the specified - run directory + """Creates a model configuration file in the specified run directory Args: - cdate: cycle date - fcst_len_hrs: forecast length in hours - fhrot: forecast hour at restart - run_dir: run directory - sub_hourly_post - dt_subhourly_post_mnts - dt_atmos + cdate (int): Cycle date in ``YYYYMMDD`` format + fcst_len_hrs (int): Forecast length in hours + fhrot (int): Forecast hour at restart + run_dir (str): Run directory + sub_hourly_post (bool): Sets subhourly post to either ``True`` or ``False`` + dt_subhourly_post_mnts (int): Subhourly forecast model output and post-processing + frequency in minutes + dt_atmos (int): Atmospheric forecast model's main timestep in seconds + Returns: - Boolean + True """ print_input_args(locals()) @@ -71,7 +70,6 @@ def create_model_configure_file( # ----------------------------------------------------------------------- # settings = { - "PE_MEMBER01": PE_MEMBER01, "start_year": cdate.year, "start_month": cdate.month, "start_day": cdate.day, @@ -79,7 +77,6 @@ def create_model_configure_file( "nhours_fcst": fcst_len_hrs, "fhrot": fhrot, "dt_atmos": DT_ATMOS, - "atmos_nthreads": OMP_NUM_THREADS_RUN_FCST, "restart_interval": RESTART_INTERVAL, "itasks": ITASKS, "write_dopost": f".{lowercase(str(WRITE_DOPOST))}.", @@ -88,7 +85,7 @@ def create_model_configure_file( } # # If the write-component is to be used, then specify a set of computational - # parameters and a set of grid parameters. The latter depends on the type + # parameters and a set of grid parameters. The latter depends on the type # (coordinate system) of the grid that the write-component will be using. # if QUILTING: @@ -162,22 +159,22 @@ def create_model_configure_file( ) # # If sub_hourly_post is set to "TRUE", then the forecast model must be - # directed to generate output files on a sub-hourly interval. Do this + # directed to generate output files on a sub-hourly interval. Do this # by specifying the output interval in the model configuration file # (MODEL_CONFIG_FN) in units of number of forecat model time steps (nsout). # nsout is calculated using the user-specified output time interval # dt_subhourly_post_mnts (in units of minutes) and the forecast model's - # main time step dt_atmos (in units of seconds). Note that nsout is + # main time step dt_atmos (in units of seconds). Note that nsout is # guaranteed to be an integer because the experiment generation scripts # require that dt_subhourly_post_mnts (after conversion to seconds) be - # evenly divisible by dt_atmos. Also, in this case, the variable output_fh + # evenly divisible by dt_atmos. Also, in this case, the variable output_fh # [which specifies the output interval in hours; # see the jinja model_config template file] is set to 0, although this - # doesn't matter because any positive of nsout will override output_fh. + # doesn't matter because any positive value of nsout will override output_fh. # # If sub_hourly_post is set to "FALSE", then the workflow is hard-coded # (in the jinja model_config template file) to direct the forecast model - # to output files every hour. This is done by setting (1) output_fh to 1 + # to output files every hour. This is done by setting (1) output_fh to 1 # here, and (2) nsout to -1 here which turns off output by time step interval. # # Note that the approach used here of separating how hourly and subhourly @@ -220,36 +217,15 @@ def create_model_configure_file( # model_config_fp = os.path.join(run_dir, MODEL_CONFIG_FN) - with tempfile.NamedTemporaryFile(dir="./", - mode="w+t", - suffix=".yaml", - prefix="model_config_settings.") as tmpfile: - tmpfile.write(settings_str) - tmpfile.seek(0) - cmd = " ".join(["uw template render", - "-i", MODEL_CONFIG_TMPL_FP, - "-o", model_config_fp, - "-v", - "--values-file", tmpfile.name, - ] + render( + input_file = MODEL_CONFIG_TMPL_FP, + output_file = model_config_fp, + values_src = settings ) - indent = " " - output = "" - try: - output = check_output(cmd, encoding="utf=8", shell=True, - stderr=STDOUT, text=True) - except CalledProcessError as e: - output = e.output - print(f"Failed with status: {e.returncode}") - sys.exit(1) - finally: - print("Output:") - for line in output.split("\n"): - print(f"{indent * 2}{line}") return True -def parse_args(argv): +def _parse_args(argv): """Parse command line arguments""" parser = argparse.ArgumentParser(description="Creates model configuration file.") @@ -317,8 +293,8 @@ def parse_args(argv): if __name__ == "__main__": - args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) + args = _parse_args(sys.argv[1:]) + cfg = load_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) import_vars(dictionary=cfg) create_model_configure_file( diff --git a/ush/create_ufs_configure_file.py b/ush/create_ufs_configure_file.py index 03de3e24c7..5bec56fb62 100644 --- a/ush/create_ufs_configure_file.py +++ b/ush/create_ufs_configure_file.py @@ -1,43 +1,76 @@ #!/usr/bin/env python3 """ -Function to create a UFS configuration file for the FV3 forecast -model(s) from a template. +Creates a UFS configuration file for the FV3 forecast model(s) from a template. """ import argparse import os import sys -import tempfile -from subprocess import STDOUT, CalledProcessError, check_output from textwrap import dedent +from uwtools.api.template import render from python_utils import ( - cfg_to_yaml_str, flatten_dict, - import_vars, - load_shell_config, + load_yaml_config, print_info_msg, - print_input_args, ) -def create_ufs_configure_file(run_dir): - """ Creates a ufs configuration file in the specified - run directory +def create_ufs_configure_file(run_dir,cfg): + """ Creates a UFS configuration file in the specified run directory Args: run_dir: run directory + cfg: dictionary of config settings Returns: - Boolean + True """ - print_input_args(locals()) - - #import all environment variables - import_vars() - # pylint: disable=undefined-variable + # Set necessary variables for each coupled configuration + + atm_end = str(int(cfg["PE_MEMBER01"]) - int(cfg["FIRE_NUM_TASKS"]) -1) + aqm_end = str(int(cfg["LAYOUT_X"]) * int(cfg["LAYOUT_Y"]) - 1) + fire_start = str(int(cfg["PE_MEMBER01"]) - int(cfg["FIRE_NUM_TASKS"])) + fire_end = str(int(cfg["PE_MEMBER01"]) - 1) + + atm_petlist_bounds = f'0 {atm_end}' + if cfg["CPL_AQM"]: + earth_component_list = 'ATM AQM' + aqm_petlist_bounds = f'0 {aqm_end}' + atm_omp_num_threads_line = '' + atm_diag_line = '' + runseq = [ f" @{cfg['DT_ATMOS']}\n", + " ATM phase1\n", + " ATM -> AQM\n", + " AQM\n", + " AQM -> ATM\n", + " ATM phase2\n", + " @" ] + elif cfg["UFS_FIRE"]: + earth_component_list = 'ATM FIRE' + atm_omp_num_threads_line = \ + f"\nATM_omp_num_threads: {cfg['OMP_NUM_THREADS_RUN_FCST']}" + atm_diag_line = '' + fire_petlist_bounds = f'{fire_start} {fire_end}' + runseq = [ f" @{cfg['DT_ATMOS']}\n", + " ATM -> FIRE\n", + " FIRE -> ATM :remapmethod=conserve\n", + " ATM\n", + " FIRE\n", + " @" ] + else: + earth_component_list = 'ATM' + atm_omp_num_threads_line = \ + f"\nATM_omp_num_threads: {cfg['OMP_NUM_THREADS_RUN_FCST']}" + atm_diag_line = ' Diagnostic = 0' + runseq = [ " ATM" ] + + if cfg["PRINT_ESMF"]: + logkindflag = 'ESMF_LOGKIND_MULTI' + else: + logkindflag = 'ESMF_LOGKIND_MULTI_ON_ERROR' # #----------------------------------------------------------------------- # @@ -46,13 +79,13 @@ def create_ufs_configure_file(run_dir): #----------------------------------------------------------------------- # print_info_msg(f''' - Creating a ufs.configure file (\"{UFS_CONFIG_FN}\") in the specified + Creating a ufs.configure file (\"{cfg["UFS_CONFIG_FN"]}\") in the specified run directory (run_dir): - run_dir = \"{run_dir}\"''', verbose=VERBOSE) + {run_dir=}''', verbose=cfg["VERBOSE"]) # # Set output file path # - ufs_config_fp = os.path.join(run_dir, UFS_CONFIG_FN) + ufs_config_fp = os.path.join(run_dir, cfg["UFS_CONFIG_FN"]) # #----------------------------------------------------------------------- # @@ -63,62 +96,49 @@ def create_ufs_configure_file(run_dir): #----------------------------------------------------------------------- # settings = { - "dt_atmos": DT_ATMOS, - "print_esmf": PRINT_ESMF, - "cpl_aqm": CPL_AQM + "ufs_fire": cfg["UFS_FIRE"], + "logKindFlag": logkindflag, + "EARTH_cl": earth_component_list, + "ATM_pb": atm_petlist_bounds, + "ATM_omp_num_threads_line": atm_omp_num_threads_line, + "ATM_diag_line": atm_diag_line, + "runseq": runseq, + "AQM_pb": "", + "FIRE_pb": "", + "dt_atmos": cfg["DT_ATMOS"], + "print_esmf": cfg["PRINT_ESMF"], + "cpl_aqm": cfg["CPL_AQM"] } - settings_str = cfg_to_yaml_str(settings) + if cfg["CPL_AQM"]: + settings["AQM_pb"] = aqm_petlist_bounds + if cfg["UFS_FIRE"]: + settings["FIRE_pb"] = fire_petlist_bounds print_info_msg( dedent( f""" - The variable \"settings\" specifying values to be used in the \"{UFS_CONFIG_FN}\" + The variable \"settings\" specifying values to be used in the \"{cfg["UFS_CONFIG_FN"]}\" file has been set as follows:\n - settings =\n\n""" - ) - + settings_str, - verbose=VERBOSE, + {settings=}\n\n""" + ), verbose=cfg["VERBOSE"] ) # #----------------------------------------------------------------------- # - # Call a python script to generate the experiment's actual UFS_CONFIG_FN - # file from the template file. + # Call the uwtools "render" function to fill in jinja expressions contained in the UFS Configure + # file template with the values from the settings variable to create ufs.configure file for this + # experiment # #----------------------------------------------------------------------- # - # Store the settings in a temporary file - with tempfile.NamedTemporaryFile(dir="./", - mode="w+t", - prefix="ufs_config_settings", - suffix=".yaml") as tmpfile: - tmpfile.write(settings_str) - tmpfile.seek(0) - - cmd = " ".join(["uw template render", - "-i", UFS_CONFIG_TMPL_FP, - "-o", ufs_config_fp, - "-v", - "--values-file", tmpfile.name, - ] + render( + input_file = cfg["UFS_CONFIG_TMPL_FP"], + output_file = ufs_config_fp, + values_src = settings, ) - - indent = " " - output = "" - try: - output = check_output(cmd, encoding="utf=8", shell=True, - stderr=STDOUT, text=True) - except CalledProcessError as e: - output = e.output - print(f"Failed with status: {e.returncode}") - sys.exit(1) - finally: - print("Output:") - for line in output.split("\n"): - print(f"{indent * 2}{line}") return True -def parse_args(argv): +def _parse_args(argv): """ Parse command line arguments""" parser = argparse.ArgumentParser( description='Creates UFS configuration file.' @@ -137,10 +157,7 @@ def parse_args(argv): return parser.parse_args(argv) if __name__ == "__main__": - args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) - cfg = flatten_dict(cfg) - import_vars(dictionary=cfg) - create_ufs_configure_file( - run_dir=args.run_dir, - ) + args = _parse_args(sys.argv[1:]) + conf = load_yaml_config(args.path_to_defns) + confg = flatten_dict(conf) + create_ufs_configure_file(run_dir=args.run_dir,cfg=confg) diff --git a/ush/eval_metplus_timestr_tmpl.py b/ush/eval_metplus_timestr_tmpl.py new file mode 100644 index 0000000000..205fee1593 --- /dev/null +++ b/ush/eval_metplus_timestr_tmpl.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python3 +import argparse +import os +import sys +from datetime import datetime, timedelta +try: + sys.path.append(os.environ['METPLUS_ROOT']) +except: + print("\nERROR ERROR ERROR\n") + print("Environment variable METPLUS_ROOT must be set to use this script\n") + raise +from metplus.util import string_template_substitution as sts + +def eval_metplus_timestr_tmpl(init_time, lhr, time_lag, fn_template, verbose=False): + """ + Calls native METplus routine for evaluating filename templates + + Args: + init_time (str): Date string for initial time in YYYYMMDD[mmss] format, where minutes and + seconds are optional. + lhr (int): Lead hour (number of hours since init_time) + time_lag (int): Hours of time lag for a time-lagged ensemble member + fn_template (str): The METplus filename template for finding the files + verbose (bool): By default this script only outputs the list of forecast hours + Returns: + str: The fully resolved filename based on the input parameters + """ + + if len(init_time) == 10: + initdate=datetime.strptime(init_time, '%Y%m%d%H') + elif len(init_time) == 12: + initdate=datetime.strptime(init_time, '%Y%m%d%H%M') + elif len(init_time) == 14: + initdate=datetime.strptime(init_time, '%Y%m%d%H%M%S') + else: + raise ValueError(f"Invalid {init_time=}; must be 10, 12, or 14 characters in length") + + validdate=initdate + timedelta(hours=lhr) + leadsec=lhr*3600 + # Evaluate the METplus timestring template for the current lead hour + if verbose: + print("Resolving METplus template for:") + print(f"{fn_template=}\ninit={initdate}\nvalid={validdate}\nlead={leadsec}\n{time_lag=}\n") + # Return the full path with templates resolved + return sts.do_string_sub(tmpl=fn_template,init=initdate,valid=validdate, + lead=leadsec,time_lag=time_lag) + +if __name__ == "__main__": + + parser = argparse.ArgumentParser( + description="Print a list of forecast hours in bash-readable comma-separated format such that there is a corresponding file (can be observations or forecast files) for each list entry.", + ) + parser.add_argument("-v", "--verbose", help="Verbose output", action="store_true") + parser.add_argument("-i", "--init_time", help="Initial date in YYYYMMDDHH[mmss] format", type=str, default='') + parser.add_argument("-l", "--lhr", help="Lead hour", type=int, required=True) + parser.add_argument("-tl", "--time_lag", help="Hours of time lag for a time-lagged ensemble member", type=int, default=0) + parser.add_argument("-ft", "--fn_template", help="Template for file names to search; see ??? for details on template settings", type=str, default='') + + args = parser.parse_args() + + filename = eval_metplus_timestr_tmpl(**vars(args)) + # If called from command line, we want to print the resolved filename + print(filename) diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index ec2b95c3f3..2b2f85c8a7 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -1,8 +1,8 @@ #!/usr/bin/env python3 """ -User interface to create an experiment directory consistent with the -user-defined config.yaml file. +User interface to create an experiment directory consistent with the user-defined YAML +configuration file. """ # pylint: disable=invalid-name @@ -11,12 +11,15 @@ import logging import os import sys -from subprocess import STDOUT, CalledProcessError, check_output +from stat import S_IXUSR +from string import Template from textwrap import dedent from uwtools.api.config import get_nml_config, get_yaml_config, realize +from uwtools.api.template import render from python_utils import ( + list_to_str, log_info, import_vars, export_vars, @@ -24,7 +27,6 @@ ln_vrfy, mkdir_vrfy, mv_vrfy, - create_symlink_to_file, check_for_preexist_dir_file, cfg_to_yaml_str, find_pattern_in_str, @@ -39,13 +41,15 @@ # pylint: disable=too-many-locals,too-many-branches, too-many-statements def generate_FV3LAM_wflow( ushdir, + config: str = "config.yaml", logfile: str = "log.generate_FV3LAM_wflow", debug: bool = False) -> str: - """Function to setup a forecast experiment and create a workflow - (according to the parameters specified in the config file) + """ + Sets up a forecast experiment and creates a workflow (according to the parameters specified + in the configuration file) Args: - ushdir (str) : The full path of the ush/ directory where this script is located + ushdir (str) : The full path of the ``ush/`` directory where this script is located logfile (str) : The name of the file where logging is written debug (bool): Enable extra output for debugging Returns: @@ -68,12 +72,12 @@ def generate_FV3LAM_wflow( # The setup function reads the user configuration file and fills in # non-user-specified values from config_defaults.yaml - expt_config = setup(ushdir,debug=debug) + expt_config = setup(ushdir,user_config_fn=config,debug=debug) # # ----------------------------------------------------------------------- # - # Set the full path to the experiment's rocoto workflow xml file. This + # Set the full path to the experiment's rocoto workflow xml file. This # file will be placed at the top level of the experiment directory and # then used by rocoto to run the workflow. # @@ -90,7 +94,7 @@ def generate_FV3LAM_wflow( # Create a multiline variable that consists of a yaml-compliant string # specifying the values that the jinja variables in the template rocoto # XML should be set to. These values are set either in the user-specified - # workflow configuration file (EXPT_CONFIG_FN) or in the setup() function + # workflow configuration file (config) or in the setup() function # called above. Then call the python script that generates the XML. # # ----------------------------------------------------------------------- @@ -112,29 +116,11 @@ def generate_FV3LAM_wflow( # Call the python script to generate the experiment's XML file # rocoto_yaml_fp = expt_config["workflow"]["ROCOTO_YAML_FP"] - cmd = " ".join(["uw template render", - "-i", template_xml_fp, - "-o", wflow_xml_fp, - "-v", - "--values-file", rocoto_yaml_fp, - ] - ) - - indent = " " - output = "" - logfunc = logging.info - try: - output = check_output(cmd, encoding="utf=8", shell=True, - stderr=STDOUT, text=True) - except CalledProcessError as e: - logfunc = logging.error - output = e.output - logging.exception(("Failed with status: %s", e.returncode)) - raise - finally: - logfunc("Output:") - for line in output.split("\n"): - logfunc("%s%s", indent * 2, line) + render( + input_file = template_xml_fp, + output_file = wflow_xml_fp, + values_src = rocoto_yaml_fp, + ) # # ----------------------------------------------------------------------- # @@ -155,9 +141,23 @@ def generate_FV3LAM_wflow( verbose=debug, ) - create_symlink_to_file( - wflow_launch_script_fp, os.path.join(exptdir, wflow_launch_script_fn), False - ) + with open(wflow_launch_script_fp, "r", encoding='utf-8') as launch_script_file: + launch_script_content = launch_script_file.read() + + # Stage an experiment-specific launch file in the experiment directory + template = Template(launch_script_content) + + # The script needs several variables from the workflow and user sections + template_variables = {**expt_config["user"], **expt_config["workflow"], + "valid_vals_BOOLEAN": list_to_str(expt_config["constants"]["valid_vals_BOOLEAN"])} + launch_content = template.safe_substitute(template_variables) + + launch_fp = os.path.join(exptdir, wflow_launch_script_fn) + with open(launch_fp, "w", encoding='utf-8') as expt_launch_fn: + expt_launch_fn.write(launch_content) + + os.chmod(launch_fp, os.stat(launch_fp).st_mode|S_IXUSR) + # # ----------------------------------------------------------------------- # @@ -312,6 +312,9 @@ def generate_FV3LAM_wflow( npx = NX + 1 npy = NY + 1 # + # Set npz, which is just LEVP minus 1. + npz = LEVP - 1 + # # For the physics suites that use RUC LSM, set the parameter kice to 9, # Otherwise, leave it unspecified (which means it gets set to the default # value in the forecast model). @@ -380,6 +383,7 @@ def generate_FV3LAM_wflow( "npy": npy, "layout": [LAYOUT_X, LAYOUT_Y], "bc_update_interval": LBC_SPEC_INTVL_HRS, + "npz": npz, }) if CCPP_PHYS_SUITE == "FV3_GFS_v15p2": if CPL_AQM: @@ -443,8 +447,27 @@ def generate_FV3LAM_wflow( "vsvoo1:0.0", "vsvoo2:0.0", "vsvoo3:0.0", "vsvpo1:0.0", "vsvpo2:0.0", "vsvpo3:0.0", "xopn:0.0", "xylmn:0.0", "*:0.2" ] }) + + # If UFS_FIRE, activate appropriate flags and update FIELD_TABLE + if expt_config['fire'].get('UFS_FIRE'): + gfs_physics_nml_dict.update({ + "cpl_fire": True, + }) + field_table_append = """# smoke tracer for UFS_FIRE + "TRACER", "atmos_mod", "fsmoke" + "longname", "fire smoke" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=0.0" /\n""" + + with open(FIELD_TABLE_FP, "a+", encoding='UTF-8') as file: + file.write(field_table_append) + settings["gfs_physics_nml"] = gfs_physics_nml_dict + # Update levp in external_ic_nml; this should be the only variable that needs changing + + settings["external_ic_nml"] = {"levp": LEVP} + # # Add to "settings" the values of those namelist variables that specify # the paths to fixed files in the FIXam directory. As above, these namelist @@ -657,7 +680,65 @@ def generate_FV3LAM_wflow( input_format="nml", output_file=FV3_NML_STOCH_FP, output_format="nml", - supplemental_configs=[settings], + update_config=get_nml_config(settings), + ) + # + #----------------------------------------------------------------------- + # + # Generate UFS_FIRE namelist if needed. Most variables in the &time section + # will be updated at the run_fcst step + # + #----------------------------------------------------------------------- + # + if expt_config['fire'].get('UFS_FIRE'): + logging.debug("Setting fire namelist values") + fire_nml_dict = {} + fire_nml_dict['atm'] = {} + fire_nml_dict['time'] = {} + fire_nml_dict['fire'] = {} + # Fill in &atm variables + fire_nml_dict['atm']['interval_atm'] = expt_config['task_run_fcst']['DT_ATMOS'] + fire_nml_dict['atm']['kde'] = expt_config['task_make_ics']['LEVP'] + # Fill in &fire and static &time variables + + # These settings must be handled specially below + each_ignit = ["FIRE_IGNITION_ROS", "FIRE_IGNITION_START_LAT", "FIRE_IGNITION_START_LON", + "FIRE_IGNITION_RADIUS", "FIRE_IGNITION_START_TIME", "FIRE_IGNITION_END_TIME", + "FIRE_IGNITION_END_LAT", "FIRE_IGNITION_END_LON"] + + # These settings do not get added to namelist, or are handled elsewhere + pass_settings = ["UFS_FIRE", "FIRE_INPUT_DIR", "FIRE_NUM_TASKS"] + pass_settings.extend(each_ignit) + + for setting in expt_config['fire']: + if setting in pass_settings: + pass + elif setting == "DT_FIRE": + fire_nml_dict['time']['dt'] = expt_config['fire'][setting] + elif setting == "OUTPUT_DT_FIRE": + fire_nml_dict['time']['interval_output'] = expt_config['fire'][setting] + else: + # For all other settings in config.yaml, convert to lowercase + # and enter into namelist.fire's &fire section + fire_nml_dict['fire'][setting.lower()] = expt_config['fire'][setting] + + # The variables specific to each ignition need special handling: SRW uses a list, but the + # fire model has these settings as separate namelist entries + for i in range(expt_config['fire']['FIRE_NUM_IGNITIONS']): + for setting in each_ignit: + # If not a list, convert to a 1-element list + if not isinstance(expt_config['fire'][setting], list): + expt_config['fire'][setting] = [ expt_config['fire'][setting] ] + + nmle = f"{setting.lower()}{i+1}" + fire_nml_dict['fire'][nmle] = expt_config['fire'][setting][i] + + realize( + input_config=expt_config['workflow']['FIRE_NML_BASE_FP'], + input_format="nml", + output_file=expt_config['workflow']['FIRE_NML_FP'], + output_format="nml", + update_config=get_nml_config(fire_nml_dict), ) # @@ -669,7 +750,7 @@ def generate_FV3LAM_wflow( # # ----------------------------------------------------------------------- # - cp_vrfy(os.path.join(ushdir, EXPT_CONFIG_FN), EXPTDIR) + cp_vrfy(os.path.join(ushdir, config), EXPTDIR) # # ----------------------------------------------------------------------- @@ -728,10 +809,16 @@ def generate_FV3LAM_wflow( def setup_logging(logfile: str = "log.generate_FV3LAM_wflow", debug: bool = False) -> None: """ - Sets up logging, printing high-priority (INFO and higher) messages to screen, and printing all - messages with detailed timing and routine info in the specified text file. + Sets up logging, printing high-priority (INFO and higher) messages to screen and printing all + messages with detailed timing and routine info in the specified text file. If ``debug = True``, + print all messages to both screen and log file. + + Args: + logfile (str) : The name of the file where logging information is written + debug (bool): Enable extra output for debugging + Returns: + None - If debug = True, print all messages to both screen and log file. """ logging.getLogger().setLevel(logging.DEBUG) @@ -765,6 +852,8 @@ def setup_logging(logfile: str = "log.generate_FV3LAM_wflow", debug: bool = Fals description="Script for setting up a forecast and creating a workflow"\ "according to the parameters specified in the config file\n") + parser.add_argument('-c', '--config', default='config.yaml', + help='Name of experiment config file in YAML format') parser.add_argument('-d', '--debug', action='store_true', help='Script will be run in debug mode with more verbose output') pargs = parser.parse_args() @@ -775,7 +864,7 @@ def setup_logging(logfile: str = "log.generate_FV3LAM_wflow", debug: bool = Fals # Call the generate_FV3LAM_wflow function defined above to generate the # experiment/workflow. try: - expt_dir = generate_FV3LAM_wflow(USHdir, wflow_logfile, pargs.debug) + expt_dir = generate_FV3LAM_wflow(USHdir, pargs.config, wflow_logfile, pargs.debug) except: # pylint: disable=bare-except logging.exception( dedent( diff --git a/ush/get_crontab_contents.py b/ush/get_crontab_contents.py index 6b0548141c..82bb350a0e 100644 --- a/ush/get_crontab_contents.py +++ b/ush/get_crontab_contents.py @@ -15,30 +15,19 @@ def get_crontab_contents(called_from_cron, machine, debug): """ This function returns the contents of the user's cron table, as well as the command used to manipulate the cron table. Typically this latter value will be `crontab`, but on some - platforms the version or location of this may change depending on other circumstances, e.g. on - Cheyenne, this depends on whether a script that wants to call `crontab` is itself being called - from a cron job. + platforms the version or location of this may change depending on other circumstances. Args: - called_from_cron (bool): Set this to True if script is called from within a crontab + called_from_cron (bool): Set this value to ``True`` if script is called from within a + crontab machine (str) : The name of the current machine - debug (bool): True will give more verbose output + debug (bool): ``True`` will give more verbose output Returns: crontab_cmd (str) : String containing the "crontab" command for this machine crontab_contents (str) : String containing the contents of the user's cron table. """ - # - # On Cheyenne, simply typing "crontab" will launch the crontab command - # at "/glade/u/apps/ch/opt/usr/bin/crontab". This is a containerized - # version of crontab that will not work if called from scripts that are - # themselves being called as cron jobs. In that case, we must instead - # call the system version of crontab at /usr/bin/crontab. - # crontab_cmd = "crontab" - if machine == "CHEYENNE": - if called_from_cron: - crontab_cmd = "/usr/bin/crontab" print_info_msg( f""" @@ -69,8 +58,17 @@ def get_crontab_contents(called_from_cron, machine, debug): return crontab_cmd, crontab_contents -def add_crontab_line(called_from_cron, machine, crontab_line, exptdir, debug): - """Add crontab line to cron table""" +def add_crontab_line(called_from_cron, machine, crontab_line, exptdir, debug) -> None: + """Adds crontab line to cron table + + Args: + called_from_cron (bool): Set this value to ``True`` if script is called from within + a crontab. + machine (str) : The name of the current machine + crontab_line (str) : Line to be added to cron table + exptdir (str) : Path to the experiment directory + debug (bool): ``True`` will give more verbose output + """ # # Make a backup copy of the user's crontab file and save it in a file. @@ -135,9 +133,17 @@ def add_crontab_line(called_from_cron, machine, crontab_line, exptdir, debug): ) -def delete_crontab_line(called_from_cron, machine, crontab_line, debug): - """Delete crontab line after job is complete i.e. either SUCCESS/FAILURE - but not IN PROGRESS status""" +def delete_crontab_line(called_from_cron, machine, crontab_line, debug) -> None: + """Deletes crontab line after job is complete i.e., either SUCCESS/FAILURE + but not IN PROGRESS status + + Args: + called_from_cron (bool): Set this value to ``True`` if script is called from within + a crontab + machine (str) : The name of the current machine + crontab_line (str) : Line to be deleted from cron table + debug (bool): ``True`` will give more verbose output + """ # # Get the full contents of the user's cron table. @@ -162,7 +168,7 @@ def delete_crontab_line(called_from_cron, machine, crontab_line, debug): crontab_contents = crontab_contents.replace(crontab_line + "\n", "") crontab_contents = crontab_contents.replace(crontab_line, "") else: - print(f"\nWARNING: line not found in crontab, nothing to remove:\n {crontab_line}\n") + print(f"\nWARNING: line not found in crontab, nothing to remove:\n{crontab_line}\n") run_command(f"""echo '{crontab_contents}' | {crontab_cmd}""") @@ -176,7 +182,7 @@ def delete_crontab_line(called_from_cron, machine, crontab_line, debug): ) -def parse_args(argv): +def _parse_args(argv): """Parse command line arguments for deleting crontab line. This is needed because it is called from shell script. If 'delete' argument is not passed, print the crontab contents @@ -228,7 +234,7 @@ def parse_args(argv): if __name__ == "__main__": - args = parse_args(sys.argv[1:]) + args = _parse_args(sys.argv[1:]) if args.remove: delete_crontab_line(args.called_from_cron,args.machine,args.line,args.debug) else: diff --git a/ush/get_mrms_files.sh b/ush/get_mrms_files.sh index b669094488..65a99cc1bd 100644 --- a/ush/get_mrms_files.sh +++ b/ush/get_mrms_files.sh @@ -54,7 +54,7 @@ function get_mrms_files () { # 10 represents a significant number of vertical levels of data if [ ${numgrib2} -ge 10 ] && [ ! -e filelist_mrms ]; then - cp_vrfy ${nsslfile1} ${output_path} + cp ${nsslfile1} ${output_path} ls ${output_path}/${file_matches} > ${output_path}/filelist_mrms echo "Copying mrms files for ${YYYY}${MM}${DD}-${cyc}${min}" fi diff --git a/ush/get_obs.py b/ush/get_obs.py new file mode 100644 index 0000000000..da88856575 --- /dev/null +++ b/ush/get_obs.py @@ -0,0 +1,961 @@ +#!/usr/bin/env python3 + +import os +import sys +import shutil +import argparse +import logging +from pathlib import Path +import datetime as dt +from textwrap import dedent +from pprint import pprint +from math import ceil, floor +import subprocess +import retrieve_data +from python_utils import ( + load_yaml_config, +) +from mrms_pull_topofhour import mrms_pull_topofhour +try: + sys.path.append(os.environ['METPLUS_ROOT']) +except: + print("\nERROR ERROR ERROR\n") + print("Environment variable METPLUS_ROOT must be set to use this script\n") + raise +from metplus.util import string_template_substitution as sts + + +def get_obs_arcv_hr(obtype, arcv_intvl_hrs, hod): + """ + This file defines a function that, for the given observation type, obs + archive interval, and hour of day, returns the hour (counting from hour + zero of the day) corresponding to the archive file in which the obs file + for the given hour of day is included. + + Note that for cumulative fields (like CCPA and NOHRSC, as opposed to + instantaneous ones like MRMS and NDAS), the archive files corresponding + to hour 0 of the day represent accumulations over the previous day. Thus, + here, we never return an archive hour of 0 for cumulative fields. Instead, + if the specified hour-of-day is 0, we consider that to represent the 0th + hour of the NEXT day (i.e. the 24th hour of the current day) and set the + archive hour to 24. + + Args: + obtype (str): + The observation type. + + arcv_intvl_hrs (int): + Time interval (in hours) between archive files. For example, if the obs + files are bundled into 6-hourly archives, then this will be set to 6. This + must be between 1 and 24 and must divide evenly into 24. + + hod (int): + The hour of the day. This must be between 0 and 23. For cumulative fields + (CCPA and NOHRSC), hour 0 is treated as that of the next day, i.e. as the + 24th hour of the current day. + + Returns: + arcv_hr (int): + The hour since the start of day corresponding to the archive file containing + the obs file for the given hour of day. + """ + + valid_obtypes = ['CCPA', 'NOHRSC', 'MRMS', 'NDAS'] + if obtype not in valid_obtypes: + msg = dedent(f""" + The specified observation type is not supported: + {obtype = } + Valid observation types are: + {valid_obtypes} + """) + logging.error(msg) + raise ValueError(msg) + + # Ensure that the archive interval divides evenly into 24 hours. + remainder = 24 % arcv_intvl_hrs + if remainder != 0: + msg = dedent(f""" + The archive interval for obs of type {obtype} must divide evenly into 24 + but doesn't: + {arcv_intvl_hrs = } + 24 % arcv_intvl_hrs = {remainder} + """) + logging.error(msg) + raise ValueError(msg) + + if (hod < 0) or (hod > 23): + msg = dedent(f""" + The specified hour-of-day must be between 0 and 23, inclusive, but isn't: + {hod = } + """) + logging.error(msg) + raise ValueError(msg) + + # Set the archive hour. This depends on the obs type because each obs + # type can organize its observation files into archives in a different + # way, e.g. a cumulative obs type may put the obs files for hours 1 + # through 6 of the day in the archive labeled with hour 6 while an + # instantaneous obs type may put the obs files for hours 0 through 5 of + # the day in the archive labeled with hour 6. + if obtype in ['CCPA']: + if hod == 0: + arcv_hr = 24 + else: + arcv_hr = ceil(hod/arcv_intvl_hrs)*arcv_intvl_hrs + elif obtype in ['NOHRSC']: + if hod == 0: + arcv_hr = 24 + else: + arcv_hr = floor(hod/arcv_intvl_hrs)*arcv_intvl_hrs + elif obtype in ['MRMS']: + arcv_hr = (floor(hod/arcv_intvl_hrs))*arcv_intvl_hrs + elif obtype in ['NDAS']: + arcv_hr = (floor(hod/arcv_intvl_hrs) + 1)*arcv_intvl_hrs + + return arcv_hr + + +def get_obs(config, obtype, yyyymmdd_task): + """ + This script checks for the existence of obs files of the specified type + at the locations specified by variables in the SRW App's configuration + file. If one or more of these files do not exist, it retrieves them from + a data store (using the ``retrieve_data.py`` script and as specified by the + configuration file ``parm/data_locations.yml`` for that script) and places + them in the locations specified by the App's configuration variables, + renaming them if necessary. + + Args: + config (dict): + The final configuration dictionary (obtained from ``var_defns.yaml``). + + obtype (str): + The observation type. + + yyyymmdd_task (datetime.datetime): + The date for which obs may be needed. + + Returns: + True (bool): + If all goes well. + + + Detailed Description: + + In this script, the main (outer) loop to obtain obs files is over a + sequence of archive hours, where each archive hour in the sequence + represents one archive (tar) file in the data store, and archive hours + are with respect to hour 0 of the day. The number of archive hours in + this sequence depends on how the obs files are arranged into archives + for the given obs type. For example, if the obs files for a given day + are arranged into four archives, then the archive interval is 6 hours, + and in order to get all the obs files for that day, the loop must + iterate over a sequence of 4 hours, either [0, 6, 12, 18] or [6, 12, + 18, 24] (which of these it will be depends on how the obs files are + arranged into the archives). + + Below, we give a description of archive layout for each obs type and + give the archive hours to loop over for the case in which we need to + obtain all available obs for the current day. + + + CCPA (Climatology-Calibrated Precipitation Analysis) precipitation accumulation obs + ----------------------------------------------------------------------------------- + For CCPA, the archive interval is 6 hours, i.e. the obs files are bundled + into 6-hourly archives. The archives are organized such that each one + contains 6 files, so that the obs availability interval is + + .. math:: + + \\begin{align*} + \\qquad \\text{obs_avail_intvl_hrs} + & = (\\text{24 hrs})/[(\\text{4 archives}) \\times (\\text{6 files/archive})] \\hspace{50in} \\\\ + & = \\text{1 hr/file} + \\end{align*} + + i.e. there is one obs file for each hour of the day containing the + accumulation over that one hour. The archive corresponding to hour 0 + of the current day contains 6 files representing accumulations during + the 6 hours of the previous day. The archive corresponding to hour 6 + of the current day contains 6 files for the accumulations during the + first 6 hours of the current day, and the archives corresponding to + hours 12 and 18 of the current day each contain 6 files for accumulations + during hours 6-12 and 12-18, respectively, of the current day. Thus, + to obtain all the one-hour accumulations for the current day, we must + extract all the obs files from the three archives corresponding to hours + 6, 12, and 18 of the current day and from the archive corresponding to + hour 0 of the next day. This corresponds to an archive hour sequence + of [6, 12, 18, 24]. Thus, in the simplest case in which the observation + retrieval times include all hours of the current task's day at which + obs files are available and none of the obs files for this day already + exist on disk, this sequence will be [6, 12, 18, 24]. In other cases, + the sequence we loop over will be a subset of [6, 12, 18, 24]. + + Note that CCPA files for 1-hour accumulation have incorrect metadata in + the files under the "00" directory (i.e. for hours-of-day 19 to 00 of + the next day) from 20180718 to 20210504. This script corrects these + errors if getting CCPA obs at these times. + + + NOHRSC (National Operational Hydrologic Remote Sensing Center) snow accumulation observations + --------------------------------------------------------------------------------------------- + For NOHRSC, the archive interval is 24 hours, i.e. the obs files are + bundled into 24-hourly archives. The archives are organized such that + each one contains 4 files, so that the obs availability interval is + + .. math:: + + \\begin{align*} + \\qquad \\text{obs_avail_intvl_hrs} + & = (\\text{24 hrs})/[(\\text{1 archive}) \\times (\\text{4 files/archive})] \\hspace{50in} \\\\ + & = \\text{6 hr/file} + \\end{align*} + + i.e. there is one obs file for each 6-hour interval of the day containing + the accumulation over those 6 hours. The 4 obs files within each archive + correspond to hours 0, 6, 12, and 18 of the current day. The obs file + for hour 0 contains accumulations during the last 6 hours of the previous + day, while those for hours 6, 12, and 18 contain accumulations for the + first, second, and third 6-hour chunks of the current day. Thus, to + obtain all the 6-hour accumulations for the current day, we must extract + from the archive for the current day the obs files for hours 6, 12, and + 18 and from the archive for the next day the obs file for hour 0. This + corresponds to an archive hour sequence of [0, 24]. Thus, in the simplest + case in which the observation retrieval times include all hours of the + current task's day at which obs files are available and none of the obs + files for this day already exist on disk, this sequence will be [0, 24]. + In other cases, the sequence we loop over will be a subset of [0, 24]. + + + MRMS (Multi-Radar Multi-Sensor) radar observations + -------------------------------------------------- + For MRMS, the archive interval is 24 hours, i.e. the obs files are + bundled into 24-hourly archives. The archives are organized such that + each contains gzipped grib2 files for that day that are usually only a + few minutes apart. However, since the forecasts cannot (yet) perform + sub-hourly output, we filter this data in time by using only those obs + files that are closest to each hour of the day for which obs are needed. + This effectively sets the obs availability interval for MRMS to one + hour, i.e. + + .. math:: + + \\begin{align*} + \\qquad \\text{obs_avail_intvl_hrs} + & = \\text{1 hr/file} \\hspace{50in} \\\\ + \\end{align*} + + i.e. there is one obs file for each hour of the day containing values + at that hour (but only after filtering in time; also see notes for + ``MRMS_OBS_AVAIL_INTVL_HRS`` in ``config_defaults.yaml``). Thus, to + obtain the obs at all hours of the day, we only need to extract files + from one archive. Thus, in the simplest case in which the observation + retrieval times include all hours of the current task's day at which obs + files are available and none of the obs files for this day already exist + on disk, the sequence of archive hours over which we loop will be just + [0]. Note that: + + * For cases in which MRMS data are not needed for all hours of the day, + we still need to retrieve and extract from this single daily archive. + Thus, the archive hour sequence over which we loop over will always + be just [0] for MRMS obs. + + * Because MRMS obs are split into two sets of archives -- one for + composite reflectivity (REFC) and another for echo top (RETOP) -- + on any given day (and with an archive hour of 0) we actually retrive + and extract two different archive files (one per field). + + + NDAS (NAM Data Assimilation System) conventional observations + ------------------------------------------------------------- + For NDAS, the archive interval is 6 hours, i.e. the obs files are + bundled into 6-hourly archives. The archives are organized such that + each one contains 7 files (not say 6). The archive associated with + time yyyymmddhh_arcv contains the hourly files at + + | yyyymmddhh_arcv - 6 hours + | yyyymmddhh_arcv - 5 hours + | ... + | yyyymmddhh_arcv - 2 hours + | yyyymmddhh_arcv - 1 hours + | yyyymmddhh_arcv - 0 hours + + These are known as the tm06, tm05, ..., tm02, tm01, and tm00 files, + respectively. Thus, the tm06 file from the current archive, say the + one associated with time yyyymmddhh_arcv, has the same valid time as + the tm00 file from the previous archive, i.e. the one associated with + time (yyyymmddhh_arcv - 6 hours). It turns out that the tm06 file from + the current archive contains more/better observations than the tm00 + file from the previous archive. Thus, for a given archive time + yyyymmddhh_arcv, we use 6 of the 7 files at tm06, ..., tm01 but not + the one at tm00, effectively resulting in 6 files per archive for NDAS + obs. The obs availability interval is then + + .. math:: + + \\begin{align*} + \\qquad \\text{obs_avail_intvl_hrs} + & = (\\text{24 hrs})/[(\\text{4 archives}) \\times (\\text{6 files/archive})] \\hspace{50in} \\\\ + & = \\text{1 hr/file} + \\end{align*} + + i.e. there is one obs file for each hour of the day containing values + at that hour. The archive corresponding to hour 0 of the current day + contains 6 files valid at hours 18 through 23 of the previous day. The + archive corresponding to hour 6 of the current day contains 6 files + valid at hours 0 through 5 of the current day, and the archives + corresponding to hours 12 and 18 of the current day each contain 6 + files valid at hours 6 through 11 and 12 through 17 of the current day. + Thus, to obtain all the hourly values for the current day (from hour + 0 to hour 23), we must extract the 6 obs files (excluding the tm00 + ones) from the three archives corresponding to hours 6, 12, and 18 of + the current day and the archive corresponding to hour 0 of the next + day. This corresponds to an archive hour sequence set below of [6, 12, + 18, 24]. Thus, in the simplest case in which the observation retrieval + times include all hours of the current task's day at which obs files + are available and none of the obs files for this day already exist on + disk, this sequence will be [6, 12, 18, 24]. In other cases, the + sequence we loop over will be a subset of [6, 12, 18, 24]. + """ + + # Convert obtype to upper case to simplify code below. + obtype = obtype.upper() + + # For convenience, get the verification portion of the configuration + # dictionary. + vx_config = cfg['verification'] + + # Get the time interval (in hours) at which the obs are available. + obs_avail_intvl_hrs = vx_config[f'{obtype}_OBS_AVAIL_INTVL_HRS'] + + # The obs availability interval must divide evenly into 24 hours. Otherwise, + # different days would have obs available at different hours-of-day. Make + # sure this is the case. + remainder = 24 % obs_avail_intvl_hrs + if remainder != 0: + msg = dedent(f""" + The obs availability interval for obs of type {obtype} must divide evenly + into 24 but doesn't: + {obs_avail_intvl_hrs = } + 24 % obs_avail_intvl_hrs = {remainder} + """) + logging.error(msg) + raise ValueError(msg) + + # For convenience, convert the obs availability interval to a datetime + # object. + obs_avail_intvl = dt.timedelta(hours=obs_avail_intvl_hrs) + + # Get the base directory for the observations. + obs_dir = vx_config[f'{obtype}_OBS_DIR'] + + # Get from the verification configuration dictionary the list of METplus + # file name template(s) corresponding to the obs type. + obs_fn_templates = vx_config[f'OBS_{obtype}_FN_TEMPLATES'] + + # Note that the list obs_fn_templates consists of pairs of elements such + # that the first element of the pair represents the verification field + # group(s) for which an obs file name template will be needed and the + # second element is the template itself. For convenience, convert this + # information to a dictionary in which the field groups are the keys and + # the templates are the values. + # + # Note: + # Once the ex-scripts for the vx tasks are converted from bash to python, + # the lists in the SRW App's configuration file containing the METplus + # obs file name template(s) (from which the variable obs_fn_templates + # was obtained above) can be converted to python dictionaries. Then the + # list-to-dictionary conversion step here will no longer be needed. + obs_fn_templates_by_fg = dict() + for i in range(0, len(obs_fn_templates), 2): + obs_fn_templates_by_fg[obs_fn_templates[i]] = obs_fn_templates[i+1] + + # For convenience, get the list of verification field groups for which + # the various obs file templates will be used. + field_groups_in_obs = obs_fn_templates_by_fg.keys() + # + #----------------------------------------------------------------------- + # + # Set variables that are only needed for some obs types. + # + #----------------------------------------------------------------------- + # + + # For cumulative obs, set the accumulation period to use when getting obs + # files. This is simply a properly formatted version of the obs availability + # interval. + accum_obs_formatted = None + if obtype == 'CCPA': + accum_obs_formatted = f'{obs_avail_intvl_hrs:02d}' + elif obtype == 'NOHRSC': + accum_obs_formatted = f'{obs_avail_intvl_hrs:d}' + + # For MRMS obs, set field-dependent parameters needed in forming grib2 + # file names. + mrms_fields_in_obs_filenames = [] + mrms_levels_in_obs_filenames = [] + if obtype == 'MRMS': + for fg in field_groups_in_obs: + if fg == 'REFC': + mrms_fields_in_obs_filenames.append('MergedReflectivityQCComposite') + mrms_levels_in_obs_filenames.append('00.50') + elif fg == 'RETOP': + mrms_fields_in_obs_filenames.append('EchoTop') + mrms_levels_in_obs_filenames.append('18_00.50') + else: + msg = dedent(f""" + Field and level names have not been specified for this {obtype} field + group: + {obtype = } + {fg = } + """) + logging.error(msg) + raise ValueError(msg) + + # CCPA files for 1-hour accumulation have incorrect metadata in the files + # under the "00" directory from 20180718 to 20210504. Set these starting + # and ending dates as datetime objects for later use. + ccpa_bad_metadata_start = dt.datetime.strptime('20180718', '%Y%m%d') + ccpa_bad_metadata_end = dt.datetime.strptime('20210504', '%Y%m%d') + + # + #----------------------------------------------------------------------- + # + # Form a string list of all the times in the current day (each in the + # format "YYYYMMDDHH") at which to retrieve obs. + # + #----------------------------------------------------------------------- + # + yyyymmdd_task_str = dt.datetime.strftime(yyyymmdd_task, '%Y%m%d') + obs_retrieve_times_crnt_day_str = vx_config[f'OBS_RETRIEVE_TIMES_{obtype}_{yyyymmdd_task_str}'] + obs_retrieve_times_crnt_day \ + = [dt.datetime.strptime(yyyymmddhh_str, '%Y%m%d%H') for yyyymmddhh_str in obs_retrieve_times_crnt_day_str] + # + #----------------------------------------------------------------------- + # + # Obs files will be obtained by extracting them from the relevant n-hourly + # archives, where n is the archive interval in hours (denoted below by the + # variable arcv_intvl_hrs). Thus, we must first obtain the sequence of + # hours (since hour 0 of the task day) corresponding to the archive files + # from which we must extract obs files. We refer to this as the sequence + # of archive hours. + # + # To generate this sequence, we first set the archive interval and then + # set the starting and ending archive hour values. + # + # + #----------------------------------------------------------------------- + # + if obtype == 'CCPA': + arcv_intvl_hrs = 6 + elif obtype == 'NOHRSC': + arcv_intvl_hrs = 24 + elif obtype == 'MRMS': + arcv_intvl_hrs = 24 + elif obtype == 'NDAS': + arcv_intvl_hrs = 6 + arcv_intvl = dt.timedelta(hours=arcv_intvl_hrs) + + # Number of obs files within each archive. + num_obs_files_per_arcv = int(arcv_intvl/obs_avail_intvl) + + # Initial guess for starting archive hour. This is set to the archive + # hour containing obs at the first obs retrieval time of the day. + arcv_hr_start = get_obs_arcv_hr(obtype, arcv_intvl_hrs, obs_retrieve_times_crnt_day[0].hour) + + # Ending archive hour. This is set to the archive hour containing obs at + # the last obs retrieval time of the day. + arcv_hr_end = get_obs_arcv_hr(obtype, arcv_intvl_hrs, obs_retrieve_times_crnt_day[-1].hour) + + # Set other variables needed below when evaluating the METplus template for + # the full path to the processed observation files. + ushdir = config['user']['USHdir'] + + # Create dictionary containing the paths to all the processed obs files + # that should exist once this script successfully completes. In this + # dictionary, the keys are the field groups, and the values are lists of + # paths. Here, by "paths to processed files" we mean the paths after any + # renaming and rearrangement of files that this script may do to the "raw" + # files, i.e. the files as they are named and arranged within the archive + # (tar) files on HPSS. + all_fp_proc_dict = {} + for fg, fn_proc_tmpl in obs_fn_templates_by_fg.items(): + fp_proc_tmpl = os.path.join(obs_dir, fn_proc_tmpl) + all_fp_proc_dict[fg] = [] + for yyyymmddhh in obs_retrieve_times_crnt_day: + # Set the lead time, a timedelta object from the beginning of the + # day at which the file is valid. + leadtime = yyyymmddhh - yyyymmdd_task + # Call METplus subroutine to evaluate the template for the full path to + # the file containing METplus timestrings at the current time. + fn = sts.do_string_sub(tmpl=fp_proc_tmpl,init=yyyymmdd_task,valid=yyyymmddhh, + lead=leadtime.total_seconds()) + all_fp_proc_dict[fg].append(fn) + + # Check whether any obs files already exist on disk in their processed + # (i.e. final) locations. If so, adjust the starting archive hour. In + # the process, keep a count of the number of obs files that already exist + # on disk. + num_existing_files = 0 + do_break = False + for fg in field_groups_in_obs: + for yyyymmddhh, fp_proc in zip(obs_retrieve_times_crnt_day, all_fp_proc_dict[fg]): + # Check whether the processed file already exists. + if os.path.isfile(fp_proc): + num_existing_files += 1 + msg = dedent(f""" + File already exists on disk: + {fp_proc = } + """) + logging.debug(msg) + else: + arcv_hr_start = get_obs_arcv_hr(obtype, arcv_intvl_hrs, yyyymmddhh.hour) + msg = dedent(f""" + File does not exist on disk: + {fp_proc = } + Setting the hour (since hour 0 of the current task day) of the first + archive to retrieve to: + {arcv_hr_start = } + """) + logging.info(msg) + do_break = True + break + if do_break: break + + # If the number of obs files that already exist on disk is equal to the + # number of obs files needed, then there is no need to retrieve any files. + # The number of obs files needed (i.e. that need to be staged) is equal + # to the number of times in the current day that obs are needed times the + # number of sets of files that the current obs type contains. + num_files_needed = len(obs_retrieve_times_crnt_day)*len(obs_fn_templates_by_fg) + if num_existing_files == num_files_needed: + + msg = dedent(f""" + All obs files needed for the current day (yyyymmdd_task) already exist + on disk: + {yyyymmdd_task = } + Thus, there is no need to retrieve any files. + """) + logging.info(msg) + return True + + # If the number of obs files that already exist on disk is not equal to + # the number of obs files needed, then we will need to retrieve files. + # In this case, set the sequence of hours corresponding to the archives + # from which files will be retrieved. + arcv_hrs = [hr for hr in range(arcv_hr_start, arcv_hr_end+arcv_intvl_hrs, arcv_intvl_hrs)] + msg = dedent(f""" + At least some obs files needed for the current day (yyyymmdd_task) + do not exist on disk: + {yyyymmdd_task = } + The number of obs files needed for the current day is: + {num_files_needed = } + The number of obs files that already exist on disk is: + {num_existing_files = } + Will retrieve remaining files by looping over archives corresponding to + the following hours (since hour 0 of the current day): + {arcv_hrs = } + """) + logging.info(msg) + # + #----------------------------------------------------------------------- + # + # At this point, at least some obs files for the current day need to be + # retrieved. Thus, loop over the relevant archives that contain obs for + # the day given by yyyymmdd_task and retrieve files as needed. + # + # Note that the NOHRSC data on HPSS are archived by day, with the archive + # for a given day containing 6-hour as well as 24-hour grib2 files. As + # described above, the four 6-hour files are for accumulated snowfall at + # hour 0 of the current day (which represents accumulation over the last + # 6 hours of the previous day) as well as hours 6, 12, and 18, while the + # two 24-hour files are at hour 0 (which represents accumulation over all + # 24 hours of the previous day) and 12 (which represents accumulation over + # the last 12 hours of the previous day plus the first 12 hours of the + # current day). Here, we will only obtain the 6-hour files. In other + # workflow tasks, the values in these 6-hour files will be added as + # necessary to obtain accumulations over longer periods (e.g. 24 hours). + # Since the four 6-hour files are in one archive and are relatively small + # (on the order of kilobytes), we get them all with a single call to the + # retrieve_data.py script. + # + #----------------------------------------------------------------------- + # + + # Whether to remove raw observations after processed directories have + # been created from them. + remove_raw_obs = vx_config[f'REMOVE_RAW_OBS_{obtype}'] + + # Base directory that will contain the archive subdirectories in which + # the files extracted from each archive (tar) file will be placed. We + # refer to this as the "raw" base directory because it contains files + # as they are found in the archives before any processing by this script. + basedir_raw = os.path.join(obs_dir, 'raw_' + yyyymmdd_task_str) + + for arcv_hr in arcv_hrs: + + msg = dedent(f""" + Processing archive hour {arcv_hr} ... + """) + logging.info(msg) + + # Calculate the time information for the current archive. + yyyymmddhh_arcv = yyyymmdd_task + dt.timedelta(hours=arcv_hr) + yyyymmddhh_arcv_str = dt.datetime.strftime(yyyymmddhh_arcv, '%Y%m%d%H') + yyyymmdd_arcv_str = dt.datetime.strftime(yyyymmddhh_arcv, '%Y%m%d') + + # Set the subdirectory under the raw base directory that will contain the + # files retrieved from the current archive. We refer to this as the "raw" + # archive sudirectory because it will contain the files as they are in + # the archive before any processing by this script. Later below, this + # will be combined with the raw base directory (whose name depends on the + # year, month, and day of the current obs day) to obtain the full path to + # the raw archive directory (arcv_dir_raw). + # + # Notes on each obs type: + # + # CCPA: + # The raw subdirectory name must include the year, month, day, and hour + # in order to avoid get_obs tasks for different days clobbering each + # others' obs files. + # + # NOHRSC: + # The hour-of-day of the archive is irrelevant because there is only one + # archive per day, so we don't include it in the raw archive subdirectory's + # name. However, we still need a subdirectory that contains the year, + # month, and day information of the archive because in the simplest case + # of having to get the NOHRSC obs for all hours of the current obs day, + # we need to extract obs files from two archives -- one for the current + # day (which includes the files for accumulations over hours 0-6, 6-12, + # and 12-18 of the current day) and another for the next day (which + # includes the file for accumulations over hours 18-24 of the current + # day). To distinguish between the raw obs files from these two archives, + # we create an archive-time dependent raw subdirectory for each possible + # archive. + # + # MRMS: + # There is only one archive per day, and it contains all the raw obs + # files needed to generate processed obs files for the current day. + # Since we will only ever need this one archive for a given day, + # for simplicity we simply do not create a raw archive subdirectory. + # + # NDAS: + # Same as for CCPA. + if obtype == 'CCPA': + arcv_subdir_raw = yyyymmddhh_arcv_str + elif obtype == 'NOHRSC': + arcv_subdir_raw = yyyymmdd_arcv_str + elif obtype == 'MRMS': + arcv_subdir_raw = '' + elif obtype == 'NDAS': + arcv_subdir_raw = yyyymmddhh_arcv_str + + # Combine the raw archive base directory with the raw archive subdirectory + # name to obtain the full path to the raw archive directory. + arcv_dir_raw = os.path.join(basedir_raw, arcv_subdir_raw) + + # Check whether any of the obs retrieval times for the day associated with + # this task fall in the time interval spanned by the current archive. If + # so, set the flag (do_retrieve) to retrieve the files in the current + # archive. + if obtype == 'CCPA': + arcv_contents_start = yyyymmddhh_arcv - (num_obs_files_per_arcv - 1)*obs_avail_intvl + arcv_contents_end = yyyymmddhh_arcv + elif obtype == 'NOHRSC': + arcv_contents_start = yyyymmddhh_arcv + arcv_contents_end = yyyymmddhh_arcv + (num_obs_files_per_arcv - 1)*obs_avail_intvl + elif obtype == 'MRMS': + arcv_contents_start = yyyymmddhh_arcv + arcv_contents_end = yyyymmddhh_arcv + (num_obs_files_per_arcv - 1)*obs_avail_intvl + elif obtype == 'NDAS': + arcv_contents_start = yyyymmddhh_arcv - num_obs_files_per_arcv*obs_avail_intvl + arcv_contents_end = yyyymmddhh_arcv - obs_avail_intvl + + do_retrieve = False + for obs_retrieve_time in obs_retrieve_times_crnt_day: + if (obs_retrieve_time >= arcv_contents_start) and \ + (obs_retrieve_time <= arcv_contents_end): + do_retrieve = True + break + + if not do_retrieve: + msg = dedent(f""" + None of the current day's observation retrieval times (possibly including + hour 0 of the next day if considering a cumulative obs type) fall in the + range spanned by the current {arcv_intvl_hrs}-hourly archive file. The + bounds of the data in the current archive are: + {arcv_contents_start = } + {arcv_contents_end = } + The times at which obs need to be retrieved are: + {obs_retrieve_times_crnt_day = } + """) + logging.info(msg) + + else: + + # Make sure the raw archive directory exists because it is used below as + # the output directory of the retrieve_data.py script (so if this directory + # doesn't already exist, that script will fail). Creating this directory + # also ensures that the raw base directory (basedir_raw) exists before we + # change location to it below. + Path(arcv_dir_raw).mkdir(parents=True, exist_ok=True) + + # The retrieve_data.py script first extracts the contents of the archive + # file into the directory it was called from and then moves them to the + # specified output location (via the --output_path option). Note that + # the relative paths of obs files within archives associted with different + # days may be the same. Thus, if files with the same archive-relative + # paths are being simultaneously extracted from multiple archive files + # (by multiple get_obs tasks), they will likely clobber each other if the + # extracton is being carried out into the same location on disk. To avoid + # this, we first change location to the raw base directory (whose name is + # obs-day dependent) and then call the retrieve_data.py script. + os.chdir(basedir_raw) + + # Pull obs from HPSS. This will get all the obs files in the current + # archive and place them in the raw archive directory. + # + # Note that for the specific case of NDAS obs, this will get all 7 obs + # files in the current archive, although we will make use of only 6 of + # these (we will not use the tm00 file). + parmdir = config['user']['PARMdir'] + args = ['--debug', \ + '--file_set', 'obs', \ + '--config', os.path.join(parmdir, 'data_locations.yml'), \ + '--cycle_date', yyyymmddhh_arcv_str, \ + '--data_stores', 'hpss', \ + '--data_type', obtype + '_obs', \ + '--output_path', arcv_dir_raw, \ + '--summary_file', 'retrieve_data.log'] + retrieve_data.main(args) + + # Get the list of times corresponding to the obs files in the current + # archive. This is a list of datetime objects. + if obtype == 'CCPA': + obs_times_in_arcv = [yyyymmddhh_arcv - i*obs_avail_intvl for i in range(0,num_obs_files_per_arcv)] + elif obtype == 'NOHRSC': + obs_times_in_arcv = [yyyymmddhh_arcv + i*obs_avail_intvl for i in range(0,num_obs_files_per_arcv)] + elif obtype == 'MRMS': + obs_times_in_arcv = [yyyymmddhh_arcv + i*obs_avail_intvl for i in range(0,num_obs_files_per_arcv)] + elif obtype == 'NDAS': + obs_times_in_arcv = [yyyymmddhh_arcv - (i+1)*obs_avail_intvl for i in range(0,num_obs_files_per_arcv)] + obs_times_in_arcv.sort() + + # Loop over the raw obs files extracted from the current archive and + # generate from them the processed obs files. + # + # Notes on each obs type: + # + # CCPA: + # For most dates, generating the processed obs files consists of simply + # copying or moving the files from the raw archive directory to the processed + # directory, possibly renaming them in the process. However, for dates + # between 20180718 and 20210504 and hours-of-day 19 through the end of the + # day (i.e. hour 0 of the next day), it involves using wgrib2 to correct an + # error in the metadata of the raw file and writing the corrected data + # to a new grib2 file in the processed location. + # + # NOHRSC: + # Generating the processed obs files consists of simply copying or moving + # the files from the raw archive directory to the processed directory, + # possibly renaming them in the process. + # + # MRMS: + # The MRMS obs are in fact available every few minutes, but the smallest + # value we allow the obs availability interval to be set to is 1 hour + # because the forecasts cannot (yet) perform sub-hourly output (also see + # notes for MRMS_OBS_AVAIL_INTVL_HRS in config_defaults.yaml). For this + # reason, MRMS obs require an extra processing step on the raw files (before + # creating the processed files). In this step, at each obs retrieval time + # we first generate an intermediate grib2 file from the set of all raw (and + # gzipped) grib2 files for the current day (the latter usually being only a + # few minutes apart) the file that is nearest in time to the obs retrieval + # time. After selecting this gzipped grib2 file, we unzip it and place it + # in a temporary subdirectory under the raw base directory. Only after this + # step do we then generate the processed file by moving this intermediate + # file to the processed directory, possibly renaming it in the process. + # + # NDAS: + # Generating the processed obs files consists of simply copying or moving + # the files from the raw archive directory to the processed directory, + # possibly renaming them in the process. Note that for a given NDAS archive, + # the tm06 file in a contains more/better observations than the tm00 file + # in the previous archive (their valid times being equivalent), so we always + # use the tm06 files. + for yyyymmddhh in obs_times_in_arcv: + + # Create the processed obs file from the raw one (by moving, copying, or + # otherwise) only if the time of the current file in the current archive + # also exists in the list of obs retrieval times for the current day. We + # need to check this because it is possible that some of the obs retrieval + # times come before the range of times spanned by the current archive while + # the others come after, but none fall within that range. This can happen + # because the set of archive hours over which we are looping were constructed + # above without considering whether there are obs retrieve time gaps that + # make it unnecessary to retrieve some of the archives between the first + # and last ones that must be retrieved. + if yyyymmddhh in obs_retrieve_times_crnt_day: + + for i, fg in enumerate(field_groups_in_obs): + + # For MRMS obs, first select from the set of raw files for the current day + # those that are nearest in time to the current hour. Unzip these in a + # temporary subdirectory under the raw base directory. + # + # Note that the function we call to do this (mrms_pull_topofhour) assumes + # a certain file naming convention. That convention must match the names + # of the files that the retrieve_data.py script called above ends up + # retrieving. The list of possible templates for these names is given + # in parm/data_locations.yml, but which of those is actually used is not + # known until retrieve_data.py completes. Thus, that information needs + # to be passed back by retrieve_data.py and then passed to mrms_pull_topofhour. + # For now, we hard-code the file name here. + if obtype == 'MRMS': + yyyymmddhh_str = dt.datetime.strftime(yyyymmddhh, '%Y%m%d%H') + mrms_pull_topofhour(valid_time=yyyymmddhh_str, + source=basedir_raw, + outdir=os.path.join(basedir_raw, 'topofhour'), + product=mrms_fields_in_obs_filenames[i], + add_vdate_subdir=False) + + # The raw file name needs to be the same as what the retrieve_data.py + # script called above ends up retrieving. The list of possible templates + # for this name is given in parm/data_locations.yml, but which of those + # is actually used is not known until retrieve_data.py completes. Thus, + # that information needs to be passed back by the script and used here. + # For now, we hard-code the file name here. + if obtype == 'CCPA': + hr = yyyymmddhh.hour + fn_raw = 'ccpa.t' + f'{hr:02d}' + 'z.' + accum_obs_formatted + 'h.hrap.conus.gb2' + elif obtype == 'NOHRSC': + yyyymmddhh_str = dt.datetime.strftime(yyyymmddhh, '%Y%m%d%H') + fn_raw = 'sfav2_CONUS_' + accum_obs_formatted + 'h_' + yyyymmddhh_str + '_grid184.grb2' + elif obtype == 'MRMS': + hr = yyyymmddhh.hour + fn_raw = mrms_fields_in_obs_filenames[i] + '_' + mrms_levels_in_obs_filenames[i] \ + + '_' + yyyymmdd_task_str + '-' + f'{hr:02d}' + '0000.grib2' + fn_raw = os.path.join('topofhour', fn_raw) + elif obtype == 'NDAS': + time_ago = yyyymmddhh_arcv - yyyymmddhh + hrs_ago = int(time_ago.seconds/3600) + hh_arcv_str = dt.datetime.strftime(yyyymmddhh_arcv, '%H') + fn_raw = 'nam.t' + hh_arcv_str + 'z.prepbufr.tm' + f'{hrs_ago:02d}' + '.nr' + fp_raw = os.path.join(arcv_dir_raw, fn_raw) + + # Get the full path to the final processed obs file (fp_proc) we want to + # create. + indx = obs_retrieve_times_crnt_day.index(yyyymmddhh) + fp_proc = all_fp_proc_dict[fg][indx] + + # Make sure the directory in which the processed file will be created exists. + dir_proc = os.path.dirname(fp_proc) + Path(dir_proc).mkdir(parents=True, exist_ok=True) + + msg = dedent(f""" + Creating the processed obs file + {fp_proc} + from the raw file + {fp_raw} + ... + """) + logging.debug(msg) + + yyyymmdd = yyyymmddhh.replace(hour=0, minute=0, second=0) + # CCPA files for 1-hour accumulation have incorrect metadata in the files + # under the "00" directory from 20180718 to 20210504. After the data is + # pulled, reorganize into correct yyyymmdd structure. + if (obtype == 'CCPA') and \ + ((yyyymmdd >= ccpa_bad_metadata_start) and (yyyymmdd <= ccpa_bad_metadata_end)) and \ + (((hr >= 19) and (hr <= 23)) or (hr == 0)): + cmd = ' '.join(['wgrib2', fp_raw, '-set_date -24hr -grib', fp_proc, '-s']) + result = subprocess.run(cmd, shell=True, capture_output=True, text=True) + elif remove_raw_obs: + shutil.move(fp_raw, fp_proc) + else: + shutil.copy(fp_raw, fp_proc) + # + #----------------------------------------------------------------------- + # + # Clean up raw obs directories. + # + #----------------------------------------------------------------------- + # + if remove_raw_obs: + logging.info("Removing raw obs directories ...") + shutil.rmtree(basedir_raw) + + return True + + + +def parse_args(argv): + """Parse command line arguments.""" + parser = argparse.ArgumentParser( + description="Get observations." + ) + + parser.add_argument( + "--obtype", + type=str, + required=True, + choices=['CCPA', 'NOHRSC', 'MRMS', 'NDAS'], + help="Cumulative observation type.", + ) + + parser.add_argument( + "--obs_day", + type=lambda d: dt.datetime.strptime(d, '%Y%m%d'), + required=True, + help="Date of observation day, in the form 'YYYMMDD'.", + ) + + parser.add_argument( + "--var_defns_path", + type=str, + required=True, + help="Path to variable definitions file.", + ) + + choices_log_level = [pair for lvl in list(logging._nameToLevel.keys()) + for pair in (str.lower(lvl), str.upper(lvl))] + parser.add_argument( + "--log_level", + type=str, + required=False, + default='info', + choices=choices_log_level, + help=dedent(f""" + Logging level to use with the 'logging' module. + """)) + + parser.add_argument( + "--log_fp", + type=str, + required=False, + default='', + help=dedent(f""" + Name of or path (absolute or relative) to log file. If not specified, + the output goes to screen. + """)) + + return parser.parse_args(argv) + + +if __name__ == "__main__": + args = parse_args(sys.argv[1:]) + + # Set up logging. + # If the name/path of a log file has been specified in the command line + # arguments, place the logging output in it (existing log files of the + # same name are overwritten). Otherwise, direct the output to the screen. + log_level = str.upper(args.log_level) + msg_format = "[%(levelname)s:%(name)s: %(filename)s, line %(lineno)s: %(funcName)s()] %(message)s" + if args.log_fp: + logging.basicConfig(level=log_level, format=msg_format, filename=args.log_fp, filemode='w') + else: + logging.basicConfig(level=log_level, format=msg_format) + + cfg = load_yaml_config(args.var_defns_path) + get_obs(cfg, args.obtype, args.obs_day) + + diff --git a/ush/job_preamble.sh b/ush/job_preamble.sh index e9c3683c40..ecfb94fb50 100644 --- a/ush/job_preamble.sh +++ b/ush/job_preamble.sh @@ -1,5 +1,7 @@ #!/bin/bash +set +u + # #----------------------------------------------------------------------- # @@ -67,13 +69,13 @@ export COMOUTwmo="${COMOUTwmo:-${COMOUT}/wmo}" # #----------------------------------------------------------------------- # -if [ ${subcyc} -ne 0 ]; then +if [ ${subcyc:-0} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" else export cycle="t${cyc}z" fi -if [ "${RUN_ENVIR}" = "nco" ] && [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z $ENSMEM_INDX ]; then +if [ "${RUN_ENVIR}" = "nco" ] && [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z $ENSMEM_INDX ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= @@ -88,7 +90,7 @@ fi export DATA= if [ "${RUN_ENVIR}" = "nco" ]; then export DATA=${DATAROOT}/${jobid} - mkdir_vrfy -p $DATA + mkdir -p $DATA cd $DATA fi # @@ -174,10 +176,10 @@ export -f POST_STEP # if [ "${RUN_ENVIR}" = "nco" ] && [ "${WORKFLOW_MANAGER}" != "ecflow" ]; then __EXPTLOG=${EXPTDIR}/log - mkdir_vrfy -p ${__EXPTLOG} + mkdir -p ${__EXPTLOG} for i in ${LOGDIR}/*.${WORKFLOW_ID}.log; do __LOGB=$(basename $i .${WORKFLOW_ID}.log) - ln_vrfy -sf $i ${__EXPTLOG}/${__LOGB}.log + ln -sf $i ${__EXPTLOG}/${__LOGB}.log done fi # @@ -215,4 +217,3 @@ In directory: \"${scrfunc_dir}\" ========================================================================" } - diff --git a/ush/launch_FV3LAM_wflow.sh b/ush/launch_FV3LAM_wflow.sh old mode 100755 new mode 100644 index cfbedac9cf..7a4a16e4b5 --- a/ush/launch_FV3LAM_wflow.sh +++ b/ush/launch_FV3LAM_wflow.sh @@ -34,43 +34,10 @@ scrfunc_dir=$( dirname "${scrfunc_fp}" ) # #----------------------------------------------------------------------- # -# Get the experiment directory. We assume that there is a symlink to -# this script in the experiment directory, and this script is called via -# that symlink. Thus, finding the directory in which the symlink is -# located will give us the experiment directory. We find this by first -# obtaining the directory portion (i.e. the portion without the name of -# this script) of the command that was used to called this script (i.e. -# "$0") and then use the "readlink -f" command to obtain the corresponding -# absolute path. This will work for all four of the following ways in -# which the symlink in the experiment directory pointing to this script -# may be called: -# -# 1) Call this script from the experiment directory: -# > cd /path/to/experiment/directory -# > launch_FV3LAM_wflow.sh -# -# 2) Call this script from the experiment directory but using "./" before -# the script name: -# > cd /path/to/experiment/directory -# > ./launch_FV3LAM_wflow.sh -# -# 3) Call this script from any directory using the absolute path to the -# symlink in the experiment directory: -# > /path/to/experiment/directory/launch_FV3LAM_wflow.sh -# -# 4) Call this script from a directory that is several levels up from the -# experiment directory (but not necessarily at the root directory): -# > cd /path/to -# > experiment/directory/launch_FV3LAM_wflow.sh -# -# Note that given just a file name, e.g. the name of this script without -# any path before it, the "dirname" command will return a ".", e.g. in -# bash, -# -# > exptdir=$( dirname "launch_FV3LAM_wflow.sh" ) -# > echo $exptdir -# -# will print out ".". +# This script will be configured for a specific experiment when +# generate_FV3LAM_wflow.py. That process fills in what is necessary so +# this configured script in the experiment directory will need no +# additional information at run time. # #----------------------------------------------------------------------- # @@ -94,7 +61,12 @@ fi # #----------------------------------------------------------------------- # -. $exptdir/var_defns.sh + +# These variables are assumed to exist in the global environment by the +# bash_utils, which is a Very Bad (TM) thing. +export USHdir=$USHdir +export valid_vals_BOOLEAN=${valid_vals_BOOLEAN} + . $USHdir/source_util_funcs.sh # #----------------------------------------------------------------------- @@ -166,7 +138,7 @@ wflow_status="IN PROGRESS" # #----------------------------------------------------------------------- # -cd_vrfy "$exptdir" +cd "$exptdir" # #----------------------------------------------------------------------- # @@ -369,7 +341,7 @@ by expt_name has completed with the following workflow status (wflow_status): # Thus, there is no need to try to relaunch it. We also append a message # to the completion message above to indicate this. # - if [ "${USE_CRON_TO_RELAUNCH}" = "TRUE" ]; then + if [ $(boolify "${USE_CRON_TO_RELAUNCH}") = "TRUE" ]; then msg="${msg}\ Thus, there is no need to relaunch the workflow via a cron job. Removing @@ -381,9 +353,9 @@ script for this experiment: # Remove CRONTAB_LINE from cron table # if [ "${called_from_cron}" = "TRUE" ]; then - python3 $USHdir/get_crontab_contents.py --remove -m=${machine} -l="${CRONTAB_LINE}" -c -d + python3 $USHdir/get_crontab_contents.py --remove -m=${machine} -l='${CRONTAB_LINE}' -c -d else - python3 $USHdir/get_crontab_contents.py --remove -m=${machine} -l="${CRONTAB_LINE}" -d + python3 $USHdir/get_crontab_contents.py --remove -m=${machine} -l='${CRONTAB_LINE}' -d fi fi # diff --git a/ush/link_fix.py b/ush/link_fix.py index fdd9a65f28..1e4a7c6254 100755 --- a/ush/link_fix.py +++ b/ush/link_fix.py @@ -18,7 +18,7 @@ cd_vrfy, mkdir_vrfy, find_pattern_in_str, - load_shell_config, + load_yaml_config, ) @@ -35,25 +35,24 @@ def link_fix( sfc_climo_fields, **kwargs, ): - """This file defines a function that links fix files to the target - directory for a given SRW experiment. Only links files for one group - at a time. + """Links fix files to the target directory for a given SRW experiment. + It only links files for one group at a time. Args: - cfg_d: dictionary of settings - file_group: could be on of ["grid", "orog", "sfc_climo"] - source_dir: the path to directory where the file_group fix files - are linked from - target_dir: the directory where the fix files should be linked to - dot_or_uscore: str containing either a dot or an underscore - nhw: grid parameter setting - constants: dict containing the constants used by SRW - run_task: boolean value indicating whether the task is to be run - in the experiment - climo_fields: list of fields needed for climo + cfg_d (dict): Dictionary of configuration settings + file_group (str) : Choice of [``"grid"``, ``"orog"``, ``"sfc_climo"``] + source_dir (str) : Path to directory that the ``file_group`` fix files are linked from + target_dir (str) : Directory that the fix files should be linked to + dot_or_uscore (str) : Either a dot (``.``) or an underscore (``_``) + nhw (int) : Wide halo width (grid parameter setting: N=number of cells, + H=halo, W=wide halo) + constants (dict): Dictionary containing the constants used by the SRW App + run_task (bool): Whether the task is to be run in the experiment + climo_fields (list): List of fields needed for surface climatology (see + ``fixed_files_mapping.yaml`` for details) Returns: - a string: resolution + res (str): File/grid resolution """ print_input_args(locals()) @@ -99,9 +98,9 @@ def link_fix( # 1) "C*.mosaic.halo${NHW}.nc" # This mosaic file for the wide-halo grid (i.e. the grid with a ${NHW}- # cell-wide halo) is needed as an input to the orography filtering - # executable in the orography generation task. The filtering code + # executable in the orography generation task. The filtering code # extracts from this mosaic file the name of the file containing the - # grid on which it will generate filtered topography. Note that the + # grid on which it will generate filtered topography. Note that the # orography generation and filtering are both performed on the wide- # halo grid. The filtered orography file on the wide-halo grid is then # shaved down to obtain the filtered orography files with ${NH3}- and @@ -256,7 +255,7 @@ def link_fix( if not res: print_err_msg_exit( f""" - The resolution could not be extracted from the current file's name. The + The resolution could not be extracted from the current file's name. The full path to the file (fp) is: fp = '{fp}' This may be because fp contains the * globbing character, which would @@ -376,8 +375,8 @@ def link_fix( return res -def parse_args(argv): - """Parse command line arguments""" +def _parse_args(argv): + """Parses command line arguments""" parser = argparse.ArgumentParser( description="Creates symbolic links to FIX directories." ) @@ -402,8 +401,8 @@ def parse_args(argv): if __name__ == "__main__": - args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) + args = _parse_args(sys.argv[1:]) + cfg = load_yaml_config(args.path_to_defns) link_fix( verbose=cfg["workflow"]["VERBOSE"], file_group=args.file_group, diff --git a/ush/load_modules_run_task.sh b/ush/load_modules_run_task.sh index 89f3addf41..5ede278bfd 100755 --- a/ush/load_modules_run_task.sh +++ b/ush/load_modules_run_task.sh @@ -3,33 +3,43 @@ # #----------------------------------------------------------------------- # -# Source necessary files. +# This script loads the appropriate modules for a given task in an +# experiment. # -#----------------------------------------------------------------------- +# It requires the following global environment variables: # -. ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/source_util_funcs.sh +# GLOBAL_VAR_DEFNS_FP # -#----------------------------------------------------------------------- +# And uses these variables from the GLOBAL_VAR_DEFNS_FP file # -# Save current shell options (in a global array). Then set new options -# for this script/function. +# platform: +# BUILD_MOD_FN +# RUN_VER_FN +# +# workflow: +# VERBOSE # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 + +# Get the location of this file -- it's the USHdir +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +USHdir=$( dirname "${scrfunc_fp}" ) +HOMEdir=$( dirname $USHdir ) + +source $USHdir/source_util_funcs.sh + # #----------------------------------------------------------------------- # -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). +# Save current shell options (in a global array). Then set new options +# for this script/function. # #----------------------------------------------------------------------- # -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) +{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 + # #----------------------------------------------------------------------- # @@ -37,7 +47,7 @@ scrfunc_dir=$( dirname "${scrfunc_fp}" ) # #----------------------------------------------------------------------- # -if [ "$#" -ne 2 ]; then +if [ "$#" -ne 3 ]; then print_err_msg_exit " Incorrect number of arguments specified: @@ -46,15 +56,17 @@ Incorrect number of arguments specified: Usage: - ${scrfunc_fn} task_name jjob_fp + ${scrfunc_fn} machine task_name jjob_fp where the arguments are defined as follows: + machine: The name of the supported platform + task_name: The name of the rocoto task for which this script will load modules and launch the J-job. - jjob_fp + jjob_fp: The full path to the J-job script corresponding to task_name. This script will launch this J-job using the \"exec\" command (which will first terminate this script and then launch the j-job; see man page of @@ -65,12 +77,13 @@ fi # #----------------------------------------------------------------------- # -# Get the task name and the name of the J-job script. +# Save arguments # #----------------------------------------------------------------------- # -task_name="$1" -jjob_fp="$2" +machine=$(echo_lowercase $1) +task_name="$2" +jjob_fp="$3" # #----------------------------------------------------------------------- # @@ -99,12 +112,38 @@ set -u #----------------------------------------------------------------------- # default_modules_dir="$HOMEdir/modulefiles" -machine=$(echo_lowercase $MACHINE) -if [ "${WORKFLOW_MANAGER}" != "ecflow" ]; then +test ! $(module is-loaded ecflow > /dev/null 2>&1) && ecflow_loaded=false + +if [ "$ecflow_loaded" = "false" ] ; then source "${HOMEdir}/etc/lmod-setup.sh" ${machine} fi module use "${default_modules_dir}" +# Load workflow environment + +if [ -f ${default_modules_dir}/python_srw.lua ] ; then + module load python_srw || print_err_msg_exit "\ + Loading SRW common python module failed. Expected python_srw.lua + in the modules directory here: + modules_dir = \"${default_modules_dir}\"" +fi + +# Modules that use conda and need an environment activated will set the +# SRW_ENV variable to the name of the environment to be activated. That +# must be done within the script, and not inside the module. Do that +# now. +if [ -n "${SRW_ENV:-}" ] ; then + set +u + conda deactivate + conda activate ${SRW_ENV} + set -u +fi + +# Source the necessary blocks of the experiment config YAML +for sect in platform workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done + if [ "${machine}" != "wcoss2" ]; then module load "${BUILD_MOD_FN}" || print_err_msg_exit "\ Loading of platform- and compiler-specific module file (BUILD_MOD_FN) @@ -116,26 +155,15 @@ fi # #----------------------------------------------------------------------- # -# Set the directory (modules_dir) in which the module files for the va- -# rious workflow tasks are located. Also, set the name of the module -# file for the specified task. -# -# A module file is a file whose first line is the "magic cookie" string -# '#%Module'. It is interpreted by the "module load ..." command. It -# sets environment variables (including prepending/appending to paths) -# and loads modules. -# -# The UFS SRW App repository contains module files for the -# workflow tasks in the template rocoto XML file for the FV3-LAM work- -# flow that need modules not loaded in the BUILD_MOD_FN above. +# Set the directory for the modulefiles included with SRW and the +# specific module for the requested task. # # The full path to a module file for a given task is # # $HOMEdir/modulefiles/$machine/${task_name}.local # -# where HOMEdir is the base directory of the workflow, machine is the -# name of the machine that we're running on (in lowercase), and task_- -# name is the name of the current task (an input to this script). +# where HOMEdir is the SRW clone, machine is the name of the platform +# being used, and task_name is the current task to run. # #----------------------------------------------------------------------- # @@ -154,10 +182,10 @@ Loading modules for task \"${task_name}\" ..." module use "${modules_dir}" || print_err_msg_exit "\ Call to \"module use\" command failed." -# source version file (run) only if it is specified in versions directory -VERSION_FILE="${HOMEdir}/versions/${RUN_VER_FN}" -if [ -f ${VERSION_FILE} ]; then - . ${VERSION_FILE} +# source version file only if it exists in the versions directory +version_file="${HOMEdir}/versions/${RUN_VER_FN}" +if [ -f ${version_file} ]; then + source ${version_file} fi # # Load the .local module file if available for the given task @@ -170,20 +198,11 @@ specified task (task_name) failed: task_name = \"${task_name}\" modulefile_local = \"${modulefile_local}\" modules_dir = \"${modules_dir}\"" -elif [ -f ${default_modules_dir}/python_srw.lua ] ; then - module load python_srw || print_err_msg_exit "\ - Loading SRW common python module failed. Expected python_srw.lua - in the modules directory here: - modules_dir = \"${default_modules_dir}\"" fi - module list -# Modules that use conda and need an environment activated will set the -# SRW_ENV variable to the name of the environment to be activated. That -# must be done within the script, and not inside the module. Do that -# now. - +# Reactivate the workflow environment to ensure the correct Python +# environment is available first in the environment. if [ -n "${SRW_ENV:-}" ] ; then set +u conda deactivate @@ -204,11 +223,7 @@ Launching J-job (jjob_fp) for task \"${task_name}\" ... jjob_fp = \"${jjob_fp}\" " -if [ "${WORKFLOW_MANAGER}" = "ecflow" ]; then - /bin/bash "${jjob_fp}" -else - exec "${jjob_fp}" -fi +source "${jjob_fp}" # #----------------------------------------------------------------------- diff --git a/ush/machine/cheyenne.yaml b/ush/machine/cheyenne.yaml deleted file mode 100644 index a91c52e170..0000000000 --- a/ush/machine/cheyenne.yaml +++ /dev/null @@ -1,35 +0,0 @@ -platform: - WORKFLOW_MANAGER: rocoto - NCORES_PER_NODE: 36 - SCHED: pbspro - TEST_CCPA_OBS_DIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/obs_data/ccpa/proc - TEST_MRMS_OBS_DIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/obs_data/mrms/proc - TEST_NDAS_OBS_DIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/obs_data/ndas/proc - TEST_NOHRSC_OBS_DIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/obs_data/nohrsc/proc - DOMAIN_PREGEN_BASEDIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/FV3LAM_pregen - QUEUE_DEFAULT: regular - QUEUE_FCST: regular - QUEUE_HPSS: regular - RUN_CMD_FCST: mpirun -np ${PE_MEMBER01} - RUN_CMD_POST: mpirun -np $nprocs - RUN_CMD_PRDGEN: mpirun -np $nprocs - RUN_CMD_SERIAL: time - RUN_CMD_UTILS: mpirun -np $nprocs - RUN_CMD_NEXUS: mpirun -np $nprocs - RUN_CMD_AQMLBC: mpirun -np ${NUMTS} - PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }' - TEST_EXTRN_MDL_SOURCE_BASEDIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/input_model_data - TEST_AQM_INPUT_BASEDIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/aqm_data - TEST_PREGEN_BASEDIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/FV3LAM_pregen - TEST_ALT_EXTRN_MDL_SYSBASEDIR_ICS: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/dummy_FV3GFS_sys_dir - TEST_ALT_EXTRN_MDL_SYSBASEDIR_LBCS: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/dummy_FV3GFS_sys_dir - TEST_VX_FCST_INPUT_BASEDIR: '{{ "/glade/work/epicufsrt/contrib/UFS_SRW_data/develop/output_data/fcst_" }}{{ "ens" if (global.NUM_ENS_MEMBERS > 0) else "det" }}{{ "/{{workflow.PREDEF_GRID_NAME}}" }}{% raw %}{% endraw %}' - FIXaer: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/fix/fix_aer - FIXgsm: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/fix/fix_am - FIXlut: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/fix/fix_lut - FIXorg: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/fix/fix_orog - FIXsfc: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/fix/fix_sfc_climo - FIXshp: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/NaturalEarth -data: - ics_lbcs: - FV3GFS: /glade/p/ral/jntp/UFS_CAM/COMGFS/gfs.${yyyymmdd}/${hh} diff --git a/ush/machine/derecho.yaml b/ush/machine/derecho.yaml index 511ccc2784..d8a3e8f4d4 100644 --- a/ush/machine/derecho.yaml +++ b/ush/machine/derecho.yaml @@ -2,6 +2,7 @@ platform: WORKFLOW_MANAGER: rocoto NCORES_PER_NODE: 128 SCHED: pbspro + WE2E_TEST_DATA: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop TEST_CCPA_OBS_DIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/obs_data/ccpa/proc TEST_MRMS_OBS_DIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/obs_data/mrms/proc TEST_NDAS_OBS_DIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/obs_data/ndas/proc @@ -15,8 +16,8 @@ platform: RUN_CMD_PRDGEN: mpiexec -n $nprocs RUN_CMD_SERIAL: time RUN_CMD_UTILS: mpiexec -n $nprocs - RUN_CMD_NEXUS: mpiexec -n $nprocs - RUN_CMD_AQMLBC: mpiexec -n ${NUMTS} + RUN_CMD_NEXUS: /opt/cray/pe/pals/1.2.11/bin/mpiexec -n $nprocs + RUN_CMD_AQMLBC: /opt/cray/pe/pals/1.2.11/bin/mpiexec -n ${numts} PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }' TEST_EXTRN_MDL_SOURCE_BASEDIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/input_model_data TEST_AQM_INPUT_BASEDIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/aqm_data @@ -31,6 +32,8 @@ platform: FIXorg: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/fix/fix_orog FIXsfc: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/fix/fix_sfc_climo FIXshp: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/NaturalEarth + FIXaqm: /glade/work/chanhooj/SRW-AQM_DATA/fix_aqm + FIXemis: /glade/work/chanhooj/SRW-AQM_DATA/fix_emis EXTRN_MDL_DATA_STORES: aws data: ics_lbcs: @@ -42,3 +45,7 @@ data: HRRR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/input_model_data/HRRR/${yyyymmdd}${hh} RAP: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/input_model_data/RAP/${yyyymmdd}${hh} GSMGFS: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/input_model_data/GSMGFS/${yyyymmdd}${hh} +cpl_aqm_parm: + COMINfire_default: /glade/work/chanhooj/SRW-AQM_DATA/aqm_data/RAVE_fire + COMINgefs_default: /glade/work/chanhooj/SRW-AQM_DATA/aqm_data/GEFS_DATA + NEXUS_GFS_SFC_DIR: /glade/work/chanhooj/SRW-AQM_DATA/aqm_data/GFS_SFC_DATA diff --git a/ush/machine/gaea.yaml b/ush/machine/gaea.yaml index 1ec2ded2ef..91e248e57b 100644 --- a/ush/machine/gaea.yaml +++ b/ush/machine/gaea.yaml @@ -2,6 +2,7 @@ platform: WORKFLOW_MANAGER: rocoto NCORES_PER_NODE: 128 SCHED: slurm + WE2E_TEST_DATA: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop TEST_CCPA_OBS_DIR: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/obs_data/ccpa/proc TEST_MRMS_OBS_DIR: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/obs_data/mrms/proc TEST_NDAS_OBS_DIR: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/obs_data/ndas/proc @@ -12,7 +13,7 @@ platform: QUEUE_HPSS: normal REMOVE_MEMORY: True PARTITION_HPSS: eslogin_c5 - RUN_CMD_FCST: srun --export=ALL -n ${PE_MEMBER01} + RUN_CMD_FCST: srun --export=ALL RUN_CMD_POST: srun --export=ALL -n $nprocs RUN_CMD_PRDGEN: srun --export=ALL -n $nprocs RUN_CMD_SERIAL: time @@ -47,9 +48,9 @@ rocoto: tasks: metatask_run_ensemble: task_run_fcst_mem#mem#: - cores: '{{ task_run_fcst.PE_MEMBER01 // 1 }}' + cores: native: '--cpus-per-task {{ task_run_fcst.OMP_NUM_THREADS_RUN_FCST|int }} --exclusive {{ platform.SCHED_NATIVE_CMD }}' - nodes: + nodes: '{{ task_run_fcst.NNODES_RUN_FCST // 1 }}:ppn={{ task_run_fcst.PPN_RUN_FCST // 1 }}' nnodes: nodesize: ppn: diff --git a/ush/machine/hera.yaml b/ush/machine/hera.yaml index 4d836af317..5644814e1d 100644 --- a/ush/machine/hera.yaml +++ b/ush/machine/hera.yaml @@ -2,10 +2,12 @@ platform: WORKFLOW_MANAGER: rocoto NCORES_PER_NODE: 40 SCHED: slurm + WE2E_TEST_DATA: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop TEST_CCPA_OBS_DIR: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/obs_data/ccpa/proc + TEST_NOHRSC_OBS_DIR: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/obs_data/nohrsc/proc TEST_MRMS_OBS_DIR: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/obs_data/mrms/proc TEST_NDAS_OBS_DIR: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/obs_data/ndas/proc - TEST_NOHRSC_OBS_DIR: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/obs_data/nohrsc/proc + TEST_GDAS_OBS_DIR: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/obs_data/gdas DOMAIN_PREGEN_BASEDIR: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/FV3LAM_pregen PARTITION_DEFAULT: hera QUEUE_DEFAULT: batch @@ -20,14 +22,15 @@ platform: RUN_CMD_UTILS: srun --export=ALL RUN_CMD_NEXUS: srun -n ${nprocs} --export=ALL RUN_CMD_AQMLBC: srun --export=ALL -n ${numts} - SCHED_NATIVE_CMD: --export=NONE - SCHED_NATIVE_CMD_HPSS: -n 1 --export=NONE + SCHED_NATIVE_CMD: "--export=NONE" + SCHED_NATIVE_CMD_HPSS: "-n 1 --export=NONE" PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }' TEST_EXTRN_MDL_SOURCE_BASEDIR: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/input_model_data TEST_AQM_INPUT_BASEDIR: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/aqm_data TEST_PREGEN_BASEDIR: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/FV3LAM_pregen TEST_ALT_EXTRN_MDL_SYSBASEDIR_ICS: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/dummy_FV3GFS_sys_dir TEST_ALT_EXTRN_MDL_SYSBASEDIR_LBCS: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/dummy_FV3GFS_sys_dir + #TEST_VX_FCST_INPUT_BASEDIR: '{{ "/scratch2/BMC/fv3lam/Gerard.Ketefian/forecasts_for_SRW_WE2E_tests.final/fcst_" }}{{ "ens" if (global.NUM_ENS_MEMBERS > 0) else "det" }}{{ "/{{workflow.PREDEF_GRID_NAME}}" }}{% raw %}{% endraw %}' TEST_VX_FCST_INPUT_BASEDIR: '{{ "/scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/output_data/fcst_" }}{{ "ens" if (global.NUM_ENS_MEMBERS > 0) else "det" }}{{ "/{{workflow.PREDEF_GRID_NAME}}" }}{% raw %}{% endraw %}' FIXaer: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/fix/fix_aer FIXgsm: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/fix/fix_am @@ -48,9 +51,9 @@ rocoto: tasks: metatask_run_ensemble: task_run_fcst_mem#mem#: - cores: '{{ task_run_fcst.PE_MEMBER01 // 1 }}' + cores: native: '--cpus-per-task {{ task_run_fcst.OMP_NUM_THREADS_RUN_FCST|int }} --exclusive {{ platform.SCHED_NATIVE_CMD }}' - nodes: + nodes: '{{ task_run_fcst.NNODES_RUN_FCST // 1 }}:ppn={{ task_run_fcst.PPN_RUN_FCST // 1 }}' nnodes: nodesize: ppn: diff --git a/ush/machine/hercules.yaml b/ush/machine/hercules.yaml index e300cf3d6d..eddf307091 100644 --- a/ush/machine/hercules.yaml +++ b/ush/machine/hercules.yaml @@ -2,6 +2,7 @@ platform: WORKFLOW_MANAGER: rocoto NCORES_PER_NODE: 80 SCHED: slurm + WE2E_TEST_DATA: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop TEST_CCPA_OBS_DIR: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/obs_data/ccpa/proc TEST_MRMS_OBS_DIR: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/obs_data/mrms/proc TEST_NDAS_OBS_DIR: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/obs_data/ndas/proc @@ -13,13 +14,13 @@ platform: QUEUE_FCST: batch PARTITION_HPSS: service QUEUE_HPSS: batch - RUN_CMD_FCST: srun --export=ALL -n ${PE_MEMBER01} + RUN_CMD_FCST: srun --export=ALL RUN_CMD_POST: srun --export=ALL RUN_CMD_PRDGEN: srun --export=ALL RUN_CMD_SERIAL: time RUN_CMD_UTILS: srun --export=ALL -n $nprocs RUN_CMD_NEXUS: srun --export=ALL - RUN_CMD_AQMLBC: srun --export=ALL -n ${NUMTS} + RUN_CMD_AQMLBC: srun --export=ALL -n ${numts} SCHED_NATIVE_CMD: --export=NONE PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }' TEST_EXTRN_MDL_SOURCE_BASEDIR: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data @@ -33,6 +34,8 @@ platform: FIXorg: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/fix/fix_orog FIXsfc: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/fix/fix_sfc_climo FIXshp: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/NaturalEarth + FIXaqm: /work/noaa/epic/SRW-AQM_DATA/fix_aqm + FIXemis: /work/noaa/epic/SRW-AQM_DATA/fix_emis EXTRN_MDL_DATA_STORES: aws data: ics_lbcs: @@ -44,3 +47,19 @@ data: HRRR: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data/HRRR/${yyyymmdd}${hh} RAP: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data/RAP/${yyyymmdd}${hh} GSMGFS: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data/GSMGFS/${yyyymmdd}${hh} + +cpl_aqm_parm: + COMINfire_default: /work/noaa/epic/SRW-AQM_DATA/aqm_data/RAVE_fire + COMINgefs_default: /work/noaa/epic/SRW-AQM_DATA/aqm_data/GEFS_DATA + NEXUS_GFS_SFC_DIR: /work/noaa/epic/SRW-AQM_DATA/aqm_data/GFS_SFC_DATA + +rocoto: + tasks: + metatask_run_ensemble: + task_run_fcst_mem#mem#: + cores: + native: '--cpus-per-task {{ task_run_fcst.OMP_NUM_THREADS_RUN_FCST|int }} --exclusive {{ platform.SCHED_NATIVE_CMD }}' + nodes: '{{ task_run_fcst.NNODES_RUN_FCST // 1 }}:ppn={{ task_run_fcst.PPN_RUN_FCST // 1 }}' + nnodes: + nodesize: + ppn: diff --git a/ush/machine/jet.yaml b/ush/machine/jet.yaml index 93d375ee02..b14a4ab9ff 100644 --- a/ush/machine/jet.yaml +++ b/ush/machine/jet.yaml @@ -2,12 +2,13 @@ platform: WORKFLOW_MANAGER: rocoto NCORES_PER_NODE: 24 SCHED: slurm - TEST_CCPA_OBS_DIR: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/obs_data/ccpa/proc - TEST_MRMS_OBS_DIR: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/obs_data/mrms/proc - TEST_NDAS_OBS_DIR: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/obs_data/ndas/proc - TEST_NOHRSC_OBS_DIR: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/obs_data/nohrsc/proc - DOMAIN_PREGEN_BASEDIR: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/FV3LAM_pregen - PARTITION_DEFAULT: sjet,vjet,kjet,xjet + WE2E_TEST_DATA: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop + TEST_CCPA_OBS_DIR: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/obs_data/ccpa/proc + TEST_MRMS_OBS_DIR: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/obs_data/mrms/proc + TEST_NDAS_OBS_DIR: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/obs_data/ndas/proc + TEST_NOHRSC_OBS_DIR: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/obs_data/nohrsc/proc + DOMAIN_PREGEN_BASEDIR: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/FV3LAM_pregen + PARTITION_DEFAULT: vjet,kjet,xjet QUEUE_DEFAULT: batch PARTITION_FCST: xjet QUEUE_FCST: batch @@ -21,17 +22,17 @@ platform: SCHED_NATIVE_CMD: --export=NONE SCHED_NATIVE_CMD_HPSS: -n 1 --export=NONE PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }' - TEST_EXTRN_MDL_SOURCE_BASEDIR: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/input_model_data - TEST_PREGEN_BASEDIR: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/FV3LAM_pregen - TEST_ALT_EXTRN_MDL_SYSBASEDIR_ICS: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/dummy_FV3GFS_sys_dir - TEST_ALT_EXTRN_MDL_SYSBASEDIR_LBCS: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/dummy_FV3GFS_sys_dir - TEST_VX_FCST_INPUT_BASEDIR: '{{ "/mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/output_data/fcst_" }}{{ "ens" if (global.NUM_ENS_MEMBERS > 0) else "det" }}{{ "/{{workflow.PREDEF_GRID_NAME}}" }}{% raw %}{% endraw %}' - FIXaer: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/fix/fix_aer - FIXgsm: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/fix/fix_am - FIXlut: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/fix/fix_lut - FIXorg: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/fix/fix_orog - FIXsfc: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/fix/fix_sfc_climo - FIXshp: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/NaturalEarth + TEST_EXTRN_MDL_SOURCE_BASEDIR: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/input_model_data + TEST_PREGEN_BASEDIR: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/FV3LAM_pregen + TEST_ALT_EXTRN_MDL_SYSBASEDIR_ICS: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/dummy_FV3GFS_sys_dir + TEST_ALT_EXTRN_MDL_SYSBASEDIR_LBCS: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/dummy_FV3GFS_sys_dir + TEST_VX_FCST_INPUT_BASEDIR: '{{ "/mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/output_data/fcst_" }}{{ "ens" if (global.NUM_ENS_MEMBERS > 0) else "det" }}{{ "/{{workflow.PREDEF_GRID_NAME}}" }}{% raw %}{% endraw %}' + FIXaer: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/fix/fix_aer + FIXgsm: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/fix/fix_am + FIXlut: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/fix/fix_lut + FIXorg: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/fix/fix_orog + FIXsfc: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/fix/fix_sfc_climo + FIXshp: /mnt/lfs5/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/NaturalEarth EXTRN_MDL_DATA_STORES: hpss aws nomads data: ics_lbcs: @@ -49,9 +50,9 @@ rocoto: tasks: metatask_run_ensemble: task_run_fcst_mem#mem#: - cores: '{{ task_run_fcst.PE_MEMBER01 // 1 }}' + cores: native: '--cpus-per-task {{ task_run_fcst.OMP_NUM_THREADS_RUN_FCST|int }} --exclusive {{ platform.SCHED_NATIVE_CMD }}' - nodes: + nodes: '{{ task_run_fcst.NNODES_RUN_FCST // 1 }}:ppn={{ task_run_fcst.PPN_RUN_FCST // 1 }}' nnodes: nodesize: ppn: diff --git a/ush/machine/noaacloud.yaml b/ush/machine/noaacloud.yaml index 50de28e751..2b27c0c139 100644 --- a/ush/machine/noaacloud.yaml +++ b/ush/machine/noaacloud.yaml @@ -2,6 +2,7 @@ platform: WORKFLOW_MANAGER: rocoto NCORES_PER_NODE: '{{ 44 if (user.ACCOUNT == "cz-epic") else 36 if (user.ACCOUNT == "ca-epic") else 28 }}' SCHED: slurm + WE2E_TEST_DATA: /contrib/EPIC/UFS_SRW_data/develop TEST_CCPA_OBS_DIR: /contrib/EPIC/UFS_SRW_data/develop/obs_data/ccpa/proc TEST_MRMS_OBS_DIR: /contrib/EPIC/UFS_SRW_data/develop/obs_data/mrms/proc TEST_NDAS_OBS_DIR: /contrib/EPIC/UFS_SRW_data/develop/obs_data/ndas/proc diff --git a/ush/machine/odin.yaml b/ush/machine/odin.yaml index 8cd1627da1..6bf51d707b 100644 --- a/ush/machine/odin.yaml +++ b/ush/machine/odin.yaml @@ -2,6 +2,7 @@ platform: WORKFLOW_MANAGER: rocoto NCORES_PER_NODE: 24 SCHED: slurm + WE2E_TEST_DATA: /scratch/ywang/UFS_SRW_App/develop DOMAIN_PREGEN_BASEDIR: /scratch/ywang/UFS_SRW_App/develop/FV3LAM_pregen PARTITION_DEFAULT: workq QUEUE_DEFAULT: workq diff --git a/ush/machine/orion.yaml b/ush/machine/orion.yaml index 3f74905c8f..285eb34ee2 100644 --- a/ush/machine/orion.yaml +++ b/ush/machine/orion.yaml @@ -2,6 +2,7 @@ platform: WORKFLOW_MANAGER: rocoto NCORES_PER_NODE: 40 SCHED: slurm + WE2E_TEST_DATA: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop TEST_CCPA_OBS_DIR: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/obs_data/ccpa/proc TEST_MRMS_OBS_DIR: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/obs_data/mrms/proc TEST_NDAS_OBS_DIR: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/obs_data/ndas/proc @@ -13,13 +14,13 @@ platform: QUEUE_FCST: batch PARTITION_HPSS: service QUEUE_HPSS: batch - RUN_CMD_FCST: srun --export=ALL -n ${PE_MEMBER01} + RUN_CMD_FCST: srun --export=ALL RUN_CMD_POST: srun --export=ALL RUN_CMD_PRDGEN: srun --export=ALL RUN_CMD_SERIAL: time RUN_CMD_UTILS: srun --export=ALL RUN_CMD_NEXUS: srun --export=ALL - RUN_CMD_AQMLBC: srun --export=ALL -n ${NUMTS} + RUN_CMD_AQMLBC: srun --export=ALL -n ${numts} SCHED_NATIVE_CMD: --export=NONE PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }' TEST_EXTRN_MDL_SOURCE_BASEDIR: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data @@ -32,6 +33,8 @@ platform: FIXorg: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/fix/fix_orog FIXsfc: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/fix/fix_sfc_climo FIXshp: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/NaturalEarth + FIXaqm: /work/noaa/epic/SRW-AQM_DATA/fix_aqm + FIXemis: /work/noaa/epic/SRW-AQM_DATA/fix_emis EXTRN_MDL_DATA_STORES: aws nomads data: ics_lbcs: @@ -43,3 +46,19 @@ data: HRRR: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data/HRRR/${yyyymmdd}${hh} RAP: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data/RAP/${yyyymmdd}${hh} GSMGFS: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data/GSMGFS/${yyyymmdd}${hh} + +cpl_aqm_parm: + COMINfire_default: /work/noaa/epic/SRW-AQM_DATA/aqm_data/RAVE_fire + COMINgefs_default: /work/noaa/epic/SRW-AQM_DATA/aqm_data/GEFS_DATA + NEXUS_GFS_SFC_DIR: /work/noaa/epic/SRW-AQM_DATA/aqm_data/GFS_SFC_DATA + +rocoto: + tasks: + metatask_run_ensemble: + task_run_fcst_mem#mem#: + cores: + native: '--cpus-per-task {{ task_run_fcst.OMP_NUM_THREADS_RUN_FCST|int }} --exclusive {{ platform.SCHED_NATIVE_CMD }}' + nodes: '{{ task_run_fcst.NNODES_RUN_FCST // 1 }}:ppn={{ task_run_fcst.PPN_RUN_FCST // 1 }}' + nnodes: + nodesize: + ppn: diff --git a/ush/machine/stampede.yaml b/ush/machine/stampede.yaml index 270cd3d593..cb6f1216eb 100644 --- a/ush/machine/stampede.yaml +++ b/ush/machine/stampede.yaml @@ -2,6 +2,7 @@ platform: WORKFLOW_MANAGER: rocoto NCORES_PER_NODE: 68 SCHED: slurm + WE2E_TEST_DATA: /work2/00315/tg455890/stampede2/UFS_SRW_App/develop DOMAIN_PREGEN_BASEDIR: /work2/00315/tg455890/stampede2/UFS_SRW_App/develop/FV3LAM_pregen PARTITION_DEFAULT: normal QUEUE_DEFAULT: normal diff --git a/ush/machine/wcoss2.yaml b/ush/machine/wcoss2.yaml index b8c3625dff..6adb36e6e5 100644 --- a/ush/machine/wcoss2.yaml +++ b/ush/machine/wcoss2.yaml @@ -2,6 +2,7 @@ platform: WORKFLOW_MANAGER: rocoto NCORES_PER_NODE: 128 SCHED: pbspro + WE2E_TEST_DATA: /lfs/h2/emc/lam/noscrub/UFS_SRW_App/develop TEST_CCPA_OBS_DIR: /lfs/h2/emc/lam/noscrub/UFS_SRW_App/develop/obs_data/ccpa/proc TEST_MRMS_OBS_DIR: /lfs/h2/emc/lam/noscrub/UFS_SRW_App/develop/obs_data/mrms/proc TEST_NDAS_OBS_DIR: /lfs/h2/emc/lam/noscrub/UFS_SRW_App/develop/obs_data/ndas/proc diff --git a/ush/mrms_pull_topofhour.py b/ush/mrms_pull_topofhour.py index 310c5d97f9..58d24aeff1 100644 --- a/ush/mrms_pull_topofhour.py +++ b/ush/mrms_pull_topofhour.py @@ -6,57 +6,61 @@ import shutil import gzip -def main(): +def mrms_pull_topofhour(valid_time, outdir, source, product, level=None, add_vdate_subdir=True, debug=False): + """Identifies the MRMS file closest to the valid time of the forecast. + METplus is configured to look for a MRMS composite reflectivity file + for the valid time of the forecast being verified; since MRMS composite + reflectivity files do not always exactly match the valid time, this + script is used to identify and rename the MRMS composite reflectivity + file to match the valid time of the forecast. - #Parse input arguments - parser = argparse.ArgumentParser() - parser.add_argument('-v', '--valid_time', type=str, required=True, - help='Valid time (in string format YYYYMMDDHH) to find MRMS data for') - parser.add_argument('-o', '--outdir', type=str, required=True, - help='Destination directory for extracted MRMS data; data will be placed in `dest/YYYYMMDD`') - parser.add_argument('-s', '--source', type=str, required=True, - help='Source directory where zipped MRMS data is found') - parser.add_argument('-p', '--product', type=str, required=True, choices=['MergedReflectivityQCComposite', 'EchoTop'], - help='Name of MRMS product') - parser.add_argument('-l', '--level', type=str, help='MRMS product level', - choices=['_00.50_','_18_00.50_']) - parser.add_argument('-d', '--debug', action='store_true', help='Add additional debug output') - args = parser.parse_args() + Returns: + None + + Raises: + FileNotFoundError: If no valid file was found within 15 minutes of the valid + time of the forecast + + """ # Level is determined by MRMS product; set if not provided - if args.level is None: - if args.product == "MergedReflectivityQCComposite": - args.level = "_00.50_" - elif args.product == "EchoTop": - args.level = "_18_00.50_" + if level is None: + if product == "MergedReflectivityQCComposite": + level = "_00.50_" + elif product == "EchoTop": + level = "_18_00.50_" else: raise Exception("This should never have happened") # Copy and unzip MRMS files that are closest to top of hour # Done every hour on a 20-minute lag - YYYY = int(args.valid_time[0:4]) - MM = int(args.valid_time[4:6]) - DD = int(args.valid_time[6:8]) - HH = int(args.valid_time[8:19]) + YYYY = int(valid_time[0:4]) + MM = int(valid_time[4:6]) + DD = int(valid_time[6:8]) + HH = int(valid_time[8:19]) valid = datetime.datetime(YYYY, MM, DD, HH, 0, 0) valid_str = valid.strftime("%Y%m%d") - print(f"Pulling {args.valid_time} MRMS data") + print(f"Pulling MRMS product {product} for valid time: {valid_time}") # Set up working directory - dest_dir = os.path.join(args.outdir, valid_str) + valid_str_or_empty = '' + if add_vdate_subdir: + valid_str_or_empty = valid_str + + dest_dir = os.path.join(outdir, valid_str_or_empty) if not os.path.exists(dest_dir): os.makedirs(dest_dir) # Sort list of files for each MRMS product - if args.debug: + if debug: print(f"Valid date: {valid_str}") - search_path = f"{args.source}/{valid_str}/{args.product}*.gz" + search_path = os.path.join(source, valid_str_or_empty, product + "*.gz") file_list = [f for f in glob.glob(search_path)] - if args.debug: + if debug: print(f"Files found: \n{file_list}") time_list = [file_list[x][-24:-9] for x in range(len(file_list))] int_list = [ @@ -76,12 +80,12 @@ def main(): # Check to make sure closest file is within +/- 15 mins of top of the hour difference = abs(closest_timestamp - valid) if difference.total_seconds() <= 900: - filename1 = f"{args.product}{args.level}{closest_timestamp.strftime('%Y%m%d-%H%M%S')}.grib2.gz" - filename2 = f"{args.product}{args.level}{valid.strftime('%Y%m%d-%H')}0000.grib2" - origfile = os.path.join(args.source, valid_str, filename1) + filename1 = f"{product}{level}{closest_timestamp.strftime('%Y%m%d-%H%M%S')}.grib2.gz" + filename2 = f"{product}{level}{valid.strftime('%Y%m%d-%H')}0000.grib2" + origfile = os.path.join(source, valid_str_or_empty, filename1) target = os.path.join(dest_dir, filename2) - if args.debug: + if debug: print(f"Unzipping file {origfile} to {target}") @@ -93,4 +97,23 @@ def main(): raise FileNotFoundError(f"Did not find a valid file within 15 minutes of {valid}") if __name__ == "__main__": - main() + #Parse input arguments + parser = argparse.ArgumentParser() + parser.add_argument('-v', '--valid_time', type=str, required=True, + help='Valid time (in string format YYYYMMDDHH) to find MRMS data for') + parser.add_argument('-o', '--outdir', type=str, required=True, + help='Destination directory for extracted MRMS data; data will be placed in `dest/YYYYMMDD`') + parser.add_argument('-s', '--source', type=str, required=True, + help='Source directory where zipped MRMS data is found') + parser.add_argument('-p', '--product', type=str, required=True, choices=['MergedReflectivityQCComposite', 'EchoTop'], + help='Name of MRMS product') + parser.add_argument('-l', '--level', type=str, help='MRMS product level', + choices=['_00.50_','_18_00.50_']) + parser.add_argument('--add_vdate_subdir', default=True, required=False, action=argparse.BooleanOptionalAction, + help='Flag to add valid-date subdirectory to source and destination directories') + parser.add_argument('-d', '--debug', action='store_true', help='Add additional debug output') + args = parser.parse_args() + + #Consistency checks + + mrms_pull_topofhour(**vars(args)) diff --git a/ush/predef_grid_params.yaml b/ush/predef_grid_params.yaml index afcbdddf7c..3dbbcb87a5 100644 --- a/ush/predef_grid_params.yaml +++ b/ush/predef_grid_params.yaml @@ -294,10 +294,77 @@ # #----------------------------------------------------------------------- # -# The RRFS Alaska domain with ~13km cells. +# A 3km grid over Colorado, created for UFS FIRE (but can be used by any experiments) # -# Note: -# This grid has not been thoroughly tested (as of 20201027). +#----------------------------------------------------------------------- +# +"SUBCONUS_CO_3km": + GRID_GEN_METHOD: "ESGgrid" + ESGgrid_LON_CTR: -105.6041 + ESGgrid_LAT_CTR: 39.01737 + ESGgrid_DELX: 3000.0 + ESGgrid_DELY: 3000.0 + ESGgrid_NX: 210 + ESGgrid_NY: 200 + ESGgrid_PAZI: 0.0 + ESGgrid_WIDE_HALO_WIDTH: 6 + DT_ATMOS: 36 + LAYOUT_X: 5 + LAYOUT_Y: 5 + BLOCKSIZE: 40 + QUILTING: + WRTCMP_write_groups: 1 + WRTCMP_write_tasks_per_group: 5 + WRTCMP_output_grid: "lambert_conformal" + WRTCMP_cen_lon: -105.6041 + WRTCMP_cen_lat: 39.01736 + WRTCMP_stdlat1: 39.01736 + WRTCMP_stdlat2: 39.01736 + WRTCMP_nx: 207 + WRTCMP_ny: 197 + WRTCMP_lon_lwr_left: -109.0989 + WRTCMP_lat_lwr_left: 36.2794 + WRTCMP_dx: 3000.0 + WRTCMP_dy: 3000.0 +# +#----------------------------------------------------------------------- +# +# A 1km grid over Colorado, created for UFS FIRE (but can be used by any experiments) +# +#----------------------------------------------------------------------- +# +"SUBCONUS_CO_1km": + GRID_GEN_METHOD: "ESGgrid" + ESGgrid_LON_CTR: -105.6041 + ESGgrid_LAT_CTR: 39.01737 + ESGgrid_DELX: 1000.0 + ESGgrid_DELY: 1000.0 + ESGgrid_NX: 630 + ESGgrid_NY: 600 + ESGgrid_PAZI: 0.0 + ESGgrid_WIDE_HALO_WIDTH: 6 + DT_ATMOS: 6 + LAYOUT_X: 9 + LAYOUT_Y: 6 + BLOCKSIZE: 32 + QUILTING: + WRTCMP_write_groups: 1 + WRTCMP_write_tasks_per_group: 6 + WRTCMP_output_grid: "lambert_conformal" + WRTCMP_cen_lon: -105.6041 + WRTCMP_cen_lat: 39.01736 + WRTCMP_stdlat1: 39.01736 + WRTCMP_stdlat2: 39.01736 + WRTCMP_nx: 599 + WRTCMP_ny: 570 + WRTCMP_lon_lwr_left: -109.1096 + WRTCMP_lat_lwr_left: 36.2700 + WRTCMP_dx: 1000.0 + WRTCMP_dy: 1000.0 +# +#----------------------------------------------------------------------- +# +# The RRFS Alaska domain with ~13km cells. # #----------------------------------------------------------------------- # @@ -334,9 +401,6 @@ # # The RRFS Alaska domain with ~3km cells. # -# Note: -# This grid has not been thoroughly tested (as of 20201027). -# #----------------------------------------------------------------------- # "RRFS_AK_3km": diff --git a/ush/python_utils/check_for_preexist_dir_file.py b/ush/python_utils/check_for_preexist_dir_file.py index 79666f8288..728eb260b3 100644 --- a/ush/python_utils/check_for_preexist_dir_file.py +++ b/ush/python_utils/check_for_preexist_dir_file.py @@ -9,14 +9,17 @@ def check_for_preexist_dir_file(path, method): - """Check for a preexisting directory or file and, if present, deal with it + """Checks for a preexisting directory or file and, if present, deals with it according to the specified method Args: - path: path to directory - method: could be any of [ 'delete', 'reuse', 'rename', 'quit' ] + path (str): Path to directory + method (str): Could be any of [ ``'delete'``, ``'reuse'``, ``'rename'``, ``'quit'`` ] Returns: None + Raises: + ValueError: If an invalid method for dealing with a pre-existing directory is specified + FileExistsError: If the specified directory or file already exists """ try: diff --git a/ush/python_utils/check_var_valid_value.py b/ush/python_utils/check_var_valid_value.py index 0c9bcc49c6..c862dd2260 100644 --- a/ush/python_utils/check_var_valid_value.py +++ b/ush/python_utils/check_var_valid_value.py @@ -2,13 +2,15 @@ def check_var_valid_value(var, values): - """Check if specified variable has a valid value + """Checks whether the specified variable has a valid value Args: - var: the variable - values: list of valid values + var: The variable + values (list): Valid values Returns: - True: if var has valid value, exit(1) otherwise + True: If ``var`` has valid value; otherwise ``exit(1)`` + Raises: + ValueError: If ``var`` has an invalid value """ if not var: diff --git a/ush/python_utils/config_parser.py b/ush/python_utils/config_parser.py index ff9a0c277c..eb9efb1c27 100644 --- a/ush/python_utils/config_parser.py +++ b/ush/python_utils/config_parser.py @@ -1,8 +1,9 @@ #!/usr/bin/env python3 """ -This file provides utilities for processing different configuration file formats. +This file provides utilities for processing different configuration (config) file formats. Supported formats include: + a) YAML b) JSON c) SHELL @@ -10,7 +11,7 @@ e) XML Typical usage involves first loading the config file, then using the dictionary -returnded by load_config to make queries. +returned by ``load_config`` to make queries. """ @@ -43,7 +44,14 @@ # YAML ########## def load_yaml_config(config_file): - """Safe load a yaml file""" + """ + Safe loads a YAML file + + Args: + config_file: Configuration file to parse + Returns: + cfg: A Python object containing the config file data + """ with open(config_file, "r") as f: cfg = yaml.safe_load(f) @@ -54,24 +62,34 @@ def load_yaml_config(config_file): try: class custom_dumper(yaml.Dumper): - """Custom yaml dumper to correct list indentation""" + """ + Custom YAML dumper to correct list indentation + + Args: + yaml.Dumper: A YAML Dumper object to custom format + + Returns: + A custom-formatted Dumper object + """ - def increase_indent(self, flow=False, indentless=False): - return super(custom_dumper, self).increase_indent(flow, False) + def _increase_indent(self, flow=False, indentless=False): + return super(custom_dumper, self)._increase_indent(flow, False) - def str_presenter(dumper, data): + def _str_presenter(dumper, data): if len(data.splitlines()) > 1: return dumper.represent_scalar("tag:yaml.org,2002:str", data, style="|") return dumper.represent_scalar("tag:yaml.org,2002:str", data) - yaml.add_representer(str, str_presenter) + yaml.add_representer(str, _str_presenter) except NameError: pass def cfg_to_yaml_str(cfg): - """Get contents of config file as a yaml string""" + """ + Gets contents of config file as a YAML string + """ return yaml.dump( cfg, sort_keys=False, default_flow_style=False @@ -79,16 +97,19 @@ def cfg_to_yaml_str(cfg): def cycstr(loader, node): - ''' Returns a cyclestring Element whose content corresponds to the - input node argument ''' + """ + Returns a cyclestring element whose content corresponds to the + input node argument + """ arg = loader.construct_scalar(node) return f'{arg}' def include(filepaths): - ''' Returns a dictionary that includes the contents of the referenced - YAML file(s). ''' + """ + Returns a dictionary that includes the contents of the referenced YAML file(s). + """ srw_path = pathlib.Path(__file__).resolve().parents[0].parents[0] @@ -104,15 +125,18 @@ def include(filepaths): return yaml.dump(cfg, sort_keys=False) def join_str(loader, node): - """Custom tag hangler to join strings""" + """ + Custom tag hangler to join strings + """ seq = loader.construct_sequence(node) return "".join([str(i) for i in seq]) def startstopfreq(loader, node): - - ''' Returns a Rocoto-formatted string for the contents of a cycledef - tag. Assume that the items in the node are env variables, and return - a Rocoto-formatted string''' + """ + Returns a Rocoto-formatted string for the contents of a ``cycledef`` + tag. Assumes that the items in the node are environment variables, and returns + a Rocoto-formatted string. + """ args = loader.construct_sequence(node) @@ -122,7 +146,7 @@ def startstopfreq(loader, node): return f'{start}00 {stop}00 {freq}:00:00' -def nowtimestamp(loader, node): +def _nowtimestamp(loader, node): return "id_" + str(int(datetime.datetime.now().timestamp())) try: @@ -130,27 +154,31 @@ def nowtimestamp(loader, node): yaml.add_constructor("!include", include, Loader=yaml.SafeLoader) yaml.add_constructor("!join_str", join_str, Loader=yaml.SafeLoader) yaml.add_constructor("!startstopfreq", startstopfreq, Loader=yaml.SafeLoader) - yaml.add_constructor("!nowtimestamp", nowtimestamp ,Loader=yaml.SafeLoader) + yaml.add_constructor("!nowtimestamp", _nowtimestamp ,Loader=yaml.SafeLoader) except NameError: pass def path_join(arg): - """A filter for jinja2 that joins paths""" + """ + A filter for jinja2 that joins paths + """ return os.path.join(*arg) def days_ago(arg): - """A filter for jinja2 that gives us a date string for x number of - days ago""" + """ + A filter for jinja2 that gives us a date string for x number of + days ago + """ return (datetime.date.today() - datetime.timedelta(days=arg)).strftime("%Y%m%d00") def extend_yaml(yaml_dict, full_dict=None, parent=None): """ - Updates yaml_dict inplace by rendering any existing Jinja2 templates + Updates ``yaml_dict`` in place by rendering any existing Jinja2 templates that exist in a value. """ @@ -250,7 +278,9 @@ def extend_yaml(yaml_dict, full_dict=None, parent=None): # JSON ########## def load_json_config(config_file): - """Load json config file""" + """ + Loads JSON config file + """ try: with open(config_file, "r") as f: @@ -262,7 +292,9 @@ def load_json_config(config_file): def cfg_to_json_str(cfg): - """Get contents of config file as a json string""" + """ + Gets contents of config file as a JSON string + """ return json.dumps(cfg, sort_keys=False, indent=4) + "\n" @@ -271,7 +303,9 @@ def cfg_to_json_str(cfg): # SHELL ########## def load_shell_as_ini_config(file_name, return_string=1): - """Load shell config file with embedded structure in comments""" + """ + Loads shell config file with embedded structure in comments + """ # read contents and replace comments as sections with open(file_name, "r") as file: @@ -294,13 +328,13 @@ def load_shell_as_ini_config(file_name, return_string=1): def load_shell_config(config_file, return_string=0): - """Loads old style shell config files. - We source the config script in a subshell and gets the variables it sets + """Loads old-style shell config files. + We source the config script in a subshell and get the variables it sets Args: - config_file: path to config file script + config_file: Path to config file script Returns: - dictionary that should be equivalent to one obtained from parsing a yaml file. + Dictionary that should be equivalent to one obtained from parsing a YAML file. """ # First try to load it as a structured shell config file @@ -339,7 +373,9 @@ def load_shell_config(config_file, return_string=0): def cfg_to_shell_str(cfg, kname=None): - """Get contents of config file as shell script string""" + """ + Gets contents of config file as shell script string + """ shell_str = "" for k, v in cfg.items(): @@ -369,7 +405,9 @@ def cfg_to_shell_str(cfg, kname=None): # INI ########## def load_ini_config(config_file, return_string=0): - """Load a config file with a format similar to Microsoft's INI files""" + """ + Loads a config file with a format similar to Microsoft's INI files + """ if not os.path.exists(config_file): raise FileNotFoundError( @@ -391,7 +429,9 @@ def load_ini_config(config_file, return_string=0): def get_ini_value(config, section, key): - """Finds the value of a property in a given section""" + """ + Finds the value of a property in a given section + """ if not section in config: raise KeyError(f"Section not found: {section}") @@ -402,7 +442,9 @@ def get_ini_value(config, section, key): def cfg_to_ini_str(cfg, kname=None): - """Get contents of config file as ini string""" + """ + Gets contents of config file as INI string + """ ini_str = "" for k, v in cfg.items(): @@ -427,7 +469,9 @@ def cfg_to_ini_str(cfg, kname=None): # XML ########## def xml_to_dict(root, return_string): - """Convert an xml tree to dictionary""" + """ + Converts an XML tree to dictionary + """ cfg = {} for child in root: @@ -440,7 +484,7 @@ def xml_to_dict(root, return_string): def dict_to_xml(d, tag): - """Convert dictionary to an xml tree""" + """Converts dictionary to an XML tree""" elem = ET.Element(tag) for k, v in d.items(): @@ -456,7 +500,9 @@ def dict_to_xml(d, tag): def load_xml_config(config_file, return_string=0): - """Load xml config file""" + """ + Loads XML config file + """ tree = ET.parse(config_file) root = tree.getroot() @@ -465,7 +511,9 @@ def load_xml_config(config_file, return_string=0): def cfg_to_xml_str(cfg): - """Get contents of config file as a xml string""" + """ + Gets contents of config file as a XML string + """ root = dict_to_xml(cfg, "root") r = ET.tostring(root, encoding="unicode") @@ -479,11 +527,12 @@ def cfg_to_xml_str(cfg): # CONFIG utils ################## def flatten_dict(dictionary, keys=None): - """Flatten a recursive dictionary (e.g.yaml/json) to be one level deep + """ + Flattens a recursive dictionary (e.g.YAML/JSON) to be one level deep Args: - dictionary: the source dictionary - keys: list of keys on top level whose contents to flatten, if None all of them + dictionary: The source dictionary + keys (list): Keys on top level whose contents to flatten; if ``None``, then all of them Returns: A one-level deep dictionary for the selected set of keys """ @@ -499,11 +548,12 @@ def flatten_dict(dictionary, keys=None): def structure_dict(dict_o, dict_t): - """Structure a dictionary based on a template dictionary + """ + Structures a dictionary based on a template dictionary Args: - dict_o: dictionary to structure (flat one level structure) - dict_t: template dictionary used for structuring + dict_o: Dictionary to structure (flat one level structure) + dict_t: Template dictionary used for structuring Returns: A dictionary with contents of dict_o following structure of dict_t """ @@ -519,7 +569,7 @@ def structure_dict(dict_o, dict_t): def update_dict(dict_o, dict_t, provide_default=False): - """Update a dictionary with another + """Updates a dictionary with another Args: dict_o: flat dictionary used as source @@ -549,15 +599,16 @@ def update_dict(dict_o, dict_t, provide_default=False): def check_structure_dict(dict_o, dict_t): - """Check if a dictionary's structure follows a template. + """ + Checks if a dictionary's structure follows a template. The invalid entries are returned as a dictionary. - If all entries are valid, returns an empty dictionary + If all entries are valid, returns an empty dictionary. Args: - dict_o: target dictionary - dict_t: template dictionary to compare structure to + dict_o (dict): Target dictionary + dict_t (dict): Template dictionary to compare structure to Returns: - dict: Invalid key-value pairs. + dict: Invalid key-value pairs. """ inval = {} for k, v in dict_o.items(): @@ -573,10 +624,12 @@ def check_structure_dict(dict_o, dict_t): def filter_dict(dict_o, keys_regex): - """Filter dictionary keys based on a list of keys + """ + Filters dictionary keys based on a list of keys + Args: - dict_o: the source dictionary - keys_regex: list of keys to retain (could be regex exp.) + dict_o: The source dictionary + keys_regex: Keys to retain (could be regex expression) """ keys = [] @@ -591,7 +644,12 @@ def filter_dict(dict_o, keys_regex): # CONFIG loader ################## def load_config_file(file_name, return_string=0): - """Load config file based on file name extension""" + """ + Loads config file based on file name extension + + Raises: + ValueError: If an unrecognized file extension is used. + """ ext = os.path.splitext(file_name)[1][1:] if ext == "sh": @@ -619,7 +677,9 @@ def load_config_file(file_name, return_string=0): # CONFIG main ################## def cfg_main(): - """Main function for converting and formatting between different config file formats""" + """ + Converts between and formats different config file formats + """ parser = argparse.ArgumentParser( description="Utility for managing different config formats." diff --git a/ush/python_utils/create_symlink_to_file.py b/ush/python_utils/create_symlink_to_file.py index 363a49fa40..5d64000ec6 100644 --- a/ush/python_utils/create_symlink_to_file.py +++ b/ush/python_utils/create_symlink_to_file.py @@ -8,12 +8,12 @@ def create_symlink_to_file(target, symlink, relative=True): - """Create a symbolic link to the specified target file. + """Creates a symbolic link to the specified target file. Args: - target: target file - symlink: symbolic link to target file - relative: optional argument to specify relative symoblic link creation + target (str) : Target file + symlink (str) : Symbolic link to target file + relative (bool): Optional argument to specify relative symbolic link creation Returns: None """ diff --git a/ush/python_utils/define_macos_utilities.py b/ush/python_utils/define_macos_utilities.py index 4a73020850..f171b4c55d 100644 --- a/ush/python_utils/define_macos_utilities.py +++ b/ush/python_utils/define_macos_utilities.py @@ -8,7 +8,13 @@ def check_darwin(cmd): - """Check if darwin command exists""" + """Checks if Darwin command exists + + Args: + cmd: The command to check (e.g., ``gsed``, ``gln``) + Returns: + True if successful; otherwise prints error + """ (err, _, _) = run_command(f"command -v {cmd}") if err != 0: @@ -24,8 +30,8 @@ def check_darwin(cmd): def define_macos_utilities(): - """Set some environment variables for Darwin systems differently - The variables are: READLINK, SED, DATE_UTIL and LN_UTIL + """Sets some environment variables for Darwin systems differently + The variables are: ``READLINK``, ``SED``, ``DATE_UTIL`` and ``LN_UTIL``. """ if os.uname()[0] == "Darwin": diff --git a/ush/python_utils/environment.py b/ush/python_utils/environment.py index c82c0a4fe9..427e190b9b 100644 --- a/ush/python_utils/environment.py +++ b/ush/python_utils/environment.py @@ -8,12 +8,12 @@ def str_to_date(s): - """Get python datetime object from string. + """Gets Python datetime object from string. Args: - s: a string + s (str): A string Returns: - datetime object or None + Datetime object or None """ v = None try: @@ -32,31 +32,30 @@ def str_to_date(s): def date_to_str(d, format="%Y%m%d%H%M"): - """Get string from python datetime object. - By default it converts to YYYYMMDDHHMM format unless - told otherwise by passing a different format + """Gets string from Python datetime object. + By default it converts to ``YYYYMMDDHHmm`` format unless told otherwise by passing a different format. Args: - d: datetime object + d (datetime.datetime): Datetime object + format (str): Format of the datetime string; default is ``"%Y%m%d%H%M"`` (see `format codes `_ for other options). Returns: - string in YYYYMMDDHHMM or shorter version of it + String in YYYYMMDDHHmm or shorter version of it """ v = d.strftime(format) return v def str_to_type(s, return_string=0): - """Check if the string contains a float, int, boolean, datetime, or just regular string. + """Checks whether the string is a float, int, boolean, datetime, or just regular string. This will be used to automatically convert environment variables to data types that are more convenient to work with. If you don't want this functionality, - pass return_string = 1 + pass ``return_string = 1``. Args: - s: a string - return_string: Set to 1 to return the string itself - Set to 2 to return the string itself only for a datetime object + s (str): A string + return_string (int): Set to ``1`` to return the string itself. Set to ``2`` to return the string itself only for a datetime object Returns: - a float, int, boolean, datetime, or the string itself when all else fails + A float, int, boolean, datetime, or the string itself when all else fails """ s = s.strip("\"'") if return_string != 1: @@ -91,13 +90,12 @@ def str_to_type(s, return_string=0): def type_to_str(v): - """Given a float/int/boolean/date or list of these types, gets a string - representing their values + """Gets a string representing the value of a given float, int, boolean, date or list of these types. Args: - v: a variable of the above types + v: A variable of the above types Returns: - a string + A string """ if isinstance(v, bool): return "TRUE" if v else "FALSE" @@ -111,11 +109,12 @@ def type_to_str(v): def list_to_str(v, oneline=False): - """Given a string or list of string, construct a string - to be used on right hand side of shell environement variables + """Given a string or list of strings, constructs a string + to be used on right hand side of shell environment variables. Args: - v: a string/number, list of strings/numbers, or null string('') + v: A string/number, list of strings/numbers, or null string(``''``) + oneline (bool): If the string is a single line (True) or multiple (False) ? Returns: A string """ @@ -134,13 +133,13 @@ def list_to_str(v, oneline=False): def str_to_list(v, return_string=0): - """Given a string, construct a string or list of strings. - Basically does the reverse operation of `list_to_string`. + """Constructs a string or list of strings based on the given string. + Basically does the reverse operation of ``list_to_str``. Args: - v: a string + v: A string Returns: - a string, list of strings or null string('') + A string, a list of strings, or a null string(``''``) """ if not isinstance(v, str): @@ -167,11 +166,11 @@ def str_to_list(v, return_string=0): def set_env_var(param, value): - """Set an environment variable + """Sets an environment variable. Args: - param: the variable to set - value: either a string, list of strings or None + param: The variable to set + value: A string, a list of strings, or None Returns: None """ @@ -180,12 +179,12 @@ def set_env_var(param, value): def get_env_var(param): - """Get the value of an environement variable + """Gets the value of an environment variable Args: - param: the environement variable + param: The environment variable Returns: - Returns either a string, list of strings or None + A string, a list of strings, or None """ if not param in os.environ: @@ -195,30 +194,30 @@ def get_env_var(param): def import_vars(dictionary=None, target_dict=None, env_vars=None): - """Import all (or select few) environment/dictionary variables as python global - variables of the caller module. Call this function at the beginning of a function + """Imports all (or a select few) environment/dictionary variables as Python global + variables of the caller module. Calls this function at the beginning of a function that uses environment variables. - Note that for read-only environmental variables, calling this function once at the + Note that for read-only environment variables, calling this function once at the beginning should be enough. However, if the variable is modified in the module it is - called from, the variable should be explicitly tagged as `global`, and then its value - should be exported back to the environment with a call to export_vars() + called from, the variable should be explicitly tagged as ``global``, and then its value + should be exported back to the environment with a call to ``export_vars()``: + + .. code-block:: console import_vars() # import all environment variables global MY_VAR, MY_LIST_VAR MY_PATH = "/path/to/somewhere" MY_LIST_VAR.append("Hello") - export_vars() # these exports all global variables + export_vars() # this exports all global variables - There doesn't seem to an easier way of imitating the shell script doing way of things, which - assumes that everything is global unless specifically tagged local, while the opposite is true - for python. + This is because in shell scripting assumes that everything is global unless specifically tagged local, while the opposite is true + for Python. Args: - dictionary: source dictionary (default=os.environ) - target_dict: target dictionary (default=caller module's globals()) - env_vars: list of selected environement/dictionary variables to import, or None, - in which case all environment/dictionary variables are imported + dictionary (dict): Source dictionary + target_dict (dict): Target dictionary + env_vars (list): List of selected environment variables to import or ``None``, in which case all environment variables are imported Returns: None """ @@ -240,15 +239,14 @@ def import_vars(dictionary=None, target_dict=None, env_vars=None): def export_vars(dictionary=None, source_dict=None, env_vars=None): - """Export all (or select few) global variables of the caller module's - to either the environement/dictionary. Call this function at the end of + """Exports all (or a select few) global variables of the caller modules + to the environment/dictionary. Calls this function at the end of a function that updates environment variables. Args: - dictionary: target dictionary to set (default=os.environ) - source_dict: source dictionary (default=caller modules globals()) - env_vars: list of selected environement/dictionary variables to export, or None, - in which case all environment/dictionary variables are exported + dictionary (dict): Target dictionary to set + source_dict (dict): Source dictionary + env_vars (list): List of selected environment variables to export or ``None``, in which case all environment variables are exported Returns: None """ diff --git a/ush/python_utils/filesys_cmds_vrfy.py b/ush/python_utils/filesys_cmds_vrfy.py index ca986f9693..c33e9b311c 100644 --- a/ush/python_utils/filesys_cmds_vrfy.py +++ b/ush/python_utils/filesys_cmds_vrfy.py @@ -5,13 +5,13 @@ def cmd_vrfy(cmd, *args): - """Execute system command + """Executes system command Args: - cmd: the command - *args: its arguments + cmd (str): The command + *args: Its arguments Returns: - Exit code + ret: Exit code """ cmd += " " + " ".join([str(a) for a in args]) @@ -22,28 +22,77 @@ def cmd_vrfy(cmd, *args): def cp_vrfy(*args): + """Checks that the ``cp`` command executed successfully + + Args: + *args: Iterable object containing command with its command line arguments + Returns: + Exit code + """ return cmd_vrfy("cp", *args) def rsync_vrfy(*args): + """Checks that the ``rsync`` command executed successfully + + Args: + *args: Iterable object containing command with its command line arguments + Returns: + Exit code + """ return cmd_vrfy("rsync", *args) def mv_vrfy(*args): + """Checks that the ``mv`` command executed successfully + + Args: + *args: Iterable object containing command with its command line arguments + Returns: + Exit code + """ return cmd_vrfy("mv", *args) def rm_vrfy(*args): + """Checks that the ``rm`` command executed successfully + + Args: + *args: Iterable object containing command with its command line arguments + Returns: + Exit code + """ return cmd_vrfy("rm", *args) def ln_vrfy(*args): + """Checks that the ``ln`` command executed successfully + + Args: + *args: Iterable object containing command with its command line arguments + Returns: + Exit code + """ return cmd_vrfy("ln", *args) def mkdir_vrfy(*args): + """Checks that the ``mkdir`` command executed successfully + + Args: + *args: Iterable object containing command with its command line arguments + Returns: + Exit code + """ return cmd_vrfy("mkdir", *args) def cd_vrfy(*args): + """Checks that the ``cd`` command executed successfully + + Args: + *args: Iterable object containing command with its command line arguments + Returns: + Exit code + """ return os.chdir(*args) diff --git a/ush/python_utils/fv3write_parms_lambert.py b/ush/python_utils/fv3write_parms_lambert.py index c94400710a..56c42b10b8 100755 --- a/ush/python_utils/fv3write_parms_lambert.py +++ b/ush/python_utils/fv3write_parms_lambert.py @@ -1,21 +1,33 @@ #!/usr/bin/env python -# -# To use this tool, you should source the regional workflow environment -# $> source env/wflow_xxx.env -# and activate pygraf (or any one with cartopy installation) -# $> conda activate pygraf -# + +""" +To use this tool, first source the workflow environment: + +.. code-block:: console + + $> module use /path/to/ufs-srweather-app/modulefiles + $> module load wflow_ + $> conda activate srw_graphics + +Make sure to adjust the ``modulefiles`` path and ```` to correspond to your system. +Even though the message printed to the console will direct users to run ``conda activate srw_app``, this script requires an environment (e.g., ``srw_graphics``) that includes ``pygraf`` or ``cartopy``. The ``srw_graphics`` environment uses ``cartopy`` for plotting. If the ``srw_app`` environment is already loaded, users can simply run ``conda activate srw_graphics`` to switch environments. + +For usage instructions, run: + +.. code-block:: console + + $> python fv3write_parms_lambert.py -h + +""" import argparse import cartopy.crs as ccrs -# @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ -# -# Main function to return parameters for the FV3 write component. -# - if __name__ == "__main__": + """ + Main function to return parameters for the FV3 write component. + """ parser = argparse.ArgumentParser( description="Determine FV3 write component lat1/lon1 for Lamert Conformal map projection", diff --git a/ush/python_utils/misc.py b/ush/python_utils/misc.py index e5e320ae43..a425f6687c 100644 --- a/ush/python_utils/misc.py +++ b/ush/python_utils/misc.py @@ -4,36 +4,36 @@ def uppercase(s): - """Function to convert a given string to uppercase + """Converts a given string to uppercase Args: - s: the string - Return: - Uppercased str + s (str): The string to change to uppercase + Returns: + Uppercased string """ return s.upper() def lowercase(s): - """Function to convert a given string to lowercase + """Converts a given string to lowercase Args: - s: the string - Return: - Lowercase str + s (str): The string to change to lowercase + Returns: + Lowercased string """ return s.lower() def find_pattern_in_str(pattern, source): - """Find regex pattern in a string + """Finds a regular expression (regex) pattern in a string Args: - pattern: regex expression - source: string - Return: + pattern (str): Regex pattern + source (str): Text string to search for regex pattern + Returns: A tuple of matched groups or None """ pattern = re.compile(pattern) @@ -43,12 +43,12 @@ def find_pattern_in_str(pattern, source): def find_pattern_in_file(pattern, file_name): - """Find regex pattern in a file + """Finds a regular expression (regex) pattern in a file Args: - pattern: regex expression - file_name: name of text file - Return: + pattern (str): Regex pattern + file_name (str): Name of text file + Returns: A tuple of matched groups or None """ pattern = re.compile(pattern) diff --git a/ush/python_utils/print_input_args.py b/ush/python_utils/print_input_args.py index 6d2a6ec2f1..7e7d7cd8a6 100644 --- a/ush/python_utils/print_input_args.py +++ b/ush/python_utils/print_input_args.py @@ -12,9 +12,9 @@ def print_input_args(valid_args): """Prints function arguments for debugging purposes Args: - valid_args: dictionary of arg-value pairs + valid_args (dict): Dictionary of argument-value pairs Returns: - Number of printed arguments + num_valid_args: Number of printed arguments """ # get verbosity from environment diff --git a/ush/python_utils/print_msg.py b/ush/python_utils/print_msg.py index 078d708ba9..fc173199cb 100644 --- a/ush/python_utils/print_msg.py +++ b/ush/python_utils/print_msg.py @@ -7,14 +7,12 @@ def print_err_msg_exit(error_msg="", stack_trace=True): - """Function to print out an error message to stderr and exit. + """Prints out an error message to standard error and exits. It can optionally print the stack trace as well. Args: - error_msg : error message to print - stack_trace : set to True to print stack trace - Returns: - None + error_msg (str): Error message to print + stack_trace (bool): Set to ``True`` to print stack trace """ if stack_trace: traceback.print_stack(file=sys.stderr) @@ -25,15 +23,16 @@ def print_err_msg_exit(error_msg="", stack_trace=True): def print_info_msg(info_msg, verbose=True): - """Function to print information message to stdout, when verbose - is set to True. It does proper "dedentation" that is needed for readability - of python code. + """ + Prints an informational message to standard output when ``verbose`` + is set to ``True``. It does proper "dedentation"/formatting that is needed for readability + of Python code. Args: - info_msg : info message to print - verbose : set to False to silence printing + info_msg (str): Info message to print + verbose (bool): Set to ``False`` to silence printing Returns: - True: if message is successfully printed + Boolean value: True if message is successfully printed; False if ``verbose`` is set to False. """ if verbose: @@ -43,13 +42,14 @@ def print_info_msg(info_msg, verbose=True): def log_info(info_msg, verbose=True, dedent_=True): - """Function to print information message using the logging module. This function - should not be used if python logging has not been initialized. + """ + Prints information message using the logging module. This function + should not be used if Python logging has not been initialized. Args: - info_msg : info message to print - verbose : set to False to silence printing - dedent_ : set to False to disable "dedenting" (print string as-is) + info_msg (str): Info message to print + verbose (bool): Set to ``False`` to silence printing + dedent_ (bool): Set to ``False`` to disable "dedenting"/formatting and print string as-is Returns: None """ diff --git a/ush/python_utils/run_command.py b/ush/python_utils/run_command.py index 3ab0fce898..bf41ec96e2 100644 --- a/ush/python_utils/run_command.py +++ b/ush/python_utils/run_command.py @@ -4,10 +4,10 @@ def run_command(cmd): - """Run system command in a subprocess + """Runs system command in a subprocess Args: - cmd: command to execute + cmd (str): Command to execute Returns: Tuple of (exit code, std_out, std_err) """ diff --git a/ush/python_utils/xml_parser.py b/ush/python_utils/xml_parser.py index a6f5add2e5..38637892fa 100644 --- a/ush/python_utils/xml_parser.py +++ b/ush/python_utils/xml_parser.py @@ -4,26 +4,26 @@ def load_xml_file(xml_file): - """Loads xml file + """Loads XML file Args: - xml_file: path to xml file + xml_file: Path to XML file Returns: - root of the xml tree + tree: Root of the XML tree """ tree = ET.parse(xml_file) return tree def has_tag_with_value(tree, tag, value): - """Check if xml tree has a node with tag and value + """Checks if XML tree has a node with tag and value Args: - tree: the xml tree - tag: the tag - value: text of tag + tree (xml.etree.ElementTree): The XML tree + tag (str): The tag + value (str): Text of tag Returns: - Boolean + Boolean value """ for node in tree.iter(): if node.tag == tag and node.text == value: diff --git a/ush/retrieve_data.py b/ush/retrieve_data.py index 5acf9d5ce9..30f3c6aed7 100755 --- a/ush/retrieve_data.py +++ b/ush/retrieve_data.py @@ -1,28 +1,19 @@ #!/usr/bin/env python3 # pylint: disable=logging-fstring-interpolation """ -This script helps users pull data from known data streams, including -URLS and HPSS (only on supported NOAA platforms), or from user-supplied -data locations on disk. - -Several supported data streams are included in -parm/data_locations.yml, which provides locations and naming -conventions for files commonly used with the SRW App. Provide the file -to this tool via the --config flag. Users are welcome to provide their -own file with alternative locations and naming conventions. - -When using this script to pull from disk, the user is required to -provide the path to the data location, which can include Python -templates. The file names follow those included in the --config file by -default, or can be user-supplied via the --file_name flag. That flag -takes a YAML-formatted string that follows the same conventions outlined -in the parm/data_locations.yml file for naming files. +This script helps users pull data from known data streams, including URLs and HPSS (only on supported NOAA platforms), or from user-supplied data locations on disk. + +Several supported data streams are included in ``parm/data_locations.yml``, which provides locations and naming conventions for files commonly used with the SRW App. Provide the file to this tool via the ``--config`` flag. Users are welcome to provide their own file with alternative locations and naming conventions. + +When using this script to pull from disk, the user is required to provide the path to the data location, which can include Python templates. The file names follow those included in the ``--config`` file by default or can be user-supplied via the ``--file_name`` flag. That flag +takes a YAML-formatted string that follows the same conventions outlined in the ``parm/data_locations.yml`` file for naming files. To see usage for this script: + .. code-block:: + python retrieve_data.py -h -Also see the parse_args function below. """ import argparse @@ -44,13 +35,22 @@ def clean_up_output_dir(expected_subdir, local_archive, output_path, source_paths): - """Remove expected sub-directories and existing_archive files on - disk once all files have been extracted and put into the specified - output location.""" + """Removes expected subdirectories and ``existing_archive`` files on disk once all files have been extracted and put into the specified output location. + + Args: + expected_subdir : Expected subdirectories + local_archive (str): File name + output_path (str): Path to a location on disk. Path is expected to exist. + source_paths (str): + + Returns: + unavailable (dict): A dictionary of unavailable files + """ unavailable = {} expand_source_paths = [] logging.debug(f"Cleaning up local paths: {source_paths}") + logging.debug(f"Looking for these local paths under directory: {os.getcwd()}") for p in source_paths: expand_source_paths.extend(glob.glob(p.lstrip("/"))) @@ -82,10 +82,16 @@ def clean_up_output_dir(expected_subdir, local_archive, output_path, source_path def copy_file(source, destination, copy_cmd): """ - Copy a file from a source and place it in the destination location. - Return a boolean value reflecting the state of the copy. - + Copies a file from a source and places it in the destination location. Assumes destination exists. + + Args: + source (str): Directory where file currently resides + destination (str): Directory that the file should be moved to + copy_cmd (str): Copy command (e.g., ``cp``, ``ln -sf``) + + Returns: + A boolean value reflecting whether the copy was successful (True) or unsuccessful (False) """ if not os.path.exists(source): @@ -110,8 +116,13 @@ def copy_file(source, destination, copy_cmd): def check_file(url): """ - Check that a file exists at the expected URL. Return boolean value - based on the response. + Checks that a file exists at the expected URL. + + Args: + url: URL for file to be downloaded + + Return: + Boolean value (True if ``status_code == 200`` or False otherwise) """ status_code = urllib.request.urlopen(url).getcode() return status_code == 200 @@ -119,14 +130,13 @@ def check_file(url): def download_file(url): """ - Download a file from a url source, and place it in a target location - on disk. + Download a file from a URL source, and place it in a target location on disk. - Arguments: - url url to file to be downloaded + Args: + url: URL for file to be downloaded - Return: - boolean value reflecting state of download. + Returns: + Boolean value reflecting whether the copy was successful (True) or unsuccessful (False) """ # wget flags: @@ -154,18 +164,24 @@ def download_file(url): def arg_list_to_range(args): """ - Given an argparse list argument, return the sequence to process. + Returns the sequence to process, given an ``argparse`` list. The length of the list will determine what sequence items are returned: - Length = 1: A single item is to be processed - Length = 2: A sequence of start, stop with increment 1 - Length = 3: A sequence of start, stop, increment - Length > 3: List as is + * Length = 1: A single item is to be processed + * Length = 2: A sequence of start, stop with increment 1 + * Length = 3: A sequence of start, stop, increment + * Length > 3: List as is - argparse should provide a list of at least one item (nargs='+'). + ``argparse`` should provide a list of at least one item (``nargs='+'``). Must ensure that the list contains integers. + + Args: + args (list): An ``argparse`` list argument + + Returns: + args: An ``argparse`` list """ args = args if isinstance(args, list) else list(args) @@ -179,25 +195,25 @@ def arg_list_to_range(args): def fill_template(template_str, cycle_date, templates_only=False, **kwargs): - """Fill in the provided template string with date time information, - and return the resulting string. + """Fills in the provided template string with date time information, and returns the + resulting string. - Arguments: - template_str a string containing Python templates - cycle_date a datetime object that will be used to fill in - date and time information - templates_only boolean value. When True, this function will only - return the templates available. + Args: + template_str : A string containing Python templates + cycle_date : A datetime object that will be used to fill in date and time + information + templates_only (bool): When ``True``, this function will only return the templates + available. Keyword Args: - ens_group a number associated with a bin where ensemble - members are stored in archive files - fcst_hr an integer forecast hour. string formatting should - be included in the template_str - mem a single ensemble member. should be a positive integer value + ens_group (int): A number associated with a bin where ensemble members are stored in + archive files. + fcst_hr (int): An integer forecast hour. String formatting should be included in the + ``template_str``. + mem (int): A single ensemble member. Should be a positive integer value. - Return: - filled template string + Returns: + Filled template string """ # Parse keyword args @@ -206,6 +222,8 @@ def fill_template(template_str, cycle_date, templates_only=False, **kwargs): mem = kwargs.get("mem", "") # ----- + # Set some variables to make the format statement more concise + f_date = cycle_date + dt.timedelta(hours=fcst_hr) cycle_hour = cycle_date.strftime("%H") # One strategy for binning data files at NCEP is to put them into 6 @@ -225,17 +243,26 @@ def fill_template(template_str, cycle_date, templates_only=False, **kwargs): ens_group=ens_group, fcst_hr=fcst_hr, dd=cycle_date.strftime("%d"), + fdd=f_date.strftime("%d"), hh=cycle_hour, + fhh=f_date.strftime("%H"), hh_even=hh_even, jjj=cycle_date.strftime("%j"), + fjjj=f_date.strftime("%j"), mem=mem, min=cycle_date.strftime("%M"), mm=cycle_date.strftime("%m"), + fmm=cycle_date.strftime("%m"), yy=cycle_date.strftime("%y"), + fyy=f_date.strftime("%y"), yyyy=cycle_date.strftime("%Y"), + fyyyy=f_date.strftime("%Y"), yyyymm=cycle_date.strftime("%Y%m"), + fyyyymm=f_date.strftime("%Y%m"), yyyymmdd=cycle_date.strftime("%Y%m%d"), + fyyyymmdd=f_date.strftime("%Y%m%d"), yyyymmddhh=cycle_date.strftime("%Y%m%d%H"), + fyyyymmddhh=f_date.strftime("%Y%m%d%H"), ) if templates_only: @@ -246,7 +273,13 @@ def fill_template(template_str, cycle_date, templates_only=False, **kwargs): def create_target_path(target_path): """ - Append target path and create directory for ensemble members + Appends target path and creates directory for ensemble members + + Args: + target_path (str): Target path + + Returns: + target_path: """ if not os.path.exists(target_path): os.makedirs(target_path) @@ -258,7 +291,18 @@ def find_archive_files(paths, file_names, cycle_date, ens_group): """Given an equal-length set of archive paths and archive file names, and a cycle date, check HPSS via hsi to make sure at least one set exists. Return a dict of the paths of the existing archive, along with - the item in set of paths that was found.""" + the item in set of paths that was found. + + Args: + paths (list): Archive paths + file_names (list): Archive file names + cycle_date (int): Cycle date (YYYYMMDDHH or YYYYMMDDHHmm format) + ens_group (int): A number associated with a bin where ensemble members are stored + in archive files + + Returns: + A tuple containing (existing_archives, list_item) or ("", 0) + """ zipped_archive_file_paths = zip(paths, file_names) @@ -291,19 +335,18 @@ def find_archive_files(paths, file_names, cycle_date, ens_group): def get_file_templates(cla, known_data_info, data_store, use_cla_tmpl=False): - """Returns the file templates requested by user input, either from - the command line, or from the known data information dict. + """Returns the file templates requested by user input, either from the command line, + or from the known data information dictionary. - Arguments: + Args: - cla command line arguments Namespace object - known_data_info dict from data_locations yaml file - data_store string corresponding to a key in the - known_data_info dict - use_cla_tmpl boolean on whether to check cla for templates + cla (str) : Command line arguments (Namespace object) + known_data_info (dict): Dictionary from ``data_locations.yml`` file + data_store (str) : String corresponding to a key in the ``known_data_info`` dictionary + use_cla_tmpl (bool): Whether to check command line arguments for templates Returns: - file_templates a list of file templates + file_templates: A list of file templates """ file_templates = known_data_info.get(data_store, {}).get("file_names") @@ -326,7 +369,7 @@ def get_file_templates(cla, known_data_info, data_store, use_cla_tmpl=False): file_templates = file_templates[cla.file_set] if not file_templates: msg = "No file naming convention found. They must be provided \ - either on the command line or on in a config file." + either on the command line or in a config file." raise argparse.ArgumentTypeError(msg) return file_templates @@ -335,29 +378,25 @@ def get_requested_files(cla, file_templates, input_locs, method="disk", **kwargs # pylint: disable=too-many-locals - """This function copies files from disk locations - or downloads files from a url, depending on the option specified for - user. - - This function expects that the output directory exists and is - writeable. + """Copies files from disk locations or downloads files from a URL, depending on the option + specified by the user. - Arguments: + This function expects that the output directory exists and is writeable. - cla Namespace object containing command line arguments - file_templates a list of file templates - input_locs A string containing a single data location, either a url - or disk path, or a list of paths/urls. - method Choice of disk or download to indicate protocol for - retrieval + Args: + cla (str) : Command line arguments (Namespace object) + file_templates (list): A list of file templates + input_locs (str) : A string containing a single data location, either a URL or disk + path, or a list of paths/URLs. + method (str) : Choice of ``"disk"`` or ``"download"`` to indicate protocol for + retrieval - Keyword args: - members a list integers corresponding to the ensemble members - check_all boolean flag that indicates all urls should be - checked for all files + Keyword Args: + members (list): A list of integers corresponding to the ensemble members + check_all (bool): Flag that indicates whether all URLs should be checked for all files Returns: - unavailable a list of locations/files that were unretrievable + unavailable (list): A list of locations/files that were unretrievable """ members = kwargs.get("members", "") @@ -444,14 +483,13 @@ def get_requested_files(cla, file_templates, input_locs, method="disk", **kwargs def hsi_single_file(file_path, mode="ls"): - """Call hsi as a subprocess for Python and return information about - whether the file_path was found. - - Arguments: - file_path path on HPSS - mode the hsi command to run. ls is default. may also - pass "get" to retrieve the file path + """Calls ``hsi`` as a subprocess for Python and returns information about whether the + ``file_path`` was found. + Args: + file_path (str): File path on HPSS + mode (str): The ``hsi`` command to run. ``ls`` is default. May also pass ``get`` + to retrieve the file path. """ cmd = f"hsi {mode} {file_path}" @@ -473,17 +511,29 @@ def hpss_requested_files(cla, file_names, store_specs, members=-1, ens_group=-1) # pylint: disable=too-many-locals - """This function interacts with the "hpss" protocol in a provided - data store specs file to download a set of files requested by the - user. Depending on the type of archive file (zip or tar), it will - either pull the entire file and unzip it, or attempt to pull - individual files from a tar file. + """This function interacts with the "hpss" protocol in a provided data store specs file to + download a set of files requested by the user. Depending on the type of archive file (``zip`` + or ``tar``), it will either pull the entire file and unzip it or attempt to pull individual + files from a tar file. - It cleans up local disk after files are deemed available to remove - any empty subdirectories that may still be present. + It cleans up the local disk after files are deemed available in order to remove any empty + subdirectories that may still be present. - This function exepcts that the output directory exists and is - writable. + This function exepcts that the output directory exists and is writable. + + Args: + cla (str): Command line arguments (Namespace object) + file_names (list): List of file names + store_specs (dict): Data-store specifications (specs) file + members (list): A list of integers corresponding to the ensemble members + ens_group (int): A number associated with a bin where ensemble members are stored in + archive files + + Returns: + A Python set of unavailable files + + Raises: + Exception: If there is an error running the archive extraction command """ members = [-1] if members == -1 else members @@ -629,15 +679,28 @@ def hpss_requested_files(cla, file_names, store_specs, members=-1, ens_group=-1) def load_str(arg): - """Load a dict string safely using YAML. Return the resulting dict.""" + """Loads a dictionary string safely using YAML. + + Args: + arg (str): A string representation of a dictionary + + Returns: + A resulting dictionary + """ return yaml.load(arg, Loader=yaml.SafeLoader) def config_exists(arg): """ - Check to ensure that the provided config file exists. If it does, - load it with YAML's safe loader and return the resulting dict. + Checks to ensure that the provided config file exists. If it does, load it with YAML's safe + loader and return the resulting dictionary. + + Args: + arg (str): Path to a configuration file + + Returns: + cfg: A dictionary representation of the configuration file contents """ # Check for existence of file @@ -657,19 +720,24 @@ def pair_locs_with_files(input_locs, file_templates, check_all): contains the multiple locations and file templates for files that should be searched in those locations. - check_all indicates that all locations should be paired with all - avaiable file templates. - The different possibilities: - 1. Get one or more files from a single path/url - 2. Get multiple files from multiple corresponding - paths/urls - 3. Check all paths for all file templates until files are - found - The default will be to handle #1 and #2. #3 will be - indicated by a flag in the yaml: "check_all: True" + #. Get one or more files from a single path/URL + #. Get multiple files from multiple corresponding paths/URLs + #. Check all paths for all file templates until files are found + + The default will be to handle #1 and #2. #3 will be indicated by a flag in the YAML: + ``check_all: True`` + Args: + input_locs (list): Input locations + file_templates (list): File templates + check_all (bool): Flag that indicates whether all input locations should be checked + for all available file templates + + Returns: + locs_files (list): Iterable containing multiple locations and file templates for files + that should be searched in those locations """ if not check_all: @@ -697,7 +765,16 @@ def pair_locs_with_files(input_locs, file_templates, check_all): def path_exists(arg): - """Check whether the supplied path exists and is writeable""" + """ + Check whether the supplied path exists and is writeable + + Args: + arg (str): File path + Returns: + File path or error message + Raises: + argparse.ArgumentTypeError: If the path does not exist or is not writable + """ if not os.path.exists(arg): msg = f"{arg} does not exist!" @@ -710,10 +787,10 @@ def path_exists(arg): return arg -def setup_logging(debug=False): +def _setup_logging(debug=False): - """Calls initialization functions for logging package, and sets the - user-defined level for logging in the script.""" + """Calls initialization functions for logging package, and sets the user-defined level for + logging in the script.""" level = logging.INFO if debug: @@ -724,11 +801,11 @@ def setup_logging(debug=False): logging.info("Logging level set to DEBUG") -def write_summary_file(cla, data_store, file_templates): +def _write_summary_file(cla, data_store, file_templates) -> None: - """Given the command line arguments and the data store from which - the data was retrieved, write a bash summary file that is needed by - the workflow elements downstream.""" + """Given the command line arguments and the data store from which the data was retrieved, + write a bash summary file that is needed by the workflow elements downstream. + """ members = cla.members if isinstance(cla.members, list) else [-1] for mem in members: @@ -757,8 +834,13 @@ def write_summary_file(cla, data_store, file_templates): def to_datetime(arg): - """Return a datetime object give a string like YYYYMMDDHH or - YYYYMMDDHHmm.""" + """Converts a string to a datetime object + + Args: + arg (str): String like ``YYYYMMDDHH`` or ``YYYYMMDDHHmm`` + Returns: + A datetime object + """ if len(arg) == 10: fmt_str = "%Y%m%d%H" elif len(arg) == 12: @@ -771,20 +853,28 @@ def to_datetime(arg): def to_lower(arg): - """Return a string provided by arg into all lower case.""" + """Converts a string to lowercase + + Args: + arg (str): Any string + Returns: + An all-lowercase string + """ return arg.lower() def main(argv): # pylint: disable=too-many-branches, too-many-statements """ - Uses known location information to try the known locations and file - paths in priority order. + Uses known location information to try the known locations and file paths in priority order. + + Args: + argv (list): List of command line arguments """ cla = parse_args(argv) - setup_logging(cla.debug) + _setup_logging(cla.debug) print("Running script retrieve_data.py with args:", f"\n{('-' * 80)}\n{('-' * 80)}") for name, val in cla.__dict__.items(): if name not in ["config"]: @@ -885,7 +975,7 @@ def main(argv): # All files are found. Stop looking! # Write a variable definitions file for the data, if requested if cla.summary_file and not cla.check_file: - write_summary_file(cla, data_store, file_templates) + _write_summary_file(cla, data_store, file_templates) break logging.debug(f"Some unavailable files: {unavailable}") @@ -898,9 +988,14 @@ def main(argv): def get_ens_groups(members): - """Given a list of ensemble members, return a dict with keys for - the ensemble group, and values are lists of ensemble members - requested in that group.""" + """Gets ensemble groups with the corresponding list of ensemble members in that group. + + Args: + members (list): List of ensemble members. + Returns: + ens_groups: A dictionary where keys are the ensemble group and values are lists of + ensemble members requested in that group + """ if members is None: return {-1: [-1]} @@ -918,9 +1013,15 @@ def get_ens_groups(members): def parse_args(argv): """ - Function maintains the arguments accepted by this script. Please see - Python's argparse documenation for more information about settings of each - argument. + Maintains the arguments accepted by this script. Please see Python's + `argparse `_ documentation for more + information about settings of each argument. + + Args: + argv (list): Command line arguments to parse + + Returns: + args: An argparse.Namespace object (``parser.parse_args(argv)``) """ description = ( diff --git a/ush/run_srw_tests.py b/ush/run_srw_tests.py index 9e77be14b8..8a3c5731b8 100755 --- a/ush/run_srw_tests.py +++ b/ush/run_srw_tests.py @@ -5,17 +5,19 @@ import time import argparse -# Python class to handle the launching of a set of SRW tests -# The expectation is to have a "clean" experiment directory with only new experiments -# that are ready to run (e.g. no _old* experiments left around from previous tests -# This script takes only one parameter "-e" or "--exptdir" which points to the -# expt_basedir specified when the run_WE2E_tests.sh is run to set up the tests. -# The script will work sequentially through each of the test directories and -# launch the workflow for each with a call to launch_FV3LAM_wflow.sh -# After the initial launch, the checkTests method is called to monitor the -# status of each test and call the launch_FV3LAM_wflow.sh script repeatedly -# in each uncompleted workflow until all workflows are done. class SRWTest: + + """Python class to handle the launching of a set of SRW tests. + The expectation is to have a "clean" experiment directory with only new experiments + that are ready to run (e.g., no ``_old*`` experiments left around from previous tests). + This script takes only one parameter (``-e`` or ``--exptdir``) which points to the + ``expt_basedir`` specified when the ``run_WE2E_tests.py`` script is run to set up the tests. + The script will work sequentially through each of the test directories and + launch the workflow for each with a call to ``launch_FV3LAM_wflow.sh``. + After the initial launch, the ``checkTests`` method is called to monitor the + status of each test and to call the ``launch_FV3LAM_wflow.sh`` script repeatedly + in each uncompleted workflow until all workflows are done.""" + def __init__(self, exptdir): self.exptdir=exptdir # Get a list of test directories @@ -34,9 +36,13 @@ def __init__(self, exptdir): self.checkTests() def checkTests(self): + """Check status of workflows/experiments; remove any that have failed or completed, + and continue running the launch command for those that aren't complete. + + Returns: + None + """ while(len(self.testDirectories) > 0): - # Only continue running launch command for workflows that aren't complete - # so check for any that have failed or completed and cull them from the list cmdstring="grep -L 'wflow_status =' */log.launch_FV3LAM_wflow | xargs dirname" try: status= subprocess.check_output(cmdstring,shell=True).strip().decode('utf-8') diff --git a/ush/set_cycle_and_obs_timeinfo.py b/ush/set_cycle_and_obs_timeinfo.py new file mode 100644 index 0000000000..7ae764ad10 --- /dev/null +++ b/ush/set_cycle_and_obs_timeinfo.py @@ -0,0 +1,847 @@ +#!/usr/bin/env python3 + +from datetime import datetime, timedelta, date +from pprint import pprint +from textwrap import dedent +from python_utils import print_input_args, print_err_msg_exit +import logging + + +def set_cycle_dates(start_time_first_cycl, start_time_last_cycl, cycl_intvl, + return_type='string'): + """ + This file defines a function that returns a list containing the starting + times of all the cycles in the experiment. + + If return_type is set to 'string' (the default value), the returned list + contains strings in the format 'YYYYMMDDHH'. If it is set to 'datetime', + the returned list contains a set of datetime objects. + + Args: + start_time_first_cycl (datetime.datetime): + Starting time of first cycle. + + start_time_last_cycl (datetime.datetime): + Starting time of last cycle. + + cycl_intvl (datetime.timedelta): + Time interval between cycle start times. + + return_type (str): + Type of the returned list. Can be 'string' or 'datetime'. + + Returns: + all_cdates (list): + Either a list of strings in the format 'YYYYMMDDHH' or a list of datetime + objects containing the cycle starting times, where 'YYYY' is the four- + digit year, 'MM is the two-digit month, 'DD' is the two-digit day-of- + month, and 'HH' is the two-digit hour-of-day. + """ + + print_input_args(locals()) + + valid_values = ['string', 'datetime'] + if return_type not in valid_values: + msg = dedent(f""" + Invalid value for optional argument "return_type": + {return_type = } + Valid values are: + {valid_values = } + """) + logging.error(msg) + raise ValueError(msg) + + # iterate over cycles + all_cdates = [] + cdate = start_time_first_cycl + while cdate <= start_time_last_cycl: + all_cdates.append(cdate) + cdate += cycl_intvl + + if return_type == 'string': + all_cdates = [datetime.strftime(cdate, "%Y%m%d%H") for cdate in all_cdates] + + return all_cdates + + +def check_temporal_consistency_cumul_fields( + vx_config, cycle_start_times, fcst_len, fcst_output_intvl): + """ + This function reads in a subset of the parameters in the verification + configuration dictionary and ensures that certain temporal constraints on + these parameters are satisfied. It then returns an updated version of + the verification configuration dictionary that satisfies these constraints. + + The constraints are on the accumulation intervals associated with the + cumulative field groups (and the corresponding observation types) that + are to be verified. The constraints on each such accumulation interval + are as follows: + + 1) The accumulation interval is less than or equal to the forecast length. + This ensures that the forecast(s) can accumulate the field(s) in the + field group over that interval. + + 2) The obs availability interval evenly divides the accumulation interval. + This ensures that the obs can be added together to obtain accumulated + values of the obs field, e.g. the 6-hourly NOHRSC obs can be added to + obtain 24-hour observed snowfall accumulations. Note that this also + ensures that the accumulation interval is greater than or equal to the + obs availability interval. + + 3) The forecast output interval evenly divides the accumulation interval. + This ensures that the forecast output can be added together to obtain + accumulated values of the fields in the field group. For example, if + the forecast output interval is 3 hours, the resulting 3-hourly APCP + outputs from the forecast can be added to obtain 6-hourly forecast APCP. + Note that this also ensures that the accumulation interval is greater + than or equal to the forecast output interval. + + 4) The hour-of-day at which the accumulated forecast values will be + available are a subset of the ones at which the accumulated obs + values are available. This ensures that the accumulated fields + from the obs and forecast are valid at the same times and thus can + be compared in the verification. + + If for a given field-accumulation combination any of these constraints + is violated, that accumulation is removed from the list of accumulations + to verify for that field. + + Args: + vx_config (dict): + The verification configuration dictionary. + + cycle_start_times (list): + List containing the starting times of the cycles in the experiment; each + list element is a datetime object. + + fcst_len (datetime.timedelta): + The length of each forecast; a timedelta object. + + fcst_output_intvl (datetime.timedelta): + Time interval between forecast output times; a timedelta object. + + Returns: + vx_config (dict): + An updated version of the verification configuration dictionary. + + fcst_obs_matched_times_all_cycles_cumul (dict): + Dictionary containing the times (in YYYYMMDDHH string format) at + which various field/accumlation combinations are output and at + which the corresponding obs type is also available. + """ + + # Set dictionary containing all field groups that consist of cumulative + # fields (i.e. whether or not those field groups are to be verified). + # The keys are the observation types and the field groups. + obtype_to_fg_dict_cumul = {"CCPA": "APCP", "NOHRSC": "ASNOW"} + + # Convert from datetime.timedelta objects to integers. + one_hour = timedelta(hours=1) + fcst_len_hrs = int(fcst_len/one_hour) + fcst_output_intvl_hrs = int(fcst_output_intvl/one_hour) + + # Initialize one of the variables that will be returned to an empty + # dictionary. + fcst_obs_matched_times_all_cycles_cumul = dict() + + for obtype, fg in obtype_to_fg_dict_cumul.items(): + + # If the current cumulative field is not in the list of fields to be + # verified, just skip to the next field. + if fg not in vx_config["VX_FIELD_GROUPS"]: + continue + + # Initialize a sub-dictionary in one of the dictionaries to be returned. + fcst_obs_matched_times_all_cycles_cumul.update({fg: {}}) + + # + # Get the availability interval of the current observation type from the + # verification configuration dictionary and use it to calculate the hours- + # of-day at which the obs will be available. + # + # Get the obs availability interval. + config_var_name = "".join([obtype, "_OBS_AVAIL_INTVL_HRS"]) + obs_avail_intvl_hrs = vx_config[config_var_name] + # Ensure that the obs availability interval evenly divides into 24. + remainder = 24 % obs_avail_intvl_hrs + if remainder != 0: + msg = dedent(f""" + The obs availability interval for obs of type {obtype} must divide evenly + into 24 but doesn't: + {obs_avail_intvl_hrs = } + 24 % obs_avail_intvl_hrs = {remainder}" + """) + logging.error(msg) + raise ValueError(msg) + # Assume that the obs are available at hour 0 of the day regardless + # of obs type. + obs_avail_hr_start = 0 + obs_avail_hr_end = obs_avail_hr_start + 24 + # Construct list of obs availability hours-of-day. + obs_avail_hrs_of_day = list(range(obs_avail_hr_start, obs_avail_hr_end, obs_avail_intvl_hrs)) + obs_avail_hrs_of_day_str = ['%02d' % int(hr) for hr in obs_avail_hrs_of_day] + # + # Get the array of accumulation intervals for the current cumulative field. + # Then loop over them to ensure that the constraints listed above are + # satisfied. If for a given accumulation one or more of the constraints + # is not satisfied, remove that accumulation from the list of accumulations + # for the current field. + # + accum_intvls_array_name = "".join(["VX_", fg, "_ACCUMS_HRS"]) + accum_intvls_hrs = vx_config[accum_intvls_array_name] + # + # Loop through the accumulation intervals and check the temporal constraints + # listed above. + # + for accum_hrs in accum_intvls_hrs.copy(): + + accum_hh = f"{accum_hrs:02d}" + # Initialize a sub-sub-dictionary in one of the dictionaries to be returned. + fcst_obs_matched_times_all_cycles_cumul[fg][accum_hh] = [] + # + # Make sure that the accumulation interval is less than or equal to the + # forecast length. + # + if accum_hrs > fcst_len_hrs: + msg = dedent(f""" + The accumulation interval (accum_hrs) for the current cumulative field + group (fg) and corresponding observation type (obtype) is greater than + the forecast length (fcst_len_hrs): + {fg = } + {obtype = } + {accum_hrs = } + {fcst_len_hrs = } + Thus, the forecast(s) cannot accumulate the field(s) in this field group + over this interval. Will remove this accumulation interval from the list + of accumulation intervals to verify for this field group/obtype. + """) + logging.info(msg) + accum_intvls_hrs.remove(accum_hrs) + # + # Make sure that accumulation interval is evenly divisible by the observation + # availability interval. + # + if accum_hrs in accum_intvls_hrs: + rem_obs = accum_hrs % obs_avail_intvl_hrs + if rem_obs != 0: + msg = dedent(f""" + The accumulation interval (accum_hrs) for the current cumulative field + group (fg) and corresponding observation type (obtype) is not evenly + divisible by the observation type's availability interval (obs_avail_intvl_hrs): + {fg = } + {obtype = } + {accum_hrs = } + {obs_avail_intvl_hrs = } + accum_hrs % obs_avail_intvl_hrs = {rem_obs} + Thus, this observation type cannot be accumulated over this interval. + Will remove this accumulation interval from the list of accumulation + intervals to verify for this field group/obtype. + """) + logging.info(msg) + accum_intvls_hrs.remove(accum_hrs) + # + # Make sure that accumulation interval is evenly divisible by the forecast + # output interval. + # + if accum_hrs in accum_intvls_hrs: + rem_fcst = accum_hrs % fcst_output_intvl_hrs + if rem_fcst != 0: + msg = dedent(f""" + The accumulation interval (accum_hrs) for the current cumulative field + group (fg) and corresponding observation type (obtype) is not evenly + divisible by the forecast output interval (fcst_output_intvl): + {fg = } + {obtype = } + {accum_hrs = } + {fcst_output_intvl_hrs = } + accum_hrs % fcst_output_intvl_hrs = {rem_fcst} + Thus, the forecast(s) cannot accumulate the field(s) in this field group + over this interval. Will remove this accumulation interval from the list + of accumulation intervals to verify for this field group/obtype. + """) + logging.info(msg) + accum_intvls_hrs.remove(accum_hrs) + # + # Make sure that the hours-of-day at which the current cumulative field + # will be output are a subset of the hours-of-day at which the corresponding + # obs type is available. + # + if accum_hrs in accum_intvls_hrs: + + # Initialize sets that will contain the forecast output times of the + # current cumulative field over all cycles. + fcst_output_times_all_cycles = set() + + # Calculate the forecast output times of the current cumulative field + # for the current cycle and include them in the the set of such times + # over all cycles. + accum = timedelta(hours=accum_hrs) + num_fcst_output_times_per_cycle = int(fcst_len/accum) + for i, start_time_crnt_cycle in enumerate(cycle_start_times): + fcst_output_times_crnt_cycle \ + = [start_time_crnt_cycle + (i+1)*accum + for i in range(0, num_fcst_output_times_per_cycle)] + fcst_output_times_all_cycles \ + = fcst_output_times_all_cycles | set(fcst_output_times_crnt_cycle) + + # Get all the hours-of-day at which the current cumulative field will be + # output by the forecast. + fcst_output_times_all_cycles = sorted(fcst_output_times_all_cycles) + fcst_output_times_all_cycles_str \ + = [datetime.strftime(dt_object, "%Y%m%d%H") + for dt_object in fcst_output_times_all_cycles] + fcst_output_hrs_of_day_str = [yyyymmddhh[8:10] for yyyymmddhh in fcst_output_times_all_cycles_str] + fcst_output_hrs_of_day_str.sort() + + # Check that all the forecast output hours-of-day are a subset of the obs + # availability hours-of-day. If not, remove the current accumulation + # interval from the list of intervals to verify. + if not set(fcst_output_hrs_of_day_str) <= set(obs_avail_hrs_of_day_str): + msg = dedent(f""" + The accumulation interval (accum_hrs) for the current cumulative field + group (fg) is such that the forecast will output the field(s) in the + field group at at least one hour-of-day at which the corresponding + observation type is not available: + {fg = } + {obtype = } + {accum_hrs = } + The forecast output hours-of-day for this field group/accumulation interval + combination are: + {fcst_output_hrs_of_day_str = } + The hours-of-day at which the obs are available are: + {obs_avail_hrs_of_day_str = } + Thus, at least some of the forecast output cannot be verified. Will remove + this accumulation interval from the list of accumulation intervals to verify + for this field group/obtype. + """) + logging.info(msg) + accum_intvls_hrs.remove(accum_hrs) + else: + fcst_obs_matched_times_all_cycles_cumul[fg][accum_hh] = fcst_output_times_all_cycles_str + # + # Update the value in the experiment configuration dictionary of the list + # of accumulation intervals to verify for this cumulative field (since + # some accumulation intervals may have been removed after the checks above). + # + vx_config[accum_intvls_array_name] = accum_intvls_hrs + # + # If the updated list of accumulations for the current cumulative field + # is empty, remove the field from the list of fields to verify in the + # verification configuration dictionary. + # + if not accum_intvls_hrs: + vx_config["VX_FIELD_GROUPS"].remove(fg) + msg = dedent(f""" + The list of accumulation intervals (accum_intvls_hrs) for the current + cumulative field group to verify (fg) is empty: + {fg = } + {accum_intvls_hrs = } + Removing this field from the list of fields to verify. The updated list + is: + {vx_config["VX_FIELD_GROUPS"]} + """) + logging.info(msg) + + return vx_config, fcst_obs_matched_times_all_cycles_cumul + + +def set_fcst_output_times_and_obs_days_all_cycles( + cycle_start_times, fcst_len, fcst_output_intvl): + """ + This function returns forecast output times and observation days (i.e. + days on which obs are needed because there is forecast output on those + days) for both instantaneous (e.g. REFC, RETOP, T2m) and cumulative (e.g. + APCP) fields that need to be verified. Note that for cumulative fields, + the only accumulation interval considered is the forecast output interval. + Accumulation intervals larger than this are considered elsewhere (and + accumulation interval smaller than this are obviously not allowed). + + Args: + cycle_start_times (list): + List containing the starting times of the cycles in the experiment; each + list element is a datetime object. + + fcst_len (datetime.timedelta): + The length of each forecast. + + fcst_output_intvl (datetime.timedelta): + Time interval between forecast output times. + + Returns: + fcst_output_times_all_cycles (dict): + Dictionary containing a list of forecast output times over all cycles for + instantaneous fields and a second analogous list for cumulative fields. + Each element of these lists is a string of the form 'YYYYMMDDHH'. + + obs_days_all_cycles (dict): + Dictionary containing a list of observation days (i.e. days on which + observations are needed to perform verification) over all cycles for + instantaneous fields and a second analogous list for cumulative fields. + Each element of these lists is a string of the form 'YYYYMMDD'. + """ + + # Get the number of forecast output times per cycle/forecast. + num_fcst_output_times_per_cycle = int(fcst_len/fcst_output_intvl + 1) + + # Initialize dictionaries that will contain the various forecast output + # time and obs day information. Note that we initialize the contents of + # these dictionaries as sets because that better suites the data manipulation + # we will need to do, but these sets will later be converted to lists. + fcst_output_times_all_cycles = dict() + fcst_output_times_all_cycles['inst'] = set() + fcst_output_times_all_cycles['cumul'] = set() + obs_days_all_cycles = dict() + obs_days_all_cycles['inst'] = set() + obs_days_all_cycles['cumul'] = set() + + for i, start_time_crnt_cycle in enumerate(cycle_start_times): + # Create a list of forecast output times of instantaneous fields for the + # current cycle. + fcst_output_times_crnt_cycle_inst \ + = [start_time_crnt_cycle + i*fcst_output_intvl + for i in range(0,num_fcst_output_times_per_cycle)] + # Include the output times of instantaneous fields for the current cycle + # in the set of all such output times over all cycles. + fcst_output_times_all_cycles['inst'] \ + = fcst_output_times_all_cycles['inst'] | set(fcst_output_times_crnt_cycle_inst) + + # Create a list of instantaneous field obs days (i.e. days on which + # observations of instantaneous fields are needed for verification) for + # the current cycle. We do this by dropping the hour-of-day from each + # element of the list of forecast output times and keeping only unique + # elements. + tmp = [datetime_obj.date() for datetime_obj in fcst_output_times_crnt_cycle_inst] + obs_days_crnt_cycl_inst = sorted(set(tmp)) + # Include the obs days for instantaneous fields for the current cycle + # in the set of all such obs days over all cycles. + obs_days_all_cycles['inst'] = obs_days_all_cycles['inst'] | set(obs_days_crnt_cycl_inst) + + # Create a list of forecast output times of cumulative fields for the + # current cycle. This is simply the list of forecast output times for + # instantaneous fields but with the first time dropped (because nothing + # has yet accumulated at the starting time of the cycle). + fcst_output_times_crnt_cycle_cumul = fcst_output_times_crnt_cycle_inst + fcst_output_times_crnt_cycle_cumul.pop(0) + # Include the obs days for cumulative fields for the current cycle in the + # set of all such obs days over all cycles. + fcst_output_times_all_cycles['cumul'] \ + = fcst_output_times_all_cycles['cumul'] | set(fcst_output_times_crnt_cycle_cumul) + + # Create a list of cumulative field obs days (i.e. days on which + # observations of cumulative fields are needed for verification) for + # the current cycle. We do this by dropping the hour-of-day from each + # element of the list of forecast output times and keeping only unique + # elements. Note, however, that before dropping the hour-of-day from + # the list of forecast output times, we remove the last forecast output + # time if it happens to be the 0th hour of a day. This is because in + # the scripts/tasks that get observations of cumulative fields, the + # zeroth hour of a day is considered part of the previous day (because + # it represents accumulation that occurred on the previous day). + tmp = fcst_output_times_crnt_cycle_cumul + last_output_time_cumul = fcst_output_times_crnt_cycle_cumul[-1] + if last_output_time_cumul.hour == 0: + tmp.pop() + tmp = [datetime_obj.date() for datetime_obj in tmp] + obs_days_crnt_cycl_cumul = sorted(set(tmp)) + # Include the obs days for cumulative fields for the current cycle in the + # set of all such obs days over all cycles. + obs_days_all_cycles['cumul'] = obs_days_all_cycles['cumul'] | set(obs_days_crnt_cycl_cumul) + + # Convert the set of output times of instantaneous fields over all cycles + # to a sorted list of strings of the form 'YYYYMMDDHH'. + fcst_output_times_all_cycles['inst'] = sorted(fcst_output_times_all_cycles['inst']) + fcst_output_times_all_cycles['inst'] \ + = [datetime.strftime(fcst_output_times_all_cycles['inst'][i], "%Y%m%d%H") + for i in range(len(fcst_output_times_all_cycles['inst']))] + + # Convert the set of obs days for instantaneous fields over all cycles + # to a sorted list of strings of the form 'YYYYMMDD'. + obs_days_all_cycles['inst'] = sorted(obs_days_all_cycles['inst']) + obs_days_all_cycles['inst'] \ + = [datetime.strftime(obs_days_all_cycles['inst'][i], "%Y%m%d") + for i in range(len(obs_days_all_cycles['inst']))] + + # Convert the set of output times of cumulative fields over all cycles to + # a sorted list of strings of the form 'YYYYMMDDHH'. + fcst_output_times_all_cycles['cumul'] = sorted(fcst_output_times_all_cycles['cumul']) + fcst_output_times_all_cycles['cumul'] \ + = [datetime.strftime(fcst_output_times_all_cycles['cumul'][i], "%Y%m%d%H") + for i in range(len(fcst_output_times_all_cycles['cumul']))] + + # Convert the set of obs days for cumulative fields over all cycles to a + # sorted list of strings of the form 'YYYYMMDD'. + obs_days_all_cycles['cumul'] = sorted(obs_days_all_cycles['cumul']) + obs_days_all_cycles['cumul'] \ + = [datetime.strftime(obs_days_all_cycles['cumul'][i], "%Y%m%d") + for i in range(len(obs_days_all_cycles['cumul']))] + + return fcst_output_times_all_cycles, obs_days_all_cycles + + +def set_rocoto_cycledefs_for_obs_days(obs_days_all_cycles): + """ + Given a list of days on which observations are needed (because there is + forecast output on those days), this function generates a list of ROCOTO- + style cycledef strings that together span the days (over all cycles of an + SRW App experiment) on which obs are needed. The input list of days must + be increasing in time, but the days do not have to be consecutive, i.e. + there may be gaps between days that are greater than one day. + + Each cycledef string in the output list represents a set of consecutive + days in the input string (when used inside a tag in a ROCOTO + XML). Thus, when the cycledef strings in the output string are all + assigned to the same cycledef group in a ROCOTO XML, that group will + represent all the days on which observations are needed. This allows + the ROCOTO workflow to define a single set of non-consecutive days on + which obs are needed and define tasks (e.g. get_obs) only for those + days, thereby avoiding the redundant creation of these tasks for any + in-between days on which obs are not needed. + + Args: + obs_days_all_cycles (list): + A list of strings of the form 'YYYYMMDD', with each string representing + a day on which observations are needed. Note that the list must be sorted, + i.e. the days must be increasing in time, but there may be gaps between + days. + + Returns: + cycledefs_all_obs_days (list): + A list of strings, with each string being a ROCOTO-style cycledef of the + form + + '{yyyymmdd_start}0000 {yyyymmdd_end}0000 24:00:00' + + where {yyyymmdd_start} is the starting day of the first cycle in the + cycledef and {yyyymmdd_end} is the starting day of the last cycle (note + that the minutes and hours in these cycledef stirngs are always set to + '00'). For example, an element of the output list may be: + + '202404290000 202405010000 24:00:00' + """ + + # To enable arithmetic with dates, convert input sting list of observation + # days (i.e. days on which observations are needed) over all cycles to a + # list of datetime objects. + tmp = [datetime.strptime(yyyymmdd, "%Y%m%d") for yyyymmdd in obs_days_all_cycles] + + # Initialize the variable that in the loop below contains the date of + # the previous day. This is just the first element of the list of + # datetime objects constructed above. Then use it to initialize the + # list (consec_obs_days_lists) that will contain lists of consecutive + # observation days. Thus, after its construction is complete, each + # element of consec_obs_days_lists will itself be a list containing + # datetime objects that represent consecutive days (i.e. are guaranteed + # to be 24 hours apart). + day_prev = tmp[0] + consec_obs_days_lists = list() + consec_obs_days_lists.append([day_prev]) + + # Remove the first element of the list of obs days since it has already + # been used initiliaze consec_obs_days_lists. + tmp.pop(0) + + # Loop over the remaining list of obs days and construct the list of + # lists of consecutive obs days. + one_day = timedelta(days=1) + for day_crnt in tmp: + # If the current obs day comes 24 hours after the previous obs day, i.e. + # if it is the next day of the previous obs day, append it to the last + # existing list in consec_obs_days_lists. + if day_crnt == day_prev + one_day: + consec_obs_days_lists[-1].append(day_crnt) + # If the current obs day is NOT the next day of the previous obs day, + # append a new element to consec_obs_days_lists and initialize it as a + # list containing a single element -- the current obs day. + else: + consec_obs_days_lists.append([day_crnt]) + # Update the value of the previous day in preparation for the next + # iteration of the loop. + day_prev = day_crnt + + # Use the list of lists of consecutive obs days to construct a list of + # ROCOTO-style cycledef strings that each represent a set of consecutive + # obs days when included in a tag in a ROCOTO XML. Each + # string in this new list corresponds to a series of consecutive days on + # which observations are needed (where by "consecutive" we mean no days + # are skipped), and there is at least a one-day gap between each such + # series. These cycledefs together represent all the days (i.e. over all + # cycles of the experiment) on which observations are needed. + cycledefs_all_obs_days = list() + for consec_obs_days_list in consec_obs_days_lists: + cycledef_start = consec_obs_days_list[0].strftime('%Y%m%d%H%M') + cycledef_end = consec_obs_days_list[-1].strftime('%Y%m%d%H%M') + cycledefs_all_obs_days.append(' '.join([cycledef_start, cycledef_end, '24:00:00'])) + + return cycledefs_all_obs_days + + +def get_obs_retrieve_times_by_day( + vx_config, cycle_start_times, fcst_len, + fcst_output_times_all_cycles, obs_days_all_cycles): + """ + This function generates dictionary of dictionaries that, for each + combination of obs type needed and each obs day, contains a string list + of the times at which that type of observation is needed on that day. + The elements of each list are formatted as 'YYYYMMDDHH'. + + Args: + vx_config (dict): + The verification configuration dictionary. + + cycle_start_times (list): + List containing the starting times of the cycles in the experiment; each + list element is a datetime object. + + fcst_len (datetime.timedelta): + The length of each forecast. + + fcst_output_times_all_cycles (dict): + Dictionary containing a list of forecast output times over all cycles for + instantaneous fields and a second analogous list for cumulative fields. + Each element of these lists is a string of the form 'YYYYMMDDHH'. + + obs_days_all_cycles (dict): + Dictionary containing a list of observation days (i.e. days on which + observations are needed to perform verification) over all cycles for + instantaneous fields and a second analogous list for cumulative fields. + Each element of these lists is a string of the form 'YYYYMMDD'. + + Returns: + obs_retrieve_times_by_day (dict): + Dictionary of dictionaries containing times at which each type of obs is + needed on each obs day. + """ + + # Convert string contents of input dictionaries to datetime objects. + for time_type in ['cumul', 'inst']: + fcst_output_times_all_cycles[time_type] \ + = [datetime.strptime(fcst_output_times_all_cycles[time_type][i], "%Y%m%d%H") + for i in range(len(fcst_output_times_all_cycles[time_type]))] + obs_days_all_cycles[time_type] \ + = [datetime.strptime(obs_days_all_cycles[time_type][i], "%Y%m%d") + for i in range(len(obs_days_all_cycles[time_type]))] + + # Get list of field groups to be verified. + vx_field_groups = vx_config['VX_FIELD_GROUPS'] + + # Define a list of dictionaries containing information about all the obs + # types that can possibly be used for verification in the SRW App. Each + # dictionary in the list contains the name of the obs type, the temporal + # nature of that obs type (i.e. whether the obs type contains cumulative + # or instantaneous fields), and a list of the field groups that the obs + # type may be used to verify. + all_obs_info \ + = [{'obtype': 'CCPA', 'time_type': 'cumul', 'field_groups': ['APCP']}, + {'obtype': 'NOHRSC', 'time_type': 'cumul', 'field_groups': ['ASNOW']}, + {'obtype': 'MRMS', 'time_type': 'inst', 'field_groups': ['REFC', 'RETOP']}, + {'obtype': 'NDAS', 'time_type': 'inst', 'field_groups': ['SFC', 'UPA']} + ] + + # Create new list that has the same form as the list of dictionaries + # defined above but contains only those obs types that have at least one + # field group that appears in the list of field groups to verify. Note + # that for those obs types that are retained in the list, the field groups + # that will not be verified are discarded. + obs_info = [] + for obs_dict in all_obs_info.copy(): + obtype = obs_dict['obtype'] + field_groups = obs_dict['field_groups'] + field_groups = [field for field in field_groups if field in vx_field_groups] + obs_dict = obs_dict.copy() + obs_dict['field_groups'] = field_groups + if field_groups: obs_info.append(obs_dict) + + # For convenience, define timedelta object representing a single day. + one_day = timedelta(days=1) + + # Generate a dictionary (of dictionaries) that, for each obs type to be + # used in the vx and for each day for which there is forecast output, + # will contain the times at which verification will be performed, i.e. + # the times at which the forecast output will be compared to observations. + # We refer to these times as the vx comparison times. + vx_compare_times_by_day = dict() + for obs_dict in obs_info: + + obtype = obs_dict['obtype'] + obs_time_type = obs_dict['time_type'] + + fcst_output_times_all_cycles_crnt_ttype = fcst_output_times_all_cycles[obs_time_type] + obs_days_all_cycles_crnt_ttype = obs_days_all_cycles[obs_time_type] + + vx_compare_times_by_day[obtype] = dict() + + # Get the availability interval for the current observation type from the + # verification configuration dictionary. Then make sure it divides evenly + # into 24. + config_var_name = "".join([obtype, "_OBS_AVAIL_INTVL_HRS"]) + obs_avail_intvl_hrs = vx_config[config_var_name] + remainder = 24 % obs_avail_intvl_hrs + if remainder != 0: + msg = dedent(f""" + The obs availability interval for obs of type {obtype} must divide evenly + into 24 but doesn't: + obs_avail_intvl_hrs = {obs_avail_intvl_hrs} + 24 % obs_avail_intvl_hrs = {remainder}" + """) + logging.error(msg) + raise Exception(msg) + obs_avail_intvl = timedelta(hours=obs_avail_intvl_hrs) + num_obs_avail_times_per_day = int(24/obs_avail_intvl_hrs) + + # Loop over all obs days over all cycles (for the current obs type). For + # each such day, get the list forecast output times and the list of obs + # availability times. Finally, set the times (on that day) that verification + # will be performed to the intersection of these two lists. + for obs_day in obs_days_all_cycles_crnt_ttype: + + next_day = obs_day + one_day + if obs_time_type == "cumul": + fcst_output_times_crnt_day \ + = [time for time in fcst_output_times_all_cycles_crnt_ttype if obs_day < time <= next_day] + elif obs_time_type == "inst": + fcst_output_times_crnt_day \ + = [time for time in fcst_output_times_all_cycles_crnt_ttype if obs_day <= time < next_day] + fcst_output_times_crnt_day = [datetime.strftime(time, "%Y%m%d%H") for time in fcst_output_times_crnt_day] + + if obs_time_type == "cumul": + obs_avail_times_crnt_day \ + = [obs_day + (i+1)*obs_avail_intvl for i in range(0,num_obs_avail_times_per_day)] + elif obs_time_type == "inst": + obs_avail_times_crnt_day \ + = [obs_day + i*obs_avail_intvl for i in range(0,num_obs_avail_times_per_day)] + obs_avail_times_crnt_day = [datetime.strftime(time, "%Y%m%d%H") for time in obs_avail_times_crnt_day] + + vx_compare_times_crnt_day = list(set(fcst_output_times_crnt_day) & set(obs_avail_times_crnt_day)) + vx_compare_times_crnt_day.sort() + + obs_day_str = datetime.strftime(obs_day, "%Y%m%d") + vx_compare_times_by_day[obtype][obs_day_str] = vx_compare_times_crnt_day + + # For each obs type to be used in the vx and for each day for which there + # is forecast output, calculate the times at which obs need to be retrieved. + # For instantaneous fields, the obs retrieval times are the same as the + # times at which vx will be performed. For cumulative fields, each field + # value needs to be constructed by adding values from previous times. For + # example, if we're verifying 6-hourly precipitation and the obs availability + # interval for precip obs (CCPA) is 1 hour, then the 6-hourly values must + # be built by adding the 1-hour values. Thus, this requires obs at every + # hour, not just every 6 hours. + # + # First, initialze the dictionary (of dictionaries) that will contain the + # obs retreival times (for all obs types and each day for which there is + # forecast output), and set the values for instantaneous obs to the vx + # comparison times calculated above. + obs_retrieve_times_by_day = dict() + for obs_dict in obs_info: + obtype = obs_dict['obtype'] + obs_time_type = obs_dict['time_type'] + if obs_time_type == 'inst': + obs_retrieve_times_by_day[obtype] = vx_compare_times_by_day[obtype] + + # Next, calculate the obs retrieval times for cumulative fields. We want + # these times grouped into days because the get_obs workflow tasks that + # will use this information are day-based (i.e. each task will get obs + # for a single day). However, it is easier to first calculate these + # times as a single group over all cycles. We do this next. + obs_retrieve_times_all_cycles = dict() + for obs_dict in obs_info: + + obtype = obs_dict['obtype'] + obs_time_type = obs_dict['time_type'] + field_groups = obs_dict['field_groups'] + + # Consider only cumulative fields. + if obs_time_type != 'cumul': + continue + + # Initialize the set that will contain the obs retrieval times over all + # cycles. + obs_retrieve_times_all_cycles[obtype] = set() + + # Get the availability interval for the current observation type from the + # verification configuration dictionary. + config_var_name = "".join([obtype, "_OBS_AVAIL_INTVL_HRS"]) + obs_avail_intvl_hrs = vx_config[config_var_name] + obs_avail_intvl = timedelta(hours=obs_avail_intvl_hrs) + + # Consider all field groups to be verified for the current obs type. + for fg in field_groups: + + # Get the list of accumulation intervals for the current cumulative obs + # type and field group combination. + accum_intvls_array_name = "".join(["VX_", fg, "_ACCUMS_HRS"]) + accum_intvls_hrs = vx_config[accum_intvls_array_name] + + for cycle_start_time in cycle_start_times: + + # Loop through the accumulation intervals for this obs type and field + # group combination. + for accum_intvl_hrs in accum_intvls_hrs: + accum_intvl = timedelta(hours=accum_intvl_hrs) + # Get the number of accumulation intervals that fits in the duration of + # the forecast. Note that the accumulation interval doesn't necessarily + # have to evenly divide the forecast duration; we simply drop any fractional + # accumulation intervals by rounding down to the nearest integer. + num_accum_intvls_in_fcst = int(fcst_len/accum_intvl) + # Calulate the times at which the current cumulative obs field will be + # compared to the forecast field(s) in the corresponding cumulative field + # group (for the current accumulation interval). + vx_compare_times_crnt_cycl = [cycle_start_time + (i+1)*accum_intvl + for i in range(0,num_accum_intvls_in_fcst)] + # For each such comparison time, get the times at which obs are needed + # to form that accumulation. For example, if the current accumulation + # interval is 6 hours and the obs are available every hour, then the + # times at which obs are needed will be the comparison time as well as + # the five hours preceeding it. Then put all such times over all vx + # comparison times within all cycles into a single array of times (which + # is stored in the dictionary obs_retrieve_times_all_cycles). + for vx_compare_time in vx_compare_times_crnt_cycl: + remainder = accum_intvl_hrs % obs_avail_intvl_hrs + if remainder != 0: + msg = dedent(f""" + The obs availability interval for obs of type {obtype} must divide evenly + into the current accumulation interval (accum_intvl) but doesn't: + accum_intvl_hrs = {accum_intvl_hrs} + obs_avail_intvl_hrs = {obs_avail_intvl_hrs} + accum_intvl_hrs % obs_avail_intvl_hrs = {remainder}" + """) + logging.error(msg) + raise Exception(msg) + num_obs_avail_times_in_accum_intvl = int(accum_intvl/obs_avail_intvl) + obs_retrieve_times_crnt_accum_intvl \ + = [vx_compare_time - i*obs_avail_intvl \ + for i in range(0,num_obs_avail_times_in_accum_intvl)] + obs_retrieve_times_all_cycles[obtype] \ + = obs_retrieve_times_all_cycles[obtype] | set(obs_retrieve_times_crnt_accum_intvl) + + # Convert the final set of obs retrieval times for the current obs type + # to a sorted list. Note that the sorted() function will convert a set + # to a sorted list (a set itself cannot be sorted). + obs_retrieve_times_all_cycles[obtype] = sorted(obs_retrieve_times_all_cycles[obtype]) + + # Now that the obs retrival times for cumulative fields have been obtained + # but grouped by cycle start date, regroup them by day and save results + # in obs_retrieve_times_by_day. + for obs_dict in obs_info: + + obtype = obs_dict['obtype'] + obs_time_type = obs_dict['time_type'] + + # Consider only cumulative obs/fields. + if obs_time_type != 'cumul': + continue + + # Initialize variables before looping over obs days. + obs_retrieve_times_by_day[obtype] = dict() + obs_days_all_cycles_crnt_ttype = obs_days_all_cycles[obs_time_type] + obs_retrieve_times_all_cycles_crnt_obtype = obs_retrieve_times_all_cycles[obtype] + + for obs_day in obs_days_all_cycles_crnt_ttype: + next_day = obs_day + one_day + obs_retrieve_times_crnt_day \ + = [time for time in obs_retrieve_times_all_cycles_crnt_obtype if obs_day < time <= next_day] + obs_retrieve_times_crnt_day = [datetime.strftime(time, "%Y%m%d%H") for time in obs_retrieve_times_crnt_day] + obs_day_str = datetime.strftime(obs_day, "%Y%m%d") + obs_retrieve_times_by_day[obtype][obs_day_str] = obs_retrieve_times_crnt_day + + return obs_retrieve_times_by_day diff --git a/ush/set_cycle_dates.py b/ush/set_cycle_dates.py deleted file mode 100644 index 0c63a87e49..0000000000 --- a/ush/set_cycle_dates.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python3 - -from datetime import datetime, timedelta, date - -from python_utils import print_input_args, print_err_msg_exit - - -def set_cycle_dates(date_start, date_end, incr_cycl_freq): - """This file defines a function that, given the start and end dates - as date time objects, and a cycling frequency, returns an array of - cycle date-hours whose elements have the form YYYYMMDDHH. Here, - YYYY is a four-digit year, MM is a two- digit month, DD is a - two-digit day of the month, and HH is a two-digit hour of the day. - - Args: - date_start: start date, datetime object - date_end: end date, datetime object - incr_cycl_freq: cycle frequency increment in hours, an int - Returns: - A list of dates in a format YYYYMMDDHH - """ - - print_input_args(locals()) - - freq_delta = timedelta(hours=incr_cycl_freq) - - # iterate over cycles - all_cdates = [] - cdate = date_start - while cdate <= date_end: - cyc = datetime.strftime(cdate, "%Y%m%d%H") - all_cdates.append(cyc) - cdate += freq_delta - return all_cdates diff --git a/ush/set_fv3nml_ens_stoch_seeds.py b/ush/set_fv3nml_ens_stoch_seeds.py index 3459fa8707..9160dec0a2 100644 --- a/ush/set_fv3nml_ens_stoch_seeds.py +++ b/ush/set_fv3nml_ens_stoch_seeds.py @@ -10,12 +10,12 @@ import sys from textwrap import dedent -from uwtools.api.config import realize +from uwtools.api.config import get_nml_config, realize from python_utils import ( cfg_to_yaml_str, import_vars, - load_shell_config, + load_yaml_config, print_input_args, print_info_msg, ) @@ -23,19 +23,17 @@ def set_fv3nml_ens_stoch_seeds(cdate, expt_config): """ - This function, for an ensemble-enabled experiment - (i.e. for an experiment for which the workflow configuration variable - DO_ENSEMBLE has been set to "TRUE"), creates new namelist files with - unique stochastic "seed" parameters, using a base namelist file in the - ${EXPTDIR} directory as a template. These new namelist files are stored - within each member directory housed within each cycle directory. Files - of any two ensemble members differ only in their stochastic "seed" - parameter values. These namelist files are generated when this file is - called as part of the TN_RUN_FCST task. + Creates new namelist files with unique stochastic "seed" parameters for an ensemble-enabled + experiment (i.e., where ``DO_ENSEMBLE: True``), using a base namelist file in ``${EXPTDIR}`` + as a template. These new namelist files are stored within each member directory housed within + each cycle directory. Files of any two ensemble members differ only in their stochastic "seed" + parameter values. These namelist files are generated when this file is called as part of the + ``run_fcst`` task. Args: - cdate the cycle - expt_config the in-memory dict representing the experiment configuration + cdate (datetime.datetime): The cycle date + expt_config (dict): The in-memory dictionary representing the experiment + configuration file Returns: None """ @@ -112,10 +110,10 @@ def set_fv3nml_ens_stoch_seeds(cdate, expt_config): input_format="nml", output_file=fv3_nml_ensmem_fp, output_format="nml", - supplemental_configs=[settings], + update_config=get_nml_config(settings), ) -def parse_args(argv): +def _parse_args(argv): """Parse command line arguments""" parser = argparse.ArgumentParser( description="Creates stochastic seeds for an ensemble experiment." @@ -141,6 +139,6 @@ def parse_args(argv): if __name__ == "__main__": - args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) + args = _parse_args(sys.argv[1:]) + cfg = load_yaml_config(args.path_to_defns) set_fv3nml_ens_stoch_seeds(args.cdate, cfg) diff --git a/ush/set_fv3nml_sfc_climo_filenames.py b/ush/set_fv3nml_sfc_climo_filenames.py index 417aa0b5ee..7b18399b00 100644 --- a/ush/set_fv3nml_sfc_climo_filenames.py +++ b/ush/set_fv3nml_sfc_climo_filenames.py @@ -10,14 +10,14 @@ import sys from textwrap import dedent -from uwtools.api.config import get_yaml_config, realize +from uwtools.api.config import get_nml_config, get_yaml_config, realize from python_utils import ( cfg_to_yaml_str, check_var_valid_value, flatten_dict, import_vars, - load_shell_config, + load_yaml_config, print_info_msg, ) @@ -38,15 +38,16 @@ def set_fv3nml_sfc_climo_filenames(config, debug=False): """ - This function sets the values of the variables in - the forecast model's namelist file that specify the paths to the surface - climatology files on the FV3LAM native grid (which are either pregenerated - or created by the TN_MAKE_SFC_CLIMO task). Note that the workflow + Sets the values of the variables in the forecast model's namelist file that specify the paths + to the surface climatology files on the FV3LAM native grid (which are either pregenerated + or created by the ``make_sfc_climo`` task). Note that the workflow generation scripts create symlinks to these surface climatology files - in the FIXlam directory, and the values in the namelist file that get + in the ``FIXlam`` directory, and the values in the namelist file that get set by this function are relative or full paths to these links. Args: + config (dict): Section of configuration file specifying surface climatology fields + (as a flattened dictionary) debug (bool): Enable extra output for debugging Returns: None @@ -105,10 +106,10 @@ def set_fv3nml_sfc_climo_filenames(config, debug=False): input_format="nml", output_file=FV3_NML_FP, output_format="nml", - supplemental_configs=[settings], + update_config=get_nml_config(settings), ) -def parse_args(argv): +def _parse_args(argv): """Parse command line arguments""" parser = argparse.ArgumentParser(description="Set surface climatology fields.") @@ -126,7 +127,7 @@ def parse_args(argv): if __name__ == "__main__": - args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) + args = _parse_args(sys.argv[1:]) + cfg = load_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) set_fv3nml_sfc_climo_filenames(cfg, args.debug) diff --git a/ush/set_gridparams_ESGgrid.py b/ush/set_gridparams_ESGgrid.py index abafd24ae4..4b070e9e50 100644 --- a/ush/set_gridparams_ESGgrid.py +++ b/ush/set_gridparams_ESGgrid.py @@ -15,21 +15,26 @@ def set_gridparams_ESGgrid( lon_ctr, lat_ctr, nx, ny, halo_width, delx, dely, pazi, constants ): - """Sets the parameters for a grid that is to be generated using the "ESGgrid" - grid generation method (i.e. GRID_GEN_METHOD set to "ESGgrid"). + """ + Sets the parameters for a grid that is to be generated using the "ESGgrid" + grid generation method (i.e., when ``GRID_GEN_METHOD: "ESGgrid"``). Args: - lon_ctr - lat_ctr - nx - ny - halo_width - delx - dely - pazi - constants: dictionary of SRW constants + lon_ctr (float): The longitude of the center of the grid (in degrees). + lat_ctr (float): The latitude of the center of the grid (in degrees). + nx (int): The number of cells in the zonal direction on the regional grid. + ny (int): The number of cells in the meridional direction on the regional grid. + halo_width (int): The width (in number of grid cells) of the wide :term:`halo` to add + around the regional grid before shaving the halo down to the width(s) + expected by the forecast model. For predefined grids, this value is + set in ``setup.py`` based on the ``ESGgrid_WIDE_HALO_WIDTH`` value in + ``predef_grid_params.yaml``. + delx (float): The cell size in the zonal direction of the regional grid (in meters). + dely (float): The cell size in the meridional direction of the regional grid (in meters). + pazi (float): The rotational parameter for the “ESGgrid” (in degrees). + constants (dict): Dictionary of SRW constants Returns: - Tuple of inputs, and 4 outputs (see return statement) + Dictionary of inputs and 4 outputs (see return statement in code for details) """ print_input_args(locals()) @@ -43,12 +48,12 @@ def set_gridparams_ESGgrid( # # For a ESGgrid-type grid, the orography filtering is performed by pass- # ing to the orography filtering the parameters for an "equivalent" glo- - # bal uniform cubed-sphere grid. These are the parameters that a global + # bal uniform cubed-sphere grid. These are the parameters that a global # uniform cubed-sphere grid needs to have in order to have a nominal # grid cell size equal to that of the (average) cell size on the region- - # al grid. These globally-equivalent parameters include a resolution + # al grid. These globally-equivalent parameters include a resolution # (in units of number of cells in each of the two horizontal directions) - # and a stretch factor. The equivalent resolution is calculated in the + # and a stretch factor. The equivalent resolution is calculated in the # script that generates the grid, and the stretch factor needs to be set # to 1 because we are considering an equivalent globally UNIFORM grid. # However, it turns out that with a non-symmetric regional grid (one in @@ -56,7 +61,7 @@ def set_gridparams_ESGgrid( # cause the orography filtering program is designed for a global cubed- # sphere grid and thus assumes that nx and ny for a given tile are equal # when stretch_factor is exactly equal to 1. - # ^^-- Why is this? Seems like symmetry btwn x and y should still hold when the stretch factor is not equal to 1. + # ^^-- Why is this? Seems like symmetry between x and y should still hold when the stretch factor is not equal to 1. # It turns out that the program will work if we set stretch_factor to a # value that is not exactly 1. This is what we do below. # diff --git a/ush/set_gridparams_GFDLgrid.py b/ush/set_gridparams_GFDLgrid.py index 64dda5b951..f200bb6f83 100644 --- a/ush/set_gridparams_GFDLgrid.py +++ b/ush/set_gridparams_GFDLgrid.py @@ -13,7 +13,7 @@ ) -def prime_factors(n): +def _prime_factors(n): i = 2 factors = [] while i * i <= n: @@ -41,24 +41,35 @@ def set_gridparams_GFDLgrid( nh4, run_envir, ): - """Sets the parameters for a grid that is to be generated using the "GFDLgrid" - grid generation method (i.e. GRID_GEN_METHOD set to "ESGgrid"). + """Sets the parameters for a grid that is to be generated using the legacy "GFDLgrid" + grid generation method (i.e., when ``GRID_GEN_METHOD: "ESGgrid"``). Args: - lon_of_t6_ctr - lat_of_t6_ctr - res_of_t6g - stretch_factor - refine_ratio_t6g_to_t7g - istart_of_t7_on_t6g - iend_of_t7_on_t6g - jstart_of_t7_on_t6g - jend_of_t7_on_t6g - verbose - nh4 - run_envir + lon_of_t6_ctr (float): Longitude of the center of tile 6 (in degrees). + lat_of_t6_ctr (float): Latitude of the center of tile 6 (in degrees). + res_of_t6g (int): Number of grid cells in either of the two horizontal + directions (x and y) on each of the six tiles of the + parent global cubed-sphere grid (e.g., 48, 96, 192, 384, + 768, 1152, 3072). + stretch_factor (float): Stretching factor used in the Schmidt transformation + applied to the parent cubed-sphere grid. Setting the + Schmidt stretching factor to a value greater than 1 + shrinks tile 6, while setting it to a value less than 1 + (but still greater than 0) expands it. + refine_ratio_t6g_to_t7g (int): Cell refinement ratio for the regional grid. It refers to + the number of cells in either the x or y direction on the + regional grid (tile 7) that abut one cell on its parent + tile (tile 6). + istart_of_t7_on_t6g (int): i-index on tile 6 at which the regional grid (tile 7) starts. + iend_of_t7_on_t6g (int): i-index on tile 6 at which the regional grid (tile 7) ends. + jstart_of_t7_on_t6g (int): j-index on tile 6 at which the regional grid (tile 7) starts. + jend_of_t7_on_t6g (int): j-index on tile 6 at which the regional grid (tile 7) ends. + verbose (bool): Flag to print out additional informational messages + nh4 (int): The width (in number of cells) of the 4-cell-wide halo on + tile 7, i.e. NH4 = 4. + run_envir (str): Workflow mode (*community* or *nco*) Returns: - Tuple of inputs and outputs (see return statement) + Dictionary of inputs and outputs (see return statement in code for more detail) """ print_input_args(locals()) @@ -67,7 +78,7 @@ def set_gridparams_GFDLgrid( # ----------------------------------------------------------------------- # # To simplify the grid setup, we require that tile 7 be centered on tile - # 6. Note that this is not really a restriction because tile 6 can al- + # 6. Note that this is not really a restriction because tile 6 can al- # ways be moved so that it is centered on tile 7 [the location of tile 6 # doesn't really matter because for a regional setup, the forecast model # will only run on tile 7 (not on tiles 1-6)]. @@ -369,8 +380,8 @@ def set_gridparams_GFDLgrid( nx_of_t6_on_t6sg = 2 * nx_of_t6_on_t6g ny_of_t6_on_t6sg = 2 * ny_of_t6_on_t6g - prime_factors_nx_of_t7_on_t7g = prime_factors(nx_of_t7_on_t7g) - prime_factors_ny_of_t7_on_t7g = prime_factors(ny_of_t7_on_t7g) + prime_factors_nx_of_t7_on_t7g = _prime_factors(nx_of_t7_on_t7g) + prime_factors_ny_of_t7_on_t7g = _prime_factors(ny_of_t7_on_t7g) logging.debug( f""" diff --git a/ush/set_leadhrs.py b/ush/set_leadhrs.py new file mode 100644 index 0000000000..3256297af2 --- /dev/null +++ b/ush/set_leadhrs.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python3 +import argparse +import os +from eval_metplus_timestr_tmpl import eval_metplus_timestr_tmpl + +def set_leadhrs(date_init, lhr_min, lhr_max, lhr_intvl, base_dir, time_lag, fn_template, num_missing_files_max, + skip_check_files=False, verbose=False): + """ + Creates a list of lead hours based on the provided range and interval, + checks for the existence of corresponding files, and returns a list + of lead hours for which files exist. If too many files are missing, it fails with an exception. + + Args: + date_init (str): Date string for initial time in YYYYMMDD[mmss] format, where + minutes and seconds are optional. + lhr_min (int): Minimum lead hour to check + lhr_max (int): Maximum lead hour to check + lhr_intvl (int): Interval between lead hours + base_dir (str): Base directory for forecast/observation file + time_lag (int): Hours of time lag for a time-lagged ensemble member + fn_template (str): The METplus filename template for finding the files + verbose (bool): By default this script only outputs the list of forecast hours + (for easier parsing from bash contexts). Set the verbose flag + to True for additional debugging output. + num_missing_files_max (int): If more files than this value are not found, raise exception + skip_check_files (bool): If true, return the list of forecast hours, skipping the file check + Returns: + A list of forecast hours where files were found + """ + + # Step 1: Generate lead hours without filtering for missing files + lhrs_list = list(range(lhr_min, lhr_max + 1, lhr_intvl)) + if verbose: + print(f"Initial set of lead hours (relative to {date_init}): {lhrs_list}") + + if skip_check_files: + return lhrs_list + + # Step 2: Loop through lead hours and check for corresponding file existence + final_list = [] + num_missing_files = 0 + for lhr in lhrs_list: + + # Evaluate the METplus timestring template for the current lead hour + fn = eval_metplus_timestr_tmpl(date_init, lhr, time_lag, fn_template, verbose=False) + + # Get the full path and check if the file exists + fp = os.path.join(base_dir, fn) + if os.path.isfile(fp): + if verbose: + print(f"Found file for lead hour {lhr} (relative to {date_init}): {fp}") + final_list.append(lhr) + else: + num_missing_files += 1 + + if verbose: + print(f"File for lead hour {lhr} (relative to {date_init}) is MISSING: {fp}") + + if verbose: + print(f"Final set of lead hours relative to {date_init}: {final_list}") + + # Step 3: Check if the number of missing files exceeds the maximum allowed + if num_missing_files > num_missing_files_max: + raise Exception(f"Number of missing files ({num_missing_files}) exceeds maximum allowed ({num_missing_files_max}).") + + return final_list + +if __name__ == "__main__": + + parser = argparse.ArgumentParser( + description="Print a list of forecast hours in bash-readable comma-separated format such that there is a corresponding file (can be observations or forecast files) for each list entry.", + ) + parser.add_argument("-v", "--verbose", help="Verbose output", action="store_true") + parser.add_argument("-d", "--date_init", help="Initial date in YYYYMMDDHH[mmss] format", type=str, default='') + parser.add_argument("-min", "--lhr_min", help="Minimum lead hour to check", type=int, required=True) + parser.add_argument("-max", "--lhr_max", help="Maximum lead hour to check", type=int, required=True) + parser.add_argument("-int", "--lhr_intvl", help="Interval between lead hours", type=int, required=True) + parser.add_argument("-tl", "--time_lag", help="Hours of time lag for a time-lagged ensemble member", type=int, default=0) + parser.add_argument("-bd", "--base_dir", help="Base directory for forecast/observation file", type=str, default='') + parser.add_argument("-ft", "--fn_template", help="Template for file names to search; see ??? for details on template settings", type=str, default='') + parser.add_argument("-n", "--num_missing_files_max", type=int, default=5, + help="Number of missing files to tolerate; if more files than this number can not be found, raise an exception") + parser.add_argument("-s", "--skip_check_files", action="store_true", + help="Flag to skip file check and just return the list of lead hours") + + args = parser.parse_args() + + #Consistency checks + if not args.skip_check_files and not args.date_init: + raise argparse.ArgumentTypeError('--date_init must be specified unless --skip_check_files is specified') + + leadhr_list = set_leadhrs(**vars(args)) + # If called from command line, we want to print a bash-parsable list + print(', '.join(str(x) for x in leadhr_list)) diff --git a/ush/set_predef_grid_params.py b/ush/set_predef_grid_params.py index 5d3bd0ffed..820e3fc659 100644 --- a/ush/set_predef_grid_params.py +++ b/ush/set_predef_grid_params.py @@ -13,11 +13,13 @@ def set_predef_grid_params(USHdir, grid_name, quilting): """Sets grid parameters for the specified predefined grid Args: - USHdir: path to the SRW ush directory - grid_name str specifying the predefined grid name. - quilting: bool whether quilting should be used for output + USHdir (str) : Path to the SRW App ``ush`` directory + grid_name (str) : String specifying the predefined grid name + quilting (bool): Whether quilting should be used for output Returns: - Dictionary of grid parameters + params_dict: A dictionary of grid parameters + Raises: + KeyError: If a selected predefined grid is not found in ``predef_grid_params.yaml``. """ params_dict = load_config_file(os.path.join(USHdir, "predef_grid_params.yaml")) diff --git a/ush/set_vx_fhr_list.sh b/ush/set_vx_fhr_list.sh deleted file mode 100644 index 8a1c9735a5..0000000000 --- a/ush/set_vx_fhr_list.sh +++ /dev/null @@ -1,295 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This file defines a function that generates a list of forecast hours -# such that for each hour there exist a corresponding obs file. It does -# this by first generating a generic sequence of forecast hours and then -# removing from that sequence any hour for which there is no obs file. -# -#----------------------------------------------------------------------- -# -function set_vx_fhr_list() { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; set -u +x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# - local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) - local scrfunc_fn=$( basename "${scrfunc_fp}" ) - local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# - local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# Specify the set of valid argument names for this script/function. Then -# process the arguments provided to this script/function (which should -# consist of a set of name-value pairs of the form arg1="value1", etc). -# -#----------------------------------------------------------------------- -# - local valid_args=( \ - "cdate" \ - "fcst_len_hrs" \ - "field" \ - "accum_hh" \ - "base_dir" \ - "fn_template" \ - "check_accum_contrib_files" \ - "num_missing_files_max" \ - "outvarname_fhr_list" \ - ) - process_args valid_args "$@" -# -#----------------------------------------------------------------------- -# -# For debugging purposes, print out values of arguments passed to this -# script. Note that these will be printed out only if VERBOSE is set to -# TRUE. -# -#----------------------------------------------------------------------- -# - print_input_args valid_args -# -#----------------------------------------------------------------------- -# -# Declare local variables. -# -#----------------------------------------------------------------------- -# - local crnt_tmpl \ - crnt_tmpl_esc \ - fhr \ - fhr_array \ - fhr_int \ - fhr_list \ - fhr_min \ - fhr_max \ - fn \ - fp \ - i \ - num_fcst_hrs \ - num_missing_files \ - regex_search_tmpl \ - remainder \ - skip_this_fhr -# -#----------------------------------------------------------------------- -# -# Create array containing set of forecast hours for which we will check -# for the existence of corresponding observation or forecast file. -# -#----------------------------------------------------------------------- -# - case "${field}" in - "APCP") - fhr_min="${accum_hh}" - fhr_int="${accum_hh}" - ;; - "ASNOW") - if [ "${accum_hh}" = "24" ]; then - fhr_min="24" - fhr_int="12" - else - fhr_min="${accum_hh}" - fhr_int="${accum_hh}" - fi - ;; - "REFC") - fhr_min="00" - fhr_int="01" - ;; - "RETOP") - fhr_min="00" - fhr_int="01" - ;; - "ADPSFC") - fhr_min="00" - fhr_int="01" - ;; - "ADPUPA") - fhr_min="00" - fhr_int="06" - ;; - *) - print_err_msg_exit "\ -A method for setting verification parameters has not been specified for -this field (field): - field = \"${field}\"" - ;; - esac - fhr_max="${fcst_len_hrs}" - - fhr_array=($( seq ${fhr_min} ${fhr_int} ${fhr_max} )) - print_info_msg "$VERBOSE" "\ -Initial (i.e. before filtering for missing files) set of forecast hours -is: - fhr_array = ( $( printf "\"%s\" " "${fhr_array[@]}" )) -" -# -#----------------------------------------------------------------------- -# -# Loop through all forecast hours. For each one for which a corresponding -# file exists, add the forecast hour to fhr_list. fhr_list will be a -# scalar containing a comma-separated list of forecast hours for which -# corresponding files exist. Also, use the variable num_missing_files -# to keep track of the number of files that are missing. -# -#----------------------------------------------------------------------- -# - fhr_list="" - num_missing_files="0" - num_fcst_hrs=${#fhr_array[@]} - for (( i=0; i<${num_fcst_hrs}; i++ )); do - - fhr_orig="${fhr_array[$i]}" - - if [ "${check_accum_contrib_files}" = "TRUE" ]; then - fhr=$(( ${fhr_orig} - ${accum_hh} + 1 )) - num_back_hrs=${accum_hh} - else - fhr=${fhr_orig} - num_back_hrs=1 - fi - - skip_this_fhr="FALSE" - for (( j=0; j<${num_back_hrs}; j++ )); do -# -# Use the provided template to set the name of/relative path to the file -# Note that the while-loop below is over all METplus time string templates -# of the form {...} in the template fn_template; it continues until all -# such templates have been evaluated to actual time strings. -# - fn="${fn_template}" - regex_search_tmpl="(.*)(\{.*\})(.*)" - crnt_tmpl=$( printf "%s" "${fn_template}" | \ - $SED -n -r -e "s|${regex_search_tmpl}|\2|p" ) - remainder=$( printf "%s" "${fn_template}" | \ - $SED -n -r -e "s|${regex_search_tmpl}|\1\3|p" ) - while [ ! -z "${crnt_tmpl}" ]; do - - eval_METplus_timestr_tmpl \ - init_time="$cdate" \ - fhr="$fhr" \ - METplus_timestr_tmpl="${crnt_tmpl}" \ - outvarname_formatted_time="actual_value" -# -# Replace METplus time templates in fn with actual times. Note that -# when using sed, we need to escape various characters (question mark, -# closing and opening curly braces, etc) in the METplus template in -# order for the sed command below to work properly. -# - crnt_tmpl_esc=$( echo "${crnt_tmpl}" | \ - $SED -r -e "s/\?/\\\?/g" -e "s/\{/\\\{/g" -e "s/\}/\\\}/g" ) - fn=$( echo "${fn}" | \ - $SED -n -r "s|(.*)(${crnt_tmpl_esc})(.*)|\1${actual_value}\3|p" ) -# -# Set up values for the next iteration of the while-loop. -# - crnt_tmpl=$( printf "%s" "${remainder}" | \ - $SED -n -r -e "s|${regex_search_tmpl}|\2|p" ) - remainder=$( printf "%s" "${remainder}" | \ - $SED -n -r -e "s|${regex_search_tmpl}|\1\3|p" ) - - done -# -# Get the full path to the file and check if it exists. -# - fp="${base_dir}/${fn}" - - if [ -f "${fp}" ]; then - print_info_msg "\ -Found file (fp) for the current forecast hour (fhr; relative to the cycle -date cdate): - fhr = \"$fhr\" - cdate = \"$cdate\" - fp = \"${fp}\" -" - else - skip_this_fhr="TRUE" - num_missing_files=$(( ${num_missing_files} + 1 )) - print_info_msg "\ -The file (fp) for the current forecast hour (fhr; relative to the cycle -date cdate) is missing: - fhr = \"$fhr\" - cdate = \"$cdate\" - fp = \"${fp}\" -Excluding the current forecast hour from the list of hours passed to the -METplus configuration file. -" - break - fi - - fhr=$(( $fhr + 1 )) - - done - - if [ "${skip_this_fhr}" != "TRUE" ]; then - fhr_list="${fhr_list},${fhr_orig}" - fi - - done -# -# Remove leading comma from fhr_list. -# - fhr_list=$( echo "${fhr_list}" | $SED "s/^,//g" ) - print_info_msg "$VERBOSE" "\ -Final (i.e. after filtering for missing files) set of foreast hours is -(written as a single string): - fhr_list = \"${fhr_list}\" -" -# -#----------------------------------------------------------------------- -# -# If the number of missing files is greater than the maximum allowed -# (specified by num_missing_files_max), print out an error message and -# exit. -# -#----------------------------------------------------------------------- -# - if [ "${num_missing_files}" -gt "${num_missing_files_max}" ]; then - print_err_msg_exit "\ -The number of missing files (num_missing_files) is greater than the -maximum allowed number (num_missing_files_max): - num_missing_files = ${num_missing_files} - num_missing_files_max = ${num_missing_files_max}" - fi -# -#----------------------------------------------------------------------- -# -# Set output variables. -# -#----------------------------------------------------------------------- -# - if [ ! -z "${outvarname_fhr_list}" ]; then - printf -v ${outvarname_fhr_list} "%s" "${fhr_list}" - fi -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/function. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 - -} diff --git a/ush/set_vx_params.sh b/ush/set_vx_params.sh index 9b67e36d22..993e45ac67 100644 --- a/ush/set_vx_params.sh +++ b/ush/set_vx_params.sh @@ -3,8 +3,10 @@ # # This file defines a function that sets various parameters needed when # performing verification. The way these parameters are set depends on -# the field being verified and, if the field is accumulated precipitation, -# the accumulation period (both of which are inputs to this function). +# the field group being verified and, if the field group consists of a +# set of cumulative fields (e.g. accumulated precipitation or accumulated +# snowfall), the accumulation interval (both of which are inputs to this +# function). # # As of 20220928, the verification tasks in the SRW App workflow use the # MET/METplus software (MET = Model Evaluation Tools) developed at the @@ -53,7 +55,7 @@ function set_vx_params() { # local valid_args=( \ "obtype" \ - "field" \ + "field_group" \ "accum_hh" \ "outvarname_grid_or_point" \ "outvarname_fieldname_in_obs_input" \ @@ -91,10 +93,14 @@ function set_vx_params() { # #----------------------------------------------------------------------- # - if [[ ! "${accum_hh}" =~ ^[0-9]{2}$ ]]; then - print_err_msg_exit "\ -The accumulation (accum_hh) must be a 2-digit integer: + if [ "${obtype}" = "CCPA" ] || [ "${obtype}" = "NOHRSC" ]; then + if [[ ! "${accum_hh}" =~ ^[0-9]{2}$ ]]; then + print_err_msg_exit "\ +For the given observation type (obtype), the accumulation (accum_hh) must +be a 2-digit integer: + obtype = \"${obtype}\" accum_hh = \"${accum_hh}\"" + fi fi # #----------------------------------------------------------------------- @@ -103,15 +109,17 @@ The accumulation (accum_hh) must be a 2-digit integer: # # grid_or_point: # String that is set to either "grid" or "point" depending on whether -# the field in consideration has obs that are gridded or point-based. +# obs type containing the field group is gridded or point-based. # # fieldname_in_obs_input: -# String used to search for the field in the input observation files -# read in by MET. +# If the field group represents a single field, this is the string used +# to search for that field in the input observation files read in by MET. +# If not, this is set to a null string. # # fieldname_in_fcst_input: -# String used to search for the field in the input forecast files read -# in by MET. +# If the field group represents a single field, this is the string used +# to search for that field in the input forecast files read in by MET. +# If not, this is set to a null string. # # fieldname_in_MET_output: # String that will be used in naming arrays defined in MET output files @@ -135,21 +143,21 @@ The accumulation (accum_hh) must be a 2-digit integer: "CCPA") _grid_or_point_="grid" - case "${field}" in + case "${field_group}" in "APCP") - fieldname_in_obs_input="${field}" - fieldname_in_fcst_input="${field}" - fieldname_in_MET_output="${field}" - fieldname_in_MET_filedir_names="${field}${accum_hh}h" + fieldname_in_obs_input="${field_group}" + fieldname_in_fcst_input="${field_group}" + fieldname_in_MET_output="${field_group}" + fieldname_in_MET_filedir_names="${field_group}${accum_hh}h" ;; *) print_err_msg_exit "\ A method for setting verification parameters has not been specified for -this observation type (obtype) and field (field) combination: +this observation type (obtype) and field group (field_group) combination: obtype = \"${obtype}\" - field = \"${field}\"" + field_group = \"${field_group}\"" ;; esac @@ -158,21 +166,21 @@ this observation type (obtype) and field (field) combination: "NOHRSC") _grid_or_point_="grid" - case "${field}" in + case "${field_group}" in "ASNOW") - fieldname_in_obs_input="${field}" - fieldname_in_fcst_input="${field}" - fieldname_in_MET_output="${field}" - fieldname_in_MET_filedir_names="${field}${accum_hh}h" + fieldname_in_obs_input="${field_group}" + fieldname_in_fcst_input="${field_group}" + fieldname_in_MET_output="${field_group}" + fieldname_in_MET_filedir_names="${field_group}${accum_hh}h" ;; *) print_err_msg_exit "\ A method for setting verification parameters has not been specified for -this observation type (obtype) and field (field) combination: +this observation type (obtype) and field group (field_group) combination: obtype = \"${obtype}\" - field = \"${field}\"" + field_group = \"${field_group}\"" ;; esac @@ -181,28 +189,28 @@ this observation type (obtype) and field (field) combination: "MRMS") _grid_or_point_="grid" - case "${field}" in + case "${field_group}" in "REFC") fieldname_in_obs_input="MergedReflectivityQCComposite" - fieldname_in_fcst_input="${field}" - fieldname_in_MET_output="${field}" - fieldname_in_MET_filedir_names="${field}" + fieldname_in_fcst_input="${field_group}" + fieldname_in_MET_output="${field_group}" + fieldname_in_MET_filedir_names="${field_group}" ;; "RETOP") fieldname_in_obs_input="EchoTop18" - fieldname_in_fcst_input="${field}" - fieldname_in_MET_output="${field}" - fieldname_in_MET_filedir_names="${field}" + fieldname_in_fcst_input="${field_group}" + fieldname_in_MET_output="${field_group}" + fieldname_in_MET_filedir_names="${field_group}" ;; *) print_err_msg_exit "\ A method for setting verification parameters has not been specified for -this observation type (obtype) and field (field) combination: +this observation type (obtype) and field group (field_group) combination: obtype = \"${obtype}\" - field = \"${field}\"" + field_group = \"${field_group}\"" ;; esac @@ -211,28 +219,28 @@ this observation type (obtype) and field (field) combination: "NDAS") _grid_or_point_="point" - case "${field}" in + case "${field_group}" in - "ADPSFC") + "SFC") fieldname_in_obs_input="" fieldname_in_fcst_input="" - fieldname_in_MET_output="${field}" - fieldname_in_MET_filedir_names="${field}" + fieldname_in_MET_output="ADP${field_group}" + fieldname_in_MET_filedir_names="ADP${field_group}" ;; - "ADPUPA") + "UPA") fieldname_in_obs_input="" fieldname_in_fcst_input="" - fieldname_in_MET_output="${field}" - fieldname_in_MET_filedir_names="${field}" + fieldname_in_MET_output="ADP${field_group}" + fieldname_in_MET_filedir_names="ADP${field_group}" ;; *) print_err_msg_exit "\ A method for setting verification parameters has not been specified for -this observation type (obtype) and field (field) combination: +this observation type (obtype) and field group (field_group) combination: obtype = \"${obtype}\" - field = \"${field}\"" + field_group = \"${field_group}\"" ;; esac diff --git a/ush/setup.py b/ush/setup.py index 0511653fa2..d5ba107a04 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -10,10 +10,13 @@ from textwrap import dedent import yaml +from uwtools.api.config import get_yaml_config +from pprint import pprint from python_utils import ( log_info, cd_vrfy, + date_to_str, mkdir_vrfy, rm_vrfy, check_var_valid_value, @@ -37,23 +40,37 @@ load_xml_file, ) -from set_cycle_dates import set_cycle_dates +from set_cycle_and_obs_timeinfo import \ + set_cycle_dates, set_fcst_output_times_and_obs_days_all_cycles, \ + set_rocoto_cycledefs_for_obs_days, \ + check_temporal_consistency_cumul_fields, \ + get_obs_retrieve_times_by_day from set_predef_grid_params import set_predef_grid_params from set_gridparams_ESGgrid import set_gridparams_ESGgrid from set_gridparams_GFDLgrid import set_gridparams_GFDLgrid from link_fix import link_fix def load_config_for_setup(ushdir, default_config, user_config): - """Load in the default, machine, and user configuration files into - Python dictionaries. Return the combined experiment dictionary. + """Updates a Python dictionary in place with experiment configuration settings from the + default, machine, and user configuration files. Args: - ushdir (str): Path to the ush directory for SRW - default_config (str): Path to the default config YAML - user_config (str): Path to the user-provided config YAML + ushdir (str): Path to the ``ush`` directory for the SRW App + default_config (str): Path to ``config_defaults.yaml`` + user_config (str): Path to the user-provided config YAML (usually named + ``config.yaml``) Returns: - Python dict of configuration settings from YAML files. + cfg_d (dict): Experiment configuration dictionary based on default, + machine, and user config files + do_vx (bool): Flag specifying whether workflow will run vx tasks + + Raises: + FileNotFoundError: If the user-provided configuration file or the machine file does not + exist. + Exception: If (1) the user-provided configuration file cannot be loaded or (2) it contains + invalid sections/keys or (3) it does not contain mandatory information or (4) + an invalid datetime format is used. """ # Load the default config. @@ -155,11 +172,13 @@ def load_config_for_setup(ushdir, default_config, user_config): if taskgroups: cfg_wflow['rocoto']['tasks']['taskgroups'] = taskgroups + # Save string specifying final workflow taskgroups for use later on. + taskgroups = cfg_wflow['rocoto']['tasks']['taskgroups'] + # Extend yaml here on just the rocoto section to include the # appropriate groups of tasks extend_yaml(cfg_wflow) - # Put the entries expanded under taskgroups in tasks rocoto_tasks = cfg_wflow["rocoto"]["tasks"] cfg_wflow["rocoto"]["tasks"] = yaml.load(rocoto_tasks.pop("taskgroups"),Loader=yaml.SafeLoader) @@ -168,8 +187,8 @@ def load_config_for_setup(ushdir, default_config, user_config): # the "null" settings are removed, i.e., tasks turned off. update_dict(cfg_u.get('rocoto', {}), cfg_wflow["rocoto"]) - def add_jobname(tasks): - """ Add the jobname entry for all the tasks in the workflow """ + def _add_jobname(tasks): + """ Adds the jobname entry for all the tasks in the workflow """ if not isinstance(tasks, dict): return @@ -182,11 +201,11 @@ def add_jobname(tasks): task_settings.get("attrs", {}).get("name") or \ task.split("_", maxsplit=1)[1] elif task_type == "metatask": - add_jobname(task_settings) + _add_jobname(task_settings) # Add jobname entry to each remaining task - add_jobname(cfg_wflow["rocoto"]["tasks"]) + _add_jobname(cfg_wflow["rocoto"]["tasks"]) # Update default config with the constants, the machine config, and # then the user_config @@ -226,7 +245,57 @@ def add_jobname(tasks): except: pass cfg_d["workflow"]["EXPT_BASEDIR"] = os.path.abspath(expt_basedir) - + # + # ----------------------------------------------------------------------- + # + # If the workflow includes at least one verification task, ensure that + # the configuration parameters associated with cumulative fields (e.g. + # APCP) in the verification section of the experiment dicitonary are + # temporally consistent, e.g. that accumulation intervals are less than + # or equal to the forecast length. Update the verification section of + # the dictionary to remove inconsistencies. + # + # ----------------------------------------------------------------------- + # + # List containing the names of all workflow config files for vx (i.e. + # whether or not they're included in the workflow). + vx_taskgroup_fns = ['verify_pre.yaml', 'verify_det.yaml', 'verify_ens.yaml'] + # Flag that specifies whether the workflow will be running any vx tasks. + do_vx = any([fn for fn in vx_taskgroup_fns if fn in taskgroups]) + + # Initialize variable containing the vx configuration. This may be + # modified within the if-statement below. + vx_config = cfg_d["verification"] + + if do_vx: + workflow_config = cfg_d["workflow"] + + date_first_cycl = workflow_config.get("DATE_FIRST_CYCL") + date_last_cycl = workflow_config.get("DATE_LAST_CYCL") + incr_cycl_freq = int(workflow_config.get("INCR_CYCL_FREQ")) + fcst_len_hrs = workflow_config.get("FCST_LEN_HRS") + vx_fcst_output_intvl_hrs = vx_config.get("VX_FCST_OUTPUT_INTVL_HRS") + + # Convert various times and time intervals from integers or strings to + # datetime or timedelta objects. + date_first_cycl_dt = datetime.datetime.strptime(date_first_cycl, "%Y%m%d%H") + date_last_cycl_dt = datetime.datetime.strptime(date_last_cycl, "%Y%m%d%H") + cycl_intvl_dt = datetime.timedelta(hours=incr_cycl_freq) + fcst_len_dt = datetime.timedelta(hours=fcst_len_hrs) + vx_fcst_output_intvl_dt = datetime.timedelta(hours=vx_fcst_output_intvl_hrs) + + # Generate a list containing the starting times of the cycles. + cycle_start_times \ + = set_cycle_dates(date_first_cycl_dt, date_last_cycl_dt, cycl_intvl_dt, + return_type='datetime') + + # Call function that runs the consistency checks on the vx parameters. + vx_config, fcst_obs_matched_times_all_cycles_cumul \ + = check_temporal_consistency_cumul_fields( + vx_config, cycle_start_times, fcst_len_dt, vx_fcst_output_intvl_dt) + + + cfg_d['verification'] = vx_config extend_yaml(cfg_d) # Do any conversions of data types @@ -253,7 +322,7 @@ def add_jobname(tasks): Mandatory variable "{val}" not found in: user config file {user_config} OR - machine file {machine_file} + machine file {machine_file} """ ) ) @@ -265,33 +334,38 @@ def add_jobname(tasks): raise Exception( dedent( f""" - Date variable {val}={cfg_d['workflow'][val]} is not in a valid date format. + Date variable {val}={cfg_d['workflow'][val]} is not in a valid date format. - For examples of valid formats, see the Users' Guide. - """ + For examples of valid formats, see the Users' Guide. + """ ) ) - return cfg_d + return cfg_d, do_vx def set_srw_paths(ushdir, expt_config): """ - Generate a dictionary of directories that describe the SRW - structure, i.e., where SRW is installed, and the paths to - external repositories managed via the manage_externals tool. + Generates a dictionary of directories that describe the SRW App + structure, i.e., where the SRW App is installed and the paths to + external repositories managed via the ``manage_externals`` tool. - Other paths for SRW are set as defaults in config_defaults.yaml + Other paths for the SRW App are set as defaults in ``config_defaults.yaml``. Args: - ushdir: (str) path to the system location of the ush/ directory - under the SRW clone - expt_config: (dict) contains the configuration settings for the - user-defined experiment + ushdir (str) : Path to the system location of the ``ush`` directory under the + SRW App clone + expt_config (dict): Contains the configuration settings for the user-defined experiment Returns: - dictionary of config settings and system paths as keys/values + Dictionary of configuration settings and system paths as keys/values + + Raises: + KeyError: If the external repository required is not listed in the externals + configuration file (e.g., ``Externals.cfg``) + FileNotFoundError: If the ``ufs-weather-model`` code containing the FV3 source code has + not been cloned properly """ # HOMEdir is the location of the SRW clone, one directory above ush/ @@ -340,24 +414,36 @@ def set_srw_paths(ushdir, expt_config): def setup(USHdir, user_config_fn="config.yaml", debug: bool = False): - """Function that validates user-provided configuration, and derives - a secondary set of parameters needed to configure a Rocoto-based SRW - workflow. The derived parameters use a set of required user-defined - parameters defined by either config_defaults.yaml, a user-provided - configuration file (config.yaml), or a YAML machine file. + """Validates user-provided configuration settings and derives + a secondary set of parameters needed to configure a Rocoto-based SRW App + workflow. The secondary parameters are derived from a set of required + parameters defined in ``config_defaults.yaml``, a user-provided + configuration file (e.g., ``config.yaml``), or a YAML machine file. A set of global variable definitions is saved to the experiment directory as a bash configure file that is sourced by scripts at run time. Args: - USHdir (str): The full path of the ush/ directory where - this script is located - user_config_fn (str): The name of a user-provided config YAML - debug (bool): Enable extra output for debugging + USHdir (str): The full path of the ``ush/`` directory where this script + (``setup.py``) is located + user_config_fn (str): The name of a user-provided configuration YAML (usually + ``config.yaml``) + debug (bool): Enable extra output for debugging Returns: - None + None + + Raises: + ValueError: If checked configuration values are invalid (e.g., forecast length, + ``EXPTDIR`` path) + FileExistsError: If ``EXPTDIR`` already exists, and ``PREEXISTING_DIR_METHOD`` is not + set to a compatible handling method + FileNotFoundError: If the path to a particular file does not exist or if the file itself + does not exist at the expected path + TypeError: If ``USE_CUSTOM_POST_CONFIG_FILE`` or ``USE_CRTM`` are set to true but no + corresponding custom configuration file or CRTM fix file directory is set + KeyError: If an invalid value is provided (i.e., for ``GRID_GEN_METHOD``) """ logger = logging.getLogger(__name__) @@ -374,7 +460,19 @@ def setup(USHdir, user_config_fn="config.yaml", debug: bool = False): # user config files. default_config_fp = os.path.join(USHdir, "config_defaults.yaml") user_config_fp = os.path.join(USHdir, user_config_fn) - expt_config = load_config_for_setup(USHdir, default_config_fp, user_config_fp) + expt_config, do_vx = load_config_for_setup(USHdir, default_config_fp, user_config_fp) + + # Load build settings as a dictionary; will be used later to make sure the build is consistent with the user settings + build_config_fp = os.path.join(expt_config["user"].get("EXECdir"), "build_settings.yaml") + build_config = load_config_file(build_config_fp) + logger.debug(f"Read build configuration from {build_config_fp}\n{build_config}") + + # Fail if build machine and config machine are inconsistent + if build_config["Machine"].upper() != expt_config["user"]["MACHINE"]: + logger.critical("ERROR: Machine in build settings file != machine specified in config file") + logger.critical(f"build machine: {build_config['Machine']}") + logger.critical(f"config machine: {expt_config['user']['MACHINE']}") + raise ValueError("Check config settings for correct value for 'machine'") # Set up some paths relative to the SRW clone expt_config["user"].update(set_srw_paths(USHdir, expt_config)) @@ -448,7 +546,7 @@ def setup(USHdir, user_config_fn="config.yaml", debug: bool = False): f""" EXPTDIR ({exptdir}) already exists, and PREEXISTING_DIR_METHOD = {preexisting_dir_method} - To ignore this error, delete the directory, or set + To ignore this error, delete the directory, or set PREEXISTING_DIR_METHOD = delete, or PREEXISTING_DIR_METHOD = rename in your config file. @@ -511,7 +609,7 @@ def setup(USHdir, user_config_fn="config.yaml", debug: bool = False): ) ) - def remove_tag(tasks, tag): + def _remove_tag(tasks, tag): """ Remove the tag for all the tasks in the workflow """ if not isinstance(tasks, dict): @@ -521,83 +619,236 @@ def remove_tag(tasks, tag): if task_type == "task": task_settings.pop(tag, None) elif task_type == "metatask": - remove_tag(task_settings, tag) + _remove_tag(task_settings, tag) # Remove all memory tags for platforms that do not support them remove_memory = expt_config["platform"].get("REMOVE_MEMORY") if remove_memory: - remove_tag(rocoto_tasks, "memory") + _remove_tag(rocoto_tasks, "memory") for part in ['PARTITION_HPSS', 'PARTITION_DEFAULT', 'PARTITION_FCST']: partition = expt_config["platform"].get(part) if not partition: - remove_tag(rocoto_tasks, 'partition') + _remove_tag(rocoto_tasks, 'partition') # When not running subhourly post, remove those tasks, if they exist if not expt_config.get("task_run_post", {}).get("SUB_HOURLY_POST"): post_meta = rocoto_tasks.get("metatask_run_ens_post", {}) post_meta.pop("metatask_run_sub_hourly_post", None) post_meta.pop("metatask_sub_hourly_last_hour_post", None) + + + date_first_cycl = workflow_config.get("DATE_FIRST_CYCL") + date_last_cycl = workflow_config.get("DATE_LAST_CYCL") + incr_cycl_freq = int(workflow_config.get("INCR_CYCL_FREQ")) + cycl_intvl_dt = datetime.timedelta(hours=incr_cycl_freq) # # ----------------------------------------------------------------------- # - # Remove all verification [meta]tasks for which no fields are specified. + # If running vx tasks, check and possibly reset values in expt_config + # and rocoto_config. # # ----------------------------------------------------------------------- # - vx_fields_all = {} - vx_metatasks_all = {} - - vx_fields_all["CCPA"] = ["APCP"] - vx_metatasks_all["CCPA"] = ["metatask_PcpCombine_obs", - "metatask_PcpCombine_fcst_APCP_all_accums_all_mems", - "metatask_GridStat_CCPA_all_accums_all_mems", - "metatask_GenEnsProd_EnsembleStat_CCPA", - "metatask_GridStat_CCPA_ensmeanprob_all_accums"] - - vx_fields_all["NOHRSC"] = ["ASNOW"] - vx_metatasks_all["NOHRSC"] = ["task_get_obs_nohrsc", - "metatask_PcpCombine_fcst_ASNOW_all_accums_all_mems", - "metatask_GridStat_NOHRSC_all_accums_all_mems", - "metatask_GenEnsProd_EnsembleStat_NOHRSC", - "metatask_GridStat_NOHRSC_ensmeanprob_all_accums"] - - vx_fields_all["MRMS"] = ["REFC", "RETOP"] - vx_metatasks_all["MRMS"] = ["metatask_GridStat_MRMS_all_mems", - "metatask_GenEnsProd_EnsembleStat_MRMS", - "metatask_GridStat_MRMS_ensprob"] - - vx_fields_all["NDAS"] = ["ADPSFC", "ADPUPA"] - vx_metatasks_all["NDAS"] = ["task_run_MET_Pb2nc_obs", - "metatask_PointStat_NDAS_all_mems", - "metatask_GenEnsProd_EnsembleStat_NDAS", - "metatask_PointStat_NDAS_ensmeanprob"] - - # Get the vx fields specified in the experiment configuration. - vx_fields_config = expt_config["verification"]["VX_FIELDS"] - - # If there are no vx fields specified, remove those tasks that are necessary - # for all observation types. - if not vx_fields_config: - metatask = "metatask_check_post_output_all_mems" - rocoto_config['tasks'].pop(metatask) - - # If for a given obstype no fields are specified, remove all vx metatasks - # for that obstype. - for obstype in vx_fields_all: - vx_fields_obstype = [field for field in vx_fields_config if field in vx_fields_all[obstype]] - if not vx_fields_obstype: - for metatask in vx_metatasks_all[obstype]: - if metatask in rocoto_config['tasks']: - logging.info(dedent( - f""" - Removing verification [meta]task - "{metatask}" - from workflow since no fields belonging to observation type "{obstype}" - are specified for verification.""" - )) - rocoto_config['tasks'].pop(metatask) - + if do_vx: + # + # ----------------------------------------------------------------------- + # + # Set some variables needed for running checks on and creating new + # (derived) configuration variables for the verification. + # + # ----------------------------------------------------------------------- + # + vx_config = expt_config["verification"] + + fcst_len_hrs = workflow_config.get("FCST_LEN_HRS") + vx_fcst_output_intvl_hrs = vx_config.get("VX_FCST_OUTPUT_INTVL_HRS") + + # To enable arithmetic with dates and times, convert various time + # intervals from integer to datetime.timedelta objects. + fcst_len_dt = datetime.timedelta(hours=fcst_len_hrs) + vx_fcst_output_intvl_dt = datetime.timedelta(hours=vx_fcst_output_intvl_hrs) + # + # ----------------------------------------------------------------------- + # + # Generate a list containing the starting times of the cycles. This will + # be needed in checking that the hours-of-day of the forecast output match + # those of the observations. + # + # ----------------------------------------------------------------------- + # + cycle_start_times \ + = set_cycle_dates(date_first_cycl, date_last_cycl, cycl_intvl_dt, + return_type='datetime') + # + # ----------------------------------------------------------------------- + # + # Generate a list of forecast output times and a list of obs days (i.e. + # days on which observations are needed to perform verification because + # there is forecast output on those days) over all cycles, both for + # instantaneous fields (e.g. T2m, REFC, RETOP) and for cumulative ones + # (e.g. APCP). Then add these lists to the dictionary containing workflow + # configuration variables. These will be needed in generating the ROCOTO + # XML. + # + # ----------------------------------------------------------------------- + # + fcst_output_times_all_cycles, obs_days_all_cycles, \ + = set_fcst_output_times_and_obs_days_all_cycles( + cycle_start_times, fcst_len_dt, vx_fcst_output_intvl_dt) + + workflow_config['OBS_DAYS_ALL_CYCLES_INST'] = obs_days_all_cycles['inst'] + workflow_config['OBS_DAYS_ALL_CYCLES_CUMUL'] = obs_days_all_cycles['cumul'] + # + # ----------------------------------------------------------------------- + # + # Generate lists of ROCOTO cycledef strings corresonding to the obs days + # for instantaneous fields and those for cumulative ones. Then save the + # lists of cycledefs in the dictionary containing values needed to + # construct the ROCOTO XML. + # + # ----------------------------------------------------------------------- + # + cycledefs_obs_days_inst = set_rocoto_cycledefs_for_obs_days(obs_days_all_cycles['inst']) + cycledefs_obs_days_cumul = set_rocoto_cycledefs_for_obs_days(obs_days_all_cycles['cumul']) + + rocoto_config['cycledefs']['cycledefs_obs_days_inst'] = cycledefs_obs_days_inst + rocoto_config['cycledefs']['cycledefs_obs_days_cumul'] = cycledefs_obs_days_cumul + # + # ----------------------------------------------------------------------- + # + # Generate dictionary of dictionaries that, for each combination of obs + # type needed and obs day, contains a string list of the times at which + # that type of observation is needed on that day. The elements of each + # list are formatted as 'YYYYMMDDHH'. This information is used by the + # day-based get_obs tasks in the workflow to get obs only at those times + # at which they are needed (as opposed to for the whole day). + # + # ----------------------------------------------------------------------- + # + obs_retrieve_times_by_day \ + = get_obs_retrieve_times_by_day( + vx_config, cycle_start_times, fcst_len_dt, + fcst_output_times_all_cycles, obs_days_all_cycles) + + for obtype, obs_days_dict in obs_retrieve_times_by_day.items(): + for obs_day, obs_retrieve_times in obs_days_dict.items(): + array_name = '_'.join(["OBS_RETRIEVE_TIMES", obtype, obs_day]) + vx_config[array_name] = obs_retrieve_times + expt_config["verification"] = vx_config + # + # ----------------------------------------------------------------------- + # + # Remove all verification (meta)tasks for which no fields are specified. + # + # ----------------------------------------------------------------------- + # + vx_field_groups_all_by_obtype = {} + vx_metatasks_all_by_obtype = {} + + vx_field_groups_all_by_obtype["CCPA"] = ["APCP"] + vx_metatasks_all_by_obtype["CCPA"] \ + = ["task_get_obs_ccpa", + "metatask_PcpCombine_APCP_all_accums_obs_CCPA", + "metatask_PcpCombine_APCP_all_accums_all_mems", + "metatask_GridStat_APCP_all_accums_all_mems", + "metatask_GenEnsProd_EnsembleStat_APCP_all_accums", + "metatask_GridStat_APCP_all_accums_ensmeanprob"] + + vx_field_groups_all_by_obtype["NOHRSC"] = ["ASNOW"] + vx_metatasks_all_by_obtype["NOHRSC"] \ + = ["task_get_obs_nohrsc", + "metatask_PcpCombine_ASNOW_all_accums_obs_NOHRSC", + "metatask_PcpCombine_ASNOW_all_accums_all_mems", + "metatask_GridStat_ASNOW_all_accums_all_mems", + "metatask_GenEnsProd_EnsembleStat_ASNOW_all_accums", + "metatask_GridStat_ASNOW_all_accums_ensmeanprob"] + + vx_field_groups_all_by_obtype["MRMS"] = ["REFC", "RETOP"] + vx_metatasks_all_by_obtype["MRMS"] \ + = ["task_get_obs_mrms", + "metatask_GridStat_REFC_RETOP_all_mems", + "metatask_GenEnsProd_EnsembleStat_REFC_RETOP", + "metatask_GridStat_REFC_RETOP_ensprob"] + + vx_field_groups_all_by_obtype["NDAS"] = ["SFC", "UPA"] + vx_metatasks_all_by_obtype["NDAS"] \ + = ["task_get_obs_ndas", + "task_run_MET_Pb2nc_obs_NDAS", + "metatask_PointStat_SFC_UPA_all_mems", + "metatask_GenEnsProd_EnsembleStat_SFC_UPA", + "metatask_PointStat_SFC_UPA_ensmeanprob"] + + # If there are no field groups specified for verification, remove those + # tasks that are common to all observation types. + vx_field_groups = vx_config["VX_FIELD_GROUPS"] + if not vx_field_groups: + metatask = "metatask_check_post_output_all_mems" + rocoto_config['tasks'].pop(metatask) + + # If for a given obs type none of its field groups are specified for + # verification, remove all vx metatasks for that obs type. + for obtype in vx_field_groups_all_by_obtype: + vx_field_groups_crnt_obtype = list(set(vx_field_groups) & set(vx_field_groups_all_by_obtype[obtype])) + if not vx_field_groups_crnt_obtype: + for metatask in vx_metatasks_all_by_obtype[obtype]: + if metatask in rocoto_config['tasks']: + logging.info(dedent( + f""" + Removing verification (meta)task + "{metatask}" + from workflow since no field groups from observation type "{obtype}" are + specified for verification.""" + )) + rocoto_config['tasks'].pop(metatask) + # + # ----------------------------------------------------------------------- + # + # If there are at least some field groups to verify, then make sure that + # the base directories in which retrieved obs files will be placed are + # distinct for the different obs types. + # + # ----------------------------------------------------------------------- + # + if vx_field_groups: + obtypes_all = ['CCPA', 'NOHRSC', 'MRMS', 'NDAS'] + obs_basedir_var_names = [f'{obtype}_OBS_DIR' for obtype in obtypes_all] + obs_basedirs_dict = {key: vx_config[key] for key in obs_basedir_var_names} + obs_basedirs_orig = list(obs_basedirs_dict.values()) + obs_basedirs_uniq = list(set(obs_basedirs_orig)) + if len(obs_basedirs_orig) != len(obs_basedirs_uniq): + msg1 = dedent(f""" + The base directories for the obs files must be distinct, but at least two + are identical:""") + msg2 = '' + for obs_basedir_var_name, obs_dir in obs_basedirs_dict.items(): + msg2 = msg2 + dedent(f""" + {obs_basedir_var_name} = {obs_dir}""") + msg3 = dedent(f""" + Modify these in the SRW App's user configuration file to make them distinct + and rerun. + """) + msg = msg1 + ' '.join(msg2.splitlines(True)) + msg3 + logging.error(msg) + raise ValueError(msg) + # + # ----------------------------------------------------------------------- + # + # The "cycled_from_second" cycledef in the default workflow configuration + # file (default_workflow.yaml) requires the starting date of the second + # cycle. That is difficult to calculate in the yaml file itself because + # currently, there are no utilities to perform arithmetic with dates. + # Thus, we calculate it here and save it as a variable in the workflow + # configuration dictionary. Note that correct functioning of the default + # workflow yaml file also requires that DATE_[FIRST|SECOND|LAST]_CYCL all + # be strings, not datetime objects. We perform those conversions here. + # + # ----------------------------------------------------------------------- + # + date_second_cycl = date_first_cycl + cycl_intvl_dt + workflow_config['DATE_FIRST_CYCL'] = datetime.datetime.strftime(date_first_cycl, "%Y%m%d%H") + workflow_config['DATE_SECOND_CYCL'] = datetime.datetime.strftime(date_second_cycl, "%Y%m%d%H") + workflow_config['DATE_LAST_CYCL'] = datetime.datetime.strftime(date_last_cycl, "%Y%m%d%H") # # ----------------------------------------------------------------------- # @@ -605,7 +856,7 @@ def remove_tag(tasks, tag): # # ----------------------------------------------------------------------- # - def get_location(xcs, fmt, expt_cfg): + def _get_location(xcs, fmt, expt_cfg): ics_lbcs = expt_cfg.get("data", {}).get("ics_lbcs") if ics_lbcs is not None: v = ics_lbcs.get(xcs) @@ -618,7 +869,7 @@ def get_location(xcs, fmt, expt_cfg): # Get the paths to any platform-supported data streams get_extrn_ics = expt_config.get("task_get_extrn_ics", {}) - extrn_mdl_sysbasedir_ics = get_location( + extrn_mdl_sysbasedir_ics = _get_location( get_extrn_ics.get("EXTRN_MDL_NAME_ICS"), get_extrn_ics.get("FV3GFS_FILE_FMT_ICS"), expt_config, @@ -626,7 +877,7 @@ def get_location(xcs, fmt, expt_cfg): get_extrn_ics["EXTRN_MDL_SYSBASEDIR_ICS"] = extrn_mdl_sysbasedir_ics get_extrn_lbcs = expt_config.get("task_get_extrn_lbcs", {}) - extrn_mdl_sysbasedir_lbcs = get_location( + extrn_mdl_sysbasedir_lbcs = _get_location( get_extrn_lbcs.get("EXTRN_MDL_NAME_LBCS"), get_extrn_lbcs.get("FV3GFS_FILE_FMT_LBCS"), expt_config, @@ -663,8 +914,7 @@ def get_location(xcs, fmt, expt_cfg): ) - # Make sure the vertical coordinate file for both make_lbcs and - # make_ics is the same. + # Make sure the vertical coordinate file and LEVP for both make_lbcs and make_ics is the same. if ics_vcoord := expt_config.get("task_make_ics", {}).get("VCOORD_FILE") != \ (lbcs_vcoord := expt_config.get("task_make_lbcs", {}).get("VCOORD_FILE")): raise ValueError( @@ -680,6 +930,20 @@ def get_location(xcs, fmt, expt_cfg): VCOORD_FILE: {lbcs_vcoord} """ ) + if ics_levp := expt_config.get("task_make_ics", {}).get("LEVP") != \ + (lbcs_levp := expt_config.get("task_make_lbcs", {}).get("LEVP")): + raise ValueError( + f""" + The number of vertical levels LEVP must be set to the same value for both the + make_ics task and the make_lbcs tasks. They are currently set to: + + make_ics: + LEVP: {ics_levp} + + make_lbcs: + LEVP: {lbcs_levp} + """ + ) # # ----------------------------------------------------------------------- @@ -749,11 +1013,6 @@ def get_location(xcs, fmt, expt_cfg): run_envir = expt_config["user"].get("RUN_ENVIR", "") - fcst_len_hrs = workflow_config.get("FCST_LEN_HRS") - date_first_cycl = workflow_config.get("DATE_FIRST_CYCL") - date_last_cycl = workflow_config.get("DATE_LAST_CYCL") - incr_cycl_freq = int(workflow_config.get("INCR_CYCL_FREQ")) - # set varying forecast lengths only when fcst_len_hrs=-1 if fcst_len_hrs == -1: fcst_len_cycl = workflow_config.get("FCST_LEN_CYCL") @@ -765,12 +1024,12 @@ def get_location(xcs, fmt, expt_cfg): num_cycles = len(set_cycle_dates( date_first_cycl, date_last_cycl, - incr_cycl_freq)) + cycl_intvl_dt)) if num_cycles != len(fcst_len_cycl): logger.error(f""" The number of entries in FCST_LEN_CYCL does not divide evenly into a 24 hour day or the number of cycles - in your experiment! + in your experiment! FCST_LEN_CYCL = {fcst_len_cycl} """ ) @@ -1151,7 +1410,7 @@ def get_location(xcs, fmt, expt_cfg): post_output_domain_name = lowercase(post_output_domain_name) # Write updated value of POST_OUTPUT_DOMAIN_NAME back to dictionary - post_config["POST_OUTPUT_DOMAIN_NAME"] = post_output_domain_name + post_config["POST_OUTPUT_DOMAIN_NAME"] = post_output_domain_name # # ----------------------------------------------------------------------- @@ -1285,7 +1544,7 @@ def get_location(xcs, fmt, expt_cfg): # the same resolution input. # - def dict_find(user_dict, substring): + def _dict_find(user_dict, substring): if not isinstance(user_dict, dict): return False @@ -1294,14 +1553,14 @@ def dict_find(user_dict, substring): if substring in key: return True if isinstance(value, dict): - if dict_find(value, substring): + if _dict_find(value, substring): return True return False - run_make_ics = dict_find(rocoto_tasks, "task_make_ics") - run_make_lbcs = dict_find(rocoto_tasks, "task_make_lbcs") - run_run_fcst = dict_find(rocoto_tasks, "task_run_fcst") + run_make_ics = _dict_find(rocoto_tasks, "task_make_ics") + run_make_lbcs = _dict_find(rocoto_tasks, "task_make_lbcs") + run_run_fcst = _dict_find(rocoto_tasks, "task_run_fcst") run_any_coldstart_task = run_make_ics or \ run_make_lbcs or \ run_run_fcst @@ -1440,13 +1699,13 @@ def dict_find(user_dict, substring): if workflow_config["SDF_USES_THOMPSON_MP"]: - logging.debug(f'Selected CCPP suite ({workflow_config["CCPP_PHYS_SUITE"]}) uses Thompson MP') - logging.debug(f'Setting up links for additional fix files') + logger.debug(f'Selected CCPP suite ({workflow_config["CCPP_PHYS_SUITE"]}) uses Thompson MP') + logger.debug(f'Setting up links for additional fix files') # If the model ICs or BCs are not from RAP or HRRR, they will not contain aerosol # climatology data needed by the Thompson scheme, so we need to provide a separate file - if (get_extrn_ics["EXTRN_MDL_NAME_ICS"] not in ["HRRR", "RAP"] or - get_extrn_lbcs["EXTRN_MDL_NAME_LBCS"] not in ["HRRR", "RAP"]): + if (get_extrn_ics["EXTRN_MDL_NAME_ICS"] not in ["HRRR", "RRFS", "RAP"] or + get_extrn_lbcs["EXTRN_MDL_NAME_LBCS"] not in ["HRRR", "RRFS", "RAP"]): fixed_files["THOMPSON_FIX_FILES"].append(workflow_config["THOMPSON_MP_CLIMO_FN"]) # Add thompson-specific fix files to CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING and @@ -1457,14 +1716,66 @@ def dict_find(user_dict, substring): for fix_file in fixed_files["THOMPSON_FIX_FILES"]: fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"].append(f"{fix_file} | {fix_file}") - logging.debug(f'New fix file list:\n{fixed_files["FIXgsm_FILES_TO_COPY_TO_FIXam"]=}') - logging.debug(f'New fix file mapping:\n{fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"]=}') + logger.debug(f'New fix file list:\n{fixed_files["FIXgsm_FILES_TO_COPY_TO_FIXam"]=}') + logger.debug(f'New fix file mapping:\n{fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"]=}') + # ----------------------------------------------------------------------- + # + # Check that UFS FIRE settings are correct and consistent + # + # ----------------------------------------------------------------------- + fire_conf = expt_config["fire"] + if fire_conf["UFS_FIRE"]: + if build_config["Application"]!="ATMF": + raise Exception("UFS_FIRE == True but UFS SRW has not been built for fire coupling; see users guide for details") + fire_input_file=os.path.join(fire_conf["FIRE_INPUT_DIR"],"geo_em.d01.nc") + if not os.path.isfile(fire_input_file): + raise FileNotFoundError( + dedent( + f""" + The fire input file (geo_em.d01.nc) does not exist in the specified directory: + {fire_conf["FIRE_INPUT_DIR"]} + Check that the specified path is correct, and that the file exists and is readable + """ + ) + ) + if fire_conf["FIRE_NUM_TASKS"] < 1: + raise ValueError("FIRE_NUM_TASKS must be > 0 if UFS_FIRE is True") + elif fire_conf["FIRE_NUM_TASKS"] > 1: + raise ValueError("FIRE_NUM_TASKS > 1 not yet supported") + + if fire_conf["FIRE_NUM_IGNITIONS"] > 5: + raise ValueError(f"Only 5 or fewer fire ignitions supported") + + if fire_conf["FIRE_NUM_IGNITIONS"] > 1: + # These settings all need to be lists for multiple fire ignitions + each_fire = ["FIRE_IGNITION_ROS", "FIRE_IGNITION_START_LAT", "FIRE_IGNITION_START_LON", + "FIRE_IGNITION_END_LAT", "FIRE_IGNITION_END_LON", "FIRE_IGNITION_RADIUS", + "FIRE_IGNITION_START_TIME", "FIRE_IGNITION_END_TIME"] + for setting in each_fire: + if not isinstance(fire_conf[setting], list): + logger.critical(f"{fire_conf['FIRE_NUM_IGNITIONS']=}") + logger.critical(f"{fire_conf[setting]=}") + raise ValueError(f"For FIRE_NUM_IGNITIONS > 1, {setting} must be a list of the same length") + if len(fire_conf[setting]) != fire_conf["FIRE_NUM_IGNITIONS"]: + logger.critical(f"{fire_conf['FIRE_NUM_IGNITIONS']=}") + logger.critical(f"{fire_conf[setting]=}") + raise ValueError(f"For FIRE_NUM_IGNITIONS > 1, {setting} must be a list of the same length") + + if fire_conf["FIRE_ATM_FEEDBACK"] > 0.0: + raise ValueError("FIRE_ATM_FEEDBACK > 0 (two-way coupling) not supported in UFS yet") + + if fire_conf["FIRE_UPWINDING"] == 0 and fire_conf["FIRE_VISCOSITY"] == 0.0: + raise ValueError("FIRE_VISCOSITY must be > 0.0 if FIRE_UPWINDING == 0") + else: + if fire_conf["FIRE_NUM_TASKS"] < 1: + logger.warning("UFS_FIRE is not enabled; setting FIRE_NUM_TASKS = 0") + # # ----------------------------------------------------------------------- # - # Generate var_defns.sh file in the EXPTDIR. This file contains all + # Generate var_defns.yaml file in the EXPTDIR. This file contains all # the user-specified settings from expt_config. # # ----------------------------------------------------------------------- @@ -1499,10 +1810,13 @@ def dict_find(user_dict, substring): yaml.Dumper.ignore_aliases = lambda *args : True yaml.dump(expt_config.get("rocoto"), f, sort_keys=False) - var_defns_cfg = copy.deepcopy(expt_config) + var_defns_cfg = get_yaml_config(config=expt_config) del var_defns_cfg["rocoto"] - with open(global_var_defns_fp, "a") as f: - f.write(cfg_to_shell_str(var_defns_cfg)) + + # Fixup a couple of data types: + for dates in ("DATE_FIRST_CYCL", "DATE_LAST_CYCL"): + var_defns_cfg["workflow"][dates] = date_to_str(var_defns_cfg["workflow"][dates]) + var_defns_cfg.dump(global_var_defns_fp) # @@ -1542,10 +1856,14 @@ def dict_find(user_dict, substring): return expt_config def clean_rocoto_dict(rocotodict): - """Removes any invalid entries from rocotodict. Examples of invalid entries are: + """Removes any invalid entries from ``rocotodict``. Examples of invalid entries are: 1. A task dictionary containing no "command" key - 2. A metatask dictionary containing no task dictionaries""" + 2. A metatask dictionary containing no task dictionaries + + Args: + rocotodict (dict): A dictionary containing Rocoto workflow settings + """ # Loop 1: search for tasks with no command key, iterating over metatasks for key in list(rocotodict.keys()): diff --git a/ush/source_util_funcs.sh b/ush/source_util_funcs.sh index fa097de34d..266975e97d 100644 --- a/ush/source_util_funcs.sh +++ b/ush/source_util_funcs.sh @@ -115,16 +115,6 @@ function source_util_funcs() { # #----------------------------------------------------------------------- # -# Source the file containing functions that execute filesystem commands -# (e.g. "cp", "mv") with verification (i.e. verifying that the commands -# completed successfully). -# -#----------------------------------------------------------------------- -# - . ${bashutils_dir}/filesys_cmds_vrfy.sh -# -#----------------------------------------------------------------------- -# # Source the file containing the function that searches an array for a # specified string. # @@ -224,21 +214,11 @@ function source_util_funcs() { # #----------------------------------------------------------------------- # -# Source the file containing the function that evaluates a METplus time -# string template. +# Source the file that sources YAML files as if they were bash # #----------------------------------------------------------------------- # - . ${bashutils_dir}/eval_METplus_timestr_tmpl.sh -# -#----------------------------------------------------------------------- -# -# Source the file containing the function that sources config files. -# -#----------------------------------------------------------------------- -# - . ${bashutils_dir}/source_config.sh - + . ${bashutils_dir}/source_yaml.sh } source_util_funcs diff --git a/ush/update_input_nml.py b/ush/update_input_nml.py index e975d9bc08..be35d495d4 100644 --- a/ush/update_input_nml.py +++ b/ush/update_input_nml.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 """ -Update the model namelist for a variety of different settings. +Updates the model namelist for a variety of different settings. """ import argparse @@ -9,7 +9,7 @@ import sys from textwrap import dedent -from uwtools.api.config import realize +from uwtools.api.config import get_nml_config, realize from python_utils import ( print_input_args, @@ -20,15 +20,16 @@ VERBOSE = os.environ.get("VERBOSE", "true") def update_input_nml(namelist, restart, aqm_na_13km): - """Update the FV3 input.nml file in the specified run directory + """ + Updates the FV3 ``input.nml`` file in the specified run directory Args: - namelist: path to the namelist - restart: should forecast start from restart? - aqm_na_13km: should the 13km AQM config be used? + namelist (str) : Path to the namelist + restart (bool): Whether the forecast should start from restart? + aqm_na_13km (bool): Whether the 13km AQM configuration should be used? Returns: - Boolean + None: Updates ``input.nml`` with the settings provided """ print_input_args(locals()) @@ -77,10 +78,10 @@ def update_input_nml(namelist, restart, aqm_na_13km): input_format="nml", output_file=namelist, output_format="nml", - supplemental_configs=[settings], + update_config=get_nml_config(settings), ) -def parse_args(argv): +def _parse_args(argv): """Parse command line arguments""" parser = argparse.ArgumentParser(description="Update FV3 input.nml file for restart.") @@ -107,7 +108,7 @@ def parse_args(argv): if __name__ == "__main__": - args = parse_args(sys.argv[1:]) + args = _parse_args(sys.argv[1:]) update_input_nml( namelist=args.namelist, restart=args.restart, diff --git a/ush/valid_param_vals.yaml b/ush/valid_param_vals.yaml index 3530b51ae9..a8a568605e 100644 --- a/ush/valid_param_vals.yaml +++ b/ush/valid_param_vals.yaml @@ -24,7 +24,9 @@ valid_vals_PREDEF_GRID_NAME: [ "RRFS_NA_13km", "RRFS_NA_3km", "SUBCONUS_Ind_3km", -"WoFS_3km" +"WoFS_3km", +"SUBCONUS_CO_3km", +"SUBCONUS_CO_1km" ] valid_vals_CCPP_PHYS_SUITE: [ "FV3_GFS_v15p2", @@ -37,8 +39,8 @@ valid_vals_CCPP_PHYS_SUITE: [ "FV3_RAP" ] valid_vals_GFDLgrid_NUM_CELLS: [48, 96, 192, 384, 768, 1152, 3072] -valid_vals_EXTRN_MDL_NAME_ICS: ["GSMGFS", "FV3GFS", "UFS-CASE-STUDY", "GEFS", "GDAS", "RAP", "HRRR", "NAM"] -valid_vals_EXTRN_MDL_NAME_LBCS: ["GSMGFS", "FV3GFS", "UFS-CASE-STUDY", "GEFS", "GDAS", "RAP", "HRRR", "NAM"] +valid_vals_EXTRN_MDL_NAME_ICS: ["GSMGFS", "FV3GFS", "UFS-CASE-STUDY", "GEFS", "GDAS", "RAP", "HRRR", "RRFS", "NAM"] +valid_vals_EXTRN_MDL_NAME_LBCS: ["GSMGFS", "FV3GFS", "UFS-CASE-STUDY", "GEFS", "GDAS", "RAP", "HRRR", "RRFS", "NAM"] valid_vals_USE_USER_STAGED_EXTRN_FILES: [True, False] valid_vals_FV3GFS_FILE_FMT_ICS: ["nemsio", "grib2", "netcdf"] valid_vals_FV3GFS_FILE_FMT_LBCS: ["nemsio", "grib2", "netcdf"] @@ -76,6 +78,6 @@ valid_vals_DO_AQM_CHEM_LBCS: [True, False] valid_vals_DO_AQM_GEFS_LBCS: [True, False] valid_vals_DO_AQM_SAVE_AIRNOW_HIST: [True, False] valid_vals_COLDSTART: [True, False] -valid_vals_VX_FIELDS: [ "APCP", "ASNOW", "REFC", "RETOP", "ADPSFC", "ADPUPA" ] +valid_vals_VX_FIELD_GROUPS: [ "APCP", "ASNOW", "REFC", "RETOP", "SFC", "UPA" ] valid_vals_VX_APCP_ACCUMS_HRS: [ 1, 3, 6, 24 ] -valid_vals_VX_ASNOW_ACCUMS_HRS: [ 6, 24 ] +valid_vals_VX_ASNOW_ACCUMS_HRS: [ 6, 12, 18, 24 ] diff --git a/ush/wrappers/run_fcst.sh b/ush/wrappers/run_fcst.sh index 7450de7cc5..c875cb16c0 100755 --- a/ush/wrappers/run_fcst.sh +++ b/ush/wrappers/run_fcst.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} export cyc=${DATE_FIRST_CYCL:8:2} diff --git a/ush/wrappers/run_get_ics.sh b/ush/wrappers/run_get_ics.sh index 0ee521a67d..494eab6850 100755 --- a/ush/wrappers/run_get_ics.sh +++ b/ush/wrappers/run_get_ics.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow task_get_extrn_ics ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} export cyc=${DATE_FIRST_CYCL:8:2} diff --git a/ush/wrappers/run_get_lbcs.sh b/ush/wrappers/run_get_lbcs.sh index 543ab6e47d..ec6fa23892 100755 --- a/ush/wrappers/run_get_lbcs.sh +++ b/ush/wrappers/run_get_lbcs.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow task_get_extrn_lbcs ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} export cyc=${DATE_FIRST_CYCL:8:2} diff --git a/ush/wrappers/run_make_grid.sh b/ush/wrappers/run_make_grid.sh index 2d55beaf94..f7a6f8aeed 100755 --- a/ush/wrappers/run_make_grid.sh +++ b/ush/wrappers/run_make_grid.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} diff --git a/ush/wrappers/run_make_ics.sh b/ush/wrappers/run_make_ics.sh index 5c629722fc..adcdc16180 100755 --- a/ush/wrappers/run_make_ics.sh +++ b/ush/wrappers/run_make_ics.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} export cyc=${DATE_FIRST_CYCL:8:2} diff --git a/ush/wrappers/run_make_lbcs.sh b/ush/wrappers/run_make_lbcs.sh index 27c94c127f..f9fe35d9da 100755 --- a/ush/wrappers/run_make_lbcs.sh +++ b/ush/wrappers/run_make_lbcs.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} export cyc=${DATE_FIRST_CYCL:8:2} diff --git a/ush/wrappers/run_make_orog.sh b/ush/wrappers/run_make_orog.sh index 5f02ff9599..ebc5259ec1 100755 --- a/ush/wrappers/run_make_orog.sh +++ b/ush/wrappers/run_make_orog.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} diff --git a/ush/wrappers/run_make_sfc_climo.sh b/ush/wrappers/run_make_sfc_climo.sh index fab33f75d6..8024f529fc 100755 --- a/ush/wrappers/run_make_sfc_climo.sh +++ b/ush/wrappers/run_make_sfc_climo.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} diff --git a/ush/wrappers/run_post.sh b/ush/wrappers/run_post.sh index 46ef104365..ca060acb1f 100755 --- a/ush/wrappers/run_post.sh +++ b/ush/wrappers/run_post.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} export cyc=${DATE_FIRST_CYCL:8:2}