From fc2c5ea7e3ddde5312370922b3ab51a8d33c22c5 Mon Sep 17 00:00:00 2001 From: AntonMFernando-NOAA <167725623+AntonMFernando-NOAA@users.noreply.github.com> Date: Wed, 30 Oct 2024 07:15:37 -0400 Subject: [PATCH 01/14] Disabling hyper-threading (#2965) Hera, Hercules, and Orion (and possibly Jet) enable hyper-threading by default. This should be disabled explicitly by adding the `sbatch/srun` flag `--hint=nomultithread`. Resolves #2863 --- env/HERA.env | 4 ++-- env/HERCULES.env | 4 ++-- env/ORION.env | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/env/HERA.env b/env/HERA.env index 09743967b5..80cd7cddaf 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -9,7 +9,7 @@ fi step=$1 -export launcher="srun -l --export=ALL" +export launcher="srun -l --export=ALL --hint=nomultithread" export mpmd_opt="--multi-prog --output=mpmd.%j.%t.out" #export POSTAMBLE_CMD='report-mem' @@ -50,7 +50,7 @@ if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then export POE="NO" export BACK="NO" export sys_tp="HERA" - export launcher_PREP="srun" + export launcher_PREP="srun --hint=nomultithread" elif [[ "${step}" = "prepsnowobs" ]]; then diff --git a/env/HERCULES.env b/env/HERCULES.env index 9ec112c699..bed1d11281 100755 --- a/env/HERCULES.env +++ b/env/HERCULES.env @@ -9,7 +9,7 @@ fi step=$1 -export launcher="srun -l --export=ALL" +export launcher="srun -l --export=ALL --hint=nomultithread" export mpmd_opt="--multi-prog --output=mpmd.%j.%t.out" # Configure MPI environment @@ -48,7 +48,7 @@ case ${step} in export POE="NO" export BACK=${BACK:-"YES"} export sys_tp="HERCULES" - export launcher_PREP="srun" + export launcher_PREP="srun --hint=nomultithread" ;; "prepsnowobs") diff --git a/env/ORION.env b/env/ORION.env index 3b8053d060..45fd607aa5 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -9,7 +9,7 @@ fi step=$1 -export launcher="srun -l --export=ALL" +export launcher="srun -l --export=ALL --hint=nomultithread" export mpmd_opt="--multi-prog --output=mpmd.%j.%t.out" # Configure MPI environment @@ -45,7 +45,7 @@ if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then export POE="NO" export BACK=${BACK:-"YES"} export sys_tp="ORION" - export launcher_PREP="srun" + export launcher_PREP="srun --hint=nomultithread" elif [[ "${step}" = "prepsnowobs" ]]; then From 5bb3f867e1f7a530986b9f291b7ec9d4d0cb1387 Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Wed, 30 Oct 2024 10:58:18 -0400 Subject: [PATCH 02/14] Fix wave restarts and GEFS FHOUT/FHMAX (#3009) # Description Fixes some issues that were preventing wave restarts from operating correctly. First, the wave restart files were not being correctly linked from `$DATA` to `$DATArestart`. The files are placed in the root of `$DATA` instead of in `${DATA}/WAVE_RESTART`, so now links for the individual files are created. Second, the incorrect filenames were being searches for and copied as part of a rerun. Filenames were geared towards multigrid waves, which use the grid names, but single grid just uses a `ww3`. Since multigrid waves are deprecated in workflow and will soon be removed (#2637), these were updated only supporting the single-grid option. These fixes allow forecast segments (and emergency restarts) to work correctly when waves are on. Additionally, the `FHMAX` (and `FHOUT`) for perturbed GEFS members was being overwritten by `config.efcs` due to remnant code from when it was copied from the EnKF version. This interfered with the segment setting for those GEFS members. Resolves #3001 # Type of change - [x] Bug fix (fixes something broken) - [ ] New feature (adds functionality) - [ ] Maintenance (code refactor, clean-up, new CI test, etc.) # Change characteristics - Is this a breaking change (a change in existing functionality)? NO - Does this change require a documentation update? NO - Does this change require an update to any of the following submodules? NO # How has this been tested? - S2SW forecast-only test with segments on Hercules # Checklist - [x] Any dependent changes have been merged and published - [x] My code follows the style guidelines of this project - [x] I have performed a self-review of my own code - [x] I have commented my code, particularly in hard-to-understand areas - [x] I have documented my code, including function, input, and output descriptions - [x] My changes generate no new warnings - [x] New and existing tests pass with my changes - [x] This change is covered by an existing CI test or a new one has been added - [x] I have made corresponding changes to the system documentation if necessary --------- Co-authored-by: Kate.Friedman Co-authored-by: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> --- parm/config/gefs/config.efcs | 14 +----------- parm/config/gefs/config.fcst | 3 ++- parm/config/gfs/config.fcst | 3 ++- parm/stage/wave.yaml.j2 | 6 +++++- ush/forecast_det.sh | 9 +++----- ush/forecast_postdet.sh | 42 ++++++++++++++++++++---------------- ush/forecast_predet.sh | 6 +++--- 7 files changed, 40 insertions(+), 43 deletions(-) diff --git a/parm/config/gefs/config.efcs b/parm/config/gefs/config.efcs index 9bd55afa54..0086121450 100644 --- a/parm/config/gefs/config.efcs +++ b/parm/config/gefs/config.efcs @@ -26,18 +26,6 @@ source "${EXPDIR}/config.ufs" ${string} # Get task specific resources source "${EXPDIR}/config.resources" efcs -# nggps_diag_nml -export FHOUT=${FHOUT_ENKF:-3} -if [[ "${RUN}" == "enkfgfs" ]]; then - export FHOUT=${FHOUT_ENKF_GFS:-${FHOUT}} -fi - -# model_configure -export FHMAX=${FHMAX_ENKF:-9} -if [[ "${RUN}" == "enkfgfs" ]]; then - export FHMAX=${FHMAX_ENKF_GFS:-${FHMAX}} -fi - # Stochastic physics parameters (only for ensemble forecasts) export DO_SKEB="YES" export SKEB="0.8,-999,-999,-999,-999" @@ -74,6 +62,6 @@ if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then else export ODA_INCUPD="False" fi -export restart_interval="${restart_interval_enkfgfs:-12}" +export restart_interval="${restart_interval_gfs:-12}" echo "END: config.efcs" diff --git a/parm/config/gefs/config.fcst b/parm/config/gefs/config.fcst index efdedb24f4..b2a9c10afe 100644 --- a/parm/config/gefs/config.fcst +++ b/parm/config/gefs/config.fcst @@ -35,7 +35,8 @@ IFS=', ' read -ra segments <<< "${FCST_SEGMENTS}" # Determine MIN and MAX based on the forecast segment export FHMIN=${segments[${FCST_SEGMENT}]} export FHMAX=${segments[${FCST_SEGMENT}+1]} -# Cap other FHMAX variables at FHMAX for the segment +# Cap other FH variables at FHMAX for the segment +export FHMIN_WAV=$(( FHMIN > FHMIN_WAV ? FHMIN : FHMIN_WAV )) export FHMAX_HF=$(( FHMAX_HF_GFS > FHMAX ? FHMAX : FHMAX_HF_GFS )) export FHMAX_WAV=$(( FHMAX_WAV > FHMAX ? FHMAX : FHMAX_WAV )) # shellcheck disable=SC2153 diff --git a/parm/config/gfs/config.fcst b/parm/config/gfs/config.fcst index da336ff73b..571e6cafb5 100644 --- a/parm/config/gfs/config.fcst +++ b/parm/config/gfs/config.fcst @@ -38,7 +38,8 @@ case ${RUN} in # Determine MIN and MAX based on the forecast segment export FHMIN=${segments[${FCST_SEGMENT}]} export FHMAX=${segments[${FCST_SEGMENT}+1]} - # Cap other FHMAX variables at FHMAX for the segment + # Cap other FH variables at FHMAX for the segment + export FHMIN_WAV=$(( FHMIN > FHMIN_WAV ? FHMIN : FHMIN_WAV )) export FHMAX_HF=$(( FHMAX_HF_GFS > FHMAX ? FHMAX : FHMAX_HF_GFS )) export FHMAX_WAV=$(( FHMAX_WAV > FHMAX ? FHMAX : FHMAX_WAV )) # shellcheck disable=SC2153 diff --git a/parm/stage/wave.yaml.j2 b/parm/stage/wave.yaml.j2 index d610430bc7..2788a24343 100644 --- a/parm/stage/wave.yaml.j2 +++ b/parm/stage/wave.yaml.j2 @@ -9,5 +9,9 @@ wave: {% for mem in range(first_mem, last_mem + 1) %} {% set imem = mem - first_mem %} {% set COMOUT_WAVE_RESTART_PREV_MEM = COMOUT_WAVE_RESTART_PREV_MEM_list[imem] %} - - ["{{ ICSDIR }}/{{ COMOUT_WAVE_RESTART_PREV_MEM | relpath(ROTDIR) }}/{{ m_prefix }}.restart.{{ waveGRD }}", "{{ COMOUT_WAVE_RESTART_PREV_MEM }}"] + {% if path_exists(ICSDIR ~ "/" ~ COMOUT_WAVE_RESTART_PREV_MEM | relpath(ROTDIR) ~ "/" ~ m_prefix ~ ".restart." ~ waveGRD) %} + - ["{{ ICSDIR }}/{{ COMOUT_WAVE_RESTART_PREV_MEM | relpath(ROTDIR) }}/{{ m_prefix }}.restart.{{ waveGRD }}", "{{ COMOUT_WAVE_RESTART_PREV_MEM }}/{{ m_prefix }}.restart.ww3"] + {% else %} + - ["{{ ICSDIR }}/{{ COMOUT_WAVE_RESTART_PREV_MEM | relpath(ROTDIR) }}/{{ m_prefix }}.restart.ww3", "{{ COMOUT_WAVE_RESTART_PREV_MEM }}"] + {% endif %} {% endfor %} # mem loop diff --git a/ush/forecast_det.sh b/ush/forecast_det.sh index 603447f612..72064ac7f5 100755 --- a/ush/forecast_det.sh +++ b/ush/forecast_det.sh @@ -93,12 +93,9 @@ UFS_det(){ # Check for WW3 restart availability if [[ "${cplwav}" == ".true." ]]; then - local ww3_grid - for ww3_grid in ${waveGRD} ; do - if [[ ! -f "${DATArestart}/WW3_RESTART/${rdate:0:8}.${rdate:8:2}0000.restart.${ww3_grid}" ]]; then - ww3_rst_ok="NO" - fi - done + if [[ ! -f "${DATArestart}/WW3_RESTART/${rdate:0:8}.${rdate:8:2}0000.restart.ww3" ]]; then + ww3_rst_ok="NO" + fi fi # Collective check diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 58755d41d9..288b251aa8 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -326,7 +326,7 @@ FV3_out() { WW3_postdet() { echo "SUB ${FUNCNAME[0]}: Linking input data for WW3" - local ww3_grid + local ww3_grid first_ww3_restart_out ww3_restart_file # Copy initial condition files: if [[ "${warm_start}" == ".true." ]]; then local restart_date restart_dir @@ -338,29 +338,35 @@ WW3_postdet() { restart_dir="${COMIN_WAVE_RESTART_PREV}" fi echo "Copying WW3 restarts for 'RUN=${RUN}' at '${restart_date}' from '${restart_dir}'" - local ww3_restart_file - for ww3_grid in ${waveGRD} ; do - ww3_restart_file="${restart_dir}/${restart_date:0:8}.${restart_date:8:2}0000.restart.${ww3_grid}" - if [[ ! -f "${ww3_restart_file}" ]]; then - echo "WARNING: WW3 restart file '${ww3_restart_file}' not found for warm_start='${warm_start}', will start from rest!" - if [[ "${RERUN}" == "YES" ]]; then - # In the case of a RERUN, the WW3 restart file is required - echo "FATAL ERROR: WW3 restart file '${ww3_restart_file}' not found for RERUN='${RERUN}', ABORT!" - exit 1 - fi - fi - if [[ "${waveMULTIGRID}" == ".true." ]]; then - ${NCP} "${ww3_restart_file}" "${DATA}/restart.${ww3_grid}" \ - || ( echo "FATAL ERROR: Unable to copy WW3 IC, ABORT!"; exit 1 ) + ww3_restart_file="${restart_dir}/${restart_date:0:8}.${restart_date:8:2}0000.restart.ww3" + if [[ -f "${ww3_restart_file}" ]]; then + ${NCP} "${ww3_restart_file}" "${DATA}/restart.ww3" \ + || ( echo "FATAL ERROR: Unable to copy WW3 IC, ABORT!"; exit 1 ) + else + if [[ "${RERUN}" == "YES" ]]; then + # In the case of a RERUN, the WW3 restart file is required + echo "FATAL ERROR: WW3 restart file '${ww3_restart_file}' not found for RERUN='${RERUN}', ABORT!" + exit 1 else - ${NCP} "${ww3_restart_file}" "${DATA}/restart.ww3" \ - || ( echo "FATAL ERROR: Unable to copy WW3 IC, ABORT!"; exit 1 ) + echo "WARNING: WW3 restart file '${ww3_restart_file}' not found for warm_start='${warm_start}', will start from rest!" fi - done + fi + + first_ww3_restart_out=$(date --utc -d "${restart_date:0:8} ${restart_date:8:2} + ${restart_interval} hours" +%Y%m%d%H) else # cold start echo "WW3 will start from rest!" + first_ww3_restart_out="${model_start_date_current_cycle}" fi # [[ "${warm_start}" == ".true." ]] + # Link restart files + local ww3_restart_file + # Use restart_date if it was determined above, otherwise use initialization date + for (( vdate = first_ww3_restart_out; vdate <= forecast_end_cycle; + vdate = $(date --utc -d "${vdate:0:8} ${vdate:8:2} + ${restart_interval} hours" +%Y%m%d%H) )); do + ww3_restart_file="${vdate:0:8}.${vdate:8:2}0000.restart.ww3" + ${NLN} "${DATArestart}/WW3_RESTART/${ww3_restart_file}" "${ww3_restart_file}" + done + # Link output files local wavprfx="${RUN}wave${WAV_MEMBER:-}" if [[ "${waveMULTIGRID}" == ".true." ]]; then diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index 5aa9dc9ac7..d359a86622 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -556,10 +556,10 @@ WW3_predet(){ echo "SUB ${FUNCNAME[0]}: WW3 before run type determination" if [[ ! -d "${COMOUT_WAVE_HISTORY}" ]]; then mkdir -p "${COMOUT_WAVE_HISTORY}"; fi - if [[ ! -d "${COMOUT_WAVE_RESTART}" ]]; then mkdir -p "${COMOUT_WAVE_RESTART}" ; fi + if [[ ! -d "${COMOUT_WAVE_RESTART}" ]]; then mkdir -p "${COMOUT_WAVE_RESTART}"; fi - if [[ ! -d "${DATArestart}/WAVE_RESTART" ]]; then mkdir -p "${DATArestart}/WAVE_RESTART"; fi - ${NLN} "${DATArestart}/WAVE_RESTART" "${DATA}/restart_wave" + if [[ ! -d "${DATArestart}/WW3_RESTART" ]]; then mkdir -p "${DATArestart}/WW3_RESTART"; fi + # Wave restarts are linked in postdet to only create links for files that will be created # Files from wave prep and wave init jobs # Copy mod_def files for wave grids From 0b3304eac1cbbe27e54978681da143820fbb11c0 Mon Sep 17 00:00:00 2001 From: AndrewEichmann-NOAA <58948505+AndrewEichmann-NOAA@users.noreply.github.com> Date: Wed, 30 Oct 2024 18:25:12 -0400 Subject: [PATCH 03/14] Add run and finalize methods to marine LETKF task (#2944) Adds run and finalize methods to marine LETKF task, experiment yaml for gw-ci in GDASApp, workflow additions, removes bugs found on the way, and completes the bulk of the work on LETKF task. Conversion of fields to increments pending. Partially resolves NOAA-EMC/GDASApp#1091 and NOAA-EMC/GDASApp#1251 Mutual dependency with GDASApp PR NOAA-EMC/GDASApp#1287 and IC fix file issue https://github.com/NOAA-EMC/global-workflow/pull/2944#issue-2537157488 --------- Co-authored-by: Walter Kolczynski - NOAA Co-authored-by: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> --- ci/cases/pr/C48mx500_hybAOWCDA.yaml | 26 +++++++ env/HERA.env | 6 +- env/ORION.env | 6 +- env/WCOSS2.env | 6 +- jobs/JGLOBAL_MARINE_ANALYSIS_LETKF | 25 ++++-- .../{marineanalletkf.sh => marineanlletkf.sh} | 2 +- parm/config/gfs/config.com | 2 + parm/config/gfs/config.marineanalletkf | 18 ----- parm/config/gfs/config.marineanlletkf | 20 +++++ parm/config/gfs/config.resources | 2 +- parm/stage/ocean_ens_perturbations.yaml.j2 | 2 +- ...f.py => exglobal_marine_analysis_letkf.py} | 0 ush/forecast_postdet.sh | 13 ++-- ush/python/pygfs/task/marine_bmat.py | 2 +- ush/python/pygfs/task/marine_letkf.py | 78 +++++++++++++++---- workflow/applications/gfs_cycled.py | 4 +- workflow/rocoto/gfs_tasks.py | 26 +++++++ workflow/rocoto/tasks.py | 2 +- 18 files changed, 177 insertions(+), 63 deletions(-) create mode 100644 ci/cases/pr/C48mx500_hybAOWCDA.yaml rename jobs/rocoto/{marineanalletkf.sh => marineanlletkf.sh} (95%) delete mode 100644 parm/config/gfs/config.marineanalletkf create mode 100644 parm/config/gfs/config.marineanlletkf rename scripts/{exgdas_global_marine_analysis_letkf.py => exglobal_marine_analysis_letkf.py} (100%) diff --git a/ci/cases/pr/C48mx500_hybAOWCDA.yaml b/ci/cases/pr/C48mx500_hybAOWCDA.yaml new file mode 100644 index 0000000000..036aa8ca60 --- /dev/null +++ b/ci/cases/pr/C48mx500_hybAOWCDA.yaml @@ -0,0 +1,26 @@ +experiment: + system: gfs + mode: cycled + +arguments: + pslot: {{ 'pslot' | getenv }} + app: S2S + resdetatmos: 48 + resdetocean: 5.0 + resensatmos: 48 + comroot: {{ 'RUNTESTS' | getenv }}/COMROOT + expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR + icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C48mx500/20240610 + idate: 2021032412 + edate: 2021032418 + nens: 3 + interval: 0 + start: warm + yaml: {{ HOMEgfs }}/ci/cases/yamls/soca_gfs_defaults_ci.yaml + +skip_ci_on_hosts: + - wcoss2 + - orion + - hercules + - hera + - gaea diff --git a/env/HERA.env b/env/HERA.env index 80cd7cddaf..259461b1ac 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -153,10 +153,10 @@ elif [[ "${step}" = "ocnanalecen" ]]; then export NTHREADS_OCNANALECEN=${NTHREADSmax} export APRUN_OCNANALECEN="${APRUN_default} --cpus-per-task=${NTHREADS_OCNANALECEN}" -elif [[ "${step}" = "marineanalletkf" ]]; then +elif [[ "${step}" = "marineanlletkf" ]]; then - export NTHREADS_MARINEANALLETKF=${NTHREADSmax} - export APRUN_MARINEANALLETKF="${APRUN_default} --cpus-per-task=${NTHREADS_MARINEANALLETKF}" + export NTHREADS_MARINEANLLETKF=${NTHREADSmax} + export APRUN_MARINEANLLETKF=${APRUN_default} elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then diff --git a/env/ORION.env b/env/ORION.env index 45fd607aa5..06ae2c1a63 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -149,10 +149,10 @@ elif [[ "${step}" = "ocnanalecen" ]]; then export NTHREADS_OCNANALECEN=${NTHREADSmax} export APRUN_OCNANALECEN="${APRUN_default} --cpus-per-task=${NTHREADS_OCNANALECEN}" -elif [[ "${step}" = "marineanalletkf" ]]; then +elif [[ "${step}" = "marineanlletkf" ]]; then - export NTHREADS_MARINEANALLETKF=${NTHREADSmax} - export APRUN_MARINEANALLETKF="${APRUN_default} --cpus-per-task=${NTHREADS_MARINEANALLETKF}" + export NTHREADS_MARINEANLLETKF=${NTHREADSmax} + export APRUN_MARINEANLLETKF="${APRUN_default}" elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then diff --git a/env/WCOSS2.env b/env/WCOSS2.env index cea24fb26b..c67c16f929 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -126,10 +126,10 @@ elif [[ "${step}" = "ocnanalecen" ]]; then export NTHREADS_OCNANALECEN=${NTHREADSmax} export APRUN_OCNANALECEN="${APRUN_default} --cpus-per-task=${NTHREADS_OCNANALECEN}" -elif [[ "${step}" = "marineanalletkf" ]]; then +elif [[ "${step}" = "marineanlletkf" ]]; then - export NTHREADS_MARINEANALLETKF=${NTHREADSmax} - export APRUN_MARINEANALLETKF="${APRUN_default} --cpus-per-task=${NTHREADS_MARINEANALLETKF}" + export NTHREADS_MARINEANLLETKF=${NTHREADSmax} + export APRUN_MARINEANLLETKF="${APRUN_default}" elif [[ "${step}" = "atmanlfv3inc" ]]; then diff --git a/jobs/JGLOBAL_MARINE_ANALYSIS_LETKF b/jobs/JGLOBAL_MARINE_ANALYSIS_LETKF index 38dc3049f9..2a88f89eab 100755 --- a/jobs/JGLOBAL_MARINE_ANALYSIS_LETKF +++ b/jobs/JGLOBAL_MARINE_ANALYSIS_LETKF @@ -1,6 +1,13 @@ #!/bin/bash source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "marineanalletkf" -c "base ocnanal marineanalletkf" + +export DATAjob="${DATAROOT}/${RUN}marineanalysis.${PDY:-}${cyc}" +export DATA="${DATAjob}/${jobid}" +# Create the directory to hold ensemble perturbations +export DATAens="${DATAjob}/ensdata" +if [[ ! -d "${DATAens}" ]]; then mkdir -p "${DATAens}"; fi + +source "${HOMEgfs}/ush/jjob_header.sh" -e "marineanlletkf" -c "base marineanl marineanlletkf" ############################################## # Set variables used in the script @@ -11,12 +18,18 @@ GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") gPDY=${GDATE:0:8} gcyc=${GDATE:8:2} +export GDUMP="gdas" +export GDUMP_ENS="enkf${GDUMP}" +export OPREFIX="${RUN}.t${cyc}z." -YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ - COMIN_OCEAN_HISTORY_PREV:COM_OCEAN_HISTORY_TMPL \ - COMIN_ICE_HISTORY_PREV:COM_ICE_HISTORY_TMPL +RUN="${GDUMP}" YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ + COMIN_OCEAN_HISTORY_PREV:COM_OCEAN_HISTORY_TMPL \ + COMIN_ICE_HISTORY_PREV:COM_ICE_HISTORY_TMPL -YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COMIN_OBS:COM_OBS_TMPL +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ + COMIN_OBS:COM_OBS_TMPL \ + COMOUT_OCEAN_LETKF:COM_OCEAN_LETKF_TMPL \ + COMOUT_ICE_LETKF:COM_ICE_LETKF_TMPL ############################################## # Begin JOB SPECIFIC work @@ -25,7 +38,7 @@ YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COMIN_OBS:COM_OBS_TMPL ############################################################### # Run relevant script -EXSCRIPT=${GDASOCNLETKFPY:-${HOMEgfs}/scripts/exgdas_global_marine_analysis_letkf.py} +EXSCRIPT=${GDASOCNLETKFPY:-${HOMEgfs}/scripts/exglobal_marine_analysis_letkf.py} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/rocoto/marineanalletkf.sh b/jobs/rocoto/marineanlletkf.sh similarity index 95% rename from jobs/rocoto/marineanalletkf.sh rename to jobs/rocoto/marineanlletkf.sh index f2bfb9f70c..d4333461f3 100755 --- a/jobs/rocoto/marineanalletkf.sh +++ b/jobs/rocoto/marineanlletkf.sh @@ -8,7 +8,7 @@ source "${HOMEgfs}/ush/preamble.sh" status=$? [[ ${status} -ne 0 ]] && exit "${status}" -export job="marineanalletkf" +export job="marineanlletkf" export jobid="${job}.$$" ############################################################### diff --git a/parm/config/gfs/config.com b/parm/config/gfs/config.com index 61d592561d..d949edb33a 100644 --- a/parm/config/gfs/config.com +++ b/parm/config/gfs/config.com @@ -82,12 +82,14 @@ declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model/ocean/history' declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model/ocean/restart' declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model/ocean/input' declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_LETKF_TMPL=${COM_BASE}'/analysis/ocean/letkf' declare -rx COM_OCEAN_BMATRIX_TMPL=${COM_BASE}'/bmatrix/ocean' declare -rx COM_OCEAN_NETCDF_TMPL=${COM_BASE}'/products/ocean/netcdf' declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2' declare -rx COM_OCEAN_GRIB_GRID_TMPL=${COM_OCEAN_GRIB_TMPL}'/${GRID}' declare -rx COM_ICE_ANALYSIS_TMPL=${COM_BASE}'/analysis/ice' +declare -rx COM_ICE_LETKF_TMPL=${COM_BASE}'/analysis/ice/letkf' declare -rx COM_ICE_BMATRIX_TMPL=${COM_BASE}'/bmatrix/ice' declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model/ice/input' declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model/ice/history' diff --git a/parm/config/gfs/config.marineanalletkf b/parm/config/gfs/config.marineanalletkf deleted file mode 100644 index fde3433a13..0000000000 --- a/parm/config/gfs/config.marineanalletkf +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash - -########## config.marineanalletkf ########## -# Ocn Analysis specific - -echo "BEGIN: config.marineanalletkf" - -# Get task specific resources -. "${EXPDIR}/config.resources" marineanalletkf - -export MARINE_LETKF_EXEC="${JEDI_BIN}/gdas.x" -export MARINE_LETKF_YAML_TMPL="${PARMgfs}/gdas/soca/letkf/letkf.yaml.j2" -export MARINE_LETKF_STAGE_YAML_TMPL="${PARMgfs}/gdas/soca/letkf/letkf_stage.yaml.j2" - -export GRIDGEN_EXEC="${JEDI_BIN}/gdas_soca_gridgen.x" -export GRIDGEN_YAML="${PARMgfs}/gdas/soca/gridgen/gridgen.yaml" - -echo "END: config.marineanalletkf" diff --git a/parm/config/gfs/config.marineanlletkf b/parm/config/gfs/config.marineanlletkf new file mode 100644 index 0000000000..8b84af4eaa --- /dev/null +++ b/parm/config/gfs/config.marineanlletkf @@ -0,0 +1,20 @@ +#!/bin/bash + +########## config.marineanlletkf ########## +# Ocn Analysis specific + +echo "BEGIN: config.marineanlletkf" + +# Get task specific resources +. "${EXPDIR}/config.resources" marineanlletkf + +export MARINE_LETKF_EXEC="${EXECgfs}/gdas.x" +export MARINE_LETKF_YAML_TMPL="${PARMgfs}/gdas/soca/letkf/letkf.yaml.j2" +export MARINE_LETKF_STAGE_YAML_TMPL="${PARMgfs}/gdas/soca/letkf/letkf_stage.yaml.j2" +export MARINE_LETKF_SAVE_YAML_TMPL="${PARMgfs}/gdas/soca/letkf/letkf_save.yaml.j2" + +export GRIDGEN_EXEC="${EXECgfs}/gdas_soca_gridgen.x" +export GRIDGEN_YAML="${PARMgfs}/gdas/soca/gridgen/gridgen.yaml" +export DIST_HALO_SIZE=500000 + +echo "END: config.marineanlletkf" diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 79dbb487db..14e6f0d7fb 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -601,7 +601,7 @@ case ${step} in tasks_per_node=$(( max_tasks_per_node / threads_per_task )) ;; - "marineanalletkf") + "marineanlletkf") ntasks=16 case ${OCNRES} in "025") diff --git a/parm/stage/ocean_ens_perturbations.yaml.j2 b/parm/stage/ocean_ens_perturbations.yaml.j2 index fede3816a7..586b9f66cb 100644 --- a/parm/stage/ocean_ens_perturbations.yaml.j2 +++ b/parm/stage/ocean_ens_perturbations.yaml.j2 @@ -9,5 +9,5 @@ ocean_ens_perturbation: {% for mem in range(first_mem + 1, last_mem + 1) %} {% set imem = mem - first_mem %} {% set COMOUT_OCEAN_ANALYSIS_MEM = COMOUT_OCEAN_ANALYSIS_MEM_list[imem] %} - - ["{{ ICSDIR }}/{{ COMOUT_OCEAN_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ m_prefix }}.mom6_perturbation.nc", "{{ COMOUT_OCEAN_ANALYSIS_MEM }}/mom6_increment.nc"] + - ["{{ ICSDIR }}/{{ COMOUT_OCEAN_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ m_prefix }}.mom6_perturbation.nc", "{{ COMOUT_OCEAN_ANALYSIS_MEM }}/{{ RUN }}.t{{ current_cycle_HH }}z.ocninc.nc"] {% endfor %} # mem loop diff --git a/scripts/exgdas_global_marine_analysis_letkf.py b/scripts/exglobal_marine_analysis_letkf.py similarity index 100% rename from scripts/exgdas_global_marine_analysis_letkf.py rename to scripts/exglobal_marine_analysis_letkf.py diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 288b251aa8..25b2e28d75 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -466,12 +466,13 @@ MOM6_postdet() { fi # GEFS perturbations - # TODO if [[ $RUN} == "gefs" ]] block maybe be needed - # to ensure it does not interfere with the GFS when ensemble is updated in the GFS - if (( MEMBER > 0 )) && [[ "${ODA_INCUPD:-False}" == "True" ]]; then - ${NCP} "${COMIN_OCEAN_ANALYSIS}/mom6_increment.nc" "${DATA}/INPUT/mom6_increment.nc" \ - || ( echo "FATAL ERROR: Unable to copy ensemble MOM6 increment, ABORT!"; exit 1 ) - fi + if [[ "${RUN}" == "gefs" ]]; then + # to ensure it does not interfere with the GFS + if (( MEMBER > 0 )) && [[ "${ODA_INCUPD:-False}" == "True" ]]; then + ${NCP} "${COMIN_OCEAN_ANALYSIS}/${RUN}.t${cyc}z.ocninc.nc" "${DATA}/INPUT/mom6_increment.nc" \ + || ( echo "FATAL ERROR: Unable to copy ensemble MOM6 increment, ABORT!"; exit 1 ) + fi + fi # if [[ "${RUN}" == "gefs" ]]; then fi # if [[ "${RERUN}" == "NO" ]]; then # Link output files diff --git a/ush/python/pygfs/task/marine_bmat.py b/ush/python/pygfs/task/marine_bmat.py index 93329f05ac..a4a5b4f144 100644 --- a/ush/python/pygfs/task/marine_bmat.py +++ b/ush/python/pygfs/task/marine_bmat.py @@ -318,7 +318,7 @@ def finalize(self: Task) -> None: FileHandler({'copy': diagb_list}).sync() # Copy the ensemble perturbation diagnostics to the ROTDIR - if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 3: + if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2: window_middle_iso = self.task_config.MARINE_WINDOW_MIDDLE.strftime('%Y-%m-%dT%H:%M:%SZ') weight_list = [] src = os.path.join(self.task_config.DATA, f"ocn.ens_weights.incr.{window_middle_iso}.nc") diff --git a/ush/python/pygfs/task/marine_letkf.py b/ush/python/pygfs/task/marine_letkf.py index 36c26d594b..54d40f8d66 100644 --- a/ush/python/pygfs/task/marine_letkf.py +++ b/ush/python/pygfs/task/marine_letkf.py @@ -1,11 +1,13 @@ #!/usr/bin/env python3 import f90nml +import pygfs.utils.marine_da_utils as mdau from logging import getLogger import os from pygfs.task.analysis import Analysis from typing import Dict from wxflow import (AttrDict, + Executable, FileHandler, logit, parse_j2yaml, @@ -41,6 +43,8 @@ def __init__(self, config: Dict) -> None: 'soca', 'localensembleda', _letkf_yaml_file] + # compute the relative path from self.task_config.DATA to self.task_config.DATAenspert + _enspert_relpath = os.path.relpath(self.task_config.DATAens, self.task_config.DATA) self.task_config.WINDOW_MIDDLE = self.task_config.current_cycle self.task_config.WINDOW_BEGIN = self.task_config.current_cycle - _half_assim_freq @@ -49,6 +53,7 @@ def __init__(self, config: Dict) -> None: self.task_config.mom_input_nml_tmpl = os.path.join(self.task_config.DATA, 'mom_input.nml.tmpl') self.task_config.mom_input_nml = os.path.join(self.task_config.DATA, 'mom_input.nml') self.task_config.obs_dir = os.path.join(self.task_config.DATA, 'obs') + self.task_config.ENSPERT_RELPATH = _enspert_relpath @logit(logger) def initialize(self): @@ -64,26 +69,50 @@ def initialize(self): logger.info("initialize") # make directories and stage ensemble background files - ensbkgconf = AttrDict() - keys = ['previous_cycle', 'current_cycle', 'DATA', 'NMEM_ENS', - 'PARMgfs', 'ROTDIR', 'COM_OCEAN_HISTORY_TMPL', 'COM_ICE_HISTORY_TMPL'] - for key in keys: - ensbkgconf[key] = self.task_config[key] - ensbkgconf.RUN = 'enkfgdas' - soca_ens_bkg_stage_list = parse_j2yaml(self.task_config.SOCA_ENS_BKG_STAGE_YAML_TMPL, ensbkgconf) - FileHandler(soca_ens_bkg_stage_list).sync() soca_fix_stage_list = parse_j2yaml(self.task_config.SOCA_FIX_YAML_TMPL, self.task_config) FileHandler(soca_fix_stage_list).sync() - letkf_stage_list = parse_j2yaml(self.task_config.MARINE_LETKF_STAGE_YAML_TMPL, self.task_config) + stageconf = AttrDict() + keys = ['current_cycle', + 'previous_cycle', + 'COM_ICE_LETKF_TMPL', + 'COM_OCEAN_LETKF_TMPL', + 'COM_ICE_HISTORY_TMPL', + 'COM_OCEAN_HISTORY_TMPL', + 'COMIN_OCEAN_HISTORY_PREV', + 'COMIN_ICE_HISTORY_PREV', + 'COMOUT_ICE_LETKF', + 'COMOUT_OCEAN_LETKF', + 'DATA', + 'ENSPERT_RELPATH', + 'GDUMP_ENS', + 'NMEM_ENS', + 'OPREFIX', + 'PARMgfs', + 'ROTDIR', + 'RUN', + 'WINDOW_BEGIN', + 'WINDOW_MIDDLE'] + for key in keys: + stageconf[key] = self.task_config[key] + + # stage ensemble background files + soca_ens_bkg_stage_list = parse_j2yaml(self.task_config.MARINE_ENSDA_STAGE_BKG_YAML_TMPL, stageconf) + FileHandler(soca_ens_bkg_stage_list).sync() + + # stage letkf-specific files + letkf_stage_list = parse_j2yaml(self.task_config.MARINE_LETKF_STAGE_YAML_TMPL, stageconf) FileHandler(letkf_stage_list).sync() - obs_list = parse_j2yaml(self.task_config.OBS_YAML, self.task_config) + obs_list = parse_j2yaml(self.task_config.MARINE_OBS_LIST_YAML, self.task_config) # get the list of observations obs_files = [] for ob in obs_list['observers']: obs_name = ob['obs space']['name'].lower() - obs_filename = f"{self.task_config.RUN}.t{self.task_config.cyc}z.{obs_name}.{to_YMDH(self.task_config.current_cycle)}.nc" + # TODO(AFE) - this should be removed when the obs config yamls are jinjafied + if 'distribution' not in ob['obs space']: + ob['obs space']['distribution'] = {'name': 'Halo', 'halo size': self.task_config['DIST_HALO_SIZE']} + obs_filename = f"{self.task_config.RUN}.t{self.task_config.cyc}z.{obs_name}.{to_YMDH(self.task_config.current_cycle)}.nc4" obs_files.append((obs_filename, ob)) obs_files_to_copy = [] @@ -102,12 +131,7 @@ def initialize(self): FileHandler({'copy': obs_files_to_copy}).sync() # make the letkf.yaml - letkfconf = AttrDict() - keys = ['WINDOW_BEGIN', 'WINDOW_MIDDLE', 'RUN', 'gcyc', 'NMEM_ENS'] - for key in keys: - letkfconf[key] = self.task_config[key] - letkfconf.RUN = 'enkfgdas' - letkf_yaml = parse_j2yaml(self.task_config.MARINE_LETKF_YAML_TMPL, letkfconf) + letkf_yaml = parse_j2yaml(self.task_config.MARINE_LETKF_YAML_TMPL, stageconf) letkf_yaml.observations.observers = obs_to_use letkf_yaml.save(self.task_config.letkf_yaml_file) @@ -133,6 +157,18 @@ def run(self): logger.info("run") + exec_cmd_gridgen = Executable(self.task_config.APRUN_MARINEANLLETKF) + exec_cmd_gridgen.add_default_arg(self.task_config.GRIDGEN_EXEC) + exec_cmd_gridgen.add_default_arg(self.task_config.GRIDGEN_YAML) + + mdau.run(exec_cmd_gridgen) + + exec_cmd_letkf = Executable(self.task_config.APRUN_MARINEANLLETKF) + for letkf_exec_arg in self.task_config.letkf_exec_args: + exec_cmd_letkf.add_default_arg(letkf_exec_arg) + + mdau.run(exec_cmd_letkf) + @logit(logger) def finalize(self): """Method finalize for ocean and sea ice LETKF task @@ -145,3 +181,11 @@ def finalize(self): """ logger.info("finalize") + + letkfsaveconf = AttrDict() + keys = ['current_cycle', 'DATA', 'NMEM_ENS', 'WINDOW_BEGIN', 'GDUMP_ENS', + 'PARMgfs', 'ROTDIR', 'COM_OCEAN_LETKF_TMPL', 'COM_ICE_LETKF_TMPL'] + for key in keys: + letkfsaveconf[key] = self.task_config[key] + letkf_save_list = parse_j2yaml(self.task_config.MARINE_LETKF_SAVE_YAML_TMPL, letkfsaveconf) + FileHandler(letkf_save_list).sync() diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index da78166ede..f92bf95fba 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -46,7 +46,7 @@ def _get_app_configs(self): if self.do_jediocnvar: configs += ['prepoceanobs', 'marineanlinit', 'marinebmat', 'marineanlvar'] if self.do_hybvar: - configs += ['ocnanalecen'] + configs += ['marineanlletkf', 'ocnanalecen'] configs += ['marineanlchkpt', 'marineanlfinal'] if self.do_vrfy_oceanda: configs += ['ocnanalvrfy'] @@ -148,7 +148,7 @@ def get_task_names(self): if self.do_jediocnvar: gdas_gfs_common_tasks_before_fcst += ['prepoceanobs', 'marineanlinit', 'marinebmat', 'marineanlvar'] if self.do_hybvar: - gdas_gfs_common_tasks_before_fcst += ['ocnanalecen'] + gdas_gfs_common_tasks_before_fcst += ['marineanlletkf', 'ocnanalecen'] gdas_gfs_common_tasks_before_fcst += ['marineanlchkpt', 'marineanlfinal'] if self.do_vrfy_oceanda: gdas_gfs_common_tasks_before_fcst += ['ocnanalvrfy'] diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 82dfb9f1d4..d3bb68a6b8 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -664,6 +664,32 @@ def prepoceanobs(self): return task + def marineanlletkf(self): + + deps = [] + dep_dict = {'type': 'metatask', 'name': f'enkfgdas_fcst', 'offset': f"-{timedelta_to_HMS(self._base['interval_gdas'])}"} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.run}_prepoceanobs'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('marineanlletkf') + task_name = f'{self.run}_marineanlletkf' + task_dict = {'task_name': task_name, + 'resources': resources, + 'dependency': dependencies, + 'envars': self.envars, + 'cycledef': self.run.replace('enkf', ''), + 'command': f'{self.HOMEgfs}/jobs/rocoto/marineanlletkf.sh', + 'job_name': f'{self.pslot}_{task_name}_@H', + 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', + 'maxtries': '&MAXTRIES;' + } + + task = rocoto.create_task(task_dict) + + return task + def marinebmat(self): ocean_hist_path = self._template_to_rocoto_cycstring(self._base["COM_OCEAN_HISTORY_TMPL"], {'RUN': 'gdas'}) diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index 92ceea73aa..b989def13f 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -15,7 +15,7 @@ class Tasks: 'prep', 'anal', 'sfcanl', 'analcalc', 'analdiag', 'arch', "cleanup", 'prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal', 'prepoceanobs', - 'marineanlinit', 'marinebmat', 'marineanlvar', 'ocnanalecen', 'marineanlchkpt', 'marineanlfinal', 'ocnanalvrfy', + 'marineanlinit', 'marineanlletkf', 'marinebmat', 'marineanlvar', 'ocnanalecen', 'marineanlchkpt', 'marineanlfinal', 'ocnanalvrfy', 'earc', 'ecen', 'echgres', 'ediag', 'efcs', 'eobs', 'eomg', 'epos', 'esfc', 'eupd', 'atmensanlinit', 'atmensanlobs', 'atmensanlsol', 'atmensanlletkf', 'atmensanlfv3inc', 'atmensanlfinal', From f4e380ac4c024d6778c333babbeba73601360d07 Mon Sep 17 00:00:00 2001 From: mingshichen-noaa <48537176+mingshichen-noaa@users.noreply.github.com> Date: Wed, 30 Oct 2024 18:25:45 -0400 Subject: [PATCH 04/14] Update global jdas enkf diag job with COMIN/COMOUT for COM prefix (#2959) NCO has requested that each COM variable specify whether it is an input or an output. This completes that process for the global jdas enkf diagnostics job. Refs https://github.com/NOAA-EMC/global-workflow/issues/2451 --- jobs/JGDAS_ATMOS_ANALYSIS_DIAG | 5 ++-- jobs/JGDAS_ENKF_DIAG | 48 ++++++++++++++++++---------------- scripts/exglobal_diag.sh | 10 +++---- 3 files changed, 33 insertions(+), 30 deletions(-) diff --git a/jobs/JGDAS_ATMOS_ANALYSIS_DIAG b/jobs/JGDAS_ATMOS_ANALYSIS_DIAG index a1e0c9f1d5..c47bd4a47b 100755 --- a/jobs/JGDAS_ATMOS_ANALYSIS_DIAG +++ b/jobs/JGDAS_ATMOS_ANALYSIS_DIAG @@ -27,8 +27,9 @@ export OPREFIX="${RUN/enkf}.t${cyc}z." export GPREFIX="${GDUMP}.t${gcyc}z." export APREFIX="${RUN}.t${cyc}z." -YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_ANALYSIS -mkdir -m 775 -p "${COM_ATMOS_ANALYSIS}" +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ + COMOUT_ATMOS_ANALYSIS:COM_ATMOS_ANALYSIS_TMPL +mkdir -m 775 -p "${COMOUT_ATMOS_ANALYSIS}" ############################################################### # Run relevant script diff --git a/jobs/JGDAS_ENKF_DIAG b/jobs/JGDAS_ENKF_DIAG index cc8c933cc8..3daa8bfb73 100755 --- a/jobs/JGDAS_ENKF_DIAG +++ b/jobs/JGDAS_ENKF_DIAG @@ -30,56 +30,58 @@ export APREFIX="${RUN}.t${cyc}z." export GPREFIX="${GDUMP_ENS}.t${gcyc}z." GPREFIX_DET="${GDUMP}.t${gcyc}z." -RUN=${RUN/enkf} YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS -MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_ANALYSIS +RUN=${RUN/enkf} YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ + COMIN_OBS:COM_OBS_TMPL +MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ + COMOUT_ATMOS_ANALYSIS:COM_ATMOS_ANALYSIS_TMPL RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ - COM_OBS_PREV:COM_OBS_TMPL \ - COM_ATMOS_ANALYSIS_DET_PREV:COM_ATMOS_ANALYSIS_TMPL + COMIN_OBS_PREV:COM_OBS_TMPL \ + COMIN_ATMOS_ANALYSIS_DET_PREV:COM_ATMOS_ANALYSIS_TMPL MEMDIR="ensstat" RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ - COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL + COMIN_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL -export ATMGES_ENSMEAN="${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf006.ensmean.nc" +export ATMGES_ENSMEAN="${COMIN_ATMOS_HISTORY_PREV}/${GPREFIX}atmf006.ensmean.nc" if [ ! -f ${ATMGES_ENSMEAN} ]; then echo "FATAL ERROR: FILE MISSING: ATMGES_ENSMEAN = ${ATMGES_ENSMEAN}" exit 1 fi # Link observational data -export PREPQC="${COM_OBS}/${OPREFIX}prepbufr" +export PREPQC="${COMIN_OBS}/${OPREFIX}prepbufr" if [[ ! -f ${PREPQC} ]]; then echo "WARNING: Global PREPBUFR FILE ${PREPQC} MISSING" fi -export TCVITL="${COM_OBS}/${OPREFIX}syndata.tcvitals.tm00" +export TCVITL="${COMIN_OBS}/${OPREFIX}syndata.tcvitals.tm00" if [[ ${DONST} = "YES" ]]; then - export NSSTBF="${COM_OBS}/${OPREFIX}nsstbufr" + export NSSTBF="${COMIN_OBS}/${OPREFIX}nsstbufr" fi -export PREPQCPF="${COM_OBS}/${OPREFIX}prepbufr.acft_profiles" +export PREPQCPF="${COMIN_OBS}/${OPREFIX}prepbufr.acft_profiles" # Guess Bias correction coefficients related to control -export GBIAS=${COM_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}abias -export GBIASPC=${COM_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}abias_pc -export GBIASAIR=${COM_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}abias_air -export GRADSTAT=${COM_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}radstat +export GBIAS=${COMIN_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}abias +export GBIASPC=${COMIN_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}abias_pc +export GBIASAIR=${COMIN_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}abias_air +export GRADSTAT=${COMIN_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}radstat # Bias correction coefficients related to ensemble mean -export ABIAS="${COM_ATMOS_ANALYSIS}/${APREFIX}abias.ensmean" -export ABIASPC="${COM_ATMOS_ANALYSIS}/${APREFIX}abias_pc.ensmean" -export ABIASAIR="${COM_ATMOS_ANALYSIS}/${APREFIX}abias_air.ensmean" -export ABIASe="${COM_ATMOS_ANALYSIS}/${APREFIX}abias_int.ensmean" +export ABIAS="${COMOUT_ATMOS_ANALYSIS}/${APREFIX}abias.ensmean" +export ABIASPC="${COMOUT_ATMOS_ANALYSIS}/${APREFIX}abias_pc.ensmean" +export ABIASAIR="${COMOUT_ATMOS_ANALYSIS}/${APREFIX}abias_air.ensmean" +export ABIASe="${COMOUT_ATMOS_ANALYSIS}/${APREFIX}abias_int.ensmean" # Diagnostics related to ensemble mean -export GSISTAT="${COM_ATMOS_ANALYSIS}/${APREFIX}gsistat.ensmean" -export CNVSTAT="${COM_ATMOS_ANALYSIS}/${APREFIX}cnvstat.ensmean" -export OZNSTAT="${COM_ATMOS_ANALYSIS}/${APREFIX}oznstat.ensmean" -export RADSTAT="${COM_ATMOS_ANALYSIS}/${APREFIX}radstat.ensmean" +export GSISTAT="${COMOUT_ATMOS_ANALYSIS}/${APREFIX}gsistat.ensmean" +export CNVSTAT="${COMOUT_ATMOS_ANALYSIS}/${APREFIX}cnvstat.ensmean" +export OZNSTAT="${COMOUT_ATMOS_ANALYSIS}/${APREFIX}oznstat.ensmean" +export RADSTAT="${COMOUT_ATMOS_ANALYSIS}/${APREFIX}radstat.ensmean" # Select observations based on ensemble mean export RUN_SELECT="YES" export USE_SELECT="NO" -export SELECT_OBS="${COM_ATMOS_ANALYSIS}/${APREFIX}obsinput.ensmean" +export SELECT_OBS="${COMOUT_ATMOS_ANALYSIS}/${APREFIX}obsinput.ensmean" export DIAG_SUFFIX="_ensmean" export DIAG_COMPRESS="NO" diff --git a/scripts/exglobal_diag.sh b/scripts/exglobal_diag.sh index ed9bef05df..46a6e9863c 100755 --- a/scripts/exglobal_diag.sh +++ b/scripts/exglobal_diag.sh @@ -49,10 +49,10 @@ SENDDBN=${SENDDBN:-"NO"} # Analysis files export APREFIX=${APREFIX:-""} -RADSTAT=${RADSTAT:-${COM_ATMOS_ANALYSIS}/${APREFIX}radstat} -PCPSTAT=${PCPSTAT:-${COM_ATMOS_ANALYSIS}/${APREFIX}pcpstat} -CNVSTAT=${CNVSTAT:-${COM_ATMOS_ANALYSIS}/${APREFIX}cnvstat} -OZNSTAT=${OZNSTAT:-${COM_ATMOS_ANALYSIS}/${APREFIX}oznstat} +RADSTAT=${RADSTAT:-${COMOUT_ATMOS_ANALYSIS}/${APREFIX}radstat} +PCPSTAT=${PCPSTAT:-${COMOUT_ATMOS_ANALYSIS}/${APREFIX}pcpstat} +CNVSTAT=${CNVSTAT:-${COMOUT_ATMOS_ANALYSIS}/${APREFIX}cnvstat} +OZNSTAT=${OZNSTAT:-${COMOUT_ATMOS_ANALYSIS}/${APREFIX}oznstat} # Remove stat file if file already exists [[ -s $RADSTAT ]] && rm -f $RADSTAT @@ -74,7 +74,7 @@ nm="" if [ $CFP_MP = "YES" ]; then nm=0 fi -DIAG_DIR=${DIAG_DIR:-${COM_ATMOS_ANALYSIS}/gsidiags} +DIAG_DIR=${DIAG_DIR:-${COMOUT_ATMOS_ANALYSIS}/gsidiags} REMOVE_DIAG_DIR=${REMOVE_DIAG_DIR:-"NO"} # Set script / GSI control parameters From 1cc407805b48cc2f0fbcc2e65cc960d3688b6161 Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Thu, 31 Oct 2024 04:11:23 -0400 Subject: [PATCH 05/14] Make wxflow links static instead of from link_workflow (#3008) Commits symlinks to the repo for wxflow instead of relying on link_workflow to create them. This will allow testing in the ci or workflow directory without needing to run an otherwise unnecessary link_workflow first. --- .gitignore | 5 ----- ci/scripts/wxflow | 1 + sorc/link_workflow.sh | 18 ------------------ ush/python/wxflow | 1 + workflow/wxflow | 1 + 5 files changed, 3 insertions(+), 23 deletions(-) create mode 120000 ci/scripts/wxflow create mode 120000 ush/python/wxflow create mode 120000 workflow/wxflow diff --git a/.gitignore b/.gitignore index 8fc6d0b20b..4ec62993d3 100644 --- a/.gitignore +++ b/.gitignore @@ -171,11 +171,6 @@ ush/bufr2ioda_insitu* versions/build.ver versions/run.ver -# wxflow checkout and symlinks -ush/python/wxflow -workflow/wxflow -ci/scripts/wxflow - # jcb checkout and symlinks ush/python/jcb workflow/jcb diff --git a/ci/scripts/wxflow b/ci/scripts/wxflow new file mode 120000 index 0000000000..9dbee42bc8 --- /dev/null +++ b/ci/scripts/wxflow @@ -0,0 +1 @@ +../../sorc/wxflow/src/wxflow \ No newline at end of file diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 870ddc5eba..3d81f7b7d4 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -86,15 +86,6 @@ esac # Source fix version file source "${HOMEgfs}/versions/fix.ver" -# Link python pacakges in ush/python -# TODO: This will be unnecessary when these are part of the virtualenv -packages=("wxflow") -for package in "${packages[@]}"; do - cd "${HOMEgfs}/ush/python" || exit 1 - [[ -s "${package}" ]] && rm -f "${package}" - ${LINK} "${HOMEgfs}/sorc/${package}/src/${package}" . -done - # Link GDASapp python packages in ush/python packages=("jcb") for package in "${packages[@]}"; do @@ -103,15 +94,6 @@ for package in "${packages[@]}"; do ${LINK} "${HOMEgfs}/sorc/gdas.cd/sorc/${package}/src/${package}" . done -# Link wxflow in workflow and ci/scripts -# TODO: This will be unnecessary when wxflow is part of the virtualenv -cd "${HOMEgfs}/workflow" || exit 1 -[[ -s "wxflow" ]] && rm -f wxflow -${LINK} "${HOMEgfs}/sorc/wxflow/src/wxflow" . -cd "${HOMEgfs}/ci/scripts" || exit 1 -[[ -s "wxflow" ]] && rm -f wxflow -${LINK} "${HOMEgfs}/sorc/wxflow/src/wxflow" . - # Link fix directories if [[ -n "${FIX_DIR}" ]]; then if [[ ! -d "${HOMEgfs}/fix" ]]; then mkdir "${HOMEgfs}/fix" || exit 1; fi diff --git a/ush/python/wxflow b/ush/python/wxflow new file mode 120000 index 0000000000..9dbee42bc8 --- /dev/null +++ b/ush/python/wxflow @@ -0,0 +1 @@ +../../sorc/wxflow/src/wxflow \ No newline at end of file diff --git a/workflow/wxflow b/workflow/wxflow new file mode 120000 index 0000000000..7ea96a12bf --- /dev/null +++ b/workflow/wxflow @@ -0,0 +1 @@ +../sorc/wxflow/src/wxflow \ No newline at end of file From 5a8a5aa13b0143c871dc466e0ed062c55c7cd573 Mon Sep 17 00:00:00 2001 From: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> Date: Thu, 31 Oct 2024 12:10:07 -0400 Subject: [PATCH 06/14] Fix the name of the TC tracker filenames in archive.py (#3030) This corrects the names of the product files created by the `tracker` job when attempting to rename the experiment and push the file to the `ARCDIR` within the `arch` job. --- ush/python/pygfs/task/archive.py | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/ush/python/pygfs/task/archive.py b/ush/python/pygfs/task/archive.py index d138474e9a..108cd2ed27 100644 --- a/ush/python/pygfs/task/archive.py +++ b/ush/python/pygfs/task/archive.py @@ -88,11 +88,6 @@ def configure(self, arch_dict: Dict[str, Any]) -> (Dict[str, Any], List[Dict[str if not os.path.isdir(arch_dict.ROTDIR): raise FileNotFoundError(f"FATAL ERROR: The ROTDIR ({arch_dict.ROTDIR}) does not exist!") - if arch_dict.RUN in ["gdas", "gfs"]: - - # Copy the cyclone track files and rename the experiments - Archive._rename_cyclone_expt(arch_dict) - # Collect datasets that need to be archived # Each dataset represents one tarball @@ -371,14 +366,14 @@ def _rename_cyclone_expt(arch_dict) -> None: if run == "gfs": in_track_file = (track_dir_in + "/avno.t" + - cycle_HH + "z.cycle.trackatcfunix") + cycle_HH + "z.cyclone.trackatcfunix") in_track_p_file = (track_dir_in + "/avnop.t" + - cycle_HH + "z.cycle.trackatcfunixp") + cycle_HH + "z.cyclone.trackatcfunix") elif run == "gdas": in_track_file = (track_dir_in + "/gdas.t" + - cycle_HH + "z.cycle.trackatcfunix") + cycle_HH + "z.cyclone.trackatcfunix") in_track_p_file = (track_dir_in + "/gdasp.t" + - cycle_HH + "z.cycle.trackatcfunixp") + cycle_HH + "z.cyclone.trackatcfunix") if not os.path.isfile(in_track_file): # Do not attempt to archive the outputs @@ -416,7 +411,7 @@ def replace_string_from_to_file(filename_in, filename_out, search_str, replace_s with open("/tmp/track_file", "w") as new_file: new_file.writelines(out_lines) - shutil.move("tmp/track_file", filename_out) + shutil.move("/tmp/track_file", filename_out) replace_string_from_to_file(in_track_file, out_track_file, "AVNO", pslot4) replace_string_from_to_file(in_track_p_file, out_track_p_file, "AVNO", pslot4) From ca8cd7af51daa20636a2045feb95105dc5c3510d Mon Sep 17 00:00:00 2001 From: TerrenceMcGuinness-NOAA Date: Thu, 31 Oct 2024 20:37:07 +0000 Subject: [PATCH 07/14] Auto provisioning of PW clusters from GitHub CI added (#3051) # Description This update to the GitHub dispatched CI pipeline to execute the self-hosted GitHub Runner on Parallel Works now adds the feature that starts up the virtual compute cluster automatically. We now have a complete end-to-end automated process for running CI tests in Parallel Works. Next steps would be tear-down and adding more test to see if it scales. It also has the update for getting a PR to load up when its originating from a forked repo. # Type of change - [ ] Bug fix (fixes something broken) - [x] New feature (adds functionality) - [ ] Maintenance (code refactor, clean-up, new CI test, etc.) # Change characteristics - Is this a breaking change (a change in existing functionality)? NO - Does this change require a documentation update? YES - Does this change require an update to any of the following submodules? NO (If YES, please add a link to any PRs that are pending.) - [ ] EMC verif-global - [ ] GDAS - [ ] GFS-utils - [ ] GSI - [ ] GSI-monitor - [ ] GSI-utils - [ ] UFS-utils - [ ] UFS-weather-model - [ ] wxflow # How has this been tested? The start up aspected has been tested from my forked repo but could not test repos that are forked. The test from forked repos has to be tested once the workflow pipeline in the **develop** branch. # Checklist - [x] Any dependent changes have been merged and published - [x] My code follows the style guidelines of this project - [x] I have performed a self-review of my own code - [x] I have commented my code, particularly in hard-to-understand areas - [ ] I have documented my code, including function, input, and output descriptions - [x] My changes generate no new warnings - [x] New and existing tests pass with my changes - [x] This change is covered by an existing CI test or a new one has been added - [ ] Any new scripts have been added to the .github/CODEOWNERS file with owners - [ ] I have made corresponding changes to the system documentation if necessary --------- Co-authored-by: tmcguinness Co-authored-by: tmcguinness --- .github/workflows/pw_aws_ci.yaml | 36 +++++++++++++++++++++++++++++++- 1 file changed, 35 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pw_aws_ci.yaml b/.github/workflows/pw_aws_ci.yaml index 245e219dd4..c59f027920 100644 --- a/.github/workflows/pw_aws_ci.yaml +++ b/.github/workflows/pw_aws_ci.yaml @@ -31,24 +31,57 @@ env: MACHINE_ID: noaacloud jobs: + + run-start-clusters: + runs-on: ubuntu-latest + env: + PW_PLATFORM_HOST: noaa.parallel.works + steps: + - name: Checkout pw-cluster-automation repository + uses: actions/checkout@v4 + with: + repository: TerrenceMcGuinness-NOAA/pw-cluster-automation + path: pw-cluster-automation + ref: pw_cluster_noaa + + - name: Run startClusters + run: | + mkdir -p ~/.ssh + echo "${{ secrets.ID_RSA_AWS }}" > ~/.ssh/id_rsa + echo "${{ secrets.PW_API_KEY }}" > ~/.ssh/pw_api.key + chmod 700 ~/.ssh + chmod 600 ~/.ssh/id_rsa + chmod 600 ~/.ssh/pw_api.key + if [ "${{ github.event.inputs.os }}" == "rocky" ]; then + clustername="globalworkflowciplatformrocky8" + elif [ "${{ github.event.inputs.os }}" == "centos" ]; then + clustername="awsemctmcgc7i48xlargeciplatform" + fi + python3 pw-cluster-automation/startClusters.py $clustername + fetch-branch: + needs: run-start-clusters runs-on: ubuntu-latest env: GH_TOKEN: ${{ secrets.GITHUBTOKEN }} outputs: branch: ${{ steps.get-branch.outputs.branch }} + repo: ${{ steps.get-branch.outputs.repo }} steps: - - name: Fetch branch name for PR + - name: Fetch branch name and repo for PR id: get-branch run: | pr_number=${{ github.event.inputs.pr_number }} repo=${{ github.repository }} if [ "$pr_number" -eq "0" ]; then branch=${{ github.event.inputs.ref }} + repo_url="https://github.com/${{ github.repository_owner }}/${{ github.repository }}.git" else branch=$(gh pr view $pr_number --repo $repo --json headRefName --jq '.headRefName') + repo_url=$(gh pr view $pr_number --repo $repo --json headRepository --jq '.headRepository.url') fi echo "::set-output name=branch::$branch" + echo "::set-output name=repo::$repo_url" checkout: needs: fetch-branch @@ -64,6 +97,7 @@ jobs: with: path: ${{ github.run_id }}/HOMEgfs submodules: 'recursive' + repository: ${{ needs.fetch-branch.outputs.repo }} ref: ${{ needs.fetch-branch.outputs.branch }} build-link: From d95630a56bf8b1ac430b33f687259cf44cc63b76 Mon Sep 17 00:00:00 2001 From: Eric Sinsky - NOAA <48259628+EricSinsky-NOAA@users.noreply.github.com> Date: Fri, 1 Nov 2024 02:13:02 -0400 Subject: [PATCH 08/14] Add more ocean variables for post-processing in GEFS (#2995) This PR adds an ocean variable `tob` (Sea Water Potential Temperature at Sea Floor) for post-processing in GEFS, which is a variable that has been requested for GEFSv13 and the reforecast. Also, this PR moves the atmos variable `PEVPR` from the "b" group to the "a" group of pgrb products in GEFS. This was requested by a reforecast stakeholder. Resolves #2993 --- parm/post/oceanice_products_gefs.yaml | 2 +- parm/product/gefs.0p25.fFFF.paramlist.a.txt | 1 + parm/product/gefs.0p25.fFFF.paramlist.b.txt | 1 - sorc/gfs_utils.fd | 2 +- 4 files changed, 3 insertions(+), 3 deletions(-) diff --git a/parm/post/oceanice_products_gefs.yaml b/parm/post/oceanice_products_gefs.yaml index fea88df2bb..f961fab83f 100644 --- a/parm/post/oceanice_products_gefs.yaml +++ b/parm/post/oceanice_products_gefs.yaml @@ -33,7 +33,7 @@ ocean: {% elif model_grid == 'mx500' %} ocean_levels: [5, 15, 25, 35, 45, 55, 65, 75, 85, 95, 105, 115, 125, 135, 145, 155, 165, 175, 185, 195, 205, 215, 226, 241, 267] {% endif %} - subset: ['SSH', 'SST', 'SSS', 'speed', 'MLD_003', 'latent', 'sensible', 'SW', 'LW', 'LwLatSens', 'Heat_PmE', 'SSU', 'SSV', 'taux', 'tauy', 'temp', 'so', 'uo', 'vo'] + subset: ['SSH', 'SST', 'SSS', 'speed', 'MLD_003', 'latent', 'sensible', 'SW', 'LW', 'LwLatSens', 'Heat_PmE', 'SSU', 'SSV', 'taux', 'tauy', 'temp', 'tob', 'so', 'uo', 'vo'] data_in: copy: - ["{{ COM_OCEAN_HISTORY }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.{{ interval }}hr_avg.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/ocean.nc"] diff --git a/parm/product/gefs.0p25.fFFF.paramlist.a.txt b/parm/product/gefs.0p25.fFFF.paramlist.a.txt index 303752ac17..4bb87c32ff 100644 --- a/parm/product/gefs.0p25.fFFF.paramlist.a.txt +++ b/parm/product/gefs.0p25.fFFF.paramlist.a.txt @@ -19,6 +19,7 @@ :CIN:180-0 mb above ground: :CIN:surface: :HLCY:3000-0 m above ground: +:PEVPR:surface: :TCDC:entire atmosphere (considered as a single layer): :WEASD:surface: :SNOD:surface: diff --git a/parm/product/gefs.0p25.fFFF.paramlist.b.txt b/parm/product/gefs.0p25.fFFF.paramlist.b.txt index ccad9da4d0..5c406ce34d 100644 --- a/parm/product/gefs.0p25.fFFF.paramlist.b.txt +++ b/parm/product/gefs.0p25.fFFF.paramlist.b.txt @@ -151,7 +151,6 @@ :O3MR:5 mb: :O3MR:70 mb: :O3MR:7 mb: -:PEVPR:surface: :PLI:30-0 mb above ground: :PLPL:255-0 mb above ground: :POT:0.995 sigma level: diff --git a/sorc/gfs_utils.fd b/sorc/gfs_utils.fd index a00cc0949e..856a42076a 160000 --- a/sorc/gfs_utils.fd +++ b/sorc/gfs_utils.fd @@ -1 +1 @@ -Subproject commit a00cc0949e2f901e73b58d54834517743916c69a +Subproject commit 856a42076a65256aaae9b29f4891532cb4a3fbca From 5e867df8aaffb95f7895fa741db33e9d12c6a4dc Mon Sep 17 00:00:00 2001 From: TerrenceMcGuinness-NOAA Date: Fri, 1 Nov 2024 18:05:43 +0000 Subject: [PATCH 09/14] GitHub CI Pipeline update for debugging forked PR support (#3056) # Description Updating GitHub CI pipeline's bug with passing repo variables for `actions/checkout@v4` to support forked PRs. Had to debug directly from develop in authoritative repo because did not not have fork of fork for the development tests. # Type of change - [x] Bug fix (fixes something broken) - [ ] New feature (adds functionality) - [ ] Maintenance (code refactor, clean-up, new CI test, etc.) --------- Co-authored-by: Terry McGuinness --- .github/workflows/pw_aws_ci.yaml | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/.github/workflows/pw_aws_ci.yaml b/.github/workflows/pw_aws_ci.yaml index c59f027920..f97825c5bc 100644 --- a/.github/workflows/pw_aws_ci.yaml +++ b/.github/workflows/pw_aws_ci.yaml @@ -80,8 +80,10 @@ jobs: branch=$(gh pr view $pr_number --repo $repo --json headRefName --jq '.headRefName') repo_url=$(gh pr view $pr_number --repo $repo --json headRepository --jq '.headRepository.url') fi - echo "::set-output name=branch::$branch" - echo "::set-output name=repo::$repo_url" + { + echo "BRANCH=$branch" + echo "REPO=$repo_url" + } >> $GITHUB_OUTPUT checkout: needs: fetch-branch @@ -97,8 +99,8 @@ jobs: with: path: ${{ github.run_id }}/HOMEgfs submodules: 'recursive' - repository: ${{ needs.fetch-branch.outputs.repo }} - ref: ${{ needs.fetch-branch.outputs.branch }} + repository: ${{ steps.git-branch.outputs.BRANCH }} + ref: ${{ steps.git-branch.outputs.REPO }} build-link: needs: checkout From 19eca3f2aae6116a45f6449ab52877e2eec0f011 Mon Sep 17 00:00:00 2001 From: Kate Friedman Date: Fri, 1 Nov 2024 14:29:08 -0400 Subject: [PATCH 10/14] Revert "GitHub CI Pipeline update for debugging forked PR support" (#3057) Reverts NOAA-EMC/global-workflow#3056 @TerrenceMcGuinness-NOAA will open a new PR for these changes to be reviewed and approved. --- .github/workflows/pw_aws_ci.yaml | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/.github/workflows/pw_aws_ci.yaml b/.github/workflows/pw_aws_ci.yaml index f97825c5bc..c59f027920 100644 --- a/.github/workflows/pw_aws_ci.yaml +++ b/.github/workflows/pw_aws_ci.yaml @@ -80,10 +80,8 @@ jobs: branch=$(gh pr view $pr_number --repo $repo --json headRefName --jq '.headRefName') repo_url=$(gh pr view $pr_number --repo $repo --json headRepository --jq '.headRepository.url') fi - { - echo "BRANCH=$branch" - echo "REPO=$repo_url" - } >> $GITHUB_OUTPUT + echo "::set-output name=branch::$branch" + echo "::set-output name=repo::$repo_url" checkout: needs: fetch-branch @@ -99,8 +97,8 @@ jobs: with: path: ${{ github.run_id }}/HOMEgfs submodules: 'recursive' - repository: ${{ steps.git-branch.outputs.BRANCH }} - ref: ${{ steps.git-branch.outputs.REPO }} + repository: ${{ needs.fetch-branch.outputs.repo }} + ref: ${{ needs.fetch-branch.outputs.branch }} build-link: needs: checkout From 5bde6495b9212ef6c3d4afd26999f3cb844756ad Mon Sep 17 00:00:00 2001 From: TerrenceMcGuinness-NOAA Date: Fri, 1 Nov 2024 20:25:33 +0000 Subject: [PATCH 11/14] PW CI pipeline update5 ready for review so it can be merged and tested (#3059) # Discription Latest updates to CI GitHub Pipeline: - Explicitly gets owner of the repo from PRs when coming in from forked repos (was deficient on last iteration) - Updated to more current method to GITHUB_OUTPUT for inter job variable passing # Type of change - [x] Bug fix (fixes something broken) - [ ] New feature (adds functionality) - [ ] Maintenance (code refactor, clean-up, new CI test, etc.) NOTE: Many updates where used in the PR process as the pipeline development had to occur directly in the authoritative repo on the develop branch for testing `actions/checkout@v4` when cloning from a forked repo. # How is this tested Once the update is made in the default develop branch the action can be tested. We can not test this from a forked repo because said test would require a fork of a fork. --------- Co-authored-by: Terry McGuinness --- .github/workflows/pw_aws_ci.yaml | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/.github/workflows/pw_aws_ci.yaml b/.github/workflows/pw_aws_ci.yaml index c59f027920..ffee433425 100644 --- a/.github/workflows/pw_aws_ci.yaml +++ b/.github/workflows/pw_aws_ci.yaml @@ -78,10 +78,14 @@ jobs: repo_url="https://github.com/${{ github.repository_owner }}/${{ github.repository }}.git" else branch=$(gh pr view $pr_number --repo $repo --json headRefName --jq '.headRefName') - repo_url=$(gh pr view $pr_number --repo $repo --json headRepository --jq '.headRepository.url') + repo_owner=$(gh pr view $pr_number --repo $repo --json headRepositoryOwner --jq '.headRepositoryOwner.login') + repo_name=$(gh pr view $pr_number --repo $repo --json headRepository --jq '.headRepository.name') + repo_url="https://github.com/$repo_owner/$repo_name.git" fi - echo "::set-output name=branch::$branch" - echo "::set-output name=repo::$repo_url" + { + echo "branch=$branch" + echo "repo=$repo_url" + } >> $GITHUB_OUTPUT checkout: needs: fetch-branch From 75c3c672597f8eda4d1873daf54ee95c5d1a2f7d Mon Sep 17 00:00:00 2001 From: TerrenceMcGuinness-NOAA Date: Fri, 1 Nov 2024 21:00:16 +0000 Subject: [PATCH 12/14] Update workflow pipeline (#3060) Change repo_url to repo owner/branch format for actions/checkout@v4 as part of in place pipeline development within. --- .github/workflows/pw_aws_ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pw_aws_ci.yaml b/.github/workflows/pw_aws_ci.yaml index ffee433425..c28f9abbf0 100644 --- a/.github/workflows/pw_aws_ci.yaml +++ b/.github/workflows/pw_aws_ci.yaml @@ -75,12 +75,12 @@ jobs: repo=${{ github.repository }} if [ "$pr_number" -eq "0" ]; then branch=${{ github.event.inputs.ref }} - repo_url="https://github.com/${{ github.repository_owner }}/${{ github.repository }}.git" + repo="{{ github.repository_owner }}/${{ github.repository }}" else branch=$(gh pr view $pr_number --repo $repo --json headRefName --jq '.headRefName') repo_owner=$(gh pr view $pr_number --repo $repo --json headRepositoryOwner --jq '.headRepositoryOwner.login') repo_name=$(gh pr view $pr_number --repo $repo --json headRepository --jq '.headRepository.name') - repo_url="https://github.com/$repo_owner/$repo_name.git" + repo="$repo_owner/$repo_name" fi { echo "branch=$branch" From c667ffaa0736c7b3b6abbccbaffe9c63a26e67c0 Mon Sep 17 00:00:00 2001 From: TerrenceMcGuinness-NOAA Date: Fri, 1 Nov 2024 21:38:43 +0000 Subject: [PATCH 13/14] Update to action workflow pipeline in default repo for development (#3061) # Description change **repo_url** to **repo** in order to pass the correct value to `actions/checkout@v4` as part of in place pipeline development with in authoritative repo ergo no review necessary # Type of change - [x] Bug fix (fixes something broken) - [ ] New feature (adds functionality) - [ ] Maintenance (code refactor, clean-up, new CI test, etc.) Co-authored-by: Terry McGuinness --- .github/workflows/pw_aws_ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pw_aws_ci.yaml b/.github/workflows/pw_aws_ci.yaml index c28f9abbf0..9d4a6f3c37 100644 --- a/.github/workflows/pw_aws_ci.yaml +++ b/.github/workflows/pw_aws_ci.yaml @@ -84,7 +84,7 @@ jobs: fi { echo "branch=$branch" - echo "repo=$repo_url" + echo "repo=$repo" } >> $GITHUB_OUTPUT checkout: From 152bb45041ff7b69d14db98648b351a8f527e8d5 Mon Sep 17 00:00:00 2001 From: TerrenceMcGuinness-NOAA Date: Fri, 1 Nov 2024 23:19:08 +0000 Subject: [PATCH 14/14] Update to action workflow pipeline in default repo for development (#3062) # Description Still did not have the repo value correct for when PR=0 and branch is selected from the actions menu. This is part of in place pipeline development with in authoritative repo. (_ergo no review necessary_) # Type of change - [x] Bug fix (fixes something broken) - [ ] New feature (adds functionality) - [ ] Maintenance (code refactor, clean-up, new CI test, etc.) --------- Co-authored-by: Terry McGuinness --- .github/workflows/pw_aws_ci.yaml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/pw_aws_ci.yaml b/.github/workflows/pw_aws_ci.yaml index 9d4a6f3c37..f398ca4baf 100644 --- a/.github/workflows/pw_aws_ci.yaml +++ b/.github/workflows/pw_aws_ci.yaml @@ -15,7 +15,7 @@ on: workflow_dispatch: inputs: pr_number: - description: 'Pull Request Number (use 0 for non-PR)' + description: 'PR Number (use 0 for non-PR)' required: true default: '0' os: @@ -72,10 +72,9 @@ jobs: id: get-branch run: | pr_number=${{ github.event.inputs.pr_number }} - repo=${{ github.repository }} if [ "$pr_number" -eq "0" ]; then branch=${{ github.event.inputs.ref }} - repo="{{ github.repository_owner }}/${{ github.repository }}" + repo=${{ github.repository }} else branch=$(gh pr view $pr_number --repo $repo --json headRefName --jq '.headRefName') repo_owner=$(gh pr view $pr_number --repo $repo --json headRepositoryOwner --jq '.headRepositoryOwner.login')