From 61de004d4f9e9edf8a31bb173f2719b46451a36a Mon Sep 17 00:00:00 2001 From: Jessica Meixner Date: Wed, 12 Jun 2024 11:03:13 -0400 Subject: [PATCH 01/20] Update ufs-weather-model (#2663) Updates ufs-weather-model, this updates RDHPCS to the newer spack-stack allowing some temporary fixes to be reverted. * removes upp submodule * uses upp from the ufs-weather-model * restores the build and link that were hacked during the Hera Rocky 8 transition to allow for UPP submodule * Removes forecast directories in clean-up Resolves #2617 Resolves #2437 --------- Co-authored-by: Rahul Mahajan --- .gitmodules | 4 ---- scripts/exglobal_cleanup.sh | 7 +++++++ sorc/build_upp.sh | 2 +- sorc/link_workflow.sh | 9 ++++----- sorc/ufs_model.fd | 2 +- sorc/upp.fd | 1 - ush/forecast_postdet.sh | 11 +++++------ 7 files changed, 18 insertions(+), 18 deletions(-) delete mode 160000 sorc/upp.fd diff --git a/.gitmodules b/.gitmodules index ea1b5c06af..1c3b7acfa5 100644 --- a/.gitmodules +++ b/.gitmodules @@ -26,10 +26,6 @@ [submodule "sorc/gsi_monitor.fd"] path = sorc/gsi_monitor.fd url = https://github.com/NOAA-EMC/GSI-Monitor.git -[submodule "sorc/upp.fd"] - path = sorc/upp.fd - url = https://github.com/NOAA-EMC/UPP.git - ignore = dirty [submodule "sorc/jcb"] path = sorc/jcb url = https://github.com/noaa-emc/jcb diff --git a/scripts/exglobal_cleanup.sh b/scripts/exglobal_cleanup.sh index c2b748f696..7c3dfafbad 100755 --- a/scripts/exglobal_cleanup.sh +++ b/scripts/exglobal_cleanup.sh @@ -2,6 +2,13 @@ source "${USHgfs}/preamble.sh" +# Remove DATAoutput from the forecast model run +# TODO: Handle this better +DATAfcst="${DATAROOT}/${RUN}fcst.${PDY:-}${cyc}" +if [[ -d "${DATAfcst}" ]]; then rm -rf "${DATAfcst}"; fi +#DATAefcs="${DATAROOT}/${RUN}efcs???${PDY:-}${cyc}" +rm -rf "${DATAROOT}/${RUN}efcs"*"${PDY:-}${cyc}" + ############################################################### # Clean up previous cycles; various depths # PRIOR CYCLE: Leave the prior cycle alone diff --git a/sorc/build_upp.sh b/sorc/build_upp.sh index 1dca0035fd..e217e171db 100755 --- a/sorc/build_upp.sh +++ b/sorc/build_upp.sh @@ -26,6 +26,6 @@ if [[ ! -d "../exec" ]]; then mkdir -p ../exec fi -cd upp.fd/tests +cd ufs_model.fd/FV3/upp/tests # shellcheck disable=SC2086 BUILD_JOBS=${BUILD_JOBS:-8} ./compile_upp.sh ${_opts} diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index f338f2bad3..593a4694c1 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -370,11 +370,10 @@ fi #--link source code directories #------------------------------ cd "${HOMEgfs}/sorc" || exit 8 -# TODO: Commenting out until UPP is up-to-date with Rocky-8. -#if [[ -d ufs_model.fd ]]; then -# [[ -d upp.fd ]] && rm -rf upp.fd -# ${LINK} ufs_model.fd/FV3/upp upp.fd -#fi +if [[ -d ufs_model.fd ]]; then + [[ -d upp.fd ]] && rm -rf upp.fd + ${LINK} ufs_model.fd/FV3/upp upp.fd +fi if [[ -d gsi_enkf.fd ]]; then [[ -d gsi.fd ]] && rm -rf gsi.fd diff --git a/sorc/ufs_model.fd b/sorc/ufs_model.fd index 5bec704243..485ccdfc4a 160000 --- a/sorc/ufs_model.fd +++ b/sorc/ufs_model.fd @@ -1 +1 @@ -Subproject commit 5bec704243286421fc613838fc67a2129e96acd6 +Subproject commit 485ccdfc4a7ed6deeb02d82c2cebe51b37e892f5 diff --git a/sorc/upp.fd b/sorc/upp.fd deleted file mode 160000 index 83e83a938b..0000000000 --- a/sorc/upp.fd +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 83e83a938b5794a628d30e66a54902dabe58737d diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 3f6ffcb8c2..aff4f5a394 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -217,12 +217,11 @@ FV3_out() { echo "SUB ${FUNCNAME[0]}: copying output data for FV3" # Copy configuration files - if [[ "${RUN}" == "gfs" || "${RUN}" == "gefs" ]]; then - ${NCP} "${DATA}/input.nml" "${COMOUT_CONF}/ufs.input.nml" - ${NCP} "${DATA}/model_configure" "${COMOUT_CONF}/ufs.model_configure" - ${NCP} "${DATA}/ufs.configure" "${COMOUT_CONF}/ufs.ufs.configure" - ${NCP} "${DATA}/diag_table" "${COMOUT_CONF}/ufs.diag_table" - fi + ${NCP} "${DATA}/input.nml" "${COMOUT_CONF}/ufs.input.nml" + ${NCP} "${DATA}/model_configure" "${COMOUT_CONF}/ufs.model_configure" + ${NCP} "${DATA}/ufs.configure" "${COMOUT_CONF}/ufs.ufs.configure" + ${NCP} "${DATA}/diag_table" "${COMOUT_CONF}/ufs.diag_table" + # Create an array of fv3 restart files local fv3_restart_files tile_files fv3_restart_file restart_file From 2e6f1fcde9935619352b1b26cba42ec0f4d845ed Mon Sep 17 00:00:00 2001 From: Guoqing Ge Date: Wed, 12 Jun 2024 09:06:23 -0600 Subject: [PATCH 02/20] Link both global-nest fix files and non-nest ones at the same time (#2632) This PR enables linking both global-nest fix files and non-nest ones at the same time and users can run both nesting and non-nesting experiments at the same time without worries about what fix files to be linked. Resolves #2631 --- .gitignore | 2 + parm/config/gefs/config.base | 2 + parm/config/gfs/config.base | 6 +- parm/ufs/fix/gfs/atmos.fixed_files.yaml | 42 +++++------ parm/ufs/fix/gfs/land.fixed_files.yaml | 98 ++++++++++++------------- parm/ufs/fix/gfs/ocean.fixed_files.yaml | 12 +-- scripts/exgdas_enkf_sfc.sh | 8 +- scripts/exglobal_forecast.sh | 6 +- sorc/link_workflow.sh | 19 +++-- sorc/ufs_utils.fd | 2 +- ush/forecast_predet.sh | 36 ++++----- ush/gaussian_sfcanl.sh | 16 ++-- versions/fix.nest.ver | 4 - versions/fix.ver | 2 + 14 files changed, 134 insertions(+), 121 deletions(-) delete mode 100644 versions/fix.nest.ver diff --git a/.gitignore b/.gitignore index 04193fca0a..8314f5bae9 100644 --- a/.gitignore +++ b/.gitignore @@ -35,8 +35,10 @@ fix/gsi fix/lut fix/mom6 fix/orog +fix/orog_nest fix/sfc_climo fix/ugwd +fix/ugwd_nest fix/verif fix/wave diff --git a/parm/config/gefs/config.base b/parm/config/gefs/config.base index b5eb8004b4..840f33ad60 100644 --- a/parm/config/gefs/config.base +++ b/parm/config/gefs/config.base @@ -30,6 +30,8 @@ export FIXgfs=${HOMEgfs}/fix export PARMgfs=${HOMEgfs}/parm export SCRgfs=${HOMEgfs}/scripts export USHgfs=${HOMEgfs}/ush +export FIXorog=${FIXgfs}/orog +export FIXugwd=${FIXgfs}/ugwd ######################################################################## diff --git a/parm/config/gfs/config.base b/parm/config/gfs/config.base index c53ac908f2..7eedec8af7 100644 --- a/parm/config/gfs/config.base +++ b/parm/config/gfs/config.base @@ -35,11 +35,9 @@ export FIXam="${FIXgfs}/am" export FIXaer="${FIXgfs}/aer" export FIXcpl="${FIXgfs}/cpl" export FIXlut="${FIXgfs}/lut" -export FIXorog="${FIXgfs}/orog" export FIXcice="${FIXgfs}/cice" export FIXmom="${FIXgfs}/mom6" export FIXreg2grb2="${FIXgfs}/reg2grb2" -export FIXugwd="${FIXgfs}/ugwd" export FIXgdas="${FIXgfs}/gdas" ######################################################################## @@ -190,8 +188,12 @@ export DO_NEST="NO" # Whether to run a global-nested domain if [[ "${DO_NEST:-NO}" == "YES" ]] ; then export ntiles=7 export NEST_OUTPUT_GRID="regional_latlon" + export FIXugwd="${FIXgfs}/ugwd_nest" + export FIXorog="${FIXgfs}/orog_nest" else export ntiles=6 + export FIXugwd="${FIXgfs}/ugwd" + export FIXorog="${FIXgfs}/orog" fi # Set operational resolution diff --git a/parm/ufs/fix/gfs/atmos.fixed_files.yaml b/parm/ufs/fix/gfs/atmos.fixed_files.yaml index 8db691b49c..374c26873e 100644 --- a/parm/ufs/fix/gfs/atmos.fixed_files.yaml +++ b/parm/ufs/fix/gfs/atmos.fixed_files.yaml @@ -1,31 +1,31 @@ copy: # Atmosphere mosaic file linked as the grid_spec file (atm only) - - [$(FIXgfs)/orog/$(CASE)/$(CASE)_mosaic.nc, $(DATA)/INPUT/grid_spec.nc] + - [$(FIXorog)/$(CASE)/$(CASE)_mosaic.nc, $(DATA)/INPUT/grid_spec.nc] # Atmosphere grid tile files - - [$(FIXgfs)/orog/$(CASE)/$(CASE)_grid.tile1.nc, $(DATA)/INPUT/] - - [$(FIXgfs)/orog/$(CASE)/$(CASE)_grid.tile2.nc, $(DATA)/INPUT/] - - [$(FIXgfs)/orog/$(CASE)/$(CASE)_grid.tile3.nc, $(DATA)/INPUT/] - - [$(FIXgfs)/orog/$(CASE)/$(CASE)_grid.tile4.nc, $(DATA)/INPUT/] - - [$(FIXgfs)/orog/$(CASE)/$(CASE)_grid.tile5.nc, $(DATA)/INPUT/] - - [$(FIXgfs)/orog/$(CASE)/$(CASE)_grid.tile6.nc, $(DATA)/INPUT/] + - [$(FIXorog)/$(CASE)/$(CASE)_grid.tile1.nc, $(DATA)/INPUT/] + - [$(FIXorog)/$(CASE)/$(CASE)_grid.tile2.nc, $(DATA)/INPUT/] + - [$(FIXorog)/$(CASE)/$(CASE)_grid.tile3.nc, $(DATA)/INPUT/] + - [$(FIXorog)/$(CASE)/$(CASE)_grid.tile4.nc, $(DATA)/INPUT/] + - [$(FIXorog)/$(CASE)/$(CASE)_grid.tile5.nc, $(DATA)/INPUT/] + - [$(FIXorog)/$(CASE)/$(CASE)_grid.tile6.nc, $(DATA)/INPUT/] - # oro_data_ls and oro_data_ss files from FIXgfs/ugwd - - [$(FIXgfs)/ugwd/$(CASE)/$(CASE)_oro_data_ls.tile1.nc, $(DATA)/INPUT/oro_data_ls.tile1.nc] - - [$(FIXgfs)/ugwd/$(CASE)/$(CASE)_oro_data_ls.tile2.nc, $(DATA)/INPUT/oro_data_ls.tile2.nc] - - [$(FIXgfs)/ugwd/$(CASE)/$(CASE)_oro_data_ls.tile3.nc, $(DATA)/INPUT/oro_data_ls.tile3.nc] - - [$(FIXgfs)/ugwd/$(CASE)/$(CASE)_oro_data_ls.tile4.nc, $(DATA)/INPUT/oro_data_ls.tile4.nc] - - [$(FIXgfs)/ugwd/$(CASE)/$(CASE)_oro_data_ls.tile5.nc, $(DATA)/INPUT/oro_data_ls.tile5.nc] - - [$(FIXgfs)/ugwd/$(CASE)/$(CASE)_oro_data_ls.tile6.nc, $(DATA)/INPUT/oro_data_ls.tile6.nc] - - [$(FIXgfs)/ugwd/$(CASE)/$(CASE)_oro_data_ss.tile1.nc, $(DATA)/INPUT/oro_data_ss.tile1.nc] - - [$(FIXgfs)/ugwd/$(CASE)/$(CASE)_oro_data_ss.tile2.nc, $(DATA)/INPUT/oro_data_ss.tile2.nc] - - [$(FIXgfs)/ugwd/$(CASE)/$(CASE)_oro_data_ss.tile3.nc, $(DATA)/INPUT/oro_data_ss.tile3.nc] - - [$(FIXgfs)/ugwd/$(CASE)/$(CASE)_oro_data_ss.tile4.nc, $(DATA)/INPUT/oro_data_ss.tile4.nc] - - [$(FIXgfs)/ugwd/$(CASE)/$(CASE)_oro_data_ss.tile5.nc, $(DATA)/INPUT/oro_data_ss.tile5.nc] - - [$(FIXgfs)/ugwd/$(CASE)/$(CASE)_oro_data_ss.tile6.nc, $(DATA)/INPUT/oro_data_ss.tile6.nc] + # oro_data_ls and oro_data_ss files from FIXugwd + - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ls.tile1.nc, $(DATA)/INPUT/oro_data_ls.tile1.nc] + - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ls.tile2.nc, $(DATA)/INPUT/oro_data_ls.tile2.nc] + - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ls.tile3.nc, $(DATA)/INPUT/oro_data_ls.tile3.nc] + - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ls.tile4.nc, $(DATA)/INPUT/oro_data_ls.tile4.nc] + - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ls.tile5.nc, $(DATA)/INPUT/oro_data_ls.tile5.nc] + - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ls.tile6.nc, $(DATA)/INPUT/oro_data_ls.tile6.nc] + - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ss.tile1.nc, $(DATA)/INPUT/oro_data_ss.tile1.nc] + - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ss.tile2.nc, $(DATA)/INPUT/oro_data_ss.tile2.nc] + - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ss.tile3.nc, $(DATA)/INPUT/oro_data_ss.tile3.nc] + - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ss.tile4.nc, $(DATA)/INPUT/oro_data_ss.tile4.nc] + - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ss.tile5.nc, $(DATA)/INPUT/oro_data_ss.tile5.nc] + - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ss.tile6.nc, $(DATA)/INPUT/oro_data_ss.tile6.nc] # GWD?? - - [$(FIXgfs)/ugwd/ugwp_limb_tau.nc, $(DATA)/ugwp_limb_tau.nc] + - [$(FIXugwd)/ugwp_limb_tau.nc, $(DATA)/ugwp_limb_tau.nc] # CO2 climatology - [$(FIXgfs)/am/co2monthlycyc.txt, $(DATA)/co2monthlycyc.txt] diff --git a/parm/ufs/fix/gfs/land.fixed_files.yaml b/parm/ufs/fix/gfs/land.fixed_files.yaml index 8e4d221dbc..bb2d060963 100644 --- a/parm/ufs/fix/gfs/land.fixed_files.yaml +++ b/parm/ufs/fix/gfs/land.fixed_files.yaml @@ -1,58 +1,58 @@ copy: - # Files from FIXgfs/orog/C??/sfc - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile1.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile2.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile3.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile4.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile5.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile6.nc, $(DATA)/] + # Files from FIXorog/C??/sfc + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile1.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile2.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile3.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile4.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile5.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile6.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile1.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile2.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile3.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile4.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile5.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile6.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile1.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile2.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile3.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile4.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile5.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile6.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile1.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile2.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile3.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile4.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile5.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile6.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile1.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile2.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile3.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile4.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile5.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile6.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile1.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile2.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile3.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile4.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile5.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile6.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile1.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile2.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile3.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile4.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile5.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile6.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile1.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile2.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile3.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile4.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile5.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile6.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile1.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile2.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile3.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile4.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile5.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile6.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile1.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile2.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile3.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile4.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile5.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile6.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile1.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile2.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile3.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile4.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile5.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile6.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile1.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile2.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile3.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile4.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile5.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile6.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile1.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile2.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile3.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile4.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile5.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile6.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile1.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile2.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile3.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile4.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile5.nc, $(DATA)/] - - [$(FIXgfs)/orog/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile6.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile1.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile2.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile3.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile4.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile5.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile6.nc, $(DATA)/] diff --git a/parm/ufs/fix/gfs/ocean.fixed_files.yaml b/parm/ufs/fix/gfs/ocean.fixed_files.yaml index 4ef19bab0d..1ca8ce7a68 100644 --- a/parm/ufs/fix/gfs/ocean.fixed_files.yaml +++ b/parm/ufs/fix/gfs/ocean.fixed_files.yaml @@ -1,9 +1,9 @@ copy: # Orography data tile files - - [$(FIXgfs)/orog/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile1.nc, $(DATA)/INPUT/oro_data.tile1.nc] - - [$(FIXgfs)/orog/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile2.nc, $(DATA)/INPUT/oro_data.tile2.nc] - - [$(FIXgfs)/orog/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile3.nc, $(DATA)/INPUT/oro_data.tile3.nc] - - [$(FIXgfs)/orog/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile4.nc, $(DATA)/INPUT/oro_data.tile4.nc] - - [$(FIXgfs)/orog/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile5.nc, $(DATA)/INPUT/oro_data.tile5.nc] - - [$(FIXgfs)/orog/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile6.nc, $(DATA)/INPUT/oro_data.tile6.nc] + - [$(FIXorog)/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile1.nc, $(DATA)/INPUT/oro_data.tile1.nc] + - [$(FIXorog)/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile2.nc, $(DATA)/INPUT/oro_data.tile2.nc] + - [$(FIXorog)/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile3.nc, $(DATA)/INPUT/oro_data.tile3.nc] + - [$(FIXorog)/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile4.nc, $(DATA)/INPUT/oro_data.tile4.nc] + - [$(FIXorog)/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile5.nc, $(DATA)/INPUT/oro_data.tile5.nc] + - [$(FIXorog)/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile6.nc, $(DATA)/INPUT/oro_data.tile6.nc] diff --git a/scripts/exgdas_enkf_sfc.sh b/scripts/exgdas_enkf_sfc.sh index cd6a40a0fa..2720dd5d5f 100755 --- a/scripts/exgdas_enkf_sfc.sh +++ b/scripts/exgdas_enkf_sfc.sh @@ -162,8 +162,8 @@ if [ $DOIAU = "YES" ]; then "${DATA}/fnbgsi.${cmem}" ${NLN} "${COM_ATMOS_RESTART_MEM}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" \ "${DATA}/fnbgso.${cmem}" - ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" - ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" + ${NLN} "${FIXorog}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" + ${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" if [[ ${GSI_SOILANAL} = "YES" ]]; then FHR=6 @@ -207,8 +207,8 @@ if [ $DOSFCANL_ENKF = "YES" ]; then "${DATA}/fnbgsi.${cmem}" ${NLN} "${COM_ATMOS_RESTART_MEM}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" \ "${DATA}/fnbgso.${cmem}" - ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" - ${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" + ${NLN} "${FIXorog}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" + ${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" done diff --git a/scripts/exglobal_forecast.sh b/scripts/exglobal_forecast.sh index 590c3aba17..e2bccd43db 100755 --- a/scripts/exglobal_forecast.sh +++ b/scripts/exglobal_forecast.sh @@ -38,9 +38,9 @@ ## Restart files: ## ## Fix files: -## 1. computing grid, ${FIXgfs}/orog/$CASE/${CASE}_grid.tile${n}.nc -## 2. orography data, ${FIXgfs}/orog/$CASE/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc -## 3. mosaic data, ${FIXgfs}/orog/$CASE/${CASE}_mosaic.nc +## 1. computing grid, ${FIXorog}/$CASE/${CASE}_grid.tile${n}.nc +## 2. orography data, ${FIXorog}/$CASE/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc +## 3. mosaic data, ${FIXorog}/$CASE/${CASE}_mosaic.nc ## 4. Global O3 data, ${FIXgfs}/am/${O3FORC} ## 5. Global H2O data, ${FIXgfs}/am/${H2OFORC} ## 6. Global solar constant data, ${FIXgfs}/am/global_solarconstant_noaa_an.txt diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 593a4694c1..1f0de0745c 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -84,10 +84,6 @@ esac # Source fix version file source "${HOMEgfs}/versions/fix.ver" -# global-nest uses different versions of orog and ugwd -if [[ "${LINK_NEST:-OFF}" == "ON" ]] ; then - source "${HOMEgfs}/versions/fix.nest.ver" -fi # Link python pacakges in ush/python # TODO: This will be unnecessary when these are part of the virtualenv @@ -134,7 +130,20 @@ do fix_ver="${dir}_ver" ${LINK_OR_COPY} "${FIX_DIR}/${dir}/${!fix_ver}" "${dir}" done - +# global-nest uses different versions of orog and ugwd +if [[ "${LINK_NEST:-OFF}" == "ON" ]] ; then + for dir in orog \ + ugwd + do + nestdir=${dir}_nest + if [[ -d "${nestdir}" ]]; then + [[ "${RUN_ENVIR}" == "nco" ]] && chmod -R 755 "${nestdir}" + rm -rf "${nestdir}" + fi + fix_ver="${dir}_nest_ver" + ${LINK_OR_COPY} "${FIX_DIR}/${dir}/${!fix_ver}" "${nestdir}" + done +fi #--------------------------------------- #--add files from external repositories diff --git a/sorc/ufs_utils.fd b/sorc/ufs_utils.fd index f42fae239d..2794d413d0 160000 --- a/sorc/ufs_utils.fd +++ b/sorc/ufs_utils.fd @@ -1 +1 @@ -Subproject commit f42fae239d0824f7b9a83c9afdc3d980894c7df8 +Subproject commit 2794d413d083b43d9ba37a15375d5c61b610d29e diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index 2f19b2c28e..9c0d90b1dc 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -346,18 +346,18 @@ FV3_predet(){ FNSNOC=${FNSNOC:-"${FIXgfs}/am/global_snoclim.1.875.grb"} FNZORC=${FNZORC:-"igbp"} FNAISC=${FNAISC:-"${FIXgfs}/am/IMS-NIC.blended.ice.monthly.clim.grb"} - FNALBC2=${FNALBC2:-"${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.facsf.tileX.nc"} - FNTG3C=${FNTG3C:-"${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.substrate_temperature.tileX.nc"} - FNVEGC=${FNVEGC:-"${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc"} + FNALBC2=${FNALBC2:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.facsf.tileX.nc"} + FNTG3C=${FNTG3C:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.substrate_temperature.tileX.nc"} + FNVEGC=${FNVEGC:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc"} FNMSKH=${FNMSKH:-"${FIXgfs}/am/global_slmask.t1534.3072.1536.grb"} - FNVMNC=${FNVMNC:-"${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc"} - FNVMXC=${FNVMXC:-"${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc"} - FNSLPC=${FNSLPC:-"${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.slope_type.tileX.nc"} - FNALBC=${FNALBC:-"${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.snowfree_albedo.tileX.nc"} - FNVETC=${FNVETC:-"${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_type.tileX.nc"} - FNSOTC=${FNSOTC:-"${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.soil_type.tileX.nc"} - FNSOCC=${FNSOCC:-"${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.soil_color.tileX.nc"} - FNABSC=${FNABSC:-"${FIXgfs}/orog/${CASE}/sfc/${CASE}.mx${OCNRES}.maximum_snow_albedo.tileX.nc"} + FNVMNC=${FNVMNC:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc"} + FNVMXC=${FNVMXC:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc"} + FNSLPC=${FNSLPC:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.slope_type.tileX.nc"} + FNALBC=${FNALBC:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.snowfree_albedo.tileX.nc"} + FNVETC=${FNVETC:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.vegetation_type.tileX.nc"} + FNSOTC=${FNSOTC:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.soil_type.tileX.nc"} + FNSOCC=${FNSOCC:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.soil_color.tileX.nc"} + FNABSC=${FNABSC:-"${FIXorog}/${CASE}/sfc/${CASE}.mx${OCNRES}.maximum_snow_albedo.tileX.nc"} FNSMCC=${FNSMCC:-"${FIXgfs}/am/global_soilmgldas.statsgo.t${JCAP}.${LONB}.${LATB}.grb"} # If the appropriate resolution fix file is not present, use the highest resolution available (T1534) @@ -365,21 +365,21 @@ FV3_predet(){ # Grid and orography data if [[ "${cplflx}" == ".false." ]] ; then - ${NCP} "${FIXgfs}/orog/${CASE}/${CASE}_mosaic.nc" "${DATA}/INPUT/grid_spec.nc" + ${NCP} "${FIXorog}/${CASE}/${CASE}_mosaic.nc" "${DATA}/INPUT/grid_spec.nc" else - ${NCP} "${FIXgfs}/orog/${CASE}/${CASE}_mosaic.nc" "${DATA}/INPUT/${CASE}_mosaic.nc" + ${NCP} "${FIXorog}/${CASE}/${CASE}_mosaic.nc" "${DATA}/INPUT/${CASE}_mosaic.nc" fi # Files for GWD - ${NCP} "${FIXgfs}/ugwd/ugwp_limb_tau.nc" "${DATA}/ugwp_limb_tau.nc" + ${NCP} "${FIXugwd}/ugwp_limb_tau.nc" "${DATA}/ugwp_limb_tau.nc" # Files for orography, GWD tiles local tt for (( tt = 1; tt <= ntiles; tt++ )); do - ${NCP} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${tt}.nc" "${DATA}/INPUT/oro_data.tile${tt}.nc" - ${NCP} "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${tt}.nc" "${DATA}/INPUT/${CASE}_grid.tile${tt}.nc" - ${NCP} "${FIXgfs}/ugwd/${CASE}/${CASE}_oro_data_ls.tile${tt}.nc" "${DATA}/INPUT/oro_data_ls.tile${tt}.nc" - ${NCP} "${FIXgfs}/ugwd/${CASE}/${CASE}_oro_data_ss.tile${tt}.nc" "${DATA}/INPUT/oro_data_ss.tile${tt}.nc" + ${NCP} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${tt}.nc" "${DATA}/INPUT/oro_data.tile${tt}.nc" + ${NCP} "${FIXorog}/${CASE}/${CASE}_grid.tile${tt}.nc" "${DATA}/INPUT/${CASE}_grid.tile${tt}.nc" + ${NCP} "${FIXugwd}/${CASE}/${CASE}_oro_data_ls.tile${tt}.nc" "${DATA}/INPUT/oro_data_ls.tile${tt}.nc" + ${NCP} "${FIXugwd}/${CASE}/${CASE}_oro_data_ss.tile${tt}.nc" "${DATA}/INPUT/oro_data_ss.tile${tt}.nc" done if [[ "${DO_NEST:-NO}" == "YES" ]] ; then ${NLN} "${DATA}/INPUT/oro_data.tile7.nc" "${DATA}/INPUT/oro_data.nest02.tile7.nc" diff --git a/ush/gaussian_sfcanl.sh b/ush/gaussian_sfcanl.sh index 794dbb7f7f..4ac762824b 100755 --- a/ush/gaussian_sfcanl.sh +++ b/ush/gaussian_sfcanl.sh @@ -73,7 +73,7 @@ # # programs : $GAUSFCANLEXE # -# fixed data : ${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile*.nc +# fixed data : ${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile*.nc # ${FIXWGTS} # ${FIXgfs}/am/global_hyblev.l65.txt # @@ -111,7 +111,7 @@ LATB_SFC=${LATB_SFC:-$LATB_CASE} DONST=${DONST:-"NO"} LEVS=${LEVS:-64} LEVSP1=$(($LEVS+1)) -FIXWGTS=${FIXWGTS:-${FIXgfs}/orog/${CASE}/fv3_SCRIP_${CASE}_GRIDSPEC_lon${LONB_SFC}_lat${LATB_SFC}.gaussian.neareststod.nc} +FIXWGTS=${FIXWGTS:-${FIXorog}/${CASE}/fv3_SCRIP_${CASE}_GRIDSPEC_lon${LONB_SFC}_lat${LATB_SFC}.gaussian.neareststod.nc} DATA=${DATA:-$(pwd)} # Filenames. @@ -161,12 +161,12 @@ ${NLN} "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile5.nc" "./anal.til ${NLN} "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile6.nc" "./anal.tile6.nc" # input orography tiles -${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile1.nc" "./orog.tile1.nc" -${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile2.nc" "./orog.tile2.nc" -${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile3.nc" "./orog.tile3.nc" -${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile4.nc" "./orog.tile4.nc" -${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile5.nc" "./orog.tile5.nc" -${NLN} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile6.nc" "./orog.tile6.nc" +${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile1.nc" "./orog.tile1.nc" +${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile2.nc" "./orog.tile2.nc" +${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile3.nc" "./orog.tile3.nc" +${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile4.nc" "./orog.tile4.nc" +${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile5.nc" "./orog.tile5.nc" +${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile6.nc" "./orog.tile6.nc" ${NLN} "${SIGLEVEL}" "./vcoord.txt" diff --git a/versions/fix.nest.ver b/versions/fix.nest.ver deleted file mode 100644 index d08ea32af1..0000000000 --- a/versions/fix.nest.ver +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash -# Fix file subfolder versions -export orog_ver=global-nest.20240419 -export ugwd_ver=global-nest.20240419 diff --git a/versions/fix.ver b/versions/fix.ver index 6b5ec7b670..1d54572c0b 100644 --- a/versions/fix.ver +++ b/versions/fix.ver @@ -21,3 +21,5 @@ export sfc_climo_ver=20220805 export ugwd_ver=20231027 export verif_ver=20220805 export wave_ver=20240105 +export orog_nest_ver=global-nest.20240419 +export ugwd_nest_ver=global-nest.20240419 From 5b2a3d449a0835cec2663aabb06f1c47a3faf84e Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Wed, 12 Jun 2024 13:31:55 -0400 Subject: [PATCH 03/20] Add COM template for JEDI obs (#2678) Adds a COM template to define a path to store obs processed for JEDI. This will allow UFSDA to stop writing to COM_OBS, which should be read-only as it belongs to obsproc in operations. No functional change yet. --- parm/config/gfs/config.com | 2 ++ 1 file changed, 2 insertions(+) diff --git a/parm/config/gfs/config.com b/parm/config/gfs/config.com index 004ca1affb..ec867e64ba 100644 --- a/parm/config/gfs/config.com +++ b/parm/config/gfs/config.com @@ -49,6 +49,8 @@ COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' declare -rx COM_CONF_TMPL=${COM_BASE}'/conf' +declare -rx COM_OBS_JEDI=${COM_BASE}'/obs_jedi' + declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' From 6c19a0e3fc4400e1d39288be4ee4fc244b74f699 Mon Sep 17 00:00:00 2001 From: "Henry R. Winterbottom" <49202169+HenryWinterbottom-NOAA@users.noreply.github.com> Date: Wed, 12 Jun 2024 19:25:42 -0600 Subject: [PATCH 04/20] Replace `sleep` with `wait_for_file` (#2586) This PR addresses issue #2444. The following is accomplished: - All `sleep` statements are replaced with `wait_for_file` for the relevant scripts beneath `scripts` and `ush`; - Indentation and shell-norms are updated where applicable. Note: The WAFS scripts are not updated as per @aerorahul direction. Resolves #2444 --------- Co-authored-by: henrywinterbottom-wxdev Co-authored-by: Walter Kolczynski - NOAA --- env/HERA.env | 1 - scripts/exgfs_atmos_awips_20km_1p0deg.sh | 22 ++++----- scripts/exgfs_atmos_postsnd.sh | 24 ++++------ scripts/exgfs_wave_nawips.sh | 28 +++-------- scripts/exgfs_wave_post_gridded_sbs.sh | 26 +++++----- scripts/exgfs_wave_prdgen_gridded.sh | 32 ++++--------- ush/gfs_bufr.sh | 26 ++++------ ush/gfs_bufr_netcdf.sh | 24 +++------- ush/wave_tar.sh | 60 ++++++++++++------------ versions/run.hera.ver | 1 + 10 files changed, 89 insertions(+), 155 deletions(-) diff --git a/env/HERA.env b/env/HERA.env index ccaaea32e7..2157e90031 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -316,7 +316,6 @@ elif [[ "${step}" = "gempak" ]]; then export NTHREADS_GEMPAK=${nth_gempak:-1} [[ ${NTHREADS_GEMPAK} -gt ${nth_max} ]] && export NTHREADS_GEMPAK=${nth_max} - elif [[ "${step}" = "fit2obs" ]]; then nth_max=$((npe_node_max / npe_node_fit2obs)) diff --git a/scripts/exgfs_atmos_awips_20km_1p0deg.sh b/scripts/exgfs_atmos_awips_20km_1p0deg.sh index 490875b2c4..4959bbd8e8 100755 --- a/scripts/exgfs_atmos_awips_20km_1p0deg.sh +++ b/scripts/exgfs_atmos_awips_20km_1p0deg.sh @@ -43,20 +43,14 @@ source "${USHgfs}/product_functions.sh" ############################################### # Wait for the availability of the pgrb file ############################################### -icnt=1 -while (( icnt < 1000 )); do - if [[ -s "${COM_ATMOS_GRIB_0p25}/${RUN}.${cycle}.pgrb2b.0p25.f${fcsthrs}.idx" ]]; then - break - fi - - sleep 10 - icnt=$((icnt + 1)) - if (( icnt >= 180 )); then - msg="FATAL ERROR: No GFS pgrb2 file after 30 min of waiting" - err_exit "${msg}" - exit 5 - fi -done +sleep_interval=10 +max_tries=180 +idxfile="${COM_ATMOS_GRIB_0p25}/${RUN}.${cycle}.pgrb2b.0p25.f${fcsthrs}.idx" +if ! wait_for_file "${idxfile}" "${sleep_interval}" "${max_tries}"; then + msg="FATAL ERROR: No GFS pgrb2 file after waiting" + err_exit "${msg}" + exit 5 +fi ######################################## diff --git a/scripts/exgfs_atmos_postsnd.sh b/scripts/exgfs_atmos_postsnd.sh index 23c41157fe..caf5443a50 100755 --- a/scripts/exgfs_atmos_postsnd.sh +++ b/scripts/exgfs_atmos_postsnd.sh @@ -50,6 +50,8 @@ declare -x LEVS ### Loop for the hour and wait for the sigma and surface flux file: export FSTART=$STARTHOUR +sleep_interval=10 +max_tries=360 # while [ $FSTART -lt $ENDHOUR ] do @@ -69,28 +71,18 @@ export FINT=$NINT1 export MAKEBUFR=YES fi - ic=0 - while [ $ic -lt 1000 ]; do - if [[ ! -f "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atm.logf${FEND}.${logfm}" ]]; then - sleep 10 - ic=$(expr $ic + 1) - else - break - fi - - if [ $ic -ge 360 ] - then - err_exit "COULD NOT LOCATE logf$FEND file AFTER 1 HOUR" - fi - done + filename="${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atm.logf${FEND}.${logfm}" + if ! wait_for_file "${filename}" "${sleep_interval}" "${max_tries}"; then + err_exit "FATAL ERROR: logf${FEND} not found after waiting $((sleep_interval * ( max_tries - 1) )) secs" + fi ## 1-hourly output before $NEND1, 3-hourly output after - if [ $FEND -gt $NEND1 ]; then + if [[ $((10#$FEND)) -gt $((10#$NEND1)) ]]; then export FINT=$NINT3 fi ${USHgfs}/gfs_bufr.sh - export FSTART=$FEND + export FSTART="${FEND}" done ############################################################## diff --git a/scripts/exgfs_wave_nawips.sh b/scripts/exgfs_wave_nawips.sh index 69c4e54ebb..949425cbc1 100755 --- a/scripts/exgfs_wave_nawips.sh +++ b/scripts/exgfs_wave_nawips.sh @@ -44,6 +44,7 @@ pdsext=no g2tbls=g2varswmo2.tbl NAGRIB=nagrib2 +sleep_interval=20 maxtries=15 fhcnt=${fstart} while [ ${fhcnt} -le ${FHMAX_WAV} ]; do @@ -72,28 +73,11 @@ while [ ${fhcnt} -le ${FHMAX_WAV} ]; do esac GRIBIN="${COM_WAVE_GRID}/${RUNwave}.${cycle}.${grdIDin}.f${fhr}.grib2" GRIBIN_chk=${GRIBIN}.idx - - icnt=1 - while [ ${icnt} -lt 1000 ]; do - if [ -r ${GRIBIN_chk} ] ; then - break - else - let "icnt=icnt+1" - sleep 20 - fi - if [ ${icnt} -ge ${maxtries} ]; then - msg="ABORTING after 5 minutes of waiting for ${GRIBIN}." - echo ' ' - echo '**************************** ' - echo '*** ERROR : NO GRIB FILE *** ' - echo '**************************** ' - echo ' ' - echo ${msg} - set_trace - echo "${RUNwave} ${grdID} ${fhr} prdgen ${date} ${cycle} : GRIB file missing." >> ${wavelog} - err=1;export err;${errchk} || exit ${err} - fi - done + if ! wait_for_file "${GRIBIN_chk}" "${sleep_interval}" "${maxtries}"; then + echo "FATAL ERROR: ${GRIBIN_chk} not found after waiting $((sleep_interval * ( max_tries - 1))) secs" + echo "${RUNwave} ${grdID} ${fhr} prdgen ${date} ${cycle} : GRIB file missing." >> "${wavelog}" + err=1;export err;"${errchk}" || exit "${err}" + fi #if [ "$grdIDin" = "global.0p25" && "$grid" = "glo_30m" ]; then if [ "${grdIDin}" = "global.0p25" ]; then diff --git a/scripts/exgfs_wave_post_gridded_sbs.sh b/scripts/exgfs_wave_post_gridded_sbs.sh index cee6d40b49..02aa8c456d 100755 --- a/scripts/exgfs_wave_post_gridded_sbs.sh +++ b/scripts/exgfs_wave_post_gridded_sbs.sh @@ -231,6 +231,7 @@ source "${USHgfs}/preamble.sh" fhr=$FHMIN_WAV fi fhrg=$fhr + sleep_interval=10 iwaitmax=120 # Maximum loop cycles for waiting until wave component output file is ready (fails after max) while [ $fhr -le $FHMAX_WAV ]; do @@ -253,26 +254,21 @@ source "${USHgfs}/preamble.sh" export GRDIDATA=${DATA}/output_$YMDHMS # Gridded data (main part, need to be run side-by-side with forecast - + if [ $fhr = $fhrg ] then - iwait=0 - for wavGRD in ${waveGRD} ; do - gfile=${COM_WAVE_HISTORY}/${WAV_MOD_TAG}.out_grd.${wavGRD}.${YMD}.${HMS} - while [ ! -s ${gfile} ]; do sleep 10; let iwait=iwait+1; done - if [ $iwait -eq $iwaitmax ]; then - echo '*************************************************** ' - echo " FATAL ERROR : NO RAW FIELD OUTPUT FILE out_grd.$grdID " - echo '*************************************************** ' - echo ' ' - set_trace + + for wavGRD in ${waveGRD}; do + gfile="${COM_WAVE_HISTORY}/${WAV_MOD_TAG}.out_grd.${wavGRD}.${YMD}.${HMS}" + if ! wait_for_file "${gfile}" "${sleep_interval}" "${iwaitmax}"; then + echo " FATAL ERROR : NO RAW FIELD OUTPUT FILE out_grd.${grdID}" echo "${WAV_MOD_TAG} post ${grdID} ${PDY} ${cycle} : field output missing." - err=3; export err;${errchk} - exit $err + err=3; export err; "${errchk}" + exit "${err}" fi - ${NLN} ${gfile} ./out_grd.${wavGRD} + ${NLN} "${gfile}" "./out_grd.${wavGRD}" done - + if [ "$DOGRI_WAV" = 'YES' ] then nigrd=1 diff --git a/scripts/exgfs_wave_prdgen_gridded.sh b/scripts/exgfs_wave_prdgen_gridded.sh index c51ce60acc..c896423ac1 100755 --- a/scripts/exgfs_wave_prdgen_gridded.sh +++ b/scripts/exgfs_wave_prdgen_gridded.sh @@ -96,30 +96,14 @@ grids=${grids:-ak_10m at_10m ep_10m wc_10m glo_30m} # GRIBIN="${COM_WAVE_GRID}/${RUNwave}.${cycle}.${grdID}.f${fhr}.grib2" - GRIBIN_chk=$GRIBIN.idx - - icnt=1 - while [ $icnt -lt 1000 ]; do - if [ -r $GRIBIN_chk ] ; then - break - else - echo "Waiting for input file: $GRIBIN" - let "icnt=icnt+1" - sleep 5 - fi - if [ $icnt -ge $maxtries ]; then - msg="ABNORMAL EXIT: NO GRIB FILE FOR GRID $GRIBIN" - echo ' ' - echo '**************************** ' - echo '*** ERROR : NO GRIB FILE *** ' - echo '**************************** ' - echo ' ' - echo $msg - set_trace - echo "$RUNwave $grdID ${fhr} prdgen $date $cycle : GRIB file missing." >> $wavelog - err=1;export err;${errchk} || exit ${err} - fi - done + GRIBIN_chk="${GRIBIN}.idx" + sleep_interval=5 + max_tries=1000 + if ! wait_for_file "${GRIBIN_chk}" "${sleep_interval}" "${max_tries}"; then + echo "FATAL ERROR: ${GRIBIN_chk} not found after waiting $((sleep_interval * ( max_tries - 1))) secs" + echo "$RUNwave $grdID ${fhr} prdgen $date $cycle : GRIB file missing." >> $wavelog + err=1;export err;${errchk} || exit ${err} + fi GRIBOUT=$RUNwave.$cycle.$grdID.f${fhr}.clipped.grib2 diff --git a/ush/gfs_bufr.sh b/ush/gfs_bufr.sh index 287365ba88..8a7d9b1091 100755 --- a/ush/gfs_bufr.sh +++ b/ush/gfs_bufr.sh @@ -51,26 +51,20 @@ cat << EOF > gfsparm / EOF +sleep_interval=10 +max_tries=1000 for (( hr = 10#${FSTART}; hr <= 10#${FEND}; hr = hr + 10#${FINT} )); do hh2=$(printf %02i "${hr}") hh3=$(printf %03i "${hr}") #--------------------------------------------------------- # Make sure all files are available: - ic=0 - while (( ic < 1000 )); do - if [[ ! -f "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atm.logf${hh3}.${logfm}" ]]; then - sleep 10 - ic=$((ic + 1)) - else - break - fi - - if (( ic >= 360 )); then - echo "FATAL: COULD NOT LOCATE logf${hh3} file AFTER 1 HOUR" - exit 2 - fi - done + filename="${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atm.logf${hh3}.${logfm}" + if ! wait_for_file "${filename}" "${sleep_interval}" "${max_tries}"; then + echo "FATAL ERROR: COULD NOT LOCATE logf${hh3} file" + exit 2 + fi + #------------------------------------------------------------------ ${NLN} "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atmf${hh3}.${atmfm}" "sigf${hh2}" ${NLN} "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.sfcf${hh3}.${atmfm}" "flxf${hh2}" @@ -96,11 +90,11 @@ esac ${APRUN_POSTSND} "${EXECgfs}/${pgm}" < gfsparm > "out_gfs_bufr_${FEND}" export err=$? -if [ $err -ne 0 ]; then +if [[ "${err}" -ne 0 ]]; then echo "GFS postsnd job error, Please check files " echo "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atmf${hh2}.${atmfm}" echo "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.sfcf${hh2}.${atmfm}" err_chk fi -exit ${err} +exit "${err}" diff --git a/ush/gfs_bufr_netcdf.sh b/ush/gfs_bufr_netcdf.sh index f10ba40730..f03ff3b9af 100755 --- a/ush/gfs_bufr_netcdf.sh +++ b/ush/gfs_bufr_netcdf.sh @@ -66,6 +66,9 @@ hh=$FSTART hh1=$(echo "${hh#"${hh%??}"}") hh=$hh1 fi + +sleep_interval=10 +max_tries=360 while test $hh -le $FEND do if test $hh -lt 100 @@ -75,24 +78,11 @@ do hh2=$hh fi -#--------------------------------------------------------- -# Make sure all files are available: - ic=0 - while [ $ic -lt 1000 ] - do - if [ ! -f $COMIN/${RUN}.${cycle}.logf${hh2}.txt ] - then - sleep 10 - ic=$(expr $ic + 1) - else - break - fi + filename="${COMIN}/${RUN}.${cycle}.logf${hh2}.txt" + if ! wait_for_file "${filename}" "${sleep_interval}" "${max_tries}" ; then + err_exit "FATAL ERROR COULD NOT LOCATE logf${hh2} file" + fi - if [ $ic -ge 360 ] - then - err_exit "COULD NOT LOCATE logf${hh2} file AFTER 1 HOUR" - fi - done #------------------------------------------------------------------ ${NLN} $COMIN/${RUN}.${cycle}.atmf${hh2}.nc sigf${hh} ${NLN} $COMIN/${RUN}.${cycle}.${SFCF}f${hh2}.nc flxf${hh} diff --git a/ush/wave_tar.sh b/ush/wave_tar.sh index bb8836df2c..e01ef61f15 100755 --- a/ush/wave_tar.sh +++ b/ush/wave_tar.sh @@ -29,7 +29,7 @@ source "${USHgfs}/preamble.sh" # 0.a Basic modes of operation - cd $DATA + cd "${DATA}" echo "Making TAR FILE" alertName=$(echo $RUN|tr [a-z] [A-Z]) @@ -47,7 +47,7 @@ source "${USHgfs}/preamble.sh" # 0.b Check if type set - if [ "$#" -lt '3' ] + if [[ "$#" -lt '3' ]] then set +x echo ' ' @@ -64,9 +64,9 @@ source "${USHgfs}/preamble.sh" fi filext=$type - if [ "$type" = "ibp" ]; then filext='spec'; fi - if [ "$type" = "ibpbull" ]; then filext='bull'; fi - if [ "$type" = "ibpcbull" ]; then filext='cbull'; fi + if [[ "$type" = "ibp" ]]; then filext='spec'; fi + if [[ "$type" = "ibpbull" ]]; then filext='bull'; fi + if [[ "$type" = "ibpcbull" ]]; then filext='cbull'; fi rm -rf TAR_${filext}_$ID @@ -88,7 +88,7 @@ source "${USHgfs}/preamble.sh" exit 2 fi - cd ${STA_DIR}/${filext} + cd "${STA_DIR}/${filext}" # --------------------------------------------------------------------------- # # 2. Generate tar file (spectral files are compressed) @@ -98,21 +98,27 @@ source "${USHgfs}/preamble.sh" echo ' Making tar file ...' set_trace - count=0 countMAX=5 tardone='no' - - while [ "$count" -lt "$countMAX" ] && [ "$tardone" = 'no' ] + sleep_interval=10 + + while [[ "${tardone}" = "no" ]] do nf=$(ls | awk '/'$ID.*.$filext'/ {a++} END {print a}') nbm2=$(( $nb - 2 )) - if [ $nf -ge $nbm2 ] - then - tar -cf $ID.$cycle.${type}_tar ./$ID.*.$filext + if [[ "${nf}" -ge "${nbm2}" ]] + then + + tar -cf "${ID}.${cycle}.${type}_tar" ./${ID}.*.${filext} exit=$? + filename="${ID}.${cycle}.${type}_tar" + if ! wait_for_file "${filename}" "${sleep_interval}" "${countMAX}" ; then + echo "FATAL ERROR: File ${filename} not found after waiting $(( sleep_interval * (countMAX + 1) )) secs" + exit 3 + fi - if [ "$exit" != '0' ] + if [[ "${exit}" != '0' ]] then set +x echo ' ' @@ -124,21 +130,15 @@ source "${USHgfs}/preamble.sh" exit 3 fi - if [ -f "$ID.$cycle.${type}_tar" ] + if [[ -f "${ID}.${cycle}.${type}_tar" ]] then tardone='yes' fi - else - set +x - echo ' All files not found for tar. Sleeping 10 seconds and trying again ..' - set_trace - sleep 10 - count=$(expr $count + 1) fi done - if [ "$tardone" = 'no' ] + if [[ "${tardone}" = 'no' ]] then set +x echo ' ' @@ -150,15 +150,15 @@ source "${USHgfs}/preamble.sh" exit 3 fi - if [ "$type" = 'spec' ] + if [[ "${type}" = 'spec' ]] then - if [ -s $ID.$cycle.${type}_tar ] + if [[ -s "${ID}.${cycle}.${type}_tar" ]] then - file_name=$ID.$cycle.${type}_tar.gz - /usr/bin/gzip -c $ID.$cycle.${type}_tar > ${file_name} + file_name="${ID}.${cycle}.${type}_tar.gz" + /usr/bin/gzip -c "${ID}.${cycle}.${type}_tar" > "${file_name}" exit=$? - if [ "$exit" != '0' ] + if [[ "${exit}" != '0' ]] then set +x echo ' ' @@ -171,7 +171,7 @@ source "${USHgfs}/preamble.sh" fi fi else - file_name=$ID.$cycle.${type}_tar + file_name="${ID}.${cycle}.${type}_tar" fi # --------------------------------------------------------------------------- # @@ -186,7 +186,7 @@ source "${USHgfs}/preamble.sh" exit=$? - if [ "$exit" != '0' ] + if [[ "${exit}" != '0' ]] then set +x echo ' ' @@ -198,7 +198,7 @@ source "${USHgfs}/preamble.sh" exit 4 fi - if [ "$SENDDBN" = 'YES' ] + if [[ "${SENDDBN}" = 'YES' ]] then set +x echo ' ' @@ -212,7 +212,7 @@ source "${USHgfs}/preamble.sh" # --------------------------------------------------------------------------- # # 4. Final clean up -cd $DATA +cd "${DATA}" if [[ ${KEEPDATA:-NO} == "NO" ]]; then set -v diff --git a/versions/run.hera.ver b/versions/run.hera.ver index 6280e8e115..34f81bfe96 100644 --- a/versions/run.hera.ver +++ b/versions/run.hera.ver @@ -5,6 +5,7 @@ export spack_env=gsi-addon-dev-rocky8 export hpss_ver=hpss export ncl_ver=6.6.2 export R_ver=3.6.1 + export gempak_ver=7.17.0 export perl_ver=5.38.0 From 34155fb4767769600a1ff95f0a65e37081addc2a Mon Sep 17 00:00:00 2001 From: Neil Barton <103681022+NeilBarton-NOAA@users.noreply.github.com> Date: Thu, 13 Jun 2024 11:18:22 -0400 Subject: [PATCH 05/20] Add ability to use GEFS replay ICs (#2559) The PR allows the use of ICs from PSL's replay analysis. These replay ICs will be used for GEFS reforecasting and SFS. Two main changes are associated with these updates: (1) replay ICs being valid at 3Z, and (2) the use of warm starts. Resolves #1838 --------- Co-authored-by: Jessica Meixner Co-authored-by: Walter Kolczynski - NOAA Co-authored-by: Rahul Mahajan --- parm/config/gefs/config.base | 9 +- parm/config/gefs/config.efcs | 2 +- parm/config/gefs/config.fcst | 3 + parm/config/gefs/config.stage_ic | 9 ++ parm/config/gefs/config.ufs | 2 +- parm/config/gefs/yaml/defaults.yaml | 2 +- parm/config/gfs/config.base | 4 + scripts/exgfs_wave_post_pnt.sh | 5 +- scripts/exglobal_stage_ic.sh | 61 +++++---- ush/forecast_postdet.sh | 188 +++++++++++++++------------- ush/forecast_predet.sh | 47 ++++++- ush/parsing_namelists_WW3.sh | 2 +- ush/wave_outp_spec.sh | 5 +- workflow/rocoto/gefs_tasks.py | 55 ++++---- workflow/setup_expt.py | 2 + 15 files changed, 255 insertions(+), 141 deletions(-) diff --git a/parm/config/gefs/config.base b/parm/config/gefs/config.base index 840f33ad60..0bb29e31ae 100644 --- a/parm/config/gefs/config.base +++ b/parm/config/gefs/config.base @@ -294,12 +294,17 @@ export MEMDIR="mem${ENSMEM}" # initialize ocean ensemble members with perturbations # if true, only occurs for members greater than zero -export USE_OCN_PERTURB_FILES=@USE_OCN_PERTURB_FILES@ +export REPLAY_ICS=@REPLAY_ICS@ +if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then + export OFFSET_START_HOUR=$(( assim_freq / 2 )) +else + export OFFSET_START_HOUR=0 +fi export DOIAU="NO" # While we are not doing IAU, we may want to warm start w/ IAU in the future # Check if cycle is cold starting if [[ "${EXP_WARM_START}" = ".false." ]]; then - export IAU_FHROT=0 + export IAU_FHROT=${OFFSET_START_HOUR} else if [[ "${DOIAU}" = "YES" ]]; then export IAU_FHROT=3 diff --git a/parm/config/gefs/config.efcs b/parm/config/gefs/config.efcs index 915726b974..807ed66d48 100644 --- a/parm/config/gefs/config.efcs +++ b/parm/config/gefs/config.efcs @@ -67,7 +67,7 @@ export EPBL="0.8,0.4,0.2,0.08,0.04" export EPBL_TAU="2.16E4,2.592E5,2.592E6,7.776E6,3.1536E7" export EPBL_LSCALE="500.E3,1000.E3,2000.E3,2000.E3,2000.E3" -if [[ "${USE_OCN_PERTURB_FILES:-false}" == "true" ]]; then +if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then export ODA_INCUPD="True" export ODA_TEMPINC_VAR='t_pert' export ODA_SALTINC_VAR='s_pert' diff --git a/parm/config/gefs/config.fcst b/parm/config/gefs/config.fcst index 0009f4a868..103d6f091d 100644 --- a/parm/config/gefs/config.fcst +++ b/parm/config/gefs/config.fcst @@ -256,6 +256,9 @@ else export io_layout="1,1" fi +if (( OFFSET_START_HOUR != 0 )); then + export reforecast="YES" +fi # Remember config.efcs will over-ride these values for ensemble forecasts # if these variables are re-defined there. # Otherwise, the ensemble forecast will inherit from config.fcst diff --git a/parm/config/gefs/config.stage_ic b/parm/config/gefs/config.stage_ic index b332ee1826..f0b5dfa609 100644 --- a/parm/config/gefs/config.stage_ic +++ b/parm/config/gefs/config.stage_ic @@ -13,12 +13,21 @@ case "${CASE}" in export CPL_ICEIC="" export CPL_OCNIC="" export CPL_WAVIC="" + export CPL_MEDIC="" + ;; + "C96") + export CPL_ATMIC="" + export CPL_ICEIC="" + export CPL_OCNIC="" + export CPL_WAVIC="" + export CPL_MEDIC="" ;; "C48") export CPL_ATMIC="gefs_test" export CPL_ICEIC="gefs_test" export CPL_OCNIC="gefs_test" export CPL_WAVIC="gefs_test" + export CPL_MEDIC="gefs_test" ;; *) echo "FATAL ERROR Unrecognized resolution: ${CASE}" diff --git a/parm/config/gefs/config.ufs b/parm/config/gefs/config.ufs index 9c39bf06de..8beb0652f7 100644 --- a/parm/config/gefs/config.ufs +++ b/parm/config/gefs/config.ufs @@ -324,7 +324,7 @@ if [[ "${skip_mom6}" == "false" ]]; then MOM6_RESTART_SETTING='r' MOM6_RIVER_RUNOFF='False' eps_imesh="2.5e-1" - TOPOEDITS="ufs.topo_edits_011818.nc" + TOPOEDITS="topo_edits_011818.nc" if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" MOM6_DIAG_MISVAL="0.0" diff --git a/parm/config/gefs/yaml/defaults.yaml b/parm/config/gefs/yaml/defaults.yaml index 5c763ad29e..2341c35d05 100644 --- a/parm/config/gefs/yaml/defaults.yaml +++ b/parm/config/gefs/yaml/defaults.yaml @@ -9,5 +9,5 @@ base: DO_AWIPS: "NO" KEEPDATA: "NO" FHMAX_GFS: 120 - USE_OCN_PERTURB_FILES: "false" + REPLAY_ICS: "NO" diff --git a/parm/config/gfs/config.base b/parm/config/gfs/config.base index 7eedec8af7..6cc1b6d744 100644 --- a/parm/config/gfs/config.base +++ b/parm/config/gfs/config.base @@ -470,6 +470,10 @@ if [[ ${DO_JEDIATMVAR} = "YES" ]]; then export DO_VMINMON="NO" # GSI minimization monitoring fi +# If starting ICs that are not at cycle hour +export REPLAY_ICS="NO" +export OFFSET_START_HOUR=0 + # Number of regional collectives to create soundings for export NUM_SND_COLLECTIVES=${NUM_SND_COLLECTIVES:-9} diff --git a/scripts/exgfs_wave_post_pnt.sh b/scripts/exgfs_wave_post_pnt.sh index 6e456e2aec..56cb83be21 100755 --- a/scripts/exgfs_wave_post_pnt.sh +++ b/scripts/exgfs_wave_post_pnt.sh @@ -247,8 +247,9 @@ source "${USHgfs}/preamble.sh" -e "s/FORMAT/F/g" \ ww3_outp_spec.inp.tmpl > ww3_outp.inp - ${NLN} mod_def.$waveuoutpGRD mod_def.ww3 - HMS="${cyc}0000" + ${NLN} mod_def.${waveuoutpGRD} mod_def.ww3 + HH=$(date --utc -d "${PDY:0:8} ${cyc} + ${FHMIN_WAV} hours" +%H) + HMS="${HH}0000" if [[ -f "${COM_WAVE_HISTORY}/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${PDY}.${HMS}" ]]; then ${NLN} "${COM_WAVE_HISTORY}/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${PDY}.${HMS}" \ "./out_pnt.${waveuoutpGRD}" diff --git a/scripts/exglobal_stage_ic.sh b/scripts/exglobal_stage_ic.sh index d941fa10b4..8800292752 100755 --- a/scripts/exglobal_stage_ic.sh +++ b/scripts/exglobal_stage_ic.sh @@ -8,6 +8,9 @@ GDATE=$(date --utc -d "${PDY} ${cyc} - ${assim_freq} hours" +%Y%m%d%H) gPDY="${GDATE:0:8}" gcyc="${GDATE:8:2}" +RDATE=$(date --utc -d "${PDY} ${cyc} + ${OFFSET_START_HOUR} hours" +%Y%m%d%H) +DTG_PREFIX="${RDATE:0:8}.${RDATE:8:2}0000" + MEMDIR_ARRAY=() if [[ "${RUN:-}" = "gefs" ]]; then # Populate the member_dirs array based on the value of NMEM_ENS @@ -33,9 +36,10 @@ for MEMDIR in "${MEMDIR_ARRAY[@]}"; do # Stage the FV3 restarts to ROTDIR (warm start) RUN=${rCDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL [[ ! -d "${COM_ATMOS_RESTART_PREV}" ]] && mkdir -p "${COM_ATMOS_RESTART_PREV}" - for ftype in coupler.res fv_core.res.nc; do - src="${BASE_CPLIC}/${CPL_ATMIC:-}/${PDY}${cyc}/${MEMDIR}/atmos/${PDY}.${cyc}0000.${ftype}" - tgt="${COM_ATMOS_RESTART_PREV}/${PDY}.${cyc}0000.${ftype}" + prev_atmos_copy_list=(fv_core.res.nc coupler.res) + for ftype in "${prev_atmos_copy_list[@]}"; do + src="${BASE_CPLIC}/${CPL_ATMIC:-}/${PDY}${cyc}/${MEMDIR}/atmos/${DTG_PREFIX}.${ftype}" + tgt="${COM_ATMOS_RESTART_PREV}/${DTG_PREFIX}.${ftype}" ${NCP} "${src}" "${tgt}" rc=$? ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" @@ -43,11 +47,11 @@ for MEMDIR in "${MEMDIR_ARRAY[@]}"; do done for ftype in ca_data fv_core.res fv_srf_wnd.res fv_tracer.res phy_data sfc_data; do for ((tt = 1; tt <= ntiles; tt++)); do - src="${BASE_CPLIC}/${CPL_ATMIC:-}/${PDY}${cyc}/${MEMDIR}/atmos/${PDY}.${cyc}0000.${ftype}.tile${tt}.nc" + src="${BASE_CPLIC}/${CPL_ATMIC:-}/${PDY}${cyc}/${MEMDIR}/atmos/${DTG_PREFIX}.${ftype}.tile${tt}.nc" if (( tt > 6 )) ; then - tgt="${COM_ATMOS_RESTART_PREV}/${PDY}.${cyc}0000.${ftype}.nest0$((tt-5)).tile${tt}.nc" + tgt="${COM_ATMOS_RESTART_PREV}/${DTG_PREFIX}.${ftype}.nest0$((tt-5)).tile${tt}.nc" else - tgt="${COM_ATMOS_RESTART_PREV}/${PDY}.${cyc}0000.${ftype}.tile${tt}.nc" + tgt="${COM_ATMOS_RESTART_PREV}/${DTG_PREFIX}.${ftype}.tile${tt}.nc" fi ${NCP} "${src}" "${tgt}" rc=$? @@ -79,13 +83,26 @@ for MEMDIR in "${MEMDIR_ARRAY[@]}"; do fi done fi + + # Atmosphere Perturbation Files (usually used with replay ICS) + # Extra zero on MEMDIR ensure we have a number even if the string is empty + if (( 0${MEMDIR:3} > 0 )) && [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then + YMD=${PDY} HH=${cyc} declare_from_tmpl COM_ATMOS_ANALYSIS:COM_ATMOS_ANALYSIS_TMPL + [[ ! -d "${COM_ATMOS_ANALYSIS}" ]] && mkdir -p "${COM_ATMOS_ANALYSIS}" + src="${BASE_CPLIC}/${CPL_ATMIC:-}/${PDY}${cyc}/${MEMDIR}/atmos/${DTG_PREFIX}.fv3_perturbation.nc" + tgt="${COM_ATMOS_ANALYSIS}/${RUN}.t00z.atminc.nc" + ${NCP} "${src}" "${tgt}" + rc=${?} + ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" + err=$((err + rc)) + fi # Stage ocean initial conditions to ROTDIR (warm start) if [[ "${DO_OCN:-}" = "YES" ]]; then RUN=${rCDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl COM_OCEAN_RESTART_PREV:COM_OCEAN_RESTART_TMPL [[ ! -d "${COM_OCEAN_RESTART_PREV}" ]] && mkdir -p "${COM_OCEAN_RESTART_PREV}" - src="${BASE_CPLIC}/${CPL_OCNIC:-}/${PDY}${cyc}/${MEMDIR}/ocean/${PDY}.${cyc}0000.MOM.res.nc" - tgt="${COM_OCEAN_RESTART_PREV}/${PDY}.${cyc}0000.MOM.res.nc" + src="${BASE_CPLIC}/${CPL_OCNIC:-}/${PDY}${cyc}/${MEMDIR}/ocean/${DTG_PREFIX}.MOM.res.nc" + tgt="${COM_OCEAN_RESTART_PREV}/${DTG_PREFIX}.MOM.res.nc" ${NCP} "${src}" "${tgt}" rc=$? ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" @@ -96,8 +113,8 @@ for MEMDIR in "${MEMDIR_ARRAY[@]}"; do ;; "025" ) for nn in $(seq 1 3); do - src="${BASE_CPLIC}/${CPL_OCNIC:-}/${PDY}${cyc}/${MEMDIR}/ocean/${PDY}.${cyc}0000.MOM.res_${nn}.nc" - tgt="${COM_OCEAN_RESTART_PREV}/${PDY}.${cyc}0000.MOM.res_${nn}.nc" + src="${BASE_CPLIC}/${CPL_OCNIC:-}/${PDY}${cyc}/${MEMDIR}/ocean/${DTG_PREFIX}.MOM.res_${nn}.nc" + tgt="${COM_OCEAN_RESTART_PREV}/${DTG_PREFIX}.MOM.res_${nn}.nc" ${NCP} "${src}" "${tgt}" rc=$? ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" @@ -113,9 +130,11 @@ for MEMDIR in "${MEMDIR_ARRAY[@]}"; do # Ocean Perturbation Files # Extra zero on MEMDIR ensure we have a number even if the string is empty - if (( 0${MEMDIR:3} > 0 )) && [[ "${USE_OCN_PERTURB_FILES:-false}" == "true" ]]; then - src="${BASE_CPLIC}/${CPL_OCNIC:-}/${PDY}${cyc}/${MEMDIR}/ocean/${PDY}.${cyc}0000.mom6_increment.nc" - tgt="${COM_OCEAN_RESTART_PREV}/${PDY}.${cyc}0000.mom6_increment.nc" + if (( 0${MEMDIR:3} > 0 )) && [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then + YMD=${PDY} HH=${cyc} declare_from_tmpl COM_OCEAN_ANALYSIS:COM_OCEAN_ANALYSIS_TMPL + [[ ! -d "${COM_OCEAN_ANALYSIS}" ]] && mkdir -p "${COM_OCEAN_ANALYSIS}" + src="${BASE_CPLIC}/${CPL_OCNIC:-}/${PDY}${cyc}/${MEMDIR}/ocean/${DTG_PREFIX}.mom6_perturbation.nc" + tgt="${COM_OCEAN_ANALYSIS}/mom6_increment.nc" ${NCP} "${src}" "${tgt}" rc=${?} ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" @@ -128,8 +147,8 @@ for MEMDIR in "${MEMDIR_ARRAY[@]}"; do # Stage the mediator restarts to ROTDIR (warm start/restart the coupled model) RUN=${rCDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl COM_MED_RESTART_PREV:COM_MED_RESTART_TMPL [[ ! -d "${COM_MED_RESTART_PREV}" ]] && mkdir -p "${COM_MED_RESTART_PREV}" - src="${BASE_CPLIC}/${CPL_MEDIC:-}/${PDY}${cyc}/${MEMDIR}/med/${PDY}.${cyc}0000.ufs.cpld.cpl.r.nc" - tgt="${COM_MED_RESTART_PREV}/${PDY}.${cyc}0000.ufs.cpld.cpl.r.nc" + src="${BASE_CPLIC}/${CPL_MEDIC:-}/${PDY}${cyc}/${MEMDIR}/med/${DTG_PREFIX}.ufs.cpld.cpl.r.nc" + tgt="${COM_MED_RESTART_PREV}/${DTG_PREFIX}.ufs.cpld.cpl.r.nc" if [[ -f "${src}" ]]; then ${NCP} "${src}" "${tgt}" rc=$? @@ -146,8 +165,8 @@ for MEMDIR in "${MEMDIR_ARRAY[@]}"; do if [[ "${DO_ICE:-}" = "YES" ]]; then RUN=${rCDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl COM_ICE_RESTART_PREV:COM_ICE_RESTART_TMPL [[ ! -d "${COM_ICE_RESTART_PREV}" ]] && mkdir -p "${COM_ICE_RESTART_PREV}" - src="${BASE_CPLIC}/${CPL_ICEIC:-}/${PDY}${cyc}/${MEMDIR}/ice/${PDY}.${cyc}0000.cice_model.res.nc" - tgt="${COM_ICE_RESTART_PREV}/${PDY}.${cyc}0000.cice_model.res.nc" + src="${BASE_CPLIC}/${CPL_ICEIC:-}/${PDY}${cyc}/${MEMDIR}/ice/${DTG_PREFIX}.cice_model.res.nc" + tgt="${COM_ICE_RESTART_PREV}/${DTG_PREFIX}.cice_model.res.nc" ${NCP} "${src}" "${tgt}" rc=$? ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" @@ -156,11 +175,11 @@ for MEMDIR in "${MEMDIR_ARRAY[@]}"; do # Stage the WW3 initial conditions to ROTDIR (warm start; TODO: these should be placed in $RUN.$gPDY/$gcyc) if [[ "${DO_WAVE:-}" = "YES" ]]; then - YMD=${PDY} HH=${cyc} declare_from_tmpl COM_WAVE_RESTART - [[ ! -d "${COM_WAVE_RESTART}" ]] && mkdir -p "${COM_WAVE_RESTART}" + YMD=${gPDY} HH=${gcyc} declare_from_tmpl COM_WAVE_RESTART_PREV:COM_WAVE_RESTART_TMPL + [[ ! -d "${COM_WAVE_RESTART_PREV}" ]] && mkdir -p "${COM_WAVE_RESTART_PREV}" for grdID in ${waveGRD}; do # TODO: check if this is a bash array; if so adjust - src="${BASE_CPLIC}/${CPL_WAVIC:-}/${PDY}${cyc}/${MEMDIR}/wave/${PDY}.${cyc}0000.restart.${grdID}" - tgt="${COM_WAVE_RESTART}/${PDY}.${cyc}0000.restart.${grdID}" + src="${BASE_CPLIC}/${CPL_WAVIC:-}/${PDY}${cyc}/${MEMDIR}/wave/${DTG_PREFIX}.restart.${grdID}" + tgt="${COM_WAVE_RESTART_PREV}/${DTG_PREFIX}.restart.${grdID}" ${NCP} "${src}" "${tgt}" rc=$? ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index aff4f5a394..a4e10dffb9 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -8,40 +8,24 @@ FV3_postdet() { echo "warm_start = ${warm_start}" echo "RERUN = ${RERUN}" + #============================================================================ + # First copy initial conditions # cold start case if [[ "${warm_start}" == ".false." ]]; then - # Create an array of chgres-ed FV3 files - local fv3_input_files tile_files fv3_input_file - fv3_input_files=(gfs_ctrl.nc) - tile_files=(gfs_data sfc_data) - local nn tt - for (( nn = 1; nn <= ntiles; nn++ )); do - for tt in "${tile_files[@]}"; do - fv3_input_files+=("${tt}.tile${nn}.nc") - done - done - + # Get list of FV3 cold start files + local file_list + file_list=$(FV3_coldstarts) echo "Copying FV3 cold start files for 'RUN=${RUN}' at '${current_cycle}' from '${COMIN_ATMOS_INPUT}'" - for fv3_input_file in "${fv3_input_files[@]}"; do - ${NCP} "${COMIN_ATMOS_INPUT}/${fv3_input_file}" "${DATA}/INPUT/${fv3_input_file}" \ + local fv3_file + for fv3_file in ${file_list}; do + ${NCP} "${COMIN_ATMOS_INPUT}/${fv3_file}" "${DATA}/INPUT/${fv3_file}" \ || ( echo "FATAL ERROR: Unable to copy FV3 IC, ABORT!"; exit 1 ) done # warm start case elif [[ "${warm_start}" == ".true." ]]; then - # Create an array of FV3 restart files - local fv3_restart_files tile_files fv3_restart_file restart_file - fv3_restart_files=(coupler.res fv_core.res.nc) - tile_files=(fv_core.res fv_srf_wnd.res fv_tracer.res phy_data sfc_data ca_data) - local nn tt - for (( nn = 1; nn <= ntiles; nn++ )); do - for tt in "${tile_files[@]}"; do - fv3_restart_files+=("${tt}.tile${nn}.nc") - done - done - # Determine restart date and directory containing restarts local restart_date restart_dir if [[ "${RERUN}" == "YES" ]]; then @@ -52,26 +36,18 @@ FV3_postdet() { restart_dir="${COMIN_ATMOS_RESTART_PREV}" fi + # Get list of FV3 restart files + local file_list + file_list=$(FV3_restarts) echo "Copying FV3 restarts for 'RUN=${RUN}' at '${restart_date}' from '${restart_dir}'" - for fv3_restart_file in "${fv3_restart_files[@]}"; do - restart_file="${restart_date:0:8}.${restart_date:8:2}0000.${fv3_restart_file}" - ${NCP} "${restart_dir}/${restart_file}" "${DATA}/INPUT/${fv3_restart_file}" \ + local fv3_file restart_file + for fv3_file in ${file_list}; do + restart_file="${restart_date:0:8}.${restart_date:8:2}0000.${fv3_file}" + ${NCP} "${restart_dir}/${restart_file}" "${DATA}/INPUT/${fv3_file}" \ || ( echo "FATAL ERROR: Unable to copy FV3 IC, ABORT!"; exit 1 ) done - if [[ "${RERUN}" == "YES" ]]; then - - local restart_fhr - restart_fhr=$(nhour "${RERUN_DATE}" "${current_cycle}") - IAU_FHROT=$((IAU_OFFSET + restart_fhr)) - if [[ "${DOIAU}" == "YES" ]]; then - IAUFHRS=-1 - IAU_DELTHRS=0 - IAU_INC_FILES="''" - fi - - else # "${RERUN}" == "NO" - + if [[ "${RERUN}" != "YES" ]]; then # Replace sfc_data with sfcanl_data restart files from current cycle (if found) local nn for (( nn = 1; nn <= ntiles; nn++ )); do @@ -84,9 +60,55 @@ FV3_postdet() { break fi done + fi # if [[ "${RERUN}" != "YES" ]]; then - # Need a coupler.res that is consistent with the model start time + fi # if [[ "${warm_start}" == ".true." ]]; then + + #============================================================================ + # Determine increment files when doing cold start + if [[ "${warm_start}" == ".false." ]]; then + + if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then + IAU_FHROT=${half_window} # Replay ICs start at the end of the assimilation window + if (( MEMBER == 0 )); then + inc_files=() + else + inc_files=("atminc.nc") + read_increment=".true." + res_latlon_dynamics="atminc.nc" + fi + local increment_file + for inc_file in "${inc_files[@]}"; do + increment_file="${COMIN_ATMOS_INPUT}/${RUN}.t${cyc}z.${inc_file}" + if [[ -f "${increment_file}" ]]; then + ${NCP} "${increment_file}" "${DATA}/INPUT/${inc_file}" + else + echo "FATAL ERROR: missing increment file '${increment_file}', ABORT!" + exit 1 + fi + done + fi + + # Determine IAU and increment files when doing warm start + elif [[ "${warm_start}" == ".true." ]]; then + + #-------------------------------------------------------------------------- + if [[ "${RERUN}" == "YES" ]]; then + + local restart_fhr + restart_fhr=$(nhour "${RERUN_DATE}" "${current_cycle}") + IAU_FHROT=$((IAU_OFFSET + restart_fhr)) if [[ "${DOIAU}" == "YES" ]]; then + IAUFHRS=-1 + IAU_DELTHRS=0 + IAU_INC_FILES="''" + fi + + #-------------------------------------------------------------------------- + else # "${RERUN}" == "NO" + + # Need a coupler.res that is consistent with the model start time + if [[ "${DOIAU:-NO}" == "YES" ]]; then local model_start_time="${previous_cycle}" else local model_start_time="${current_cycle}" @@ -121,6 +143,15 @@ EOF inc_files=("atminc.nc") read_increment=".true." res_latlon_dynamics="atminc.nc" + if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then + IAU_FHROT=${half_window} # Replay ICs start at the end of the assimilation window + # Control member has no perturbation + if (( MEMBER == 0 )); then + inc_files=() + read_increment=".false." + res_latlon_dynamics='""' + fi + fi fi local increment_file @@ -135,17 +166,22 @@ EOF done fi # if [[ "${RERUN}" == "YES" ]]; then + #-------------------------------------------------------------------------- fi # if [[ "${warm_start}" == ".true." ]]; then + #============================================================================ + #============================================================================ # If doing IAU, change forecast hours - if [[ "${DOIAU:-}" == "YES" ]]; then + if [[ "${DOIAU:-NO}" == "YES" ]]; then FHMAX=$((FHMAX + 6)) if (( FHMAX_HF > 0 )); then FHMAX_HF=$((FHMAX_HF + 6)) fi fi + #============================================================================ + #============================================================================ # If warm starting from restart files, set the following flags if [[ "${warm_start}" == ".true." ]]; then @@ -162,8 +198,9 @@ EOF na_init=0 fi # warm_start == .true. + #============================================================================ - cd "${DATA}" || exit 1 + #============================================================================ if [[ "${QUILTING}" = ".true." ]] && [[ "${OUTPUT_GRID}" = "gaussian_grid" ]]; then local FH2 FH3 for fhr in ${FV3_OUTPUT_FH}; do @@ -181,16 +218,8 @@ EOF fi fi done - else # TODO: Is this even valid anymore? - local nn - for (( nn = 1; nn <= ntiles; nn++ )); do - ${NLN} "nggps2d.tile${nn}.nc" "${COMOUT_ATMOS_HISTORY}/nggps2d.tile${nn}.nc" - ${NLN} "nggps3d.tile${nn}.nc" "${COMOUT_ATMOS_HISTORY}/nggps3d.tile${nn}.nc" - ${NLN} "grid_spec.tile${nn}.nc" "${COMOUT_ATMOS_HISTORY}/grid_spec.tile${nn}.nc" - ${NLN} "atmos_static.tile${nn}.nc" "${COMOUT_ATMOS_HISTORY}/atmos_static.tile${nn}.nc" - ${NLN} "atmos_4xdaily.tile${nn}.nc" "${COMOUT_ATMOS_HISTORY}/atmos_4xdaily.tile${nn}.nc" - done fi + #============================================================================ } FV3_nml() { @@ -223,44 +252,35 @@ FV3_out() { ${NCP} "${DATA}/diag_table" "${COMOUT_CONF}/ufs.diag_table" - # Create an array of fv3 restart files - local fv3_restart_files tile_files fv3_restart_file restart_file - fv3_restart_files=(coupler.res fv_core.res.nc) - tile_files=(fv_core.res fv_srf_wnd.res fv_tracer.res phy_data sfc_data ca_data) - local nn tt - for (( nn = 1; nn <= ntiles; nn++ )); do - for tt in "${tile_files[@]}"; do - fv3_restart_files+=("${tt}.tile${nn}.nc") - done - done - + # Determine the dates for restart files to be copied to COM + local restart_date restart_dates + restart_dates=() # Copy restarts in the assimilation window for RUN=gdas|enkfgdas|enkfgfs if [[ "${RUN}" =~ "gdas" || "${RUN}" == "enkfgfs" ]]; then - local restart_date - restart_date=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${restart_interval} hours" +%Y%m%d%H) + restart_date="${model_start_date_next_cycle}" while (( restart_date <= forecast_end_cycle )); do - echo "Copying FV3 restarts for 'RUN=${RUN}' at ${restart_date}" - for fv3_restart_file in "${fv3_restart_files[@]}"; do - restart_file="${restart_date:0:8}.${restart_date:8:2}0000.${fv3_restart_file}" - ${NCP} "${DATArestart}/FV3_RESTART/${restart_file}" \ - "${COMOUT_ATMOS_RESTART}/${restart_file}" - done + restart_dates+=("${restart_date:0:8}.${restart_date:8:2}0000") restart_date=$(date --utc -d "${restart_date:0:8} ${restart_date:8:2} + ${restart_interval} hours" +%Y%m%d%H) done + elif [[ "${RUN}" == "gfs" || "${RUN}" == "gefs" ]]; then # Copy restarts at the end of the forecast segment for RUN=gfs|gefs + if [[ "${COPY_FINAL_RESTARTS}" == "YES" ]]; then + restart_dates+=("${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000") + fi fi - # Copy the final restart files at the end of the forecast segment - # The final restart written at the end of the forecast does not include the valid date - # TODO: verify the above statement since RM found that it did! - # TODO: For other components, this is only for gfs/gefs - check to see if this should also have this - if [[ "${COPY_FINAL_RESTARTS}" == "YES" ]]; then - echo "Copying FV3 restarts for 'RUN=${RUN}' at the end of the forecast segment: ${forecast_end_cycle}" - for fv3_restart_file in "${fv3_restart_files[@]}"; do - restart_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.${fv3_restart_file}" - ${NCP} "${DATArestart}/FV3_RESTART/${restart_file}" \ - "${COMOUT_ATMOS_RESTART}/${restart_file}" + # Get list of FV3 restart files + local file_list fv3_file + file_list=$(FV3_restarts) + + # Copy restarts for the dates collected above to COM + for restart_date in "${restart_dates[@]}"; do + echo "Copying FV3 restarts for 'RUN=${RUN}' at ${restart_date}" + for fv3_file in ${file_list}; do + ${NCP} "${DATArestart}/FV3_RESTART/${restart_date}.${fv3_file}" \ + "${COMOUT_ATMOS_RESTART}/${restart_date}.${fv3_file}" done - fi + done + echo "SUB ${FUNCNAME[0]}: Output data for FV3 copied" } @@ -362,7 +382,7 @@ WW3_out() { CPL_out() { echo "SUB ${FUNCNAME[0]}: Copying output data for general cpl fields" - if [[ "${esmf_profile:-}" == ".true." ]]; then + if [[ "${esmf_profile:-.false.}" == ".true." ]]; then ${NCP} "${DATA}/ESMF_Profile.summary" "${COMOUT_ATMOS_HISTORY}/ESMF_Profile.summary" fi } @@ -406,7 +426,7 @@ MOM6_postdet() { # TODO if [[ $RUN} == "gefs" ]] block maybe be needed # to ensure it does not interfere with the GFS when ensemble is updated in the GFS if (( MEMBER > 0 )) && [[ "${ODA_INCUPD:-False}" == "True" ]]; then - ${NCP} "${COMIN_OCEAN_RESTART_PREV}/${restart_date:0:8}.${restart_date:0:8}0000.mom6_increment.nc" "${DATA}/INPUT/mom6_increment.nc" \ + ${NCP} "${COMIN_OCEAN_ANALYSIS}/mom6_increment.nc" "${DATA}/INPUT/mom6_increment.nc" \ || ( echo "FATAL ERROR: Unable to copy ensemble MOM6 increment, ABORT!"; exit 1 ) fi fi # if [[ "${RERUN}" == "NO" ]]; then diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index 9c0d90b1dc..9183e86002 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -36,6 +36,38 @@ nhour(){ echo "${hours}" } +FV3_coldstarts(){ + # Function to return an comma-separated string of cold-start input files for FV3 + # Create an array of chgres-ed FV3 files + local fv3_input_files tile_files + fv3_input_files=(gfs_ctrl.nc) + tile_files=(gfs_data sfc_data) + local nn tt + for (( nn = 1; nn <= ntiles; nn++ )); do + for tt in "${tile_files[@]}"; do + fv3_input_files+=("${tt}.tile${nn}.nc") + done + done + # Create a comma separated string from array using IFS + IFS=, echo "${fv3_input_files[*]}" +} + +FV3_restarts(){ + # Function to return an comma-separated string of warm-start input files for FV3 + # Create an array of FV3 restart files + local fv3_restart_files tile_files + fv3_restart_files=(coupler.res fv_core.res.nc) + tile_files=(fv_core.res fv_srf_wnd.res fv_tracer.res phy_data sfc_data ca_data) + local nn tt + for (( nn = 1; nn <= ntiles; nn++ )); do + for tt in "${tile_files[@]}"; do + fv3_restart_files+=("${tt}.tile${nn}.nc") + done + done + # Create a comma separated string from array using IFS + IFS=, echo "${fv3_restart_files[*]}" +} + # shellcheck disable=SC2034 common_predet(){ echo "SUB ${FUNCNAME[0]}: Defining variables for shared through model components" @@ -54,15 +86,20 @@ common_predet(){ current_cycle_begin=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} - ${half_window} hours" +%Y%m%d%H) current_cycle_end=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${half_window} hours" +%Y%m%d%H) next_cycle_begin=$(date --utc -d "${next_cycle:0:8} ${next_cycle:8:2} - ${half_window} hours" +%Y%m%d%H) - #Define model start date for current_cycle and next_cycle as the time the forecast will start - if [[ "${DOIAU:-}" == "YES" ]]; then + forecast_end_cycle=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${FHMAX} hours" +%Y%m%d%H) + + # Define model start date for current_cycle and next_cycle as the time the forecast will start + if [[ "${DOIAU:-NO}" == "YES" ]]; then model_start_date_current_cycle="${current_cycle_begin}" model_start_date_next_cycle="${next_cycle_begin}" else - model_start_date_current_cycle=${current_cycle} + if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then + model_start_date_current_cycle=${current_cycle_end} + else + model_start_date_current_cycle=${current_cycle} + fi model_start_date_next_cycle=${next_cycle} - fi - forecast_end_cycle=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${FHMAX} hours" +%Y%m%d%H) + fi FHMIN=${FHMIN:-0} FHMAX=${FHMAX:-9} diff --git a/ush/parsing_namelists_WW3.sh b/ush/parsing_namelists_WW3.sh index a01d694710..c57a90e50a 100755 --- a/ush/parsing_namelists_WW3.sh +++ b/ush/parsing_namelists_WW3.sh @@ -18,7 +18,7 @@ WW3_namelists(){ fi # Set time stamps for model start and output # For special case when IAU is on but this is an initial half cycle - if [ $IAU_OFFSET = 0 ]; then + if [ ${IAU_OFFSET:-0} = 0 ]; then ymdh_beg=$YMDH else ymdh_beg=$($NDATE -$WAVHINDH $YMDH) diff --git a/ush/wave_outp_spec.sh b/ush/wave_outp_spec.sh index db9997fd54..159d0eb2cf 100755 --- a/ush/wave_outp_spec.sh +++ b/ush/wave_outp_spec.sh @@ -31,6 +31,7 @@ source "${USHgfs}/preamble.sh" workdir=$4 YMDHE=$($NDATE $FHMAX_WAV_PNT $CDATE) + model_start_date=$(${NDATE} ${OFFSET_START_HOUR} "${PDY}${cyc}") cd $workdir @@ -196,7 +197,7 @@ source "${USHgfs}/preamble.sh" if [ -f $outfile ] then - if [ "${ymdh}" = "${CDATE}" ] + if [ "${ymdh}" = "${model_start_date}" ] then if [ "$specdir" = "bull" ] then @@ -237,6 +238,6 @@ source "${USHgfs}/preamble.sh" # 3.b Clean up the rest cd .. -rm -rf ${specdir}_${bloc} +rm -rf "${specdir}_${bloc}" # End of ww3_outp_spec.sh ---------------------------------------------------- # diff --git a/workflow/rocoto/gefs_tasks.py b/workflow/rocoto/gefs_tasks.py index 99be535a55..6899a655b6 100644 --- a/workflow/rocoto/gefs_tasks.py +++ b/workflow/rocoto/gefs_tasks.py @@ -1,6 +1,7 @@ from applications.applications import AppConfig from rocoto.tasks import Tasks import rocoto.rocoto as rocoto +from datetime import datetime, timedelta class GEFSTasks(Tasks): @@ -9,49 +10,61 @@ def __init__(self, app_config: AppConfig, cdump: str) -> None: super().__init__(app_config, cdump) def stage_ic(self): - cpl_ic = self._configs['stage_ic'] - deps = [] - + dtg_prefix = "@Y@m@d.@H0000" + offset = str(self._configs['base']['OFFSET_START_HOUR']).zfill(2) + ":00:00" # Atm ICs if self.app_config.do_atm: - prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ATMIC']}/@Y@m@d@H/mem000/atmos" - for file in ['gfs_ctrl.nc'] + \ - [f'{datatype}_data.tile{tile}.nc' - for datatype in ['gfs', 'sfc'] - for tile in range(1, self.n_tiles + 1)]: - data = f"{prefix}/{file}" - dep_dict = {'type': 'data', 'data': data} + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ATMIC']}/@Y@m@d@H/mem000/atmos/" + if self._base['EXP_WARM_START']: + for file in ['fv_core.res.nc'] + \ + [f'{datatype}.tile{tile}.nc' + for datatype in ['ca_data', 'fv_core.res', 'fv_srf_wnd.res', 'fv_tracer.res', 'phy_data', 'sfc_data'] + for tile in range(1, self.n_tiles + 1)]: + data = [prefix, f"{dtg_prefix}.{file}"] + dep_dict = {'type': 'data', 'data': data, 'offset': [None, offset]} + deps.append(rocoto.add_dependency(dep_dict)) + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ATMIC']}/@Y@m@d@H/mem000/med/" + data = [prefix, f"{dtg_prefix}.ufs.cpld.cpl.r.nc"] + dep_dict = {'type': 'data', 'data': data, 'offset': [None, offset]} deps.append(rocoto.add_dependency(dep_dict)) + else: + for file in ['gfs_ctrl.nc'] + \ + [f'{datatype}_data.tile{tile}.nc' + for datatype in ['gfs', 'sfc'] + for tile in range(1, self.n_tiles + 1)]: + data = f"{prefix}/{file}" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) # Ocean ICs if self.app_config.do_ocean: ocn_res = f"{self._base.get('OCNRES', '025'):03d}" - prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_OCNIC']}/@Y@m@d@H/mem000/ocean" - data = f"{prefix}/@Y@m@d.@H0000.MOM.res.nc" - dep_dict = {'type': 'data', 'data': data} + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_OCNIC']}/@Y@m@d@H/mem000/ocean/" + data = [prefix, f"{dtg_prefix}.MOM.res.nc"] + dep_dict = {'type': 'data', 'data': data, 'offset': [None, offset]} deps.append(rocoto.add_dependency(dep_dict)) if ocn_res in ['025']: # 0.25 degree ocean model also has these additional restarts for res in [f'res_{res_index}' for res_index in range(1, 4)]: - data = f"{prefix}/@Y@m@d.@H0000.MOM.{res}.nc" - dep_dict = {'type': 'data', 'data': data} + data = [prefix, f"{dtg_prefix}.MOM.{res}.nc"] + dep_dict = {'type': 'data', 'data': data, 'offset': [None, offset]} deps.append(rocoto.add_dependency(dep_dict)) # Ice ICs if self.app_config.do_ice: - prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ICEIC']}/@Y@m@d@H/mem000/ice" - data = f"{prefix}/@Y@m@d.@H0000.cice_model.res.nc" - dep_dict = {'type': 'data', 'data': data} + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ICEIC']}/@Y@m@d@H/mem000/ice/" + data = [prefix, f"{dtg_prefix}.cice_model.res.nc"] + dep_dict = {'type': 'data', 'data': data, 'offset': [None, offset]} deps.append(rocoto.add_dependency(dep_dict)) # Wave ICs if self.app_config.do_wave: - prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_WAVIC']}/@Y@m@d@H/mem000/wave" + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_WAVIC']}/@Y@m@d@H/mem000/wave/" for wave_grid in self._configs['waveinit']['waveGRD'].split(): - data = f"{prefix}/@Y@m@d.@H0000.restart.{wave_grid}" - dep_dict = {'type': 'data', 'data': data} + data = [prefix, f"{dtg_prefix}.restart.{wave_grid}"] + dep_dict = {'type': 'data', 'data': data, 'offset': [None, offset]} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) diff --git a/workflow/setup_expt.py b/workflow/setup_expt.py index 97d25dc15a..b44842b982 100755 --- a/workflow/setup_expt.py +++ b/workflow/setup_expt.py @@ -287,6 +287,8 @@ def _update_defaults(dict_in: dict) -> dict: data = AttrDict(host.info, **inputs.__dict__) data.HOMEgfs = _top yaml_path = inputs.yaml + if not os.path.exists(yaml_path): + raise IOError(f'YAML file does not exist, check path:' + yaml_path) yaml_dict = _update_defaults(AttrDict(parse_j2yaml(yaml_path, data))) # First update config.base From ebacebfbe458634b8c80af6a735d6b6d01e4e406 Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA <26926959+RussTreadon-NOAA@users.noreply.github.com> Date: Thu, 13 Jun 2024 11:20:24 -0400 Subject: [PATCH 06/20] Update gdas.cd and gsi_utils hashes (#2641) This PR updates the `sorc/gdas.cd` and `sorc/gsi_utils` hashes. The updated hashes bring in bug fixes, new UFS DA functionality, and a Gaea build for gsi_utils. Resolves #2640 --- parm/archive/enkf.yaml.j2 | 2 +- sorc/gdas.cd | 2 +- sorc/gsi_utils.fd | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/parm/archive/enkf.yaml.j2 b/parm/archive/enkf.yaml.j2 index e05aca2981..ed50a1749d 100644 --- a/parm/archive/enkf.yaml.j2 +++ b/parm/archive/enkf.yaml.j2 @@ -16,7 +16,7 @@ enkf: {% endfor %} {% if DO_JEDIATMENS %} - {% set steps = ["atmensanlinit", "atmensanlrun", "atmensanlfinal"] %} + {% set steps = ["atmensanlinit", "atmensanlletkf", "atmensanlfv3inc", "atmensanlfinal"] %} {% else %} {% set steps = ["eobs", "eupd"] %} {% if lobsdiag_forenkf %} diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 3e50a8fdcd..2bd8ffc149 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 3e50a8fdcd07305a3464a02e20eaf4b033179167 +Subproject commit 2bd8ffc149f30c0ae0f8e1508477a8dc88c46a65 diff --git a/sorc/gsi_utils.fd b/sorc/gsi_utils.fd index d940406161..4332814529 160000 --- a/sorc/gsi_utils.fd +++ b/sorc/gsi_utils.fd @@ -1 +1 @@ -Subproject commit d9404061611553459394173c3ff33116db306326 +Subproject commit 4332814529465ab8eb58e43a38227b952ebfca49 From dc21eac6c3941d7f30803891d91d82f4cc1f8183 Mon Sep 17 00:00:00 2001 From: TerrenceMcGuinness-NOAA Date: Thu, 13 Jun 2024 11:41:14 -0400 Subject: [PATCH 07/20] Add Hercules-EMC to the Jenkins configurable parameter list (#2685) This quick-fix PR is to update the Jenkins Pipeline's configurable parameter list to include the **Hercules-EMC** node. This allows Jenkins users to restart Jobs in the controller when no updates have been made. --- ci/Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/Jenkinsfile b/ci/Jenkinsfile index 317cb7d820..263de10c6d 100644 --- a/ci/Jenkinsfile +++ b/ci/Jenkinsfile @@ -76,7 +76,7 @@ pipeline { Machine = machine[0].toUpperCase() + machine.substring(1) echo "Getting Common Workspace for ${Machine}" ws("${custom_workspace[machine]}/${env.CHANGE_ID}") { - properties([parameters([[$class: 'NodeParameterDefinition', allowedSlaves: ['built-in', 'Hera-EMC', 'Orion-EMC'], defaultSlaves: ['built-in'], name: '', nodeEligibility: [$class: 'AllNodeEligibility'], triggerIfResult: 'allCases']])]) + properties([parameters([[$class: 'NodeParameterDefinition', allowedSlaves: ['built-in', 'Hercules-EMC' 'Hera-EMC', 'Orion-EMC'], defaultSlaves: ['built-in'], name: '', nodeEligibility: [$class: 'AllNodeEligibility'], triggerIfResult: 'allCases']])]) HOME = "${WORKSPACE}" sh(script: "mkdir -p ${HOME}/RUNTESTS;rm -Rf ${HOME}/RUNTESTS/*") sh(script: """${GH} pr edit ${env.CHANGE_ID} --repo ${repo_url} --add-label "CI-${Machine}-Building" --remove-label "CI-${Machine}-Ready" """) From 603a4a8052a5c43ce5986f028c3fcfd5fd248ad4 Mon Sep 17 00:00:00 2001 From: TerrenceMcGuinness-NOAA Date: Thu, 13 Jun 2024 12:22:03 -0400 Subject: [PATCH 08/20] Update Jenkinsfile needed a comma --- ci/Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/Jenkinsfile b/ci/Jenkinsfile index 263de10c6d..c6aa0887c7 100644 --- a/ci/Jenkinsfile +++ b/ci/Jenkinsfile @@ -76,7 +76,7 @@ pipeline { Machine = machine[0].toUpperCase() + machine.substring(1) echo "Getting Common Workspace for ${Machine}" ws("${custom_workspace[machine]}/${env.CHANGE_ID}") { - properties([parameters([[$class: 'NodeParameterDefinition', allowedSlaves: ['built-in', 'Hercules-EMC' 'Hera-EMC', 'Orion-EMC'], defaultSlaves: ['built-in'], name: '', nodeEligibility: [$class: 'AllNodeEligibility'], triggerIfResult: 'allCases']])]) + properties([parameters([[$class: 'NodeParameterDefinition', allowedSlaves: ['built-in', 'Hercules-EMC', 'Hera-EMC', 'Orion-EMC'], defaultSlaves: ['built-in'], name: '', nodeEligibility: [$class: 'AllNodeEligibility'], triggerIfResult: 'allCases']])]) HOME = "${WORKSPACE}" sh(script: "mkdir -p ${HOME}/RUNTESTS;rm -Rf ${HOME}/RUNTESTS/*") sh(script: """${GH} pr edit ${env.CHANGE_ID} --repo ${repo_url} --add-label "CI-${Machine}-Building" --remove-label "CI-${Machine}-Ready" """) From 5a5fc2be7555f094a0f90fd3a3df22d071ccdfd4 Mon Sep 17 00:00:00 2001 From: Jessica Meixner Date: Fri, 14 Jun 2024 11:04:41 -0400 Subject: [PATCH 09/20] Remove ocean daily files (#2689) This PR removes the ocn_daily files that are produced by the ocean component. These files can be recreated by averaging data that exists in the 6 hour aveaged files if needed. Fixes https://github.com/NOAA-EMC/global-workflow/issues/2675 Fixes https://github.com/NOAA-EMC/global-workflow/issues/2659 (by removing them and making this obsolete) --- parm/archive/master_gfs.yaml.j2 | 1 - parm/archive/ocean_daily.yaml.j2 | 8 -------- parm/ufs/fv3/diag_table | 20 -------------------- ush/forecast_postdet.sh | 7 ------- 4 files changed, 36 deletions(-) delete mode 100644 parm/archive/ocean_daily.yaml.j2 diff --git a/parm/archive/master_gfs.yaml.j2 b/parm/archive/master_gfs.yaml.j2 index e96def7a03..67cde482a2 100644 --- a/parm/archive/master_gfs.yaml.j2 +++ b/parm/archive/master_gfs.yaml.j2 @@ -56,7 +56,6 @@ datasets: # Ocean forecasts {% filter indent(width=4) %} {% include "ocean_6hravg.yaml.j2" %} -{% include "ocean_daily.yaml.j2" %} {% include "ocean_grib2.yaml.j2" %} {% include "gfs_flux_1p00.yaml.j2" %} {% endfilter %} diff --git a/parm/archive/ocean_daily.yaml.j2 b/parm/archive/ocean_daily.yaml.j2 deleted file mode 100644 index 0f45264973..0000000000 --- a/parm/archive/ocean_daily.yaml.j2 +++ /dev/null @@ -1,8 +0,0 @@ -ocean_daily: - {% set head = "gfs.ocean.t" + cycle_HH + "z." %} - name: "OCEAN_DAILY" - target: "{{ ATARDIR }}/{{ cycle_YMDH }}/ocean_daily.tar" - required: - {% for fhr in range(24, FHMAX_GFS + 24, 24) %} - - "{{ COM_OCEAN_HISTORY | relpath(ROTDIR) }}/{{ head }}daily.f{{ '%03d' % fhr }}.nc" - {% endfor %} diff --git a/parm/ufs/fv3/diag_table b/parm/ufs/fv3/diag_table index 83991cb223..dad8b6fac6 100644 --- a/parm/ufs/fv3/diag_table +++ b/parm/ufs/fv3/diag_table @@ -1,7 +1,6 @@ "fv3_history", 0, "hours", 1, "hours", "time" "fv3_history2d", 0, "hours", 1, "hours", "time" "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", @[FHOUT_OCN], "hours", 1, "hours", "time", @[FHOUT_OCN], "hours", "@[SYEAR] @[SMONTH] @[SDAY] @[CHOUR] 0 0" -"@[MOM6_OUTPUT_DIR]/ocn_daily%4yr%2mo%2dy", 1, "days", 1, "days", "time", 1, "days", "@[SYEAR] @[SMONTH] @[SDAY] @[CHOUR] 0 0" ############## # Ocean fields @@ -57,25 +56,6 @@ "ocean_model", "LwLatSens", "LwLatSens", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 "ocean_model", "Heat_PmE", "Heat_PmE", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 -# Daily fields -"ocean_model", "geolon", "geolon", "@[MOM6_OUTPUT_DIR]/ocn_daily%4yr%2mo%2dy", "all", .false., "none", 2 -"ocean_model", "geolat", "geolat", "@[MOM6_OUTPUT_DIR]/ocn_daily%4yr%2mo%2dy", "all", .false., "none", 2 -"ocean_model", "geolon_c", "geolon_c", "@[MOM6_OUTPUT_DIR]/ocn_daily%4yr%2mo%2dy", "all", .false., "none", 2 -"ocean_model", "geolat_c", "geolat_c", "@[MOM6_OUTPUT_DIR]/ocn_daily%4yr%2mo%2dy", "all", .false., "none", 2 -"ocean_model", "geolon_u", "geolon_u", "@[MOM6_OUTPUT_DIR]/ocn_daily%4yr%2mo%2dy", "all", .false., "none", 2 -"ocean_model", "geolat_u", "geolat_u", "@[MOM6_OUTPUT_DIR]/ocn_daily%4yr%2mo%2dy", "all", .false., "none", 2 -"ocean_model", "geolon_v", "geolon_v", "@[MOM6_OUTPUT_DIR]/ocn_daily%4yr%2mo%2dy", "all", .false., "none", 2 -"ocean_model", "geolat_v", "geolat_v", "@[MOM6_OUTPUT_DIR]/ocn_daily%4yr%2mo%2dy", "all", .false., "none", 2 -"ocean_model", "SST", "sst", "@[MOM6_OUTPUT_DIR]/ocn_daily%4yr%2mo%2dy", "all", .true., "none", 2 -"ocean_model", "latent", "latent", "@[MOM6_OUTPUT_DIR]/ocn_daily%4yr%2mo%2dy", "all", .true., "none", 2 -"ocean_model", "sensible", "sensible", "@[MOM6_OUTPUT_DIR]/ocn_daily%4yr%2mo%2dy", "all", .true., "none", 2 -"ocean_model", "SW", "SW", "@[MOM6_OUTPUT_DIR]/ocn_daily%4yr%2mo%2dy", "all", .true., "none", 2 -"ocean_model", "LW", "LW", "@[MOM6_OUTPUT_DIR]/ocn_daily%4yr%2mo%2dy", "all", .true., "none", 2 -"ocean_model", "evap", "evap", "@[MOM6_OUTPUT_DIR]/ocn_daily%4yr%2mo%2dy", "all", .true., "none", 2 -"ocean_model", "lprec", "lprec", "@[MOM6_OUTPUT_DIR]/ocn_daily%4yr%2mo%2dy", "all", .true., "none", 2 -"ocean_model", "taux", "taux", "@[MOM6_OUTPUT_DIR]/ocn_daily%4yr%2mo%2dy", "all", .true., "none", 2 -"ocean_model", "tauy", "tauy", "@[MOM6_OUTPUT_DIR]/ocn_daily%4yr%2mo%2dy", "all", .true., "none", 2 - ################### # Atmosphere fields ################### diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index a4e10dffb9..2cc34eaacd 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -455,13 +455,6 @@ MOM6_postdet() { dest_file="${RUN}.ocean.t${cyc}z.${interval}hr_avg.f${fhr3}.nc" ${NLN} "${COMOUT_OCEAN_HISTORY}/${dest_file}" "${DATA}/MOM6_OUTPUT/${source_file}" - # Daily output - if (( fhr > 0 & fhr % 24 == 0 )); then - source_file="ocn_daily_${vdate:0:4}_${vdate:4:2}_${vdate:6:2}.nc" - dest_file="${RUN}.ocean.t${cyc}z.daily.f${fhr3}.nc" - ${NLN} "${COMOUT_OCEAN_HISTORY}/${dest_file}" "${DATA}/MOM6_OUTPUT/${source_file}" - fi - last_fhr=${fhr} done From 6c93b4554e235fcb4d0004e99a4c4498d55d461b Mon Sep 17 00:00:00 2001 From: Yaping Wang <49168260+ypwang19@users.noreply.github.com> Date: Fri, 14 Jun 2024 10:18:17 -0500 Subject: [PATCH 10/20] Add observation preparation job for aerosols DA to workflow (#2624) Add a prepaeroobs job to prepare aerosol obs files for DA. This job does quality control of the VIIRS aerosol raw observations and convert them to ioda format. Resolves #2623 --------- Co-authored-by: ypwang19 Co-authored-by: TerrenceMcGuinness-NOAA Co-authored-by: Cory Martin Co-authored-by: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> --- env/HERA.env | 7 + env/HERCULES.env | 6 + env/JET.env | 7 + env/ORION.env | 7 + env/S4.env | 7 + env/WCOSS2.env | 7 + jobs/JGLOBAL_PREP_OBS_AERO | 43 +++++ jobs/rocoto/prepobsaero.sh | 24 +++ parm/archive/arcdir.yaml.j2 | 5 + parm/archive/gdas.yaml.j2 | 4 + parm/archive/gfsa.yaml.j2 | 4 + parm/config/gfs/config.base | 1 + parm/config/gfs/config.prepobsaero | 17 ++ parm/config/gfs/config.resources | 10 +- scripts/exglobal_archive.py | 4 +- scripts/exglobal_prep_obs_aero.py | 23 +++ ush/python/pygfs/task/aero_prepobs.py | 236 ++++++++++++++++++++++++++ workflow/applications/applications.py | 1 + workflow/applications/gfs_cycled.py | 6 + workflow/rocoto/gfs_tasks.py | 25 +++ 20 files changed, 441 insertions(+), 3 deletions(-) create mode 100755 jobs/JGLOBAL_PREP_OBS_AERO create mode 100755 jobs/rocoto/prepobsaero.sh create mode 100644 parm/config/gfs/config.prepobsaero create mode 100755 scripts/exglobal_prep_obs_aero.py create mode 100644 ush/python/pygfs/task/aero_prepobs.py diff --git a/env/HERA.env b/env/HERA.env index 2157e90031..94bab36703 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -102,6 +102,13 @@ elif [[ "${step}" = "atmanlfv3inc" ]]; then [[ ${NTHREADS_ATMANLFV3INC} -gt ${nth_max} ]] && export NTHREADS_ATMANLFV3INC=${nth_max} export APRUN_ATMANLFV3INC="${launcher} -n ${npe_atmanlfv3inc} --cpus-per-task=${NTHREADS_ATMANLFV3INC}" +elif [[ "${step}" = "prepobsaero" ]]; then + + nth_max=$((npe_node_max / npe_node_prepobsaero)) + + export NTHREADS_PREPOBSAERO=${nth_prepobsaero:-1} + export APRUN_PREPOBSAERO="${launcher} -n ${npe_prepobsaero} --cpus-per-task=${NTHREADS_PREPOBSAERO}" + elif [[ "${step}" = "snowanl" ]]; then nth_max=$((npe_node_max / npe_node_snowanl)) diff --git a/env/HERCULES.env b/env/HERCULES.env index 0824ba913a..d43dedad8d 100755 --- a/env/HERCULES.env +++ b/env/HERCULES.env @@ -99,6 +99,12 @@ case ${step} in [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max} export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun} --cpus-per-task=${NTHREADS_AEROANL}" ;; + "prepobsaero") + nth_max=$((npe_node_max / npe_node_prepobsaero)) + + export NTHREADS_PREPOBSAERO=${nth_prepobsaero:-1} + export APRUN_PREPOBSAERO="${launcher} -n ${npe_prepobsaero} --cpus-per-task=${NTHREADS_PREPOBSAERO}" +;; "snowanl") nth_max=$((npe_node_max / npe_node_snowanl)) diff --git a/env/JET.env b/env/JET.env index 5bd88dc93a..668ec1c2e4 100755 --- a/env/JET.env +++ b/env/JET.env @@ -82,6 +82,13 @@ elif [[ "${step}" = "aeroanlrun" ]]; then [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max} export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun}" +elif [[ "${step}" = "prepobsaero" ]]; then + + nth_max=$((npe_node_max / npe_node_prepobsaero)) + + export NTHREADS_PREPOBSAERO=${nth_prepobsaero:-1} + export APRUN_PREPOBSAERO="${launcher} -n ${npe_prepobsaero} --cpus-per-task=${NTHREADS_PREPOBSAERO}" + elif [[ "${step}" = "snowanl" ]]; then nth_max=$((npe_node_max / npe_node_snowanl)) diff --git a/env/ORION.env b/env/ORION.env index f701e55aa2..afd1cda052 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -90,6 +90,13 @@ elif [[ "${step}" = "aeroanlrun" ]]; then [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max} export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun} --cpus-per-task=${NTHREADS_AEROANL}" +elif [[ "${step}" = "prepobsaero" ]]; then + + nth_max=$((npe_node_max / npe_node_prepobsaero)) + + export NTHREADS_PREPOBSAERO=${nth_prepobsaero:-1} + export APRUN_PREPOBSAERO="${launcher} -n ${npe_prepobsaero} --cpus-per-task=${NTHREADS_PREPOBSAERO}" + elif [[ "${step}" = "snowanl" ]]; then nth_max=$((npe_node_max / npe_node_snowanl)) diff --git a/env/S4.env b/env/S4.env index 9ba3a61b01..8a368bf1d6 100755 --- a/env/S4.env +++ b/env/S4.env @@ -82,6 +82,13 @@ elif [[ "${step}" = "aeroanlrun" ]]; then [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max} export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun}" +elif [[ "${step}" = "prepobsaero" ]]; then + + nth_max=$((npe_node_max / npe_node_prepobsaero)) + + export NTHREADS_PREPOBSAERO=${nth_prepobsaero:-1} + export APRUN_PREPOBSAERO="${launcher} -n ${npe_prepobsaero} --cpus-per-task=${NTHREADS_PREPOBSAERO}" + elif [[ "${step}" = "snowanl" ]]; then nth_max=$((npe_node_max / npe_node_snowanl)) diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 0876e4127d..9fe9179e6b 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -76,6 +76,13 @@ elif [[ "${step}" = "aeroanlrun" ]]; then [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max} export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun}" +elif [[ "${step}" = "prepobsaero" ]]; then + + nth_max=$((npe_node_max / npe_node_prepaeroobs)) + + export NTHREADS_PREPOBSAERO=${nth_prepobsaero:-1} + export APRUN_PREPOBSAERO="${launcher} -n ${npe_prepobsaero} --ppn ${npe_node_prepobsaero}--cpu-bind depth --depth=${NTHREADS_PREPOBSAERO}" + elif [[ "${step}" = "snowanl" ]]; then nth_max=$((npe_node_max / npe_node_snowanl)) diff --git a/jobs/JGLOBAL_PREP_OBS_AERO b/jobs/JGLOBAL_PREP_OBS_AERO new file mode 100755 index 0000000000..7fe701898f --- /dev/null +++ b/jobs/JGLOBAL_PREP_OBS_AERO @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "prepobsaero" -c "base prepobsaero" + +############################################## +# Set variables used in the script +############################################## + +export COMIN_OBS="${DATA}" +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COMOUT_OBS:COM_OBS_TMPL + +############################################## +# Begin JOB SPECIFIC work +############################################## + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASPREPAEROOBSPY:-${SCRgfs}/exglobal_prep_obs_aero.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || exit +[[ "${KEEPDATA}" = "NO" ]] && rm -rf "${DATA}" + +exit 0 diff --git a/jobs/rocoto/prepobsaero.sh b/jobs/rocoto/prepobsaero.sh new file mode 100755 index 0000000000..89da7547e8 --- /dev/null +++ b/jobs/rocoto/prepobsaero.sh @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="prepobsaero" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_PREP_OBS_AERO" +status=$? +exit "${status}" diff --git a/parm/archive/arcdir.yaml.j2 b/parm/archive/arcdir.yaml.j2 index fab93cc2a4..1ed7422761 100644 --- a/parm/archive/arcdir.yaml.j2 +++ b/parm/archive/arcdir.yaml.j2 @@ -47,6 +47,11 @@ Deterministic: &deterministic - ["{{ COM_CHEM_ANALYSIS }}/{{ head }}aerostat", "{{ ARCDIR }}/aerostat.{{ RUN }}.{{ cycle_YMDH }}"] {% endif %} + {% if DO_PREP_OBS_AERO %} + - ["{{ COM_OBS }}/{{ head }}aeroobs", "{{ ARCDIR }}/aeroobs.{{ RUN }}.{{ cycle_YMDH }}"] + - ["{{ COM_OBS }}/{{ head }}aerorawobs", "{{ ARCDIR }}/aerorawobs.{{ RUN }}.{{ cycle_YMDH }}"] + {% endif %} + {% endif %} {% if RUN == "gfs" %} diff --git a/parm/archive/gdas.yaml.j2 b/parm/archive/gdas.yaml.j2 index fd5e074111..26540156cd 100644 --- a/parm/archive/gdas.yaml.j2 +++ b/parm/archive/gdas.yaml.j2 @@ -69,6 +69,10 @@ gdas: {% if AERO_ANL_CDUMP == "gdas" or AERO_ANL_CDUMP == "both" %} - "{{ COM_CHEM_ANALYSIS | relpath(ROTDIR) }}/{{ head }}aerostat" {% endif %} + {% if DO_PREP_OBS_AERO %} + - "{{ COM_OBS | relpath(ROTDIR) }}/{{ head }}aeroobs" + - "{{ COM_OBS | relpath(ROTDIR) }}/{{ head }}aerorawobs" + {% endif %} {% if DO_JEDISNOWDA %} - "{{ COM_SNOW_ANALYSIS | relpath(ROTDIR) }}/{{ head }}snowstat.tgz" {% endif %} diff --git a/parm/archive/gfsa.yaml.j2 b/parm/archive/gfsa.yaml.j2 index df90a1a71e..0a8e65d3ef 100644 --- a/parm/archive/gfsa.yaml.j2 +++ b/parm/archive/gfsa.yaml.j2 @@ -40,6 +40,10 @@ gfsa: {% if AERO_ANL_CDUMP == "gfs" or AERO_ANL_CDUMP == "both" %} - "{{ COM_CHEM_ANALYSIS | relpath(ROTDIR) }}/{{ head }}aerostat" {% endif %} + {% if DO_PREP_OBS_AERO %} + - "{{ COM_OBS | relpath(ROTDIR) }}/{{ head }}aeroobs" + - "{{ COM_OBS | relpath(ROTDIR) }}/{{ head }}aerorawobs" + {% endif %} # BUFR inputs - "{{ COM_OBS | relpath(ROTDIR) }}/{{ head }}nsstbufr" diff --git a/parm/config/gfs/config.base b/parm/config/gfs/config.base index 6cc1b6d744..fd1ffdc415 100644 --- a/parm/config/gfs/config.base +++ b/parm/config/gfs/config.base @@ -178,6 +178,7 @@ export DO_WAVE="NO" export DO_OCN="NO" export DO_ICE="NO" export DO_AERO="NO" +export DO_PREP_OBS_AERO="NO" export AERO_FCST_CDUMP="" # When to run aerosol forecast: gdas, gfs, or both export AERO_ANL_CDUMP="" # When to run aerosol analysis: gdas, gfs, or both export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both diff --git a/parm/config/gfs/config.prepobsaero b/parm/config/gfs/config.prepobsaero new file mode 100644 index 0000000000..f70138991c --- /dev/null +++ b/parm/config/gfs/config.prepobsaero @@ -0,0 +1,17 @@ +#!/bin/bash -x + +########## config.prepobsaero ########## +# Prepare and thin/superob aerosol observations + +echo "BEGIN: config.prepobsaero" + +# Get task specific resources +source "${EXPDIR}/config.resources" prepobsaero + +export OBSPROCYAML="${PARMgfs}/gdas/aero/obs/lists/gdas_aero_obsproc.yaml.j2" +export OBSPROCEXE="${EXECgfs}/gdas_obsprovider2ioda.x" +export VIIRS_DATA_DIR="/scratch2/NCEPDEV/stmp3/Yaping.Wang/VIIRS/AWS/" +export SENSORS="npp,n20" + + +echo "END: config.prepaeroobs" diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 0972f74f9c..1d1fd2e3c1 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -13,7 +13,7 @@ if (( $# != 1 )); then echo "atmanlinit atmanlvar atmanlfv3inc atmanlfinal" echo "atmensanlinit atmensanlletkf atmensanlfv3inc atmensanlfinal" echo "snowanl" - echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "prepobsaero aeroanlinit aeroanlrun aeroanlfinal" echo "anal sfcanl analcalc analdiag fcst echgres" echo "upp atmos_products" echo "tracker genesis genesis_fsu" @@ -289,6 +289,14 @@ case ${step} in export npe_node_snowanl=$(( npe_node_max / nth_snowanl )) ;; + "prepobsaero") + export wtime_prepobsaero="00:30:00" + export npe_prepobsaero=1 + export nth_prepobsaero=1 + export npe_node_prepobsaero=1 + export memory_prepobsaero="96GB" + ;; + "aeroanlinit") # below lines are for creating JEDI YAML case ${CASE} in diff --git a/scripts/exglobal_archive.py b/scripts/exglobal_archive.py index e38d0abf72..bcd8d522d9 100755 --- a/scripts/exglobal_archive.py +++ b/scripts/exglobal_archive.py @@ -19,8 +19,8 @@ def main(): # Pull out all the configuration keys needed to run the rest of archive steps keys = ['ATARDIR', 'current_cycle', 'FHMIN', 'FHMAX', 'FHOUT', 'RUN', 'PDY', - 'DO_VERFRAD', 'DO_VMINMON', 'DO_VERFOZN', 'DO_ICE', 'DO_AERO', 'PARMgfs', - 'DO_OCN', 'DO_WAVE', 'WRITE_DOPOST', 'PSLOT', 'HPSSARCH', 'DO_MOS', + 'DO_VERFRAD', 'DO_VMINMON', 'DO_VERFOZN', 'DO_ICE', 'DO_AERO', 'DO_PREP_OBS_AERO', + 'PARMgfs', 'DO_OCN', 'DO_WAVE', 'WRITE_DOPOST', 'PSLOT', 'HPSSARCH', 'DO_MOS', 'DO_JEDISNOWDA', 'LOCALARCH', 'REALTIME', 'ROTDIR', 'ARCH_WARMICFREQ', 'ARCH_FCSTICFREQ', 'ARCH_CYC', 'assim_freq', 'ARCDIR', 'SDATE', 'FHMIN_GFS', 'FHMAX_GFS', 'FHOUT_GFS', 'ARCH_GAUSSIAN', 'MODE', diff --git a/scripts/exglobal_prep_obs_aero.py b/scripts/exglobal_prep_obs_aero.py new file mode 100755 index 0000000000..08548e6874 --- /dev/null +++ b/scripts/exglobal_prep_obs_aero.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 +# exglobal_prep_obs_aero.py +# This script collect available viirs +# obs files, combine and preprocess +# them. +import os + +from wxflow import Logger, cast_strdict_as_dtypedict +from pygfs.task.aero_prepobs import AerosolObsPrep + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + AeroObs = AerosolObsPrep(config) + AeroObs.initialize() + AeroObs.runConverter() + AeroObs.finalize() diff --git a/ush/python/pygfs/task/aero_prepobs.py b/ush/python/pygfs/task/aero_prepobs.py new file mode 100644 index 0000000000..f2344241a9 --- /dev/null +++ b/ush/python/pygfs/task/aero_prepobs.py @@ -0,0 +1,236 @@ +#!/usr/bin/env python3 + +import os +import glob +import gzip +import tarfile +import re +from logging import getLogger +from typing import List, Dict, Any, Union + +from wxflow import (AttrDict, FileHandler, rm_p, rmdir, + Task, add_to_datetime, to_timedelta, to_datetime, + datetime_to_YMD, + chdir, Executable, WorkflowException, + parse_j2yaml, save_as_yaml, logit) + +logger = getLogger(__name__.split('.')[-1]) + + +class AerosolObsPrep(Task): + """ + Class for preparing and managing aerosol observations + """ + def __init__(self, config: Dict[str, Any]) -> None: + super().__init__(config) + + _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config['assim_freq']}H") / 2) + _window_end = add_to_datetime(self.runtime_config.current_cycle, +to_timedelta(f"{self.config['assim_freq']}H") / 2) + + local_dict = AttrDict( + { + 'window_begin': _window_begin, + 'window_end': _window_end, + 'sensors': str(self.config['SENSORS']).split(','), + 'data_dir': self.config['VIIRS_DATA_DIR'], + 'input_files': '', + 'OPREFIX': f"{self.runtime_config.RUN}.t{self.runtime_config.cyc:02d}z.", + 'APREFIX': f"{self.runtime_config.RUN}.t{self.runtime_config.cyc:02d}z." + } + ) + + # task_config is everything that this task should need + self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict) + + @logit(logger) + def initialize(self) -> None: + """ + List needed raw obs files. + Copy the raw obs files to $DATA/obs. + Link over the needed executable. + Generate corresponding YAML file. + Run IODA converter. + """ + self.task_config.DATA_OBS = os.path.join(self.task_config.DATA, 'obs') + if os.path.exists(self.task_config.DATA_OBS): + rmdir(self.task_config.DATA_OBS) + FileHandler({'mkdir': [self.task_config.DATA_OBS]}).sync() + + self.task_config.prepaero_yaml = [] + for sensor in self.task_config.sensors: + raw_files = self.list_raw_files(sensor) + self.task_config.input_files = self.copy_obs(raw_files) + self.link_obsconvexe() + self.task_config.prepaero_config = self.get_obsproc_config(sensor) + + # generate converter YAML file + template = f"{self.runtime_config.CDUMP}.t{self.runtime_config['cyc']:02d}z.prepaero_viirs_{sensor}.yaml" + _prepaero_yaml = os.path.join(self.runtime_config.DATA, template) + self.task_config.prepaero_yaml.append(_prepaero_yaml) + logger.debug(f"Generate PrepAeroObs YAML file: {_prepaero_yaml}") + save_as_yaml(self.task_config.prepaero_config, _prepaero_yaml) + logger.info(f"Wrote PrepAeroObs YAML to: {_prepaero_yaml}") + + @logit(logger) + def list_raw_files(self, sensor) -> List[str]: + """ + List all files in the predefined directory that match the predefined sensor and within the time window. + """ + if sensor == 'n20': + sensor = 'j01' + dir1 = os.path.join(self.task_config.data_dir, datetime_to_YMD(self.task_config.window_begin)) + dir2 = os.path.join(self.task_config.data_dir, datetime_to_YMD(self.task_config.window_end)) + + if dir1 == dir2: + files = os.listdir(dir1) + allfiles = [os.path.join(dir1, file) for file in files] + allfiles.sort() + else: + files_1 = os.listdir(dir1) + allfiles_1 = [os.path.join(dir1, file) for file in files_1] + files_2 = os.listdir(dir2) + allfiles_2 = [os.path.join(dir2, file) for file in files_2] + allfiles = sorted(allfiles_1, allfiles_2) + matching_files = [] + try: + for file in allfiles: + basename = os.path.basename(file) + pattern = r"s(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{3})" + match = re.match(pattern, basename.split('_')[3]) + yyyy, mm, dd, HH, MM = match.group(1), match.group(2), match.group(3), match.group(4), match.group(5) + fstart = to_datetime(f'{yyyy}-{mm}-{dd}T{HH}:{MM}Z') + if sensor == basename.split('_')[2]: + # temporally select obs files based on time stamp in the filename. + if (fstart > self.task_config.window_begin) and (fstart < self.task_config.window_end): + matching_files.append(os.path.join(self.task_config.data_dir, file)) + logger.info("Found %d matching files.", len(matching_files)) + except FileNotFoundError: + logger.error("The specified file/directory does not exist.") + raise + return matching_files + + @logit(logger) + def copy_obs(self, inputfiles) -> Dict[str, Any]: + """ + Copy the raw obs files to $DATA/obs. + """ + copylist = [] + destlist = [] + for filename in inputfiles: + basename = os.path.basename(filename) + dest = os.path.join(self.task_config.DATA_OBS, basename) + copylist.append([filename, dest]) + destlist.append(dest) + FileHandler({'copy': copylist}).sync() + + return destlist + + @logit(logger) + def get_obsproc_config(self, sensor) -> Dict[str, Any]: + """ + Compile a dictionary of obs proc configuration from OBSPROCYAML template file + Parameters + ---------- + Returns + ---------- + obsproc_config : Dict + a dictionary containing the fully rendered obs proc yaml configuration + """ + self.task_config.sensor = sensor + # generate JEDI YAML file + logger.info(f"Generate gdas_obsprovider2ioda YAML config: {self.task_config.OBSPROCYAML}") + prepaero_config = parse_j2yaml(self.task_config.OBSPROCYAML, self.task_config) + + return prepaero_config + + @logit(logger) + def link_obsconvexe(self) -> None: + """ + This method links the gdas executable to the run directory + Parameters + ---------- + Task: GDAS task + Returns + ---------- + None + """ + exe_src = self.task_config.OBSPROCEXE + + logger.info(f"Link executable {exe_src} to DATA/") + exe_dest = os.path.join(self.task_config.DATA, os.path.basename(exe_src)) + if os.path.exists(exe_dest): + rm_p(exe_dest) + os.symlink(exe_src, exe_dest) + + return + + @logit(logger) + def runConverter(self) -> None: + """ + Run the IODA converter gdas_obsprovider2ioda.x + """ + chdir(self.task_config.DATA) + exec_cmd = Executable(self.task_config.APRUN_PREPOBSAERO) + exec_name = os.path.join(self.task_config.DATA, 'gdas_obsprovider2ioda.x') + exec_cmd.add_default_arg(exec_name) + + for prepaero_yaml in self.task_config.prepaero_yaml: + try: + logger.debug(f"Executing {exec_cmd} on {prepaero_yaml}") + exec_cmd(f"{prepaero_yaml}") + except OSError: + raise OSError(f"Failed to execute {exec_cmd} on {prepaero_yaml}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd} on {prepaero_yaml}") + + pass + + @logit(logger) + def finalize(self) -> None: + """ + Copy the output viirs files to COMIN_OBS. + Tar and archive the output files. + Tar and archive the raw obs files. + """ + # get list of viirs files + obsfiles = glob.glob(os.path.join(self.task_config['DATA'], '*viirs*nc4')) + copylist = [] + for obsfile in obsfiles: + basename = os.path.basename(obsfile) + src = os.path.join(self.task_config['DATA'], basename) + dest = os.path.join(self.task_config.COMOUT_OBS, basename) + copylist.append([src, dest]) + FileHandler({'copy': copylist}).sync() + + # gzip the files first + for obsfile in obsfiles: + with open(obsfile, 'rb') as f_in, gzip.open(f"{obsfile}.gz", 'wb') as f_out: + f_out.writelines(f_in) + + aeroobs = os.path.join(self.task_config.COMOUT_OBS, f"{self.task_config['APREFIX']}aeroobs") + # open tar file for writing + with tarfile.open(aeroobs, "w") as archive: + for obsfile in obsfiles: + aeroobsgzip = f"{obsfile}.gz" + archive.add(aeroobsgzip, arcname=os.path.basename(aeroobsgzip)) + # get list of raw viirs L2 files + rawfiles = glob.glob(os.path.join(self.task_config.DATA_OBS, 'JRR-AOD*')) + # gzip the raw L2 files first + for rawfile in rawfiles: + with open(rawfile, 'rb') as f_in, gzip.open(f"{rawfile}.gz", 'wb') as f_out: + f_out.writelines(f_in) + + aerorawobs = os.path.join(self.task_config.COMOUT_OBS, f"{self.task_config['APREFIX']}aerorawobs") + # open tar file for writing + with tarfile.open(aerorawobs, "w") as archive: + for rawfile in rawfiles: + aerorawobsgzip = f"{rawfile}.gz" + archive.add(aerorawobsgzip, arcname=os.path.basename(aerorawobsgzip)) + copylist = [] + for prepaero_yaml in self.task_config.prepaero_yaml: + basename = os.path.basename(prepaero_yaml) + dest = os.path.join(self.task_config.COMOUT_OBS, basename) + copylist.append([prepaero_yaml, dest]) + FileHandler({'copy': copylist}).sync() + + pass diff --git a/workflow/applications/applications.py b/workflow/applications/applications.py index 50a9a7cdd0..6a4d240fe5 100644 --- a/workflow/applications/applications.py +++ b/workflow/applications/applications.py @@ -51,6 +51,7 @@ def __init__(self, conf: Configuration) -> None: self.do_ocean = _base.get('DO_OCN', False) self.do_ice = _base.get('DO_ICE', False) self.do_aero = _base.get('DO_AERO', False) + self.do_prep_obs_aero = _base.get('DO_PREP_OBS_AERO', False) self.do_bufrsnd = _base.get('DO_BUFRSND', False) self.do_gempak = _base.get('DO_GEMPAK', False) self.do_awips = _base.get('DO_AWIPS', False) diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index f7f9b5b5e6..175ddb07bf 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -108,6 +108,8 @@ def _get_app_configs(self): if self.do_aero: configs += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] + if self.do_prep_obs_aero: + configs += ['prepobsaero'] if self.do_jedisnowda: configs += ['prepsnowobs', 'snowanl'] @@ -178,6 +180,8 @@ def get_task_names(self): if self.do_aero and 'gdas' in self.aero_anl_cdumps: gdas_tasks += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] + if self.do_prep_obs_aero: + gdas_tasks += ['prepobsaero'] gdas_tasks += ['atmanlupp', 'atmanlprod', 'fcst'] @@ -215,6 +219,8 @@ def get_task_names(self): if self.do_aero and 'gfs' in self.aero_anl_cdumps: gfs_tasks += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] + if self.do_prep_obs_aero: + gfs_tasks += ['prepobsaero'] gfs_tasks += ['atmanlupp', 'atmanlprod', 'fcst'] diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index fa218c6713..0fd468b3b4 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -483,10 +483,35 @@ def atmanlfinal(self): return task + def prepobsaero(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('prepobsaero') + task_name = f'{self.cdump}prepobsaero' + task_dict = {'task_name': task_name, + 'resources': resources, + 'dependency': dependencies, + 'envars': self.envars, + 'cycledef': self.cdump.replace('enkf', ''), + 'command': f'{self.HOMEgfs}/jobs/rocoto/prepobsaero.sh', + 'job_name': f'{self.pslot}_{task_name}_@H', + 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', + 'maxtries': '&MAXTRIES;' + } + + task = rocoto.create_task(task_dict) + + return task + def aeroanlinit(self): deps = [] dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'} + if self.app_config.do_prep_obs_aero: + dep_dict = {'type': 'task', 'name': f'{self.cdump}prepobsaero'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) From 5af325a6a4e0a14d180514a418603ca79fada487 Mon Sep 17 00:00:00 2001 From: Dan Holdaway <27729500+danholdaway@users.noreply.github.com> Date: Fri, 14 Jun 2024 18:05:23 -0400 Subject: [PATCH 11/20] Update GDASapp hash to move JCB into GDASapp (#2665) This PR moves JCB into GDASapp. The PR also bumps up the hash of GDASapp to what is in `feature/move_jcb`, which at time of writing is develop plus the absorption of JCB into GDASapp. Note that I also took the changes from https://github.com/NOAA-EMC/global-workflow/pull/2641 to follow the testing @RussTreadon-NOAA has done. --- .gitmodules | 4 ---- sorc/gdas.cd | 2 +- sorc/jcb | 1 - sorc/link_workflow.sh | 10 +++++++++- 4 files changed, 10 insertions(+), 7 deletions(-) delete mode 160000 sorc/jcb diff --git a/.gitmodules b/.gitmodules index 1c3b7acfa5..5c9e569243 100644 --- a/.gitmodules +++ b/.gitmodules @@ -26,7 +26,3 @@ [submodule "sorc/gsi_monitor.fd"] path = sorc/gsi_monitor.fd url = https://github.com/NOAA-EMC/GSI-Monitor.git -[submodule "sorc/jcb"] - path = sorc/jcb - url = https://github.com/noaa-emc/jcb - fetchRecurseSubmodules = false diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 2bd8ffc149..368c9c5db9 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 2bd8ffc149f30c0ae0f8e1508477a8dc88c46a65 +Subproject commit 368c9c5db9b5ea62e72937b6d1b0f753adb9be40 diff --git a/sorc/jcb b/sorc/jcb deleted file mode 160000 index f62b9df37f..0000000000 --- a/sorc/jcb +++ /dev/null @@ -1 +0,0 @@ -Subproject commit f62b9df37f131c9ff68b62eb6e19c1109c314cf0 diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 1f0de0745c..4973ab8d7d 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -87,13 +87,21 @@ source "${HOMEgfs}/versions/fix.ver" # Link python pacakges in ush/python # TODO: This will be unnecessary when these are part of the virtualenv -packages=("wxflow" "jcb") +packages=("wxflow") for package in "${packages[@]}"; do cd "${HOMEgfs}/ush/python" || exit 1 [[ -s "${package}" ]] && rm -f "${package}" ${LINK} "${HOMEgfs}/sorc/${package}/src/${package}" . done +# Link GDASapp python packages in ush/python +packages=("jcb") +for package in "${packages[@]}"; do + cd "${HOMEgfs}/ush/python" || exit 1 + [[ -s "${package}" ]] && rm -f "${package}" + ${LINK} "${HOMEgfs}/sorc/gdas.cd/sorc/${package}/src/${package}" . +done + # Link wxflow in workflow and ci/scripts # TODO: This will be unnecessary when wxflow is part of the virtualenv cd "${HOMEgfs}/workflow" || exit 1 From 38f2df9fb0c074b1f80d3c637080be79be693161 Mon Sep 17 00:00:00 2001 From: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> Date: Mon, 17 Jun 2024 17:12:55 +0000 Subject: [PATCH 12/20] Optimize wavepostpnt (#2657) Optimize the gfswavepostpnt, gfswavepostbndpntbll, and gfswavepostbndpnt jobs This is done by 1) reducing the number of calls to `sed`, `awk`, `grep`, and `cat` by - performing operations on all files at once instead of looping over each file - removing piped `cat` calls (e.g. `cat | sed 'something'`) - combining `sed` and `grep` calls when possible - adding logic to `awk` calls instead of handling that logic in bash 2) minimizing as much as possible the amount of data on disk that has to be read in (e.g. limiting sed to read only the line numbers it needs) --------- Co-authored-by: Walter Kolczynski - NOAA --- parm/config/gefs/config.resources | 2 +- parm/config/gefs/config.wave | 2 +- parm/config/gfs/config.resources | 2 +- parm/config/gfs/config.wave | 2 +- scripts/exgfs_wave_post_pnt.sh | 60 +++++++++++++++---------------- ush/wave_outp_spec.sh | 39 ++++++++------------ workflow/rocoto/gefs_tasks.py | 7 ++-- workflow/rocoto/gfs_tasks.py | 6 +++- 8 files changed, 58 insertions(+), 62 deletions(-) diff --git a/parm/config/gefs/config.resources b/parm/config/gefs/config.resources index 04d55ae082..b2ca5b3e51 100644 --- a/parm/config/gefs/config.resources +++ b/parm/config/gefs/config.resources @@ -255,7 +255,7 @@ case ${step} in # The wavepost*pnt* jobs are I/O heavy and do not scale well to large nodes. # Limit the number of tasks/node to 40. "wavepostbndpnt") - export wtime_wavepostbndpnt="01:00:00" + export wtime_wavepostbndpnt="03:00:00" export npe_wavepostbndpnt=240 export nth_wavepostbndpnt=1 export npe_node_wavepostbndpnt=$(( npe_node_max / nth_wavepostbndpnt )) diff --git a/parm/config/gefs/config.wave b/parm/config/gefs/config.wave index b61a2f6e54..bef3437adf 100644 --- a/parm/config/gefs/config.wave +++ b/parm/config/gefs/config.wave @@ -87,7 +87,7 @@ export WAVEWND_FID= # The start time reflects the number of hindcast hours prior to the cycle initial time export WAVHINDH=0 export FHMAX_WAV_IBP=180 -if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_WAV} ; fi # gridded and point output rate export DTFLD_WAV=$(( FHOUT_HF_WAV * 3600 )) diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 1d1fd2e3c1..e5f741cf7e 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -144,7 +144,7 @@ case ${step} in # The wavepost*pnt* jobs are I/O heavy and do not scale well to large nodes. # Limit the number of tasks/node to 40. "wavepostbndpnt") - export wtime_wavepostbndpnt="01:00:00" + export wtime_wavepostbndpnt="03:00:00" export npe_wavepostbndpnt=240 export nth_wavepostbndpnt=1 export npe_node_wavepostbndpnt=$(( npe_node_max / nth_wavepostbndpnt )) diff --git a/parm/config/gfs/config.wave b/parm/config/gfs/config.wave index 7253ef1396..568aeb1e1c 100644 --- a/parm/config/gfs/config.wave +++ b/parm/config/gfs/config.wave @@ -102,7 +102,7 @@ if [[ "${RUN}" == "gfs" ]]; then fi export WAVHINDH=0 export FHMAX_WAV_IBP=180 -if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_WAV} ; fi # gridded and point output rate export DTFLD_WAV=$(( FHOUT_HF_WAV * 3600 )) diff --git a/scripts/exgfs_wave_post_pnt.sh b/scripts/exgfs_wave_post_pnt.sh index 56cb83be21..93bdbeaf32 100755 --- a/scripts/exgfs_wave_post_pnt.sh +++ b/scripts/exgfs_wave_post_pnt.sh @@ -290,14 +290,14 @@ source "${USHgfs}/preamble.sh" fi # Create new buoy_log.ww3 - cat buoy.loc | awk '{print $3}' | sed 's/'\''//g' > ibp_tags + awk '{print $3}' buoy.loc | sed 's/'\''//g' > ibp_tags grep -F -f ibp_tags buoy_log.ww3 > buoy_log.tmp rm -f buoy_log.dat mv buoy_log.tmp buoy_log.dat grep -F -f ibp_tags buoy_lst.loc > buoy_tmp1.loc #sed '$d' buoy_tmp1.loc > buoy_tmp2.loc - buoys=$(awk '{ print $1 }' buoy_tmp1.loc) + awk '{ print $1 }' buoy_tmp1.loc > buoy_lst.txt Nb=$(wc buoy_tmp1.loc | awk '{ print $1 }') rm -f buoy_tmp1.loc @@ -350,6 +350,8 @@ source "${USHgfs}/preamble.sh" # 1.a.2 Loop over forecast time to generate post files fhr=$FHMIN_WAV + # Generated sed-searchable paths + escaped_USHgfs="${USHgfs//\//\\\/}" while [ $fhr -le $FHMAX_WAV_PNT ]; do echo " Creating the wave point scripts at : $(date)" @@ -366,6 +368,7 @@ source "${USHgfs}/preamble.sh" # Create instances of directories for spec and gridded output export SPECDATA=${DATA}/output_$YMDHMS + escaped_SPECDATA="${SPECDATA//\//\\\/}" export BULLDATA=${DATA}/output_$YMDHMS cp $DATA/mod_def.${waveuoutpGRD} mod_def.${waveuoutpGRD} @@ -386,19 +389,15 @@ source "${USHgfs}/preamble.sh" if [ "$DOSPC_WAV" = 'YES' ] then export dtspec=3600. - for buoy in $buoys - do - echo "${USHgfs}/wave_outp_spec.sh $buoy $ymdh spec $SPECDATA > $SPECDATA/spec_$buoy.out 2>&1" >> tmpcmdfile.$FH3 - done + # Construct the wave_outp_spec (spec) command to run on each buoy in buoy_lst.txt + sed "s/^\(.*\)$/${escaped_USHgfs}\/wave_outp_spec.sh \1 ${ymdh} spec ${escaped_SPECDATA} > ${escaped_SPECDATA}\/spec_\1.out 2>\&1/" buoy_lst.txt >> "tmpcmdfile.${FH3}" fi if [ "$DOBLL_WAV" = 'YES' ] then export dtspec=3600. - for buoy in $buoys - do - echo "${USHgfs}/wave_outp_spec.sh $buoy $ymdh bull $SPECDATA > $SPECDATA/bull_$buoy.out 2>&1" >> tmpcmdfile.$FH3 - done + # Construct the wave_outp_spec (bull) command to run on each buoy in buoy_lst.txt + sed "s/^\(.*\)$/${escaped_USHgfs}\/wave_outp_spec.sh \1 ${ymdh} bull ${escaped_SPECDATA} > ${escaped_SPECDATA}\/bull_\1.out 2>\&1/" buoy_lst.txt >> "tmpcmdfile.${FH3}" fi split -n l/1/10 tmpcmdfile.$FH3 > cmdfile.${FH3}.01 @@ -504,27 +503,24 @@ source "${USHgfs}/preamble.sh" cd $DATA - echo "Before create cmdfile for cat bouy : $(date)" - rm -f cmdfile.bouy - touch cmdfile.bouy - chmod 744 cmdfile.bouy + echo "Before create cmdfile for cat buoy : $(date)" + rm -f cmdfile.buoy + touch cmdfile.buoy + chmod 744 cmdfile.buoy CATOUTDIR=${DATA}/pnt_cat_out + escaped_CATOUTDIR="${CATOUTDIR//\//\\\/}" mkdir -p ${CATOUTDIR} if [ "$DOSPC_WAV" = 'YES' ] then - for buoy in $buoys - do - echo "${USHgfs}/wave_outp_cat.sh $buoy $FHMAX_WAV_PNT spec > ${CATOUTDIR}/spec_cat_$buoy.out 2>&1" >> cmdfile.bouy - done + # Construct wave_outp_cat (spec) call for each buoy in buoy_lst.txt + sed "s/^\(.*\)$/${escaped_USHgfs}\/wave_outp_cat.sh \1 ${FHMAX_WAV_PNT} spec > ${escaped_CATOUTDIR}\/spec_cat_\1.out 2>\&1/" buoy_lst.txt >> cmdfile.buoy fi if [ "$DOBLL_WAV" = 'YES' ] then - for buoy in $buoys - do - echo "${USHgfs}/wave_outp_cat.sh $buoy $FHMAX_WAV_PNT bull > ${CATOUTDIR}/bull_cat_$buoy.out 2>&1" >> cmdfile.bouy - done + # Construct wave_outp_cat (bull) call for each buoy in buoy_lst.txt + sed "s/^\(.*\)$/${escaped_USHgfs}\/wave_outp_cat.sh \1 ${FHMAX_WAV_PNT} bull > ${escaped_CATOUTDIR}\/bull_cat_\1.out 2>\&1/" buoy_lst.txt >> cmdfile.buoy fi if [ ${CFP_MP:-"NO"} = "YES" ]; then @@ -532,18 +528,18 @@ source "${USHgfs}/preamble.sh" ifile=0 iline=1 ifirst='yes' - nlines=$( wc -l cmdfile.bouy | awk '{print $1}' ) + nlines=$( wc -l < cmdfile.buoy) while [ $iline -le $nlines ]; do - line=$( sed -n ''$iline'p' cmdfile.bouy ) + line=$( sed -n ''$iline'p' cmdfile.buoy ) if [ -z "$line" ]; then break else if [ "$ifirst" = 'yes' ]; then - echo "#!/bin/sh" > cmdfile.bouy.$nfile - echo "$nfile cmdfile.bouy.$nfile" >> cmdmprogbouy - chmod 744 cmdfile.bouy.$nfile + echo "#!/bin/sh" > cmdfile.buoy.$nfile + echo "$nfile cmdfile.buoy.$nfile" >> cmdmprogbuoy + chmod 744 cmdfile.buoy.$nfile fi - echo $line >> cmdfile.bouy.$nfile + echo $line >> cmdfile.buoy.$nfile nfile=$(( nfile + 1 )) if [ $nfile -eq $NTASKS ]; then nfile=0 @@ -554,7 +550,7 @@ source "${USHgfs}/preamble.sh" done fi - wavenproc=$(wc -l cmdfile.bouy | awk '{print $1}') + wavenproc=$(wc -l < cmdfile.buoy) wavenproc=$(echo $((${wavenproc}<${NTASKS}?${wavenproc}:${NTASKS}))) set +x @@ -567,9 +563,11 @@ source "${USHgfs}/preamble.sh" if [ "$wavenproc" -gt '1' ] then if [ ${CFP_MP:-"NO"} = "YES" ]; then - ${wavempexec} -n ${wavenproc} ${wave_mpmd} cmdmprogbouy + # shellcheck disable=SC2086 + ${wavempexec} -n "${wavenproc}" ${wave_mpmd} cmdmprogbuoy else - ${wavempexec} ${wavenproc} ${wave_mpmd} cmdfile.bouy + # shellcheck disable=SC2086 + ${wavempexec} "${wavenproc}" ${wave_mpmd} cmdfile.buoy fi exit=$? else diff --git a/ush/wave_outp_spec.sh b/ush/wave_outp_spec.sh index 159d0eb2cf..37accbae49 100755 --- a/ush/wave_outp_spec.sh +++ b/ush/wave_outp_spec.sh @@ -74,21 +74,7 @@ source "${USHgfs}/preamble.sh" exit 1 else buoy=$bloc - grep $buoy ${DATA}/buoy_log.ww3 > tmp_list.loc - while read line - do - buoy_name=$(echo $line | awk '{print $2}') - if [ $buoy = $buoy_name ] - then - point=$(echo $line | awk '{ print $1 }') - set +x - echo " Location ID/# : $buoy (${point})" - echo " Spectral output start time : $ymdh " - echo ' ' - set_trace - break - fi - done < tmp_list.loc + point=$(awk "{if (\$2 == \"${buoy}\"){print \$1; exit} }" "${DATA}/buoy_log.ww3") if [ -z "$point" ] then set +x @@ -98,6 +84,11 @@ source "${USHgfs}/preamble.sh" echo ' ' set_trace exit 2 + else + set +x + echo " Location ID/# : $buoy (${point})" + echo " Spectral output start time : $ymdh " + echo ' ' fi fi @@ -201,27 +192,27 @@ source "${USHgfs}/preamble.sh" then if [ "$specdir" = "bull" ] then - cat $outfile | sed -e '9,$d' >> ${STA_DIR}/${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.bull - cat $coutfile | sed -e '8,$d' >> ${STA_DIR}/c${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.cbull + sed '9,$d' "${outfile}" >> "${STA_DIR}/${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.bull" + sed '8,$d' "${coutfile}" >> "${STA_DIR}/c${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.cbull" else - cat $outfile >> ${STA_DIR}/${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.spec + cat $outfile >> "${STA_DIR}/${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.spec" fi elif [ "${ymdh}" = "${YMDHE}" ] then if [ "$specdir" = "bull" ] then - cat $outfile | sed -e '1,7d' >> ${STA_DIR}/${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.bull - cat $coutfile | sed -e '1,6d' >> ${STA_DIR}/c${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.cbull + sed '1,7d' "${outfile}" >> "${STA_DIR}/${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.bull" + sed '1,6d' "${coutfile}" >> "${STA_DIR}/c${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.cbull" else - cat $outfile | sed -n "/^${YMD} ${HMS}$/,\$p" >> ${STA_DIR}/${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.spec + sed -n "/^${YMD} ${HMS}$/,\$p" "${outfile}" >> "${STA_DIR}/${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.spec" fi else if [ "$specdir" = "bull" ] then - cat $outfile | sed -e '1,7d' | sed -e '2,$d' >> ${STA_DIR}/${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.bull - cat $coutfile | sed -e '1,6d' | sed -e '2,$d' >> ${STA_DIR}/c${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.cbull + sed '8q;d' "${outfile}" >> "${STA_DIR}/${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.bull" + sed '7q;d' "${coutfile}" >> "${STA_DIR}/c${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.cbull" else - cat $outfile | sed -n "/^${YMD} ${HMS}$/,\$p" >> ${STA_DIR}/${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.spec + sed -n "/^${YMD} ${HMS}$/,\$p" "${outfile}" >> "${STA_DIR}/${specdir}fhr/${WAV_MOD_TAG}.${ymdh}.${buoy}.spec" fi fi else diff --git a/workflow/rocoto/gefs_tasks.py b/workflow/rocoto/gefs_tasks.py index 6899a655b6..6fffc881e0 100644 --- a/workflow/rocoto/gefs_tasks.py +++ b/workflow/rocoto/gefs_tasks.py @@ -408,8 +408,11 @@ def wavepostbndpnt(self): def wavepostbndpntbll(self): deps = [] atmos_hist_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_HISTORY_TMPL"], {'MEMDIR': 'mem#member#'}) - # Is there any reason this is 180? - data = f'{atmos_hist_path}/{self.cdump}.t@Hz.atm.logf180.txt' + + # The wavepostbndpntbll job runs on forecast hours up to FHMAX_WAV_IBP + last_fhr = self._configs['wave']['FHMAX_WAV_IBP'] + + data = f'{atmos_hist_path}/{self.cdump}.t@Hz.atm.logf{last_fhr:03d}.txt' dep_dict = {'type': 'data', 'data': data} deps.append(rocoto.add_dependency(dep_dict)) diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 0fd468b3b4..2c74d0f854 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -1200,9 +1200,13 @@ def wavepostbndpnt(self): return task def wavepostbndpntbll(self): + + # The wavepostbndpntbll job runs on forecast hours up to FHMAX_WAV_IBP + last_fhr = self._configs['wave']['FHMAX_WAV_IBP'] + deps = [] atmos_hist_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_HISTORY_TMPL"]) - data = f'{atmos_hist_path}/{self.cdump}.t@Hz.atm.logf180.txt' + data = f'{atmos_hist_path}/{self.cdump}.t@Hz.atm.logf{last_fhr:03d}.txt' dep_dict = {'type': 'data', 'data': data} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep=deps) From 47b3a581c8257fa24411fb400df8bb0e1e04972a Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Mon, 17 Jun 2024 22:55:38 -0400 Subject: [PATCH 13/20] Turn on high-frequency output in extended test (#2679) Turns on high-frequency (hourly) output in the extended products test to exercise that aspect of the code. This test only runs on WCOSS. Also adds the hooks to optionally turn on the metplus jobs, but that is deferred as they are not currently working correctly. --- ci/cases/yamls/gfs_extended_ci.yaml | 1 + parm/config/gefs/config.base | 2 +- parm/config/gefs/yaml/defaults.yaml | 3 ++- parm/config/gfs/config.base | 6 +++--- parm/config/gfs/yaml/defaults.yaml | 2 ++ 5 files changed, 9 insertions(+), 5 deletions(-) diff --git a/ci/cases/yamls/gfs_extended_ci.yaml b/ci/cases/yamls/gfs_extended_ci.yaml index f3a84c8fde..42ee612f3a 100644 --- a/ci/cases/yamls/gfs_extended_ci.yaml +++ b/ci/cases/yamls/gfs_extended_ci.yaml @@ -10,3 +10,4 @@ base: DO_NPOESS: "YES" DO_GENESIS_FSU: "NO" FHMAX_GFS: 384 + FHMAX_HF_GFS: 120 diff --git a/parm/config/gefs/config.base b/parm/config/gefs/config.base index 0bb29e31ae..e98eb41208 100644 --- a/parm/config/gefs/config.base +++ b/parm/config/gefs/config.base @@ -237,7 +237,7 @@ export FHMIN_GFS=0 export FHMIN=${FHMIN_GFS} export FHMAX_GFS=@FHMAX_GFS@ export FHOUT_GFS=6 -export FHMAX_HF_GFS=0 +export FHMAX_HF_GFS=@FHMAX_HF_GFS@ export FHOUT_HF_GFS=1 export FHOUT_OCN_GFS=6 export FHOUT_ICE_GFS=6 diff --git a/parm/config/gefs/yaml/defaults.yaml b/parm/config/gefs/yaml/defaults.yaml index 2341c35d05..f6797758da 100644 --- a/parm/config/gefs/yaml/defaults.yaml +++ b/parm/config/gefs/yaml/defaults.yaml @@ -9,5 +9,6 @@ base: DO_AWIPS: "NO" KEEPDATA: "NO" FHMAX_GFS: 120 + FHMAX_HF_GFS: 0 REPLAY_ICS: "NO" - + USE_OCN_PERTURB_FILES: "false" diff --git a/parm/config/gfs/config.base b/parm/config/gfs/config.base index fd1ffdc415..f893eaaf4e 100644 --- a/parm/config/gfs/config.base +++ b/parm/config/gfs/config.base @@ -291,8 +291,8 @@ export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: # GFS output and frequency export FHMIN_GFS=0 export FHMAX_GFS=@FHMAX_GFS@ -export FHOUT_GFS=3 # Must be 6 for S2S until #1629 is addressed; 3 for ops -export FHMAX_HF_GFS=0 +export FHOUT_GFS=3 # 3 for ops +export FHMAX_HF_GFS=@FHMAX_HF_GFS@ export FHOUT_HF_GFS=1 export FHOUT_OCN_GFS=6 export FHOUT_ICE_GFS=6 @@ -445,7 +445,7 @@ export netcdf_diag=".true." export binary_diag=".false." # Verification options -export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp; not supported with spack-stack +export DO_METP="@DO_METP@" # Run METPLUS jobs - set METPLUS settings in config.metp export DO_FIT2OBS="YES" # Run fit to observations package export DO_VRFY_OCEANDA="@DO_VRFY_OCEANDA@" # Run SOCA Ocean and Seaice DA verification tasks diff --git a/parm/config/gfs/yaml/defaults.yaml b/parm/config/gfs/yaml/defaults.yaml index d4a423f17e..b074b11dbf 100644 --- a/parm/config/gfs/yaml/defaults.yaml +++ b/parm/config/gfs/yaml/defaults.yaml @@ -13,7 +13,9 @@ base: DO_TRACKER: "YES" DO_GENESIS: "YES" DO_GENESIS_FSU: "NO" + DO_METP: "NO" FHMAX_GFS: 120 + FHMAX_HF_GFS: 0 DO_VRFY_OCEANDA: "NO" GSI_SOILANAL: "NO" EUPD_CYC: "gdas" From 35d4d99eaac669721add9ddcc793153e5ab3b30a Mon Sep 17 00:00:00 2001 From: "Henry R. Winterbottom" <49202169+HenryWinterbottom-NOAA@users.noreply.github.com> Date: Tue, 18 Jun 2024 08:06:53 -0600 Subject: [PATCH 14/20] Update archive job to use COMIN/COMOUT (#2668) NCO has requested that each COM variable specify whether it is an input or an output. This completes that process for the global-workflow archive task. Refs #2451 --------- Co-authored-by: Walter Kolczynski - NOAA Co-authored-by: Rahul Mahajan Co-authored-by: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> --- jobs/JGDAS_ENKF_ARCHIVE | 4 +- jobs/JGLOBAL_ARCHIVE | 50 +++++++++---- parm/archive/arcdir.yaml.j2 | 52 ++++++------- parm/archive/chem.yaml.j2 | 2 +- parm/archive/enkf.yaml.j2 | 16 ++-- parm/archive/enkf_grp.yaml.j2 | 16 ++-- parm/archive/enkf_restarta_grp.yaml.j2 | 26 +++---- parm/archive/enkf_restartb_grp.yaml.j2 | 10 +-- parm/archive/gdas.yaml.j2 | 98 ++++++++++++------------- parm/archive/gdas_restarta.yaml.j2 | 34 ++++----- parm/archive/gdas_restartb.yaml.j2 | 10 +-- parm/archive/gdasice.yaml.j2 | 6 +- parm/archive/gdasice_restart.yaml.j2 | 2 +- parm/archive/gdasocean.yaml.j2 | 4 +- parm/archive/gdasocean_analysis.yaml.j2 | 26 +++---- parm/archive/gdasocean_restart.yaml.j2 | 4 +- parm/archive/gdaswave.yaml.j2 | 4 +- parm/archive/gdaswave_restart.yaml.j2 | 2 +- parm/archive/gfs_downstream.yaml.j2 | 10 +-- parm/archive/gfs_flux.yaml.j2 | 4 +- parm/archive/gfs_flux_1p00.yaml.j2 | 4 +- parm/archive/gfs_netcdfa.yaml.j2 | 12 +-- parm/archive/gfs_netcdfb.yaml.j2 | 4 +- parm/archive/gfs_pgrb2b.yaml.j2 | 16 ++-- parm/archive/gfs_restarta.yaml.j2 | 8 +- parm/archive/gfsa.yaml.j2 | 56 +++++++------- parm/archive/gfsb.yaml.j2 | 16 ++-- parm/archive/gfswave.yaml.j2 | 20 ++--- parm/archive/ice_6hravg.yaml.j2 | 4 +- parm/archive/ice_grib2.yaml.j2 | 12 +-- parm/archive/master_enkf.yaml.j2 | 26 +++---- parm/archive/ocean_6hravg.yaml.j2 | 2 +- parm/archive/ocean_grib2.yaml.j2 | 12 +-- scripts/exgdas_enkf_earc.py | 4 +- scripts/exglobal_archive.py | 4 +- ush/python/pygfs/task/archive.py | 19 ++--- 36 files changed, 312 insertions(+), 287 deletions(-) diff --git a/jobs/JGDAS_ENKF_ARCHIVE b/jobs/JGDAS_ENKF_ARCHIVE index 5ac46a73e7..7496acd8d4 100755 --- a/jobs/JGDAS_ENKF_ARCHIVE +++ b/jobs/JGDAS_ENKF_ARCHIVE @@ -11,8 +11,8 @@ export CDUMP=${RUN/enkf} YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_TOP MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ - COM_ATMOS_ANALYSIS_ENSSTAT:COM_ATMOS_ANALYSIS_TMPL \ - COM_ATMOS_HISTORY_ENSSTAT:COM_ATMOS_HISTORY_TMPL + COMIN_ATMOS_ANALYSIS_ENSSTAT:COM_ATMOS_ANALYSIS_TMPL \ + COMIN_ATMOS_HISTORY_ENSSTAT:COM_ATMOS_HISTORY_TMPL ############################################################### # Run archive script diff --git a/jobs/JGLOBAL_ARCHIVE b/jobs/JGLOBAL_ARCHIVE index cec3505000..28bd820de1 100755 --- a/jobs/JGLOBAL_ARCHIVE +++ b/jobs/JGLOBAL_ARCHIVE @@ -9,21 +9,45 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "arch" -c "base arch" ############################################## export CDUMP=${RUN/enkf} -YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_ANALYSIS COM_ATMOS_BUFR COM_ATMOS_GEMPAK \ - COM_ATMOS_GENESIS COM_ATMOS_HISTORY COM_ATMOS_INPUT COM_ATMOS_MASTER COM_ATMOS_RESTART \ - COM_ATMOS_TRACK COM_ATMOS_WMO \ - COM_CHEM_HISTORY COM_CHEM_ANALYSIS\ - COM_MED_RESTART \ - COM_SNOW_ANALYSIS \ - COM_ICE_HISTORY COM_ICE_INPUT COM_ICE_RESTART COM_ICE_GRIB \ - COM_OBS COM_TOP \ - COM_OCEAN_HISTORY COM_OCEAN_RESTART COM_OCEAN_GRIB COM_OCEAN_NETCDF \ - COM_OCEAN_ANALYSIS \ - COM_WAVE_GRID COM_WAVE_HISTORY COM_WAVE_STATION COM_WAVE_RESTART \ - COM_ATMOS_OZNMON COM_ATMOS_RADMON COM_ATMOS_MINMON COM_CONF +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ + COMIN_ATMOS_ANALYSIS:COM_ATMOS_ANALYSIS_TMPL \ + COMIN_ATMOS_BUFR:COM_ATMOS_BUFR_TMPL \ + COMIN_ATMOS_GEMPAK:COM_ATMOS_GEMPAK_TMPL \ + COMIN_ATMOS_GENESIS:COM_ATMOS_GENESIS_TMPL \ + COMIN_ATMOS_HISTORY:COM_ATMOS_HISTORY_TMPL \ + COMIN_ATMOS_INPUT:COM_ATMOS_INPUT_TMPL \ + COMIN_ATMOS_MASTER:COM_ATMOS_MASTER_TMPL \ + COMIN_ATMOS_RESTART:COM_ATMOS_RESTART_TMPL \ + COMIN_ATMOS_TRACK:COM_ATMOS_TRACK_TMPL \ + COMIN_ATMOS_WMO:COM_ATMOS_WMO_TMPL \ + COMIN_CHEM_HISTORY:COM_CHEM_HISTORY_TMPL \ + COMIN_CHEM_ANALYSIS:COM_CHEM_ANALYSIS_TMPL \ + COMIN_MED_RESTART:COM_MED_RESTART_TMPL \ + COMIN_SNOW_ANALYSIS:COM_SNOW_ANALYSIS_TMPL \ + COMIN_ICE_HISTORY:COM_ICE_HISTORY_TMPL \ + COMIN_ICE_INPUT:COM_ICE_INPUT_TMPL \ + COMIN_ICE_RESTART:COM_ICE_RESTART_TMPL \ + COMIN_ICE_GRIB:COM_ICE_GRIB_TMPL \ + COMIN_OBS:COM_OBS_TMPL \ + COMIN_TOP:COM_TOP_TMPL \ + COMIN_OCEAN_HISTORY:COM_OCEAN_HISTORY_TMPL \ + COMIN_OCEAN_RESTART:COM_OCEAN_RESTART_TMPL \ + COMIN_OCEAN_GRIB:COM_OCEAN_GRIB_TMPL \ + COMIN_OCEAN_NETCDF:COM_OCEAN_NETCDF_TMPL \ + COMIN_OCEAN_ANALYSIS:COM_OCEAN_ANALYSIS_TMPL \ + COMIN_WAVE_GRID:COM_WAVE_GRID_TMPL \ + COMIN_WAVE_HISTORY:COM_WAVE_HISTORY_TMPL \ + COMIN_WAVE_STATION:COM_WAVE_STATION_TMPL \ + COMIN_WAVE_RESTART:COM_WAVE_RESTART_TMPL \ + COMIN_ATMOS_OZNMON:COM_ATMOS_OZNMON_TMPL \ + COMIN_ATMOS_RADMON:COM_ATMOS_RADMON_TMPL \ + COMIN_ATMOS_MINMON:COM_ATMOS_MINMON_TMPL \ + COMIN_CONF:COM_CONF_TMPL \ + COMOUT_ATMOS_TRACK:COM_ATMOS_TRACK_TMPL for grid in "0p25" "0p50" "1p00"; do - YMD=${PDY} HH=${cyc} GRID=${grid} declare_from_tmpl -rx "COM_ATMOS_GRIB_${grid}:COM_ATMOS_GRIB_GRID_TMPL" + YMD=${PDY} HH=${cyc} GRID=${grid} declare_from_tmpl -rx \ + "COMIN_ATMOS_GRIB_${grid}:COM_ATMOS_GRIB_GRID_TMPL" done ############################################################### diff --git a/parm/archive/arcdir.yaml.j2 b/parm/archive/arcdir.yaml.j2 index 1ed7422761..d6b1899ebc 100644 --- a/parm/archive/arcdir.yaml.j2 +++ b/parm/archive/arcdir.yaml.j2 @@ -15,15 +15,15 @@ Deterministic: &deterministic copy: # Cyclone forecasts, produced for both gdas and gfs cycles ## Only created if tracking is on and there were systems to track - {% if path_exists(COM_ATMOS_TRACK ~ "/atcfunix." ~ RUN ~ "." ~ cycle_YMDH) %} - - ["{{ COM_ATMOS_TRACK }}/atcfunix.{{ RUN }}.{{ cycle_YMDH }}", "{{ ARCDIR }}/atcfunix.{{ RUN }}.{{ cycle_YMDH }}"] - - ["{{ COM_ATMOS_TRACK }}/atcfunixp.{{ RUN }}.{{ cycle_YMDH }}", "{{ ARCDIR }}/atcfunixp.{{ RUN }}.{{ cycle_YMDH }}"] + {% if path_exists(COMIN_ATMOS_TRACK ~ "/atcfunix." ~ RUN ~ "." ~ cycle_YMDH) %} + - ["{{ COMIN_ATMOS_TRACK }}/atcfunix.{{ RUN }}.{{ cycle_YMDH }}", "{{ ARCDIR }}/atcfunix.{{ RUN }}.{{ cycle_YMDH }}"] + - ["{{ COMIN_ATMOS_TRACK }}/atcfunixp.{{ RUN }}.{{ cycle_YMDH }}", "{{ ARCDIR }}/atcfunixp.{{ RUN }}.{{ cycle_YMDH }}"] {% endif %} # Cyclone tracking data {% for basin in ["epac", "natl"] %} - {% if path_exists(COM_ATMOS_TRACK + "/" + basin) %} - - ["{{ COM_ATMOS_TRACK }}/{{ basin }}", "{{ ARCDIR }}/{{ basin }}"] + {% if path_exists(COMIN_ATMOS_TRACK + "/" + basin) %} + - ["{{ COMIN_ATMOS_TRACK }}/{{ basin }}", "{{ ARCDIR }}/{{ basin }}"] {% endif %} {% endfor %} @@ -31,25 +31,25 @@ Deterministic: &deterministic analysis: copy: # Analysis data (if we are running in cycled mode) - - ["{{ COM_ATMOS_GRIB_1p00 }}/{{ head }}pgrb2.1p00.anl", "{{ ARCDIR }}/pgbanl.{{ RUN }}.{{ cycle_YMDH }}.grib2"] + - ["{{ COMIN_ATMOS_GRIB_1p00 }}/{{ head }}pgrb2.1p00.anl", "{{ ARCDIR }}/pgbanl.{{ RUN }}.{{ cycle_YMDH }}.grib2"] {% if DO_JEDIATMVAR %} - - ["{{ COM_ATMOS_ANALYSIS }}/{{ head }}atmstat", "{{ ARCDIR }}/atmstat.{{ RUN }}.{{ cycle_YMDH }}"] + - ["{{ COMIN_ATMOS_ANALYSIS }}/{{ head }}atmstat", "{{ ARCDIR }}/atmstat.{{ RUN }}.{{ cycle_YMDH }}"] {% else %} - - ["{{ COM_ATMOS_ANALYSIS }}/{{ head }}gsistat", "{{ ARCDIR }}/gsistat.{{ RUN }}.{{ cycle_YMDH }}"] + - ["{{ COMIN_ATMOS_ANALYSIS }}/{{ head }}gsistat", "{{ ARCDIR }}/gsistat.{{ RUN }}.{{ cycle_YMDH }}"] {% endif %} {% if DO_JEDISNOWDA %} - - ["{{ COM_SNOW_ANALYSIS }}/{{ head }}snowstat.tgz", "{{ ARCDIR }}/snowstat.{{ RUN }}.{{ cycle_YMDH }}.tgz"] + - ["{{ COMIN_SNOW_ANALYSIS }}/{{ head }}snowstat.tgz", "{{ ARCDIR }}/snowstat.{{ RUN }}.{{ cycle_YMDH }}.tgz"] {% endif %} {% if AERO_ANL_CDUMP == RUN or AERO_ANL_CDUMP == "both" %} - - ["{{ COM_CHEM_ANALYSIS }}/{{ head }}aerostat", "{{ ARCDIR }}/aerostat.{{ RUN }}.{{ cycle_YMDH }}"] + - ["{{ COMIN_CHEM_ANALYSIS }}/{{ head }}aerostat", "{{ ARCDIR }}/aerostat.{{ RUN }}.{{ cycle_YMDH }}"] {% endif %} {% if DO_PREP_OBS_AERO %} - - ["{{ COM_OBS }}/{{ head }}aeroobs", "{{ ARCDIR }}/aeroobs.{{ RUN }}.{{ cycle_YMDH }}"] - - ["{{ COM_OBS }}/{{ head }}aerorawobs", "{{ ARCDIR }}/aerorawobs.{{ RUN }}.{{ cycle_YMDH }}"] + - ["{{ COMIN_OBS }}/{{ head }}aeroobs", "{{ ARCDIR }}/aeroobs.{{ RUN }}.{{ cycle_YMDH }}"] + - ["{{ COMIN_OBS }}/{{ head }}aerorawobs", "{{ ARCDIR }}/aerorawobs.{{ RUN }}.{{ cycle_YMDH }}"] {% endif %} {% endif %} @@ -62,18 +62,18 @@ gfs: # GFS specific gfs: copy: {% for fhr in range(0, FHMAX_GFS + 1, FHOUT_GFS) %} - - ["{{ COM_ATMOS_GRIB_1p00 }}/{{ head }}pgrb2.1p00.f{{ '%03d' % fhr }}", "{{ ARCDIR }}/pgbf{{ '%02d' % fhr }}.{{ RUN }}.{{ cycle_YMDH }}.grib2"] + - ["{{ COMIN_ATMOS_GRIB_1p00 }}/{{ head }}pgrb2.1p00.f{{ '%03d' % fhr }}", "{{ ARCDIR }}/pgbf{{ '%02d' % fhr }}.{{ RUN }}.{{ cycle_YMDH }}.grib2"] {% endfor %} # Cyclone genesis data (only present if there are storms) - {% if path_exists(COM_ATMOS_GENESIS ~ "/storms.gfso.atcf_gen." ~ cycle_YMDH) %} - - ["{{ COM_ATMOS_GENESIS }}/storms.gfso.atcf_gen.{{ cycle_YMDH }}", "{{ ARCDIR }}/storms.gfso.atcf_gen.{{ cycle_YMDH }}"] - - ["{{ COM_ATMOS_GENESIS }}/storms.gfso.atcf_gen.altg.{{ cycle_YMDH }}", "{{ ARCDIR }}/storms.gfso.atcf_gen.altg.{{ cycle_YMDH }}"] + {% if path_exists(COMIN_ATMOS_GENESIS ~ "/storms.gfso.atcf_gen." ~ cycle_YMDH) %} + - ["{{ COMIN_ATMOS_GENESIS }}/storms.gfso.atcf_gen.{{ cycle_YMDH }}", "{{ ARCDIR }}/storms.gfso.atcf_gen.{{ cycle_YMDH }}"] + - ["{{ COMIN_ATMOS_GENESIS }}/storms.gfso.atcf_gen.altg.{{ cycle_YMDH }}", "{{ ARCDIR }}/storms.gfso.atcf_gen.altg.{{ cycle_YMDH }}"] {% endif %} - {% if path_exists(COM_ATMOS_GENESIS ~ "/trak.gfso.atcfunix." ~ cycle_YMDH) %} - - ["{{ COM_ATMOS_GENESIS }}/trak.gfso.atcfunix.{{ cycle_YMDH }}", "{{ ARCDIR }}/trak.gfso.atcfunix.{{ cycle_YMDH }}"] - - ["{{ COM_ATMOS_GENESIS }}/trak.gfso.atcfunix.altg.{{ cycle_YMDH }}", "{{ ARCDIR }}/trak.gfso.atcfunix.altg.{{ cycle_YMDH }}"] + {% if path_exists(COMIN_ATMOS_GENESIS ~ "/trak.gfso.atcfunix." ~ cycle_YMDH) %} + - ["{{ COMIN_ATMOS_GENESIS }}/trak.gfso.atcfunix.{{ cycle_YMDH }}", "{{ ARCDIR }}/trak.gfso.atcfunix.{{ cycle_YMDH }}"] + - ["{{ COMIN_ATMOS_GENESIS }}/trak.gfso.atcfunix.altg.{{ cycle_YMDH }}", "{{ ARCDIR }}/trak.gfso.atcfunix.altg.{{ cycle_YMDH }}"] {% endif %} {% if DO_FIT2OBS %} @@ -87,8 +87,8 @@ gfs: # GFS specific {% for fhr in range(0, FHMAX_FITS + 1, 6) %} {% set sfcfile = "/" + head + "sfcf" + '%03d'|format(fhr) + ".nc" %} {% set sigfile = "/" + head + "atmf" + '%03d'|format(fhr) + ".nc" %} - - ["{{COM_ATMOS_HISTORY}}/{{ sfcfile }}", "{{ VFYARC }}/{{ RUN }}.{{ cycle_YMD }}/{{ cycle_HH }}/{{ sfcfile }}"] - - ["{{COM_ATMOS_HISTORY}}/{{ sigfile }}", "{{ VFYARC }}/{{ RUN }}.{{ cycle_YMD }}/{{ cycle_HH }}/{{ sigfile }}"] + - ["{{COMIN_ATMOS_HISTORY}}/{{ sfcfile }}", "{{ VFYARC }}/{{ RUN }}.{{ cycle_YMD }}/{{ cycle_HH }}/{{ sfcfile }}"] + - ["{{COMIN_ATMOS_HISTORY}}/{{ sigfile }}", "{{ VFYARC }}/{{ RUN }}.{{ cycle_YMD }}/{{ cycle_HH }}/{{ sigfile }}"] {% endfor %} {% endif %} @@ -101,7 +101,7 @@ gdas: # GDAS specific gdas: copy: {% for fhr in range(0, FHMAX + 1, FHOUT) %} - - ["{{ COM_ATMOS_GRIB_1p00 }}/{{ head }}pgrb2.1p00.f{{ '%03d' % fhr }}", "{{ ARCDIR }}/pgbf{{ '%02d' % fhr }}.{{ RUN }}.{{ cycle_YMDH }}.grib2"] + - ["{{ COMIN_ATMOS_GRIB_1p00 }}/{{ head }}pgrb2.1p00.f{{ '%03d' % fhr }}", "{{ ARCDIR }}/pgbf{{ '%02d' % fhr }}.{{ RUN }}.{{ cycle_YMDH }}.grib2"] {% endfor %} {% endif %} @@ -110,11 +110,11 @@ Ensemble: &ensemble # ENKFGDAS or ENKFGFS copy: # Copy ensemble analyses {% if DO_JEDIATMENS %} - - ["{{ COM_ATMOS_ANALYSIS_ENSSTAT }}/{{ head }}atmensstat", "{{ ARCDIR }}/atmensstat.{{ RUN }}.{{ cycle_YMDH }}"] - - ["{{ COM_ATMOS_ANALYSIS_ENSSTAT }}/{{ head }}atminc.ensmean.nc", "{{ ARCDIR }}/atmensstat.{{ RUN }}.{{ cycle_YMDH }}.ensmean.nc"] + - ["{{ COMIN_ATMOS_ANALYSIS_ENSSTAT }}/{{ head }}atmensstat", "{{ ARCDIR }}/atmensstat.{{ RUN }}.{{ cycle_YMDH }}"] + - ["{{ COMIN_ATMOS_ANALYSIS_ENSSTAT }}/{{ head }}atminc.ensmean.nc", "{{ ARCDIR }}/atmensstat.{{ RUN }}.{{ cycle_YMDH }}.ensmean.nc"] {% else %} - - ["{{ COM_ATMOS_ANALYSIS_ENSSTAT }}/{{ head }}enkfstat", "{{ ARCDIR }}/enkfstat.{{ RUN }}.{{ cycle_YMDH }}"] - - ["{{ COM_ATMOS_ANALYSIS_ENSSTAT }}/{{ head }}gsistat.ensmean", "{{ ARCDIR }}/gsistat.{{ RUN }}.{{ cycle_YMDH }}.ensmean"] + - ["{{ COMIN_ATMOS_ANALYSIS_ENSSTAT }}/{{ head }}enkfstat", "{{ ARCDIR }}/enkfstat.{{ RUN }}.{{ cycle_YMDH }}"] + - ["{{ COMIN_ATMOS_ANALYSIS_ENSSTAT }}/{{ head }}gsistat.ensmean", "{{ ARCDIR }}/gsistat.{{ RUN }}.{{ cycle_YMDH }}.ensmean"] {% endif %} {% if "enkf" in RUN %} diff --git a/parm/archive/chem.yaml.j2 b/parm/archive/chem.yaml.j2 index 7796912b1a..33c6dcef57 100644 --- a/parm/archive/chem.yaml.j2 +++ b/parm/archive/chem.yaml.j2 @@ -4,4 +4,4 @@ chem: target: "{{ ATARDIR }}/{{ cycle_YMDH }}/chem.tar" required: # TODO explicitize this set - - "{{ COM_CHEM_HISTORY | relpath(ROTDIR) }}/{{ head }}*" + - "{{ COMIN_CHEM_HISTORY | relpath(ROTDIR) }}/{{ head }}*" diff --git a/parm/archive/enkf.yaml.j2 b/parm/archive/enkf.yaml.j2 index ed50a1749d..bc5ef03cb8 100644 --- a/parm/archive/enkf.yaml.j2 +++ b/parm/archive/enkf.yaml.j2 @@ -34,10 +34,10 @@ enkf: # Ensemble mean and spread {% for fhr in range(3, fhmax + 1, 3) %} - - "{{ COM_ATMOS_HISTORY_ENSSTAT | relpath(ROTDIR) }}/{{ head }}atmf{{ '%03d' % fhr }}.ensmean.nc" - - "{{ COM_ATMOS_HISTORY_ENSSTAT | relpath(ROTDIR) }}/{{ head }}sfcf{{ '%03d' % fhr }}.ensmean.nc" + - "{{ COMIN_ATMOS_HISTORY_ENSSTAT | relpath(ROTDIR) }}/{{ head }}atmf{{ '%03d' % fhr }}.ensmean.nc" + - "{{ COMIN_ATMOS_HISTORY_ENSSTAT | relpath(ROTDIR) }}/{{ head }}sfcf{{ '%03d' % fhr }}.ensmean.nc" {% if ENKF_SPREAD %} - - "{{ COM_ATMOS_HISTORY_ENSSTAT | relpath(ROTDIR) }}/{{ head }}atmf{{ '%03d' % fhr }}.ensspread.nc" + - "{{ COMIN_ATMOS_HISTORY_ENSSTAT | relpath(ROTDIR) }}/{{ head }}atmf{{ '%03d' % fhr }}.ensspread.nc" {% endif %} {% endfor %} @@ -54,15 +54,15 @@ enkf: "atmensstat"] %} {% endif %} {% for file in da_files %} - - "{{ COM_ATMOS_ANALYSIS_ENSSTAT | relpath(ROTDIR) }}/{{ head }}{{ file }}" + - "{{ COMIN_ATMOS_ANALYSIS_ENSSTAT | relpath(ROTDIR) }}/{{ head }}{{ file }}" {% endfor %} # Ensemble mean analyses/increments # 6-hr analysis/increment {% if do_calc_increment %} - - "{{ COM_ATMOS_ANALYSIS_ENSSTAT | relpath(ROTDIR) }}/{{ head }}atmanl.ensmean.nc" + - "{{ COMIN_ATMOS_ANALYSIS_ENSSTAT | relpath(ROTDIR) }}/{{ head }}atmanl.ensmean.nc" {% else %} - - "{{ COM_ATMOS_ANALYSIS_ENSSTAT | relpath(ROTDIR) }}/{{ head }}atminc.ensmean.nc" + - "{{ COMIN_ATMOS_ANALYSIS_ENSSTAT | relpath(ROTDIR) }}/{{ head }}atminc.ensmean.nc" {% endif %} {% if DOIAU %} @@ -71,10 +71,10 @@ enkf: {% for fhr in iaufhrs if fhr != 6 %} {% if do_calc_increment %} # Store analyses instead of increments - - "{{ COM_ATMOS_ANALYSIS_ENSSTAT | relpath(ROTDIR) }}/{{ head }}atma{{ '%03d' % fhr }}.ensmean.nc" + - "{{ COMIN_ATMOS_ANALYSIS_ENSSTAT | relpath(ROTDIR) }}/{{ head }}atma{{ '%03d' % fhr }}.ensmean.nc" {% else %} # Store increments - - "{{ COM_ATMOS_ANALYSIS_ENSSTAT | relpath(ROTDIR) }}/{{ head }}atmi{{ '%03d' % fhr }}.ensmean.nc" + - "{{ COMIN_ATMOS_ANALYSIS_ENSSTAT | relpath(ROTDIR) }}/{{ head }}atmi{{ '%03d' % fhr }}.ensmean.nc" {% endif %} {% endfor %} diff --git a/parm/archive/enkf_grp.yaml.j2 b/parm/archive/enkf_grp.yaml.j2 index 85d5854e3b..933ca45caf 100644 --- a/parm/archive/enkf_grp.yaml.j2 +++ b/parm/archive/enkf_grp.yaml.j2 @@ -5,25 +5,25 @@ enkf_grp: {% for mem in range(first_group_mem, last_group_mem + 1) %} {% set imem = mem - first_group_mem %} # Construct member COM directories - {% set COM_ATMOS_ANALYSIS_MEM = COM_ATMOS_ANALYSIS_MEM_list[imem] %} - {% set COM_ATMOS_HISTORY_MEM = COM_ATMOS_HISTORY_MEM_list[imem] %} - {% set COM_ATMOS_RESTART_MEM = COM_ATMOS_RESTART_MEM_list[imem] %} + {% set COMIN_ATMOS_ANALYSIS_MEM = COMIN_ATMOS_ANALYSIS_MEM_list[imem] %} + {% set COMIN_ATMOS_HISTORY_MEM = COMIN_ATMOS_HISTORY_MEM_list[imem] %} + {% set COMIN_ATMOS_RESTART_MEM = COMIN_ATMOS_RESTART_MEM_list[imem] %} # Forecast data {% for fhr in range(3, 10, 3) %} - - "{{ COM_ATMOS_HISTORY_MEM | relpath(ROTDIR) }}/{{ head }}atmf{{ "%03d" % fhr }}.nc" + - "{{ COMIN_ATMOS_HISTORY_MEM | relpath(ROTDIR) }}/{{ head }}atmf{{ "%03d" % fhr }}.nc" {% endfor %} # Only store the 6-hour surface forecast - - "{{ COM_ATMOS_HISTORY_MEM | relpath(ROTDIR) }}/{{ head }}sfcf006.nc" + - "{{ COMIN_ATMOS_HISTORY_MEM | relpath(ROTDIR) }}/{{ head }}sfcf006.nc" # Store the individual member analysis data {% if not lobsdiag_forenkf %} - - "{{ COM_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ head }}gsistat" + - "{{ COMIN_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ head }}gsistat" {% endif %} {% if do_calc_increment %} - - "{{ COM_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}atmanl.nc" + - "{{ COMIN_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}atmanl.nc" {% else %} - - "{{ COM_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}ratminc.nc" + - "{{ COMIN_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}ratminc.nc" {% endif %} {% endfor %} # first_group_mem to last_group_mem diff --git a/parm/archive/enkf_restarta_grp.yaml.j2 b/parm/archive/enkf_restarta_grp.yaml.j2 index 8a4ea18bc1..41e03edc92 100644 --- a/parm/archive/enkf_restarta_grp.yaml.j2 +++ b/parm/archive/enkf_restarta_grp.yaml.j2 @@ -5,16 +5,16 @@ enkf_restarta_grp: {% for mem in range(first_group_mem, last_group_mem + 1) %} {% set imem = mem - first_group_mem %} # Construct the pertinent member COM directories - {% set COM_ATMOS_ANALYSIS_MEM = COM_ATMOS_ANALYSIS_MEM_list[imem] %} - {% set COM_ATMOS_HISTORY_MEM = COM_ATMOS_HISTORY_MEM_list[imem] %} - {% set COM_ATMOS_RESTART_MEM = COM_ATMOS_RESTART_MEM_list[imem] %} + {% set COMIN_ATMOS_ANALYSIS_MEM = COMIN_ATMOS_ANALYSIS_MEM_list[imem] %} + {% set COMIN_ATMOS_HISTORY_MEM = COMIN_ATMOS_HISTORY_MEM_list[imem] %} + {% set COMIN_ATMOS_RESTART_MEM = COMIN_ATMOS_RESTART_MEM_list[imem] %} # Store bias data {% if not lobsdiag_forenkf %} - - "{{ COM_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ head }}abias" - - "{{ COM_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ head }}abias_air" - - "{{ COM_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ head }}abias_int" - - "{{ COM_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ head }}abias_pc" + - "{{ COMIN_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ head }}abias" + - "{{ COMIN_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ head }}abias_air" + - "{{ COMIN_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ head }}abias_int" + - "{{ COMIN_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ head }}abias_pc" {% endif %} # Member surface analysis data @@ -25,29 +25,29 @@ enkf_restarta_grp: {% endif %} {% set anl_time = current_cycle | add_to_datetime(anl_delta) %} {% for itile in range(1, 7) %} - - "{{ COM_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ anl_time | to_YMD }}.{{ anl_time | strftime("%H") }}0000.sfcanl_data.tile{{ itile }}.nc" + - "{{ COMIN_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ anl_time | to_YMD }}.{{ anl_time | strftime("%H") }}0000.sfcanl_data.tile{{ itile }}.nc" {% endfor %} # Member atmospheric analysis data {% if do_calc_increment %} - - "{{ COM_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}atmanl.nc" + - "{{ COMIN_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}atmanl.nc" {% else %} - - "{{ COM_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}ratminc.nc" + - "{{ COMIN_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}ratminc.nc" {% endif %} # Member increments {% for iaufhr in iaufhrs if iaufhr != 6 %} {% set iaufhr = iaufhr %} {% if do_calc_increment %} - - "{{ COM_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}atma{{ '%03d' % iaufhr }}.nc" + - "{{ COMIN_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}atma{{ '%03d' % iaufhr }}.nc" {% else %} - - "{{ COM_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}ratmi{{ '%03d' % iaufhr }}.nc" + - "{{ COMIN_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}ratmi{{ '%03d' % iaufhr }}.nc" {% endif %} {% endfor %} # iaufhr in iaufhrs # Conventional data {% if not lobsdiag_forenkf and not DO_JEDIATMENS %} - - "{{ COM_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ head }}cnvstat" + - "{{ COMIN_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ head }}cnvstat" {% endif %} {% endfor %} # first_group_mem to last_group_mem diff --git a/parm/archive/enkf_restartb_grp.yaml.j2 b/parm/archive/enkf_restartb_grp.yaml.j2 index 34fde9d7ca..7cd799f0a9 100644 --- a/parm/archive/enkf_restartb_grp.yaml.j2 +++ b/parm/archive/enkf_restartb_grp.yaml.j2 @@ -4,7 +4,7 @@ enkf_restartb_grp: required: {% for mem in range(first_group_mem, last_group_mem + 1) %} {% set imem = mem - first_group_mem %} - {% set COM_ATMOS_RESTART_MEM = COM_ATMOS_RESTART_MEM_list[imem] %} + {% set COMIN_ATMOS_RESTART_MEM = COMIN_ATMOS_RESTART_MEM_list[imem] %} # Grab surface analysis data. # If IAU is on, grab the beginning of the window, otherwise grab the center. @@ -18,7 +18,7 @@ enkf_restartb_grp: {% set offset_HH = offset_dt | strftime("%H") %} {% set prefix = offset_YMD + "." + offset_HH + "0000" %} {% for itile in range(1, 7) %} - - "{{ COM_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ prefix }}.sfcanl_data.tile{{ itile }}.nc" + - "{{ COMIN_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ prefix }}.sfcanl_data.tile{{ itile }}.nc" {% endfor %} # Now get the restart files. @@ -28,10 +28,10 @@ enkf_restartb_grp: {% set r_prefix = r_dt | to_YMD + "." + r_dt | strftime("%H") + "0000" %} {% for itile in range(1, 7) %} {% for datatype in ["ca_data", "fv_core.res", "fv_srf_wnd.res", "fv_tracer.res", "phy_data", "sfc_data"] %} - - "{{ COM_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ r_prefix }}.{{datatype}}.tile{{ itile }}.nc" + - "{{ COMIN_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ r_prefix }}.{{datatype}}.tile{{ itile }}.nc" {% endfor %} {% endfor %} - - "{{ COM_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ r_prefix }}.coupler.res" - - "{{ COM_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ r_prefix }}.fv_core.res.nc" + - "{{ COMIN_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ r_prefix }}.coupler.res" + - "{{ COMIN_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ r_prefix }}.fv_core.res.nc" {% endfor %} {% endfor %} diff --git a/parm/archive/gdas.yaml.j2 b/parm/archive/gdas.yaml.j2 index 26540156cd..12b89f2e61 100644 --- a/parm/archive/gdas.yaml.j2 +++ b/parm/archive/gdas.yaml.j2 @@ -35,22 +35,22 @@ gdas: {% endif %} # Analysis GRIB2 (sub-sampled) data - - "{{ COM_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.anl" - - "{{ COM_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.anl.idx" - - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.anl" - - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.anl.idx" + - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.anl" + - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.anl.idx" + - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.anl" + - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.anl.idx" # Analysis netCDF (raw) data - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmanl.nc" - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}sfcanl.nc" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmanl.nc" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}sfcanl.nc" {% if DOHYBVAR %} # Ensemble analysis residual - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmanl.ensres.nc" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmanl.ensres.nc" {% if DOIAU %} # Ensemble IAU analysis residuals {% for fhr in iaufhrs if fhr != 6 %} - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atma{{ '%03d' % fhr }}.ensres.nc" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atma{{ '%03d' % fhr }}.ensres.nc" {% endfor %} {% endif %} # End of ensemble analysis mean residuals @@ -58,51 +58,51 @@ gdas: # Analysis state {% if DO_JEDIATMVAR %} - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmvar.yaml" - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmstat" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmvar.yaml" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmstat" {% else %} - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}gsistat" - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}cnvstat" - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}oznstat" - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}radstat" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}gsistat" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}cnvstat" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}oznstat" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}radstat" {% endif %} {% if AERO_ANL_CDUMP == "gdas" or AERO_ANL_CDUMP == "both" %} - - "{{ COM_CHEM_ANALYSIS | relpath(ROTDIR) }}/{{ head }}aerostat" + - "{{ COMIN_CHEM_ANALYSIS | relpath(ROTDIR) }}/{{ head }}aerostat" {% endif %} {% if DO_PREP_OBS_AERO %} - - "{{ COM_OBS | relpath(ROTDIR) }}/{{ head }}aeroobs" - - "{{ COM_OBS | relpath(ROTDIR) }}/{{ head }}aerorawobs" + - "{{ COMIN_OBS | relpath(ROTDIR) }}/{{ head }}aeroobs" + - "{{ COMIN_OBS | relpath(ROTDIR) }}/{{ head }}aerorawobs" {% endif %} {% if DO_JEDISNOWDA %} - - "{{ COM_SNOW_ANALYSIS | relpath(ROTDIR) }}/{{ head }}snowstat.tgz" + - "{{ COMIN_SNOW_ANALYSIS | relpath(ROTDIR) }}/{{ head }}snowstat.tgz" {% endif %} # Ozone verification {% if DO_VERFOZN %} - - "{{ COM_ATMOS_OZNMON | relpath(ROTDIR) }}/time/bad_cnt.{{ cycle_YMDH }}" - - "{{ COM_ATMOS_OZNMON | relpath(ROTDIR) }}/time/bad_diag.{{ cycle_YMDH }}" - - "{{ COM_ATMOS_OZNMON | relpath(ROTDIR) }}/time/bad_pen.{{ cycle_YMDH }}" - - "{{ COM_ATMOS_OZNMON | relpath(ROTDIR) }}/time/stdout.time.tar.gz" - - "{{ COM_ATMOS_OZNMON | relpath(ROTDIR) }}/horiz/stdout.horiz.tar.gz" + - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/time/bad_cnt.{{ cycle_YMDH }}" + - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/time/bad_diag.{{ cycle_YMDH }}" + - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/time/bad_pen.{{ cycle_YMDH }}" + - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/time/stdout.time.tar.gz" + - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/horiz/stdout.horiz.tar.gz" - "logs/{{ cycle_YMDH }}/{{ RUN }}verfozn.log" {% endif %} # Radiance verification {% if DO_VERFRAD %} - - "{{ COM_ATMOS_RADMON | relpath(ROTDIR) }}/radmon_angle.tar.gz" - - "{{ COM_ATMOS_RADMON | relpath(ROTDIR) }}/radmon_bcoef.tar.gz" - - "{{ COM_ATMOS_RADMON | relpath(ROTDIR) }}/radmon_bcor.tar.gz" - - "{{ COM_ATMOS_RADMON | relpath(ROTDIR) }}/radmon_time.tar.gz" + - "{{ COMIN_ATMOS_RADMON | relpath(ROTDIR) }}/radmon_angle.tar.gz" + - "{{ COMIN_ATMOS_RADMON | relpath(ROTDIR) }}/radmon_bcoef.tar.gz" + - "{{ COMIN_ATMOS_RADMON | relpath(ROTDIR) }}/radmon_bcor.tar.gz" + - "{{ COMIN_ATMOS_RADMON | relpath(ROTDIR) }}/radmon_time.tar.gz" - "logs/{{ cycle_YMDH }}/{{ RUN }}verfrad.log" {% endif %} # Minimization monitor {% if DO_VMINMON %} - - "{{ COM_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.costs.txt" - - "{{ COM_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.cost_terms.txt" - - "{{ COM_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.gnorms.ieee_d" - - "{{ COM_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.reduction.ieee_d" - - "{{ COM_ATMOS_MINMON | relpath(ROTDIR) }}/gnorm_data.txt" + - "{{ COMIN_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.costs.txt" + - "{{ COMIN_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.cost_terms.txt" + - "{{ COMIN_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.gnorms.ieee_d" + - "{{ COMIN_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.reduction.ieee_d" + - "{{ COMIN_ATMOS_MINMON | relpath(ROTDIR) }}/gnorm_data.txt" - "logs/{{ cycle_YMDH }}/{{ RUN }}vminmon.log" {% endif %} {% endif %} # End of cycled data @@ -118,27 +118,27 @@ gdas: {% for fhr in range(0, FHMAX + 1, 3) %} # Forecast GRIB2 data - - "{{ COM_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.f{{ '%03d' % fhr }}" - - "{{ COM_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.f{{ '%03d' % fhr }}.idx" - - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.f{{ '%03d' % fhr }}" - - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.f{{ '%03d' % fhr }}.idx" + - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.f{{ '%03d' % fhr }}" + - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.f{{ '%03d' % fhr }}.idx" + - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.f{{ '%03d' % fhr }}" + - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.f{{ '%03d' % fhr }}.idx" # Forecast GRIB2 fluxes - - "{{ COM_ATMOS_MASTER | relpath(ROTDIR) }}/{{ head }}sfluxgrbf{{ '%03d' % fhr }}.grib2" - - "{{ COM_ATMOS_MASTER | relpath(ROTDIR) }}/{{ head }}sfluxgrbf{{ '%03d' % fhr }}.grib2.idx" + - "{{ COMIN_ATMOS_MASTER | relpath(ROTDIR) }}/{{ head }}sfluxgrbf{{ '%03d' % fhr }}.grib2" + - "{{ COMIN_ATMOS_MASTER | relpath(ROTDIR) }}/{{ head }}sfluxgrbf{{ '%03d' % fhr }}.grib2.idx" # FV3 log - - "{{ COM_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}atm.logf{{ '%03d' % fhr }}.txt" + - "{{ COMIN_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}atm.logf{{ '%03d' % fhr }}.txt" # Raw netCDF forecasts - - "{{ COM_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}atmf{{ '%03d' % fhr }}.nc" - - "{{ COM_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}sfcf{{ '%03d' % fhr }}.nc" + - "{{ COMIN_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}atmf{{ '%03d' % fhr }}.nc" + - "{{ COMIN_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}sfcf{{ '%03d' % fhr }}.nc" {% endfor %} optional: {% if MODE == "cycled" %} {% if DO_VERFRAD %} # Radiance verification (only created if there are problems) - - "{{ COM_ATMOS_RADMON | relpath(ROTDIR) }}/bad_diag.{{ cycle_YMDH }}" - - "{{ COM_ATMOS_RADMON | relpath(ROTDIR) }}/bad_pen.{{ cycle_YMDH }}" - - "{{ COM_ATMOS_RADMON | relpath(ROTDIR) }}/low_count.{{ cycle_YMDH }}" - - "{{ COM_ATMOS_RADMON | relpath(ROTDIR) }}/warning.{{ cycle_YMDH }}" + - "{{ COMIN_ATMOS_RADMON | relpath(ROTDIR) }}/bad_diag.{{ cycle_YMDH }}" + - "{{ COMIN_ATMOS_RADMON | relpath(ROTDIR) }}/bad_pen.{{ cycle_YMDH }}" + - "{{ COMIN_ATMOS_RADMON | relpath(ROTDIR) }}/low_count.{{ cycle_YMDH }}" + - "{{ COMIN_ATMOS_RADMON | relpath(ROTDIR) }}/warning.{{ cycle_YMDH }}" {% endif %} {% if DO_VERFOZN %} @@ -150,10 +150,10 @@ gdas: {% for group in [ "horiz", "time" ] %} {% if group == "horiz" %} {% set suffix = ".gz" %} {% else %} {% set suffix = "" %} {% endif %} {% for type in oznmon_types %} - - "{{ COM_ATMOS_OZNMON | relpath(ROTDIR) }}/{{ group }}/{{ type }}.anl.ctl" - - "{{ COM_ATMOS_OZNMON | relpath(ROTDIR) }}/{{ group }}/{{ type }}.anl.{{ cycle_YMDH }}.ieee_d{{ suffix }}" - - "{{ COM_ATMOS_OZNMON | relpath(ROTDIR) }}/{{ group }}/{{ type }}.ges.ctl" - - "{{ COM_ATMOS_OZNMON | relpath(ROTDIR) }}/{{ group }}/{{ type }}.ges.{{ cycle_YMDH }}.ieee_d{{ suffix }}" + - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/{{ group }}/{{ type }}.anl.ctl" + - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/{{ group }}/{{ type }}.anl.{{ cycle_YMDH }}.ieee_d{{ suffix }}" + - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/{{ group }}/{{ type }}.ges.ctl" + - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/{{ group }}/{{ type }}.ges.{{ cycle_YMDH }}.ieee_d{{ suffix }}" {% endfor %} {% endfor %} {% endif %} diff --git a/parm/archive/gdas_restarta.yaml.j2 b/parm/archive/gdas_restarta.yaml.j2 index af1acbdbe7..4c0522fed7 100644 --- a/parm/archive/gdas_restarta.yaml.j2 +++ b/parm/archive/gdas_restarta.yaml.j2 @@ -4,10 +4,10 @@ gdas_restarta: target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdas_restarta.tar" required: # Deterministic analysis increments - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atminc.nc" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atminc.nc" # IAU increments {% for iaufhr in iaufhrs if iaufhr != 6 %} - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmi{{ "%03d" % iaufhr }}.nc" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmi{{ "%03d" % iaufhr }}.nc" {% endfor %} # Surface analysis tiles @@ -19,36 +19,36 @@ gdas_restarta: {% set anl_timedelta = anl_offset | to_timedelta %} {% set anl_time = current_cycle | add_to_datetime(anl_timedelta) %} {% for itile in range(1,7) %} - - "{{ COM_ATMOS_RESTART | relpath(ROTDIR) }}/{{ anl_time | to_YMD }}.{{ anl_time | strftime("%H") }}0000.sfcanl_data.tile{{ itile }}.nc" + - "{{ COMIN_ATMOS_RESTART | relpath(ROTDIR) }}/{{ anl_time | to_YMD }}.{{ anl_time | strftime("%H") }}0000.sfcanl_data.tile{{ itile }}.nc" {% endfor %} # Initial biases {% if not DO_JEDIATMVAR %} - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}abias" - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}abias_air" - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}abias_pc" - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}radstat" - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}cnvstat" - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}abias_int" - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}dtfanl.nc" - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}loginc.txt" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}abias" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}abias_air" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}abias_pc" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}radstat" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}cnvstat" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}abias_int" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}dtfanl.nc" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}loginc.txt" {% endif %} # Snow surface data {% if DO_JEDISNOWDA %} {% for itile in range(1,7) %} # Snow analysis is 3dvar - - "{{ COM_SNOW_ANALYSIS | relpath(ROTDIR) }}/snowinc.{{ cycle_YMD }}.{{ cycle_HH }}0000.sfc_data.tile{{ itile }}.nc" - - "{{ COM_SNOW_ANALYSIS | relpath(ROTDIR) }}/{{ cycle_YMD }}.{{ cycle_HH }}0000.sfc_data.tile{{ itile }}.nc" + - "{{ COMIN_SNOW_ANALYSIS | relpath(ROTDIR) }}/snowinc.{{ cycle_YMD }}.{{ cycle_HH }}0000.sfc_data.tile{{ itile }}.nc" + - "{{ COMIN_SNOW_ANALYSIS | relpath(ROTDIR) }}/{{ cycle_YMD }}.{{ cycle_HH }}0000.sfc_data.tile{{ itile }}.nc" {% endfor %} {% endif %} # Snow configuration yaml {% if DO_JEDISNOWDA %} - - "{{ COM_CONF | relpath(ROTDIR) }}/{{ head }}letkfoi.yaml" + - "{{ COMIN_CONF | relpath(ROTDIR) }}/{{ head }}letkfoi.yaml" {% endif %} # Input BUFR files - - "{{ COM_OBS | relpath(ROTDIR) }}/{{ head }}nsstbufr" - - "{{ COM_OBS | relpath(ROTDIR) }}/{{ head }}prepbufr" - - "{{ COM_OBS | relpath(ROTDIR) }}/{{ head }}prepbufr.acft_profiles" + - "{{ COMIN_OBS | relpath(ROTDIR) }}/{{ head }}nsstbufr" + - "{{ COMIN_OBS | relpath(ROTDIR) }}/{{ head }}prepbufr" + - "{{ COMIN_OBS | relpath(ROTDIR) }}/{{ head }}prepbufr.acft_profiles" diff --git a/parm/archive/gdas_restartb.yaml.j2 b/parm/archive/gdas_restartb.yaml.j2 index c5cb29329f..0bbf517fb2 100644 --- a/parm/archive/gdas_restartb.yaml.j2 +++ b/parm/archive/gdas_restartb.yaml.j2 @@ -12,14 +12,14 @@ gdas_restartb: {% set offset_HH = offset_dt | strftime("%H") %} {% set prefix = offset_YMD + "." + offset_HH + "0000" %} {% for itile in range(1, 7) %} - - "{{ COM_ATMOS_RESTART | relpath(ROTDIR) }}/{{ prefix }}.sfcanl_data.tile{{ itile }}.nc" + - "{{ COMIN_ATMOS_RESTART | relpath(ROTDIR) }}/{{ prefix }}.sfcanl_data.tile{{ itile }}.nc" {% endfor %} {% endif %} # Regardless, always grab the center surface analysis data. {% set prefix = cycle_YMD + "." + cycle_HH + "0000" %} {% for itile in range(1, 7) %} - - "{{ COM_ATMOS_RESTART | relpath(ROTDIR) }}/{{ prefix }}.sfcanl_data.tile{{ itile }}.nc" + - "{{ COMIN_ATMOS_RESTART | relpath(ROTDIR) }}/{{ prefix }}.sfcanl_data.tile{{ itile }}.nc" {% endfor %} # Now get the restart files. @@ -31,9 +31,9 @@ gdas_restartb: {% set r_prefix = r_YMD + "." + r_HH + "0000" %} {% for itile in range(1, 7) %} {% for datatype in ["ca_data", "fv_core.res", "fv_srf_wnd.res", "fv_tracer.res", "phy_data", "sfc_data"] %} - - "{{ COM_ATMOS_RESTART | relpath(ROTDIR) }}/{{ r_prefix }}.{{datatype}}.tile{{ itile }}.nc" + - "{{ COMIN_ATMOS_RESTART | relpath(ROTDIR) }}/{{ r_prefix }}.{{datatype}}.tile{{ itile }}.nc" {% endfor %} {% endfor %} - - "{{ COM_ATMOS_RESTART | relpath(ROTDIR) }}/{{ r_prefix }}.coupler.res" - - "{{ COM_ATMOS_RESTART | relpath(ROTDIR) }}/{{ r_prefix }}.fv_core.res.nc" + - "{{ COMIN_ATMOS_RESTART | relpath(ROTDIR) }}/{{ r_prefix }}.coupler.res" + - "{{ COMIN_ATMOS_RESTART | relpath(ROTDIR) }}/{{ r_prefix }}.fv_core.res.nc" {% endfor %} diff --git a/parm/archive/gdasice.yaml.j2 b/parm/archive/gdasice.yaml.j2 index 23337a8856..da02decf83 100644 --- a/parm/archive/gdasice.yaml.j2 +++ b/parm/archive/gdasice.yaml.j2 @@ -3,8 +3,8 @@ gdasice: name: "GDASICE" target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdasice.tar" required: - - "{{ COM_ICE_HISTORY | relpath(ROTDIR) }}/{{ head }}ic.nc" + - "{{ COMIN_ICE_HISTORY | relpath(ROTDIR) }}/{{ head }}ic.nc" {% for fhr in range(FHOUT_ICE, FHMAX+1, FHOUT_ICE) %} - - "{{ COM_ICE_HISTORY | relpath(ROTDIR) }}/{{ head }}inst.f{{ '%03d' % fhr }}.nc" + - "{{ COMIN_ICE_HISTORY | relpath(ROTDIR) }}/{{ head }}inst.f{{ '%03d' % fhr }}.nc" {% endfor %} - - '{{ COM_CONF | relpath(ROTDIR) }}/ufs.ice_in' + - '{{ COMIN_CONF | relpath(ROTDIR) }}/ufs.ice_in' diff --git a/parm/archive/gdasice_restart.yaml.j2 b/parm/archive/gdasice_restart.yaml.j2 index 39877674fb..15c8ba627d 100644 --- a/parm/archive/gdasice_restart.yaml.j2 +++ b/parm/archive/gdasice_restart.yaml.j2 @@ -4,4 +4,4 @@ gdasice_restart: target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdasice_restart.tar" required: # TODO explicitly name the restart files to archive - - '{{ COM_ICE_RESTART | relpath(ROTDIR) }}/*' + - '{{ COMIN_ICE_RESTART | relpath(ROTDIR) }}/*' diff --git a/parm/archive/gdasocean.yaml.j2 b/parm/archive/gdasocean.yaml.j2 index 4e92bba1a0..9e6ca38851 100644 --- a/parm/archive/gdasocean.yaml.j2 +++ b/parm/archive/gdasocean.yaml.j2 @@ -4,6 +4,6 @@ gdasocean: target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdasocean.tar" required: {% for fhr in range(FHMIN, FHMAX + 1, FHOUT_OCN) %} - - "{{ COM_OCEAN_HISTORY | relpath(ROTDIR) }}/{{ head }}inst.f{{ '%03d' % fhr }}.nc" + - "{{ COMIN_OCEAN_HISTORY | relpath(ROTDIR) }}/{{ head }}inst.f{{ '%03d' % fhr }}.nc" {% endfor %} - - '{{ COM_CONF | relpath(ROTDIR) }}/ufs.MOM_input' + - '{{ COMIN_CONF | relpath(ROTDIR) }}/ufs.MOM_input' diff --git a/parm/archive/gdasocean_analysis.yaml.j2 b/parm/archive/gdasocean_analysis.yaml.j2 index 2417ddf6e7..d127ee0b75 100644 --- a/parm/archive/gdasocean_analysis.yaml.j2 +++ b/parm/archive/gdasocean_analysis.yaml.j2 @@ -3,23 +3,23 @@ gdasocean_analysis: name: "GDASOCEAN_ANALYSIS" target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdasocean_analysis.tar" required: - - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocninc.nc' + - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocninc.nc' {% set ocngrid_cycle = '%02d' % (((cycle_HH | int) - 3) % 24) %} - - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/gdas.t{{ ocngrid_cycle }}z.ocngrid.nc' + - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/gdas.t{{ ocngrid_cycle }}z.ocngrid.nc' {% for domain in ["ocn", "ice"] %} - - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}{{domain}}.bkgerr_stddev.nc' - - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}{{domain}}.incr.nc' - - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}{{domain}}ana.nc' + - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}{{domain}}.bkgerr_stddev.nc' + - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}{{domain}}.incr.nc' + - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}{{domain}}ana.nc' {% if NMEM_ENS > 2 %} - - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}{{domain}}.recentering_error.nc' + - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}{{domain}}.recentering_error.nc' {% endif %} {% endfor %} {% if NMEM_ENS > 2 %} - - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocn.ssh_steric_stddev.nc' - - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocn.ssh_unbal_stddev.nc' - - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocn.ssh_total_stddev.nc' - - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocn.steric_explained_variance.nc' + - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocn.ssh_steric_stddev.nc' + - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocn.ssh_unbal_stddev.nc' + - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocn.ssh_total_stddev.nc' + - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocn.steric_explained_variance.nc' {% endif %} - - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocn.*.stats.csv' - - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/diags/*.nc4' - - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/yaml/*.yaml' + - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocn.*.stats.csv' + - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/diags/*.nc4' + - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/yaml/*.yaml' diff --git a/parm/archive/gdasocean_restart.yaml.j2 b/parm/archive/gdasocean_restart.yaml.j2 index 21bfc3955f..f2b6bfb875 100644 --- a/parm/archive/gdasocean_restart.yaml.j2 +++ b/parm/archive/gdasocean_restart.yaml.j2 @@ -4,5 +4,5 @@ gdasocean_restart: target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdasocean_restart.tar" required: # TODO explicitly name the restart files to archive - - '{{ COM_OCEAN_RESTART | relpath(ROTDIR) }}/*' - - '{{ COM_MED_RESTART | relpath(ROTDIR) }}/*' + - '{{ COMIN_OCEAN_RESTART | relpath(ROTDIR) }}/*' + - '{{ COMIN_MED_RESTART | relpath(ROTDIR) }}/*' diff --git a/parm/archive/gdaswave.yaml.j2 b/parm/archive/gdaswave.yaml.j2 index 74a5a64dbf..220770b38d 100644 --- a/parm/archive/gdaswave.yaml.j2 +++ b/parm/archive/gdaswave.yaml.j2 @@ -4,5 +4,5 @@ gdaswave: target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdaswave.tar" required: # TODO explicitly name the wave grid/station files to archive - - "{{ COM_WAVE_GRID | relpath(ROTDIR) }}/{{ head }}*" - - "{{ COM_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}*" + - "{{ COMIN_WAVE_GRID | relpath(ROTDIR) }}/{{ head }}*" + - "{{ COMIN_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}*" diff --git a/parm/archive/gdaswave_restart.yaml.j2 b/parm/archive/gdaswave_restart.yaml.j2 index 8387d48616..7ada504e2a 100644 --- a/parm/archive/gdaswave_restart.yaml.j2 +++ b/parm/archive/gdaswave_restart.yaml.j2 @@ -3,4 +3,4 @@ gdaswave_restart: target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdaswave_restart.tar" required: # TODO explicitly name the wave restart files to archive - - "{{ COM_WAVE_RESTART | relpath(ROTDIR) }}/*" + - "{{ COMIN_WAVE_RESTART | relpath(ROTDIR) }}/*" diff --git a/parm/archive/gfs_downstream.yaml.j2 b/parm/archive/gfs_downstream.yaml.j2 index 23c9383d28..ed5317b42c 100644 --- a/parm/archive/gfs_downstream.yaml.j2 +++ b/parm/archive/gfs_downstream.yaml.j2 @@ -3,10 +3,10 @@ gfs_downstream: name: "GFS_DOWNSTREAM" target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gfs_downstream.tar" required: - - "{{ COM_ATMOS_GEMPAK | relpath(ROTDIR) }}/gfs_{{ cycle_YMDH }}.sfc" - - "{{ COM_ATMOS_GEMPAK | relpath(ROTDIR) }}/gfs_{{ cycle_YMDH }}.snd" + - "{{ COMIN_ATMOS_GEMPAK | relpath(ROTDIR) }}/gfs_{{ cycle_YMDH }}.sfc" + - "{{ COMIN_ATMOS_GEMPAK | relpath(ROTDIR) }}/gfs_{{ cycle_YMDH }}.snd" {% for i in range(1, NUM_SND_COLLECTIVES) %} - - "{{ COM_ATMOS_WMO | relpath(ROTDIR) }}/gfs_collective{{ i }}.postsnd_{{ cycle_HH }}" + - "{{ COMIN_ATMOS_WMO | relpath(ROTDIR) }}/gfs_collective{{ i }}.postsnd_{{ cycle_HH }}" {% endfor %} - - "{{ COM_ATMOS_BUFR | relpath(ROTDIR) }}/bufr.t{{ cycle_HH }}z" - - "{{ COM_ATMOS_BUFR | relpath(ROTDIR) }}/gfs.t{{ cycle_HH }}z.bufrsnd.tar.gz" + - "{{ COMIN_ATMOS_BUFR | relpath(ROTDIR) }}/bufr.t{{ cycle_HH }}z" + - "{{ COMIN_ATMOS_BUFR | relpath(ROTDIR) }}/gfs.t{{ cycle_HH }}z.bufrsnd.tar.gz" diff --git a/parm/archive/gfs_flux.yaml.j2 b/parm/archive/gfs_flux.yaml.j2 index 66c8221f60..46bd0624b6 100644 --- a/parm/archive/gfs_flux.yaml.j2 +++ b/parm/archive/gfs_flux.yaml.j2 @@ -4,6 +4,6 @@ gfs_flux: target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gfs_flux.tar" required: {% for fhr in range(FHMIN_GFS, FHMAX_GFS + FHOUT_GFS, FHOUT_GFS) %} - - "{{ COM_ATMOS_MASTER | relpath(ROTDIR) }}/{{ head }}sfluxgrbf{{ '%03d' % fhr }}.grib2" - - "{{ COM_ATMOS_MASTER | relpath(ROTDIR) }}/{{ head }}sfluxgrbf{{ '%03d' % fhr }}.grib2.idx" + - "{{ COMIN_ATMOS_MASTER | relpath(ROTDIR) }}/{{ head }}sfluxgrbf{{ '%03d' % fhr }}.grib2" + - "{{ COMIN_ATMOS_MASTER | relpath(ROTDIR) }}/{{ head }}sfluxgrbf{{ '%03d' % fhr }}.grib2.idx" {% endfor %} diff --git a/parm/archive/gfs_flux_1p00.yaml.j2 b/parm/archive/gfs_flux_1p00.yaml.j2 index 2f5c9c8910..97fcd6e4d2 100644 --- a/parm/archive/gfs_flux_1p00.yaml.j2 +++ b/parm/archive/gfs_flux_1p00.yaml.j2 @@ -4,6 +4,6 @@ gfs_flux_1p00: target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gfs_flux_1p00.tar" required: {% for fhr in range(FHMIN_GFS, FHMAX_GFS + FHOUT_GFS, FHOUT_GFS) %} - - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}flux.1p00.f{{ '%03d' % fhr }}" - - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}flux.1p00.f{{ '%03d' % fhr }}.idx" + - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}flux.1p00.f{{ '%03d' % fhr }}" + - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}flux.1p00.f{{ '%03d' % fhr }}.idx" {% endfor %} diff --git a/parm/archive/gfs_netcdfa.yaml.j2 b/parm/archive/gfs_netcdfa.yaml.j2 index 6bcafe1b89..8c0d4a813f 100644 --- a/parm/archive/gfs_netcdfa.yaml.j2 +++ b/parm/archive/gfs_netcdfa.yaml.j2 @@ -3,14 +3,14 @@ gfs_netcdfa: name: "GFS_NETCDFA" target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gfs_netcdfa.tar" required: - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmanl.nc" - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}sfcanl.nc" - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atminc.nc" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmanl.nc" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}sfcanl.nc" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atminc.nc" {% for iauhr in iaufhrs if iauhr != 6 %} - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmi{{ "%03d" % iauhr }}.nc" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmi{{ "%03d" % iauhr }}.nc" {% endfor %} optional: {% if not DO_JEDIATMVAR %} - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}dtfanl.nc" - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}loginc.txt" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}dtfanl.nc" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}loginc.txt" {% endif %} diff --git a/parm/archive/gfs_netcdfb.yaml.j2 b/parm/archive/gfs_netcdfb.yaml.j2 index b0393d63b6..727f054715 100644 --- a/parm/archive/gfs_netcdfb.yaml.j2 +++ b/parm/archive/gfs_netcdfb.yaml.j2 @@ -4,6 +4,6 @@ gfs_netcdfb: target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gfs_netcdfb.tar" required: {% for fhr in range(0, ARCH_GAUSSIAN_FHMAX + ARCH_GAUSSIAN_FHINC, ARCH_GAUSSIAN_FHINC) %} - - "{{ COM_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}atmf{{ '%03d' % fhr }}.nc" - - "{{ COM_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}sfcf{{ '%03d' % fhr }}.nc" + - "{{ COMIN_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}atmf{{ '%03d' % fhr }}.nc" + - "{{ COMIN_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}sfcf{{ '%03d' % fhr }}.nc" {% endfor %} diff --git a/parm/archive/gfs_pgrb2b.yaml.j2 b/parm/archive/gfs_pgrb2b.yaml.j2 index b06dd14b73..ca20d1a3d8 100644 --- a/parm/archive/gfs_pgrb2b.yaml.j2 +++ b/parm/archive/gfs_pgrb2b.yaml.j2 @@ -4,16 +4,16 @@ gfs_pgrb2b: target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gfs_pgrb2b.tar" required: {% if MODE == "cycled" %} - - "{{ COM_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2b.0p25.anl" - - "{{ COM_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2b.0p25.anl.idx" - - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2b.1p00.anl" - - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2b.1p00.anl.idx" + - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2b.0p25.anl" + - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2b.0p25.anl.idx" + - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2b.1p00.anl" + - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2b.1p00.anl.idx" {% endif %} {% if ARCH_GAUSSIAN %} {% for fhr in range(0, FHMAX_GFS + FHOUT_GFS, FHOUT_GFS) %} - - "{{ COM_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2b.0p25.f{{ '%03d' % fhr }}" - - "{{ COM_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2b.0p25.f{{ '%03d' % fhr }}.idx" - - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2b.1p00.f{{ '%03d' % fhr }}" - - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2b.1p00.f{{ '%03d' % fhr }}.idx" + - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2b.0p25.f{{ '%03d' % fhr }}" + - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2b.0p25.f{{ '%03d' % fhr }}.idx" + - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2b.1p00.f{{ '%03d' % fhr }}" + - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2b.1p00.f{{ '%03d' % fhr }}.idx" {% endfor %} {% endif %} diff --git a/parm/archive/gfs_restarta.yaml.j2 b/parm/archive/gfs_restarta.yaml.j2 index c2ad717484..8f6a0b6c10 100644 --- a/parm/archive/gfs_restarta.yaml.j2 +++ b/parm/archive/gfs_restarta.yaml.j2 @@ -12,12 +12,12 @@ gfs_restarta: {% set anl_timedelta = anl_offset | to_timedelta %} {% set anl_time = current_cycle | add_to_datetime(anl_timedelta) %} {% for i_tile in range(1, 7) %} - - "{{ COM_ATMOS_RESTART | relpath(ROTDIR) }}/{{ anl_time | to_YMD }}.{{ anl_time | strftime("%H") }}0000.sfcanl_data.tile{{ i_tile }}.nc" + - "{{ COMIN_ATMOS_RESTART | relpath(ROTDIR) }}/{{ anl_time | to_YMD }}.{{ anl_time | strftime("%H") }}0000.sfcanl_data.tile{{ i_tile }}.nc" {% endfor %} {% elif MODE == "forecast-only" %} - - "{{ COM_ATMOS_INPUT | relpath(ROTDIR) }}/gfs_ctrl.nc" + - "{{ COMIN_ATMOS_INPUT | relpath(ROTDIR) }}/gfs_ctrl.nc" {% for i_tile in range(1, 7) %} - - "{{ COM_ATMOS_INPUT | relpath(ROTDIR) }}/gfs_data.tile{{ i_tile }}.nc" - - "{{ COM_ATMOS_INPUT | relpath(ROTDIR) }}/sfc_data.tile{{ i_tile }}.nc" + - "{{ COMIN_ATMOS_INPUT | relpath(ROTDIR) }}/gfs_data.tile{{ i_tile }}.nc" + - "{{ COMIN_ATMOS_INPUT | relpath(ROTDIR) }}/sfc_data.tile{{ i_tile }}.nc" {% endfor %} {% endif %} diff --git a/parm/archive/gfsa.yaml.j2 b/parm/archive/gfsa.yaml.j2 index 0a8e65d3ef..e76c26e60e 100644 --- a/parm/archive/gfsa.yaml.j2 +++ b/parm/archive/gfsa.yaml.j2 @@ -12,57 +12,57 @@ gfsa: {% endfor %} # UFS configuration - - "{{ COM_CONF | relpath(ROTDIR) }}/ufs.input.nml" + - "{{ COMIN_CONF | relpath(ROTDIR) }}/ufs.input.nml" {% if MODE == "cycled" %} # Analysis GRIB2 (gridded) data - - "{{ COM_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.anl" - - "{{ COM_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.anl.idx" - - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.anl" - - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.anl.idx" + - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.anl" + - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.anl.idx" + - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.anl" + - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.anl.idx" {% if DO_VMINMON %} # Minimization monitor - - "{{ COM_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.costs.txt" - - "{{ COM_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.cost_terms.txt" - - "{{ COM_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.gnorms.ieee_d" - - "{{ COM_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.reduction.ieee_d" - - "{{ COM_ATMOS_MINMON | relpath(ROTDIR) }}/gnorm_data.txt" + - "{{ COMIN_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.costs.txt" + - "{{ COMIN_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.cost_terms.txt" + - "{{ COMIN_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.gnorms.ieee_d" + - "{{ COMIN_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.reduction.ieee_d" + - "{{ COMIN_ATMOS_MINMON | relpath(ROTDIR) }}/gnorm_data.txt" {% endif %} # State data {% if DO_JEDIATMVAR %} - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmvar.yaml" - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmstat" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmvar.yaml" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmstat" {% else %} - - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}gsistat" + - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}gsistat" {% endif %} {% if AERO_ANL_CDUMP == "gfs" or AERO_ANL_CDUMP == "both" %} - - "{{ COM_CHEM_ANALYSIS | relpath(ROTDIR) }}/{{ head }}aerostat" + - "{{ COMIN_CHEM_ANALYSIS | relpath(ROTDIR) }}/{{ head }}aerostat" {% endif %} {% if DO_PREP_OBS_AERO %} - - "{{ COM_OBS | relpath(ROTDIR) }}/{{ head }}aeroobs" - - "{{ COM_OBS | relpath(ROTDIR) }}/{{ head }}aerorawobs" + - "{{ COMIN_OBS | relpath(ROTDIR) }}/{{ head }}aeroobs" + - "{{ COMIN_OBS | relpath(ROTDIR) }}/{{ head }}aerorawobs" {% endif %} # BUFR inputs - - "{{ COM_OBS | relpath(ROTDIR) }}/{{ head }}nsstbufr" - - "{{ COM_OBS | relpath(ROTDIR) }}/{{ head }}prepbufr" - - "{{ COM_OBS | relpath(ROTDIR) }}/{{ head }}prepbufr.acft_profiles" + - "{{ COMIN_OBS | relpath(ROTDIR) }}/{{ head }}nsstbufr" + - "{{ COMIN_OBS | relpath(ROTDIR) }}/{{ head }}prepbufr" + - "{{ COMIN_OBS | relpath(ROTDIR) }}/{{ head }}prepbufr.acft_profiles" {% endif %} # Full cycle # Forecast GRIB2 products {% for fhr in range(FHMIN_GFS, FHMAX_GFS + FHOUT_GFS, FHOUT_GFS) %} - - "{{ COM_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.f{{ '%03d' % fhr }}" - - "{{ COM_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.f{{ '%03d' % fhr }}.idx" - - "{{ COM_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}atm.logf{{ '%03d' % fhr }}.txt" + - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.f{{ '%03d' % fhr }}" + - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.f{{ '%03d' % fhr }}.idx" + - "{{ COMIN_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}atm.logf{{ '%03d' % fhr }}.txt" {% endfor %} optional: # Cyclone tracking data; only present if there's something to track. - - "{{ COM_ATMOS_TRACK | relpath(ROTDIR) }}/avno.t{{ cycle_HH }}z.cyclone.trackatcfunix" - - "{{ COM_ATMOS_TRACK | relpath(ROTDIR) }}/avnop.t{{ cycle_HH }}z.cyclone.trackatcfunix" - - "{{ COM_ATMOS_GENESIS | relpath(ROTDIR) }}/trak.gfso.atcfunix.{{ cycle_YMDH }}" - - "{{ COM_ATMOS_GENESIS | relpath(ROTDIR) }}/trak.gfso.atcfunix.altg.{{ cycle_YMDH }}" - - "{{ COM_ATMOS_GENESIS | relpath(ROTDIR) }}/storms.gfso.atcf_gen.{{ cycle_YMDH }}" - - "{{ COM_ATMOS_GENESIS | relpath(ROTDIR) }}/storms.gfso.atcf_gen.altg.{{ cycle_YMDH }}" + - "{{ COMIN_ATMOS_TRACK | relpath(ROTDIR) }}/avno.t{{ cycle_HH }}z.cyclone.trackatcfunix" + - "{{ COMIN_ATMOS_TRACK | relpath(ROTDIR) }}/avnop.t{{ cycle_HH }}z.cyclone.trackatcfunix" + - "{{ COMIN_ATMOS_GENESIS | relpath(ROTDIR) }}/trak.gfso.atcfunix.{{ cycle_YMDH }}" + - "{{ COMIN_ATMOS_GENESIS | relpath(ROTDIR) }}/trak.gfso.atcfunix.altg.{{ cycle_YMDH }}" + - "{{ COMIN_ATMOS_GENESIS | relpath(ROTDIR) }}/storms.gfso.atcf_gen.{{ cycle_YMDH }}" + - "{{ COMIN_ATMOS_GENESIS | relpath(ROTDIR) }}/storms.gfso.atcf_gen.altg.{{ cycle_YMDH }}" diff --git a/parm/archive/gfsb.yaml.j2 b/parm/archive/gfsb.yaml.j2 index cbb4d4fad8..e6ffa05766 100644 --- a/parm/archive/gfsb.yaml.j2 +++ b/parm/archive/gfsb.yaml.j2 @@ -5,16 +5,16 @@ gfsb: required: {% if MODE == "cycled" %} # GRIB2 (subsampled) analysis data - - "{{ COM_ATMOS_GRIB_0p50 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p50.anl" - - "{{ COM_ATMOS_GRIB_0p50 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p50.anl.idx" - - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.anl" - - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.anl.idx" + - "{{ COMIN_ATMOS_GRIB_0p50 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p50.anl" + - "{{ COMIN_ATMOS_GRIB_0p50 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p50.anl.idx" + - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.anl" + - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.anl.idx" {% endif %} # GRIB2 orecast data {% for fhr in range(FHMIN_GFS, FHMAX_GFS + FHOUT_GFS, FHOUT_GFS) %} - - "{{ COM_ATMOS_GRIB_0p50 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p50.f{{ '%03d' % fhr }}" - - "{{ COM_ATMOS_GRIB_0p50 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p50.f{{ '%03d' % fhr }}.idx" - - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.f{{ '%03d' % fhr }}" - - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.f{{ '%03d' % fhr }}.idx" + - "{{ COMIN_ATMOS_GRIB_0p50 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p50.f{{ '%03d' % fhr }}" + - "{{ COMIN_ATMOS_GRIB_0p50 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p50.f{{ '%03d' % fhr }}.idx" + - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.f{{ '%03d' % fhr }}" + - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.f{{ '%03d' % fhr }}.idx" {% endfor %} diff --git a/parm/archive/gfswave.yaml.j2 b/parm/archive/gfswave.yaml.j2 index 8422377b7a..6909421757 100644 --- a/parm/archive/gfswave.yaml.j2 +++ b/parm/archive/gfswave.yaml.j2 @@ -7,24 +7,24 @@ gfswave: {% for fh in range(0, FHMAX_HF_WAV + FHOUT_HF_WAV, FHOUT_HF_WAV) %} # NOTE This is as explicit as possible without major logic to parse wavepostGRD. # Matches files of the form "gfswave.tCCz...fHHH.grib2". - - "{{ COM_WAVE_GRID | relpath(ROTDIR) }}/{{ head }}*.*.f{{ '%03d' % fh }}.grib2" - - "{{ COM_WAVE_GRID | relpath(ROTDIR) }}/{{ head }}*.*.f{{ '%03d' % fh }}.grib2.idx" + - "{{ COMIN_WAVE_GRID | relpath(ROTDIR) }}/{{ head }}*.*.f{{ '%03d' % fh }}.grib2" + - "{{ COMIN_WAVE_GRID | relpath(ROTDIR) }}/{{ head }}*.*.f{{ '%03d' % fh }}.grib2.idx" {% endfor %} # Global wave GRIB2 forecast products {% for fh in range(FHMAX_HF_WAV + FHOUT_WAV, FHMAX_WAV_GFS + FHOUT_WAV, FHOUT_WAV) %} - - "{{ COM_WAVE_GRID | relpath(ROTDIR) }}/{{ head }}*.*.f{{ '%03d' % fh }}.grib2" - - "{{ COM_WAVE_GRID | relpath(ROTDIR) }}/{{ head }}*.*.f{{ '%03d' % fh }}.grib2.idx" + - "{{ COMIN_WAVE_GRID | relpath(ROTDIR) }}/{{ head }}*.*.f{{ '%03d' % fh }}.grib2" + - "{{ COMIN_WAVE_GRID | relpath(ROTDIR) }}/{{ head }}*.*.f{{ '%03d' % fh }}.grib2.idx" {% endfor %} # Wave bulletins - - "{{ COM_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}bull_tar" - - "{{ COM_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}cbull_tar" - - "{{ COM_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}spec_tar.gz" + - "{{ COMIN_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}bull_tar" + - "{{ COMIN_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}cbull_tar" + - "{{ COMIN_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}spec_tar.gz" # Wave IBP bulletins {% if DOIBP_WAV %} - - "{{ COM_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}ibpbull_tar" - - "{{ COM_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}ibpcbull_tar" - - "{{ COM_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}ibp_tar" + - "{{ COMIN_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}ibpbull_tar" + - "{{ COMIN_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}ibpcbull_tar" + - "{{ COMIN_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}ibp_tar" {% endif %} diff --git a/parm/archive/ice_6hravg.yaml.j2 b/parm/archive/ice_6hravg.yaml.j2 index 251e51b110..6eb64ae70d 100644 --- a/parm/archive/ice_6hravg.yaml.j2 +++ b/parm/archive/ice_6hravg.yaml.j2 @@ -3,7 +3,7 @@ ice_6hravg: name: "ICE_6HRAVG" target: "{{ ATARDIR }}/{{ cycle_YMDH }}/ice_6hravg.tar" required: - - "{{ COM_ICE_HISTORY | relpath(ROTDIR) }}/{{ head }}ic.nc" + - "{{ COMIN_ICE_HISTORY | relpath(ROTDIR) }}/{{ head }}ic.nc" {% for fhr in range(6, FHMAX_GFS + 6, 6) %} - - "{{ COM_ICE_HISTORY | relpath(ROTDIR) }}/{{ head }}6hr_avg.f{{ '%03d' % fhr }}.nc" + - "{{ COMIN_ICE_HISTORY | relpath(ROTDIR) }}/{{ head }}6hr_avg.f{{ '%03d' % fhr }}.nc" {% endfor %} diff --git a/parm/archive/ice_grib2.yaml.j2 b/parm/archive/ice_grib2.yaml.j2 index 9d52f174ab..04bc2f5873 100644 --- a/parm/archive/ice_grib2.yaml.j2 +++ b/parm/archive/ice_grib2.yaml.j2 @@ -7,13 +7,13 @@ ice_grib2: {% for fhr in range(FHOUT_ICE_GFS, FHMAX_GFS + FHOUT_ICE_GFS, FHOUT_ICE_GFS) %} {% set fhr3 = '%03d' % fhr %} {% if ICERES == 500 %} - - "{{ COM_ICE_GRIB | relpath(ROTDIR) }}/5p00/{{ head }}5p00.f{{ fhr3 }}.grib2" - - "{{ COM_ICE_GRIB | relpath(ROTDIR) }}/5p00/{{ head }}5p00.f{{ fhr3 }}.grib2.idx" + - "{{ COMIN_ICE_GRIB | relpath(ROTDIR) }}/5p00/{{ head }}5p00.f{{ fhr3 }}.grib2" + - "{{ COMIN_ICE_GRIB | relpath(ROTDIR) }}/5p00/{{ head }}5p00.f{{ fhr3 }}.grib2.idx" {% elif ICERES == 100 %} - - "{{ COM_ICE_GRIB | relpath(ROTDIR) }}/1p00/{{ head }}1p00.f{{ fhr3 }}.grib2" - - "{{ COM_ICE_GRIB | relpath(ROTDIR) }}/1p00/{{ head }}1p00.f{{ fhr3 }}.grib2.idx" + - "{{ COMIN_ICE_GRIB | relpath(ROTDIR) }}/1p00/{{ head }}1p00.f{{ fhr3 }}.grib2" + - "{{ COMIN_ICE_GRIB | relpath(ROTDIR) }}/1p00/{{ head }}1p00.f{{ fhr3 }}.grib2.idx" {% elif ICERES == 25 or ICERES == "025" %} - - "{{ COM_ICE_GRIB | relpath(ROTDIR) }}/0p25/{{ head }}0p25.f{{ fhr3 }}.grib2" - - "{{ COM_ICE_GRIB | relpath(ROTDIR) }}/0p25/{{ head }}0p25.f{{ fhr3 }}.grib2.idx" + - "{{ COMIN_ICE_GRIB | relpath(ROTDIR) }}/0p25/{{ head }}0p25.f{{ fhr3 }}.grib2" + - "{{ COMIN_ICE_GRIB | relpath(ROTDIR) }}/0p25/{{ head }}0p25.f{{ fhr3 }}.grib2.idx" {% endif %} {% endfor %} diff --git a/parm/archive/master_enkf.yaml.j2 b/parm/archive/master_enkf.yaml.j2 index fb6a3d30ac..2fd94597f8 100644 --- a/parm/archive/master_enkf.yaml.j2 +++ b/parm/archive/master_enkf.yaml.j2 @@ -37,12 +37,12 @@ datasets: # Archive individual member data # First, construct individual member directories from templates -# COM_ATMOS_ANALYSIS_MEM, COM_ATMOS_HISTORY_MEM, and COM_ATMOS_RESTART_MEM +# COMIN_ATMOS_ANALYSIS_MEM, COMIN_ATMOS_HISTORY_MEM, and COMIN_ATMOS_RESTART_MEM # Declare to-be-filled lists of member COM directories -{% set COM_ATMOS_ANALYSIS_MEM_list = [] %} -{% set COM_ATMOS_RESTART_MEM_list = [] %} -{% set COM_ATMOS_HISTORY_MEM_list = [] %} +{% set COMIN_ATMOS_ANALYSIS_MEM_list = [] %} +{% set COMIN_ATMOS_RESTART_MEM_list = [] %} +{% set COMIN_ATMOS_HISTORY_MEM_list = [] %} # Determine which ensemble members belong to this group {% set first_group_mem = (ENSGRP - 1) * NMEM_EARCGRP + 1 %} @@ -62,22 +62,22 @@ datasets: # This must be done in a namespace to overcome jinja scoping # Variables set inside of a for loop are lost at the end of the loop # unless they are part of a namespace -{% set com_ns = namespace(COM_ATMOS_ANALYSIS_MEM = COM_ATMOS_ANALYSIS_TMPL, - COM_ATMOS_HISTORY_MEM = COM_ATMOS_HISTORY_TMPL, - COM_ATMOS_RESTART_MEM = COM_ATMOS_RESTART_TMPL) %} +{% set com_ns = namespace(COMIN_ATMOS_ANALYSIS_MEM = COMIN_ATMOS_ANALYSIS_TMPL, + COMIN_ATMOS_HISTORY_MEM = COMIN_ATMOS_HISTORY_TMPL, + COMIN_ATMOS_RESTART_MEM = COMIN_ATMOS_RESTART_TMPL) %} {% for key in tmpl_dict.keys() %} {% set search_term = '${' + key + '}' %} {% set replace_term = tmpl_dict[key] %} -{% set com_ns.COM_ATMOS_ANALYSIS_MEM = com_ns.COM_ATMOS_ANALYSIS_MEM.replace(search_term, replace_term) %} -{% set com_ns.COM_ATMOS_HISTORY_MEM = com_ns.COM_ATMOS_HISTORY_MEM.replace(search_term, replace_term) %} -{% set com_ns.COM_ATMOS_RESTART_MEM = com_ns.COM_ATMOS_RESTART_MEM.replace(search_term, replace_term) %} +{% set com_ns.COMIN_ATMOS_ANALYSIS_MEM = com_ns.COMIN_ATMOS_ANALYSIS_MEM.replace(search_term, replace_term) %} +{% set com_ns.COMIN_ATMOS_HISTORY_MEM = com_ns.COMIN_ATMOS_HISTORY_MEM.replace(search_term, replace_term) %} +{% set com_ns.COMIN_ATMOS_RESTART_MEM = com_ns.COMIN_ATMOS_RESTART_MEM.replace(search_term, replace_term) %} {% endfor %} # Append the member COM directories -{% do COM_ATMOS_ANALYSIS_MEM_list.append(com_ns.COM_ATMOS_ANALYSIS_MEM)%} -{% do COM_ATMOS_HISTORY_MEM_list.append(com_ns.COM_ATMOS_HISTORY_MEM)%} -{% do COM_ATMOS_RESTART_MEM_list.append(com_ns.COM_ATMOS_RESTART_MEM)%} +{% do COMIN_ATMOS_ANALYSIS_MEM_list.append(com_ns.COMIN_ATMOS_ANALYSIS_MEM)%} +{% do COMIN_ATMOS_HISTORY_MEM_list.append(com_ns.COMIN_ATMOS_HISTORY_MEM)%} +{% do COMIN_ATMOS_RESTART_MEM_list.append(com_ns.COMIN_ATMOS_RESTART_MEM)%} {% endfor %} diff --git a/parm/archive/ocean_6hravg.yaml.j2 b/parm/archive/ocean_6hravg.yaml.j2 index dac3ce262a..58db08538f 100644 --- a/parm/archive/ocean_6hravg.yaml.j2 +++ b/parm/archive/ocean_6hravg.yaml.j2 @@ -4,5 +4,5 @@ ocean_6hravg: target: "{{ ATARDIR }}/{{ cycle_YMDH }}/ocean_6hravg.tar" required: {% for fhr in range(6, FHMAX_GFS + 6, 6) %} - - "{{ COM_OCEAN_HISTORY | relpath(ROTDIR) }}/{{ head }}6hr_avg.f{{ '%03d' % fhr }}.nc" + - "{{ COMIN_OCEAN_HISTORY | relpath(ROTDIR) }}/{{ head }}6hr_avg.f{{ '%03d' % fhr }}.nc" {% endfor %} diff --git a/parm/archive/ocean_grib2.yaml.j2 b/parm/archive/ocean_grib2.yaml.j2 index 784e30021d..e8f2e3170b 100644 --- a/parm/archive/ocean_grib2.yaml.j2 +++ b/parm/archive/ocean_grib2.yaml.j2 @@ -6,13 +6,13 @@ ocean_grib2: {% for fhr in range(FHOUT_OCN_GFS, FHMAX_GFS + FHOUT_OCN_GFS, FHOUT_OCN_GFS) %} {% set fhr3 = '%03d' % fhr %} {% if OCNRES == 500 %} - - "{{ COM_OCEAN_GRIB | relpath(ROTDIR) }}/5p00/{{ head }}5p00.f{{ fhr3 }}.grib2" - - "{{ COM_OCEAN_GRIB | relpath(ROTDIR) }}/5p00/{{ head }}5p00.f{{ fhr3 }}.grib2.idx" + - "{{ COMIN_OCEAN_GRIB | relpath(ROTDIR) }}/5p00/{{ head }}5p00.f{{ fhr3 }}.grib2" + - "{{ COMIN_OCEAN_GRIB | relpath(ROTDIR) }}/5p00/{{ head }}5p00.f{{ fhr3 }}.grib2.idx" {% elif OCNRES == 100 %} - - "{{ COM_OCEAN_GRIB | relpath(ROTDIR) }}/1p00/{{ head }}1p00.f{{ fhr3 }}.grib2" - - "{{ COM_OCEAN_GRIB | relpath(ROTDIR) }}/1p00/{{ head }}1p00.f{{ fhr3 }}.grib2.idx" + - "{{ COMIN_OCEAN_GRIB | relpath(ROTDIR) }}/1p00/{{ head }}1p00.f{{ fhr3 }}.grib2" + - "{{ COMIN_OCEAN_GRIB | relpath(ROTDIR) }}/1p00/{{ head }}1p00.f{{ fhr3 }}.grib2.idx" {% elif OCNRES == 25 or OCNRES == "025" %} - - "{{ COM_OCEAN_GRIB | relpath(ROTDIR) }}/0p25/{{ head }}0p25.f{{ fhr3 }}.grib2" - - "{{ COM_OCEAN_GRIB | relpath(ROTDIR) }}/0p25/{{ head }}0p25.f{{ fhr3 }}.grib2.idx" + - "{{ COMIN_OCEAN_GRIB | relpath(ROTDIR) }}/0p25/{{ head }}0p25.f{{ fhr3 }}.grib2" + - "{{ COMIN_OCEAN_GRIB | relpath(ROTDIR) }}/0p25/{{ head }}0p25.f{{ fhr3 }}.grib2.idx" {% endif %} {% endfor %} diff --git a/scripts/exgdas_enkf_earc.py b/scripts/exgdas_enkf_earc.py index 2febbc27f5..c0fc51eee5 100755 --- a/scripts/exgdas_enkf_earc.py +++ b/scripts/exgdas_enkf_earc.py @@ -34,9 +34,9 @@ def main(): for key in keys: archive_dict[key] = archive.task_config[key] - # Also import all COM* directory and template variables + # Also import all COMIN* directory and template variables for key in archive.task_config.keys(): - if key.startswith("COM"): + if key.startswith("COMIN"): archive_dict[key] = archive.task_config[key] cwd = os.getcwd() diff --git a/scripts/exglobal_archive.py b/scripts/exglobal_archive.py index bcd8d522d9..af396d382e 100755 --- a/scripts/exglobal_archive.py +++ b/scripts/exglobal_archive.py @@ -37,9 +37,9 @@ def main(): for key in keys: archive_dict[key] = archive.task_config[key] - # Also import all COM* directory and template variables + # Also import all COMIN* and COMOUT* directory and template variables for key in archive.task_config.keys(): - if key.startswith("COM"): + if key.startswith("COMIN_") or key.startswith("COMOUT_"): archive_dict[key] = archive.task_config[key] cwd = os.getcwd() diff --git a/ush/python/pygfs/task/archive.py b/ush/python/pygfs/task/archive.py index 66d94878e8..269aea0ccc 100644 --- a/ush/python/pygfs/task/archive.py +++ b/ush/python/pygfs/task/archive.py @@ -309,7 +309,7 @@ def _gen_relative_paths(self, root_path: str) -> Dict: rel_path_dict : Dict Dictionary of paths relative to root_path. Members will be named based on the dict names in self.config. For COM paths, the names will - follow COM_ --> _dir. For all other directories, the + follow COMIN_ --> _dir. For all other directories, the names will follow --> _dir. """ @@ -318,7 +318,7 @@ def _gen_relative_paths(self, root_path: str) -> Dict: if isinstance(value, str): if root_path in value: rel_path = value.replace(root_path, "") - rel_key = (key[4:] if key.startswith("COM_") else key).lower() + "_dir" + rel_key = (key[4:] if key.startswith("COMIN_") else key).lower() + "_dir" rel_path_dict[rel_key] = rel_path return rel_path_dict @@ -374,27 +374,28 @@ def _rename_cyclone_expt(arch_dict) -> None: if len(arch_dict.PSLOT) > 4: pslot4 = arch_dict.PSLOT[0:4].upper() - track_dir = arch_dict.COM_ATMOS_TRACK + track_dir_in = arch_dict.COMIN_ATMOS_TRACK + track_dir_out = arch_dict.COMOUT_ATMOS_TRACK run = arch_dict.RUN cycle_HH = strftime(arch_dict.current_cycle, "%H") if run == "gfs": - in_track_file = (track_dir + "/avno.t" + + in_track_file = (track_dir_in + "/avno.t" + cycle_HH + "z.cycle.trackatcfunix") - in_track_p_file = (track_dir + "/avnop.t" + + in_track_p_file = (track_dir_in + "/avnop.t" + cycle_HH + "z.cycle.trackatcfunixp") elif run == "gdas": - in_track_file = (track_dir + "/gdas.t" + + in_track_file = (track_dir_in + "/gdas.t" + cycle_HH + "z.cycle.trackatcfunix") - in_track_p_file = (track_dir + "/gdasp.t" + + in_track_p_file = (track_dir_in + "/gdasp.t" + cycle_HH + "z.cycle.trackatcfunixp") if not os.path.isfile(in_track_file): # Do not attempt to archive the outputs return - out_track_file = track_dir + "/atcfunix." + run + "." + to_YMDH(arch_dict.current_cycle) - out_track_p_file = track_dir + "/atcfunixp." + run + "." + to_YMDH(arch_dict.current_cycle) + out_track_file = track_dir_out + "/atcfunix." + run + "." + to_YMDH(arch_dict.current_cycle) + out_track_p_file = track_dir_out + "/atcfunixp." + run + "." + to_YMDH(arch_dict.current_cycle) def replace_string_from_to_file(filename_in, filename_out, search_str, replace_str): From 3270ac3bf00c3ebc8166c70d84647ec44431fbae Mon Sep 17 00:00:00 2001 From: "Henry R. Winterbottom" <49202169+HenryWinterbottom-NOAA@users.noreply.github.com> Date: Tue, 18 Jun 2024 12:17:59 -0600 Subject: [PATCH 15/20] Hotfix for bug in template names. (#2697) This PR is a hotfix for an incorrectly named (e.g., non-existent) `COM/` template. Resolves #2696 Refs #2451 --- parm/archive/master_enkf.yaml.j2 | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/parm/archive/master_enkf.yaml.j2 b/parm/archive/master_enkf.yaml.j2 index 2fd94597f8..f663d02895 100644 --- a/parm/archive/master_enkf.yaml.j2 +++ b/parm/archive/master_enkf.yaml.j2 @@ -62,9 +62,9 @@ datasets: # This must be done in a namespace to overcome jinja scoping # Variables set inside of a for loop are lost at the end of the loop # unless they are part of a namespace -{% set com_ns = namespace(COMIN_ATMOS_ANALYSIS_MEM = COMIN_ATMOS_ANALYSIS_TMPL, - COMIN_ATMOS_HISTORY_MEM = COMIN_ATMOS_HISTORY_TMPL, - COMIN_ATMOS_RESTART_MEM = COMIN_ATMOS_RESTART_TMPL) %} +{% set com_ns = namespace(COMIN_ATMOS_ANALYSIS_MEM = COM_ATMOS_ANALYSIS_TMPL, + COMIN_ATMOS_HISTORY_MEM = COM_ATMOS_HISTORY_TMPL, + COMIN_ATMOS_RESTART_MEM = COM_ATMOS_RESTART_TMPL) %} {% for key in tmpl_dict.keys() %} {% set search_term = '${' + key + '}' %} From 0b810c888239853fedd0e4584fe62536c6aaacdf Mon Sep 17 00:00:00 2001 From: "Henry R. Winterbottom" <49202169+HenryWinterbottom-NOAA@users.noreply.github.com> Date: Tue, 18 Jun 2024 20:32:48 -0600 Subject: [PATCH 16/20] Removes misleading "No such file or directory" syntax errors from output files (#2688) This PR addresses issue #1252. The following is accomplished: - Prior to removing files, the existence of a file is checked prior to attempting to remove; this is performed as noted [here](https://github.com/NOAA-EMC/global-workflow/issues/1252#issue-1538627369); this PR only addresses the the `chgrp` issue. Refs #1252 --------- Co-authored-by: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> --- scripts/exglobal_diag.sh | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/scripts/exglobal_diag.sh b/scripts/exglobal_diag.sh index 2d48053bd2..e1faa7b2ee 100755 --- a/scripts/exglobal_diag.sh +++ b/scripts/exglobal_diag.sh @@ -232,9 +232,11 @@ EOFdiag # Restrict diagnostic files containing rstprod data rlist="conv_gps conv_ps conv_pw conv_q conv_sst conv_t conv_uv saphir" for rtype in $rlist; do - set +e - ${CHGRP_CMD} *${rtype}* - ${STRICT_ON:-set -e} + for rfile in *"${rtype}"*; do + if [[ -s "${rfile}" ]]; then + ${CHGRP_CMD} "${rfile}" + fi + done done # If requested, create diagnostic file tarballs From 8993b42cb91144c0ab0501dc7841ea8d675c4701 Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Wed, 19 Jun 2024 21:51:22 -0400 Subject: [PATCH 17/20] Eliminate post groups (#2667) Eliminates the post groups used for upp and products jobs so that each task only processes one forecast hour. This is more efficient and greatly simplifies downstream dependencies that depend on a specific forecast hour. Resolves #2666 Refs #2642 --- .gitignore | 8 +++++ jobs/rocoto/atmos_ensstat.sh | 20 +++-------- jobs/rocoto/atmos_products.sh | 22 ++++-------- jobs/rocoto/oceanice_products.sh | 18 ++++------ jobs/rocoto/upp.sh | 17 ++++----- parm/config/gefs/config.base | 2 ++ scripts/exgdas_enkf_earc.py | 2 +- workflow/rocoto/gefs_tasks.py | 9 ++--- workflow/rocoto/gfs_tasks.py | 59 ++++++++++---------------------- 9 files changed, 56 insertions(+), 101 deletions(-) diff --git a/.gitignore b/.gitignore index 8314f5bae9..0c15a50661 100644 --- a/.gitignore +++ b/.gitignore @@ -166,6 +166,14 @@ scripts/exemcsfc_global_sfc_prep.sh scripts/exgdas_global_marine_analysis_ecen.py scripts/exglobal_prep_ocean_obs.py # ush symlinks +ush/bufr2ioda_insitu_profile_argo.py +ush/bufr2ioda_insitu_profile_bathy.py +ush/bufr2ioda_insitu_profile_glider.py +ush/bufr2ioda_insitu_profile_marinemammal.py +ush/bufr2ioda_insitu_profile_tesac.py +ush/bufr2ioda_insitu_profile_xbtctd.py +ush/bufr2ioda_insitu_surface_altkob.py +ush/bufr2ioda_insitu_surface_trkob.py ush/chgres_cube.sh ush/emcsfc_ice_blend.sh ush/emcsfc_snow.sh diff --git a/jobs/rocoto/atmos_ensstat.sh b/jobs/rocoto/atmos_ensstat.sh index cbaaff9a0f..76ed7f0a72 100755 --- a/jobs/rocoto/atmos_ensstat.sh +++ b/jobs/rocoto/atmos_ensstat.sh @@ -15,21 +15,11 @@ if (( status != 0 )); then exit "${status}"; fi export job="atmos_ensstat" export jobid="${job}.$$" -############################################################### -# shellcheck disable=SC2153,SC2001 -IFS='_' read -ra fhrs <<< "${FHRLST//f}" # strip off the 'f's and convert to array +export FORECAST_HOUR=$(( 10#${FHR3} )) -#--------------------------------------------------------------- +############################################################### # Execute the JJOB -for fhr in "${fhrs[@]}"; do - # The analysis fhr is -001. Performing math on negative, leading 0 integers is tricky. - # The negative needs to be in front of "10#", so do some regex magic to make it happen. - fhr="10#${fhr}" - fhr=${fhr//10\#-/-10\#} - export FORECAST_HOUR=$(( fhr )) - "${HOMEgfs}/jobs/JGLOBAL_ATMOS_ENSSTAT" - status=$? - if (( status != 0 )); then exit "${status}"; fi -done +############################################################### +"${HOMEgfs}/jobs/JGLOBAL_ATMOS_ENSSTAT" -exit 0 +exit $? diff --git a/jobs/rocoto/atmos_products.sh b/jobs/rocoto/atmos_products.sh index 472f202de8..f6adbcf861 100755 --- a/jobs/rocoto/atmos_products.sh +++ b/jobs/rocoto/atmos_products.sh @@ -15,21 +15,13 @@ if (( status != 0 )); then exit "${status}"; fi export job="atmos_products" export jobid="${job}.$$" -############################################################### -# shellcheck disable=SC2153,SC2001 -IFS='_' read -ra fhrs <<< "${FHRLST//f}" # strip off the 'f's and convert to array +# Negatation needs to be before the base +fhr3_base="10#${FHR3}" +export FORECAST_HOUR=$(( ${fhr3_base/10#-/-10#} )) -#--------------------------------------------------------------- +############################################################### # Execute the JJOB -for fhr in "${fhrs[@]}"; do - # The analysis fhr is -001. Performing math on negative, leading 0 integers is tricky. - # The negative needs to be in front of "10#", so do some regex magic to make it happen. - fhr="10#${fhr}" - fhr=${fhr//10\#-/-10\#} - export FORECAST_HOUR=$(( fhr )) - "${HOMEgfs}/jobs/JGLOBAL_ATMOS_PRODUCTS" - status=$? - if (( status != 0 )); then exit "${status}"; fi -done +############################################################### +"${HOMEgfs}/jobs/JGLOBAL_ATMOS_PRODUCTS" -exit 0 +exit $? diff --git a/jobs/rocoto/oceanice_products.sh b/jobs/rocoto/oceanice_products.sh index 48816fb3a1..eb704fb35f 100755 --- a/jobs/rocoto/oceanice_products.sh +++ b/jobs/rocoto/oceanice_products.sh @@ -21,17 +21,11 @@ export PYTHONPATH export job="oceanice_products" export jobid="${job}.$$" -############################################################### -# shellcheck disable=SC2153,SC2001 -IFS='_' read -ra fhrs <<< "${FHRLST//f}" # strip off the 'f's and convert to array +export FORECAST_HOUR=$(( 10#${FHR3} )) -#--------------------------------------------------------------- +############################################################### # Execute the JJOB -for fhr in "${fhrs[@]}"; do - export FORECAST_HOUR=$(( 10#${fhr} )) - "${HOMEgfs}/jobs/JGLOBAL_OCEANICE_PRODUCTS" - status=$? - if (( status != 0 )); then exit "${status}"; fi -done - -exit 0 +############################################################### +"${HOMEgfs}/jobs/JGLOBAL_OCEANICE_PRODUCTS" + +exit $? diff --git a/jobs/rocoto/upp.sh b/jobs/rocoto/upp.sh index 18d5c12cea..da0180472d 100755 --- a/jobs/rocoto/upp.sh +++ b/jobs/rocoto/upp.sh @@ -44,16 +44,11 @@ export PYTHONPATH export job="upp" export jobid="${job}.$$" -############################################################### -# shellcheck disable=SC2153,SC2001 -IFS='_' read -ra fhrs <<< "${FHRLST//f}" # strip off the 'f's convert to array +export FORECAST_HOUR=$(( 10#${FHR3} )) +############################################################### # Execute the JJOB -for fhr in "${fhrs[@]}"; do - export FORECAST_HOUR=$(( 10#${fhr} )) - "${HOMEgfs}/jobs/JGLOBAL_ATMOS_UPP" - status=$? - if (( status != 0 )); then exit "${status}"; fi -done - -exit 0 +############################################################### +"${HOMEgfs}/jobs/JGLOBAL_ATMOS_UPP" + +exit $? diff --git a/parm/config/gefs/config.base b/parm/config/gefs/config.base index e98eb41208..16e0fefaba 100644 --- a/parm/config/gefs/config.base +++ b/parm/config/gefs/config.base @@ -256,6 +256,8 @@ export ILPOST=1 # gempak output frequency up to F120 export FHMIN_ENKF=${FHMIN_GFS} export FHMAX_ENKF=${FHMAX_GFS} export FHOUT_ENKF=${FHOUT_GFS} +export FHOUT_OCN=${FHOUT_OCN_GFS} +export FHOUT_ICE=${FHOUT_ICE_GFS} # GFS restart interval in hours export restart_interval_gfs=12 diff --git a/scripts/exgdas_enkf_earc.py b/scripts/exgdas_enkf_earc.py index c0fc51eee5..a515ec9746 100755 --- a/scripts/exgdas_enkf_earc.py +++ b/scripts/exgdas_enkf_earc.py @@ -36,7 +36,7 @@ def main(): # Also import all COMIN* directory and template variables for key in archive.task_config.keys(): - if key.startswith("COMIN"): + if key.startswith("COM"): archive_dict[key] = archive.task_config[key] cwd = os.getcwd() diff --git a/workflow/rocoto/gefs_tasks.py b/workflow/rocoto/gefs_tasks.py index 6fffc881e0..cfd8fa7093 100644 --- a/workflow/rocoto/gefs_tasks.py +++ b/workflow/rocoto/gefs_tasks.py @@ -253,7 +253,7 @@ def _atmosoceaniceprod(self, component: str): postenvars = self.envars.copy() postenvar_dict = {'ENSMEM': '#member#', 'MEMDIR': 'mem#member#', - 'FHRLST': '#fhr#', + 'FHR3': '#fhr#', 'COMPONENT': component} for key, value in postenvar_dict.items(): postenvars.append(rocoto.create_envar(name=key, value=str(value))) @@ -270,11 +270,6 @@ def _atmosoceaniceprod(self, component: str): 'maxtries': '&MAXTRIES;'} fhrs = self._get_forecast_hours('gefs', self._configs[config], component) - - # ocean/ice components do not have fhr 0 as they are averaged output - if component in ['ocean', 'ice'] and 0 in fhrs: - fhrs.remove(0) - fhr_var_dict = {'fhr': ' '.join([f"{fhr:03d}" for fhr in fhrs])} fhr_metatask_dict = {'task_name': f'{component}_prod_#member#', @@ -303,7 +298,7 @@ def atmos_ensstat(self): dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) postenvars = self.envars.copy() - postenvar_dict = {'FHRLST': '#fhr#'} + postenvar_dict = {'FHR3': '#fhr#'} for key, value in postenvar_dict.items(): postenvars.append(rocoto.create_envar(name=key, value=str(value))) diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 2c74d0f854..60a08549b6 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -939,7 +939,7 @@ def _fcst_cycled(self): def atmanlupp(self): postenvars = self.envars.copy() - postenvar_dict = {'FHRLST': 'f000', + postenvar_dict = {'FHR3': '000', 'UPP_RUN': 'analysis'} for key, value in postenvar_dict.items(): postenvars.append(rocoto.create_envar(name=key, value=str(value))) @@ -975,7 +975,7 @@ def atmanlupp(self): def atmanlprod(self): postenvars = self.envars.copy() - postenvar_dict = {'FHRLST': '-f001'} + postenvar_dict = {'FHR3': '-001'} for key, value in postenvar_dict.items(): postenvars.append(rocoto.create_envar(name=key, value=str(value))) @@ -1002,24 +1002,6 @@ def atmanlprod(self): return task - @staticmethod - def _get_ufs_postproc_grps(cdump, config, component='atmos'): - - fhrs = Tasks._get_forecast_hours(cdump, config, component=component) - - nfhrs_per_grp = config.get('NFHRS_PER_GROUP', 1) - ngrps = len(fhrs) // nfhrs_per_grp if len(fhrs) % nfhrs_per_grp == 0 else len(fhrs) // nfhrs_per_grp + 1 - - fhrs = [f'f{fhr:03d}' for fhr in fhrs] - fhrs = np.array_split(fhrs, ngrps) - fhrs = [fhr.tolist() for fhr in fhrs] - - grp = ' '.join(f'_{fhr[0]}-{fhr[-1]}' if len(fhr) > 1 else f'_{fhr[0]}' for fhr in fhrs) - dep = ' '.join([fhr[-1] for fhr in fhrs]) - lst = ' '.join(['_'.join(fhr) for fhr in fhrs]) - - return grp, dep, lst - def atmupp(self): return self._upptask(upp_run='forecast', task_id='atmupp') @@ -1032,32 +1014,28 @@ def _upptask(self, upp_run="forecast", task_id="atmupp"): if upp_run not in VALID_UPP_RUN: raise KeyError(f"{upp_run} is invalid; UPP_RUN options are: {('|').join(VALID_UPP_RUN)}") - varname1, varname2, varname3 = 'grp', 'dep', 'lst' - varval1, varval2, varval3 = self._get_ufs_postproc_grps(self.cdump, self._configs['upp']) - var_dict = {varname1: varval1, varname2: varval2, varname3: varval3} - postenvars = self.envars.copy() - postenvar_dict = {'FHRLST': '#lst#', + postenvar_dict = {'FHR3': '#fhr#', 'UPP_RUN': upp_run} for key, value in postenvar_dict.items(): postenvars.append(rocoto.create_envar(name=key, value=str(value))) atm_hist_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_HISTORY_TMPL"]) deps = [] - data = f'{atm_hist_path}/{self.cdump}.t@Hz.atm#dep#.nc' + data = f'{atm_hist_path}/{self.cdump}.t@Hz.atmf#fhr#.nc' dep_dict = {'type': 'data', 'data': data, 'age': 120} deps.append(rocoto.add_dependency(dep_dict)) - data = f'{atm_hist_path}/{self.cdump}.t@Hz.sfc#dep#.nc' + data = f'{atm_hist_path}/{self.cdump}.t@Hz.sfcf#fhr#.nc' dep_dict = {'type': 'data', 'data': data, 'age': 120} deps.append(rocoto.add_dependency(dep_dict)) - data = f'{atm_hist_path}/{self.cdump}.t@Hz.atm.log#dep#.txt' + data = f'{atm_hist_path}/{self.cdump}.t@Hz.atm.logf#fhr#.txt' dep_dict = {'type': 'data', 'data': data, 'age': 60} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep=deps, dep_condition='and') cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump resources = self.get_resource('upp') - task_name = f'{self.cdump}{task_id}#{varname1}#' + task_name = f'{self.cdump}{task_id}_f#fhr#' task_dict = {'task_name': task_name, 'resources': resources, 'dependency': dependencies, @@ -1069,9 +1047,12 @@ def _upptask(self, upp_run="forecast", task_id="atmupp"): 'maxtries': '&MAXTRIES;' } + fhrs = self._get_forecast_hours(self.cdump, self._configs['upp']) + fhr_var_dict = {'fhr': ' '.join([f"{fhr:03d}" for fhr in fhrs])} + metatask_dict = {'task_name': f'{self.cdump}{task_id}', 'task_dict': task_dict, - 'var_dict': var_dict + 'var_dict': fhr_var_dict } task = rocoto.create_task(metatask_dict) @@ -1091,25 +1072,21 @@ def _atmosoceaniceprod(self, component: str): products_dict = {'atmos': {'config': 'atmos_products', 'history_path_tmpl': 'COM_ATMOS_MASTER_TMPL', - 'history_file_tmpl': f'{self.cdump}.t@Hz.master.grb2#dep#'}, + 'history_file_tmpl': f'{self.cdump}.t@Hz.master.grb2f#fhr#'}, 'ocean': {'config': 'oceanice_products', 'history_path_tmpl': 'COM_OCEAN_HISTORY_TMPL', - 'history_file_tmpl': f'{self.cdump}.ocean.t@Hz.6hr_avg.#dep#.nc'}, + 'history_file_tmpl': f'{self.cdump}.ocean.t@Hz.6hr_avg.f#fhr#.nc'}, 'ice': {'config': 'oceanice_products', 'history_path_tmpl': 'COM_ICE_HISTORY_TMPL', - 'history_file_tmpl': f'{self.cdump}.ice.t@Hz.6hr_avg.#dep#.nc'}} + 'history_file_tmpl': f'{self.cdump}.ice.t@Hz.6hr_avg.f#fhr#.nc'}} component_dict = products_dict[component] config = component_dict['config'] history_path_tmpl = component_dict['history_path_tmpl'] history_file_tmpl = component_dict['history_file_tmpl'] - varname1, varname2, varname3 = 'grp', 'dep', 'lst' - varval1, varval2, varval3 = self._get_ufs_postproc_grps(self.cdump, self._configs[config], component=component) - var_dict = {varname1: varval1, varname2: varval2, varname3: varval3} - postenvars = self.envars.copy() - postenvar_dict = {'FHRLST': '#lst#', 'COMPONENT': component} + postenvar_dict = {'FHR3': '#fhr#', 'COMPONENT': component} for key, value in postenvar_dict.items(): postenvars.append(rocoto.create_envar(name=key, value=str(value))) @@ -1129,7 +1106,7 @@ def _atmosoceaniceprod(self, component: str): cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump resources = self.get_resource(component_dict['config']) - task_name = f'{self.cdump}{component}_prod#{varname1}#' + task_name = f'{self.cdump}{component}_prod_f#fhr#' task_dict = {'task_name': task_name, 'resources': resources, 'dependency': dependencies, @@ -1141,9 +1118,11 @@ def _atmosoceaniceprod(self, component: str): 'maxtries': '&MAXTRIES;' } + fhrs = self._get_forecast_hours(self.cdump, self._configs[config], component) + fhr_var_dict = {'fhr': ' '.join([f"{fhr:03d}" for fhr in fhrs])} metatask_dict = {'task_name': f'{self.cdump}{component}_prod', 'task_dict': task_dict, - 'var_dict': var_dict + 'var_dict': fhr_var_dict } task = rocoto.create_task(metatask_dict) From f43a86276aaef91efa28faadc71a3cf50e749efe Mon Sep 17 00:00:00 2001 From: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> Date: Fri, 21 Jun 2024 13:44:29 -0400 Subject: [PATCH 18/20] Fix and simplify online archiving and reenable METplus jobs (#2687) This fixes the online archiving portion of the `*arch` and `*earc00` jobs, a prerequisite for running METplus. This also reenables METplus by default. The approach previously taken created `FileHandler` dictionaries at varying levels within the resulting yaml, which was not properly parsed by `exglobal_archive.py`. This approach creates a single `FileHandler` dictionary and is much less complicated overall. Resolves #2673 #2647 --- .gitignore | 2 + parm/archive/arcdir.yaml.j2 | 250 ++++++++++++++++------------- parm/archive/gdas.yaml.j2 | 123 +++++++------- parm/config/gfs/yaml/defaults.yaml | 2 +- ush/python/pygfs/task/archive.py | 24 +-- 5 files changed, 215 insertions(+), 186 deletions(-) diff --git a/.gitignore b/.gitignore index 0c15a50661..861346a494 100644 --- a/.gitignore +++ b/.gitignore @@ -184,6 +184,8 @@ ush/fv3gfs_make_grid.sh ush/fv3gfs_make_orog.sh ush/gen_bufr2ioda_json.py ush/gen_bufr2ioda_yaml.py +ush/bufr2ioda_insitu_profile*.py +ush/bufr2ioda_insitu_surface*.py ush/global_chgres.sh ush/global_chgres_driver.sh ush/global_cycle.sh diff --git a/parm/archive/arcdir.yaml.j2 b/parm/archive/arcdir.yaml.j2 index d6b1899ebc..f845e3c9cb 100644 --- a/parm/archive/arcdir.yaml.j2 +++ b/parm/archive/arcdir.yaml.j2 @@ -4,121 +4,153 @@ {% set head = RUN + ".t" + cycle_HH + "z." %} # Select data to store in the ARCDIR and VFYARC from deterministic runs -Base: &base # GDAS, GFS, ENKFGDAS, or ENKFGFS - common: - mkdir: - - "{{ ARCDIR }}" - -# Common files to be added to both the gfs and gdas keys below -Deterministic: &deterministic - cyclone: - copy: - # Cyclone forecasts, produced for both gdas and gfs cycles - ## Only created if tracking is on and there were systems to track - {% if path_exists(COMIN_ATMOS_TRACK ~ "/atcfunix." ~ RUN ~ "." ~ cycle_YMDH) %} - - ["{{ COMIN_ATMOS_TRACK }}/atcfunix.{{ RUN }}.{{ cycle_YMDH }}", "{{ ARCDIR }}/atcfunix.{{ RUN }}.{{ cycle_YMDH }}"] - - ["{{ COMIN_ATMOS_TRACK }}/atcfunixp.{{ RUN }}.{{ cycle_YMDH }}", "{{ ARCDIR }}/atcfunixp.{{ RUN }}.{{ cycle_YMDH }}"] - {% endif %} - - # Cyclone tracking data - {% for basin in ["epac", "natl"] %} - {% if path_exists(COMIN_ATMOS_TRACK + "/" + basin) %} - - ["{{ COMIN_ATMOS_TRACK }}/{{ basin }}", "{{ ARCDIR }}/{{ basin }}"] - {% endif %} - {% endfor %} +# This file set will contain all source-destination pairs to send to the FileHandler for copying +{% set file_set = [] %} + +# Declare the VFYARC where Fit2Obs data will be sent +{% set VFYARC = ROTDIR ~ "/vrfyarch" %} + +# Deterministic files +{% if "enkf" not in RUN %} + # Common files to be added to both the gfs and gdas keys below + {% set det_files = [] %} + # Cyclone forecasts, produced for both gdas and gfs cycles + ## Only created if tracking is on and there were systems to track + {% if path_exists(COMIN_ATMOS_TRACK ~ "/atcfunix." ~ RUN ~ "." ~ cycle_YMDH) %} + {% do det_files.append([COMIN_ATMOS_TRACK ~ "/atcfunix." ~ RUN ~ "." ~ cycle_YMDH, + ARCDIR ~"/atcfunix." ~ RUN ~ "." ~ cycle_YMDH ]) %} + {% do det_files.append([COMIN_ATMOS_TRACK ~ "/atcfunixp." ~ RUN ~ "." ~ cycle_YMDH, + ARCDIR ~ "/atcfunixp." ~ RUN ~ "." ~ cycle_YMDH]) %} + {% endif %} - {% if MODE == "cycled" %} - analysis: - copy: - # Analysis data (if we are running in cycled mode) - - ["{{ COMIN_ATMOS_GRIB_1p00 }}/{{ head }}pgrb2.1p00.anl", "{{ ARCDIR }}/pgbanl.{{ RUN }}.{{ cycle_YMDH }}.grib2"] - - {% if DO_JEDIATMVAR %} - - ["{{ COMIN_ATMOS_ANALYSIS }}/{{ head }}atmstat", "{{ ARCDIR }}/atmstat.{{ RUN }}.{{ cycle_YMDH }}"] - {% else %} - - ["{{ COMIN_ATMOS_ANALYSIS }}/{{ head }}gsistat", "{{ ARCDIR }}/gsistat.{{ RUN }}.{{ cycle_YMDH }}"] - {% endif %} - - {% if DO_JEDISNOWDA %} - - ["{{ COMIN_SNOW_ANALYSIS }}/{{ head }}snowstat.tgz", "{{ ARCDIR }}/snowstat.{{ RUN }}.{{ cycle_YMDH }}.tgz"] - {% endif %} - - {% if AERO_ANL_CDUMP == RUN or AERO_ANL_CDUMP == "both" %} - - ["{{ COMIN_CHEM_ANALYSIS }}/{{ head }}aerostat", "{{ ARCDIR }}/aerostat.{{ RUN }}.{{ cycle_YMDH }}"] - {% endif %} - - {% if DO_PREP_OBS_AERO %} - - ["{{ COMIN_OBS }}/{{ head }}aeroobs", "{{ ARCDIR }}/aeroobs.{{ RUN }}.{{ cycle_YMDH }}"] - - ["{{ COMIN_OBS }}/{{ head }}aerorawobs", "{{ ARCDIR }}/aerorawobs.{{ RUN }}.{{ cycle_YMDH }}"] - {% endif %} + # Cyclone tracking data + {% for basin in ["epac", "natl"] %} + {% if path_exists(COMIN_ATMOS_TRACK + "/" + basin) %} + {% do det_files.append([COMIN_ATMOS_TRACK ~ "/" ~ basin, + ARCDIR ~ "/" ~ basin ]) %} + {% endif %} + {% endfor %} + + # Deterministic analysis files (generated for cycled experiments) + {% set det_anl_files = [] %} + # Analysis data (if we are running in cycled mode) + {% do det_anl_files.append([COMIN_ATMOS_GRIB_1p00 ~ "/" ~ head ~ "pgrb2.1p00.anl", + ARCDIR ~ "/pgbanl." ~ RUN ~ "." ~ cycle_YMDH ~ ".grib2"]) %} + + {% if DO_JEDIATMVAR == True %} + {% do det_anl_files.append([COMIN_ATMOS_ANALYSIS ~ "/" ~ head ~ "atmstat", + ARCDIR ~ "/atmstat." ~ RUN ~ "." ~ cycle_YMDH ]) %} + {% else %} + {% do det_anl_files.append([COMIN_ATMOS_ANALYSIS ~ "/" ~ head ~ "gsistat", + ARCDIR ~ "/gsistat." ~ RUN ~ "." ~ cycle_YMDH ]) %} + {% endif %} + {% if DO_JEDISNOWDA == True %} + {% do det_anl_files.append([COMIN_SNOW_ANALYSIS ~ "/" ~ head ~ "snowstat.tgz", + ARCDIR ~ "/snowstat." ~ RUN ~ "." ~ cycle_YMDH ~ ".tgz"]) %} {% endif %} -{% if RUN == "gfs" %} -gfs: # GFS specific - <<: *base - <<: *deterministic - - gfs: - copy: - {% for fhr in range(0, FHMAX_GFS + 1, FHOUT_GFS) %} - - ["{{ COMIN_ATMOS_GRIB_1p00 }}/{{ head }}pgrb2.1p00.f{{ '%03d' % fhr }}", "{{ ARCDIR }}/pgbf{{ '%02d' % fhr }}.{{ RUN }}.{{ cycle_YMDH }}.grib2"] - {% endfor %} - - # Cyclone genesis data (only present if there are storms) - {% if path_exists(COMIN_ATMOS_GENESIS ~ "/storms.gfso.atcf_gen." ~ cycle_YMDH) %} - - ["{{ COMIN_ATMOS_GENESIS }}/storms.gfso.atcf_gen.{{ cycle_YMDH }}", "{{ ARCDIR }}/storms.gfso.atcf_gen.{{ cycle_YMDH }}"] - - ["{{ COMIN_ATMOS_GENESIS }}/storms.gfso.atcf_gen.altg.{{ cycle_YMDH }}", "{{ ARCDIR }}/storms.gfso.atcf_gen.altg.{{ cycle_YMDH }}"] - {% endif %} - - {% if path_exists(COMIN_ATMOS_GENESIS ~ "/trak.gfso.atcfunix." ~ cycle_YMDH) %} - - ["{{ COMIN_ATMOS_GENESIS }}/trak.gfso.atcfunix.{{ cycle_YMDH }}", "{{ ARCDIR }}/trak.gfso.atcfunix.{{ cycle_YMDH }}"] - - ["{{ COMIN_ATMOS_GENESIS }}/trak.gfso.atcfunix.altg.{{ cycle_YMDH }}", "{{ ARCDIR }}/trak.gfso.atcfunix.altg.{{ cycle_YMDH }}"] - {% endif %} - - {% if DO_FIT2OBS %} - fit2obs: - - mkdir: - {% set VFYARC = ROTDIR + "/vrfyarch" %} - - "{{ VFYARC }}/{{ RUN }}.{{ cycle_YMD }}/{{ cycle_HH }}" - - copy: - {% for fhr in range(0, FHMAX_FITS + 1, 6) %} - {% set sfcfile = "/" + head + "sfcf" + '%03d'|format(fhr) + ".nc" %} - {% set sigfile = "/" + head + "atmf" + '%03d'|format(fhr) + ".nc" %} - - ["{{COMIN_ATMOS_HISTORY}}/{{ sfcfile }}", "{{ VFYARC }}/{{ RUN }}.{{ cycle_YMD }}/{{ cycle_HH }}/{{ sfcfile }}"] - - ["{{COMIN_ATMOS_HISTORY}}/{{ sigfile }}", "{{ VFYARC }}/{{ RUN }}.{{ cycle_YMD }}/{{ cycle_HH }}/{{ sigfile }}"] - {% endfor %} + {% if AERO_ANL_CDUMP == RUN or AERO_ANL_CDUMP == "both" %} + {% do det_anl_files.append([COMIN_CHEM_ANALYSIS ~ "/" ~ head ~ "aerostat", + ARCDIR ~ "/aerostat." ~ RUN ~ "." ~ cycle_YMDH ]) %} + {% endif %} + {% if DO_PREP_OBS_AERO == True %} + {% do det_anl_files.append([COMIN_OBS ~ "/" ~ head ~ "aeroobs", + ARCDIR ~ "/aeroobs." ~ RUN ~ "." ~ cycle_YMDH]) %} + {% do det_anl_files.append([COMIN_OBS ~ "/" ~ head ~ "aeroawobs", + ARCDIR ~ "/aeroawobs." ~ RUN ~ "." ~ cycle_YMDH]) %} {% endif %} -{% endif %} -{% if RUN == "gdas" %} -gdas: # GDAS specific - <<: *base - <<: *deterministic - gdas: - copy: - {% for fhr in range(0, FHMAX + 1, FHOUT) %} - - ["{{ COMIN_ATMOS_GRIB_1p00 }}/{{ head }}pgrb2.1p00.f{{ '%03d' % fhr }}", "{{ ARCDIR }}/pgbf{{ '%02d' % fhr }}.{{ RUN }}.{{ cycle_YMDH }}.grib2"] - {% endfor %} -{% endif %} + # GFS-specific files + {% set gfs_files = [] %} + {% for fhr in range(0, FHMAX_GFS + 1, FHOUT_GFS) %} + {% do gfs_files.append([COMIN_ATMOS_GRIB_1p00 ~ "/" ~ head ~ "pgrb2.1p00.f" ~ '%03d'|format(fhr), + ARCDIR ~ "/pgbf" ~ '%02d'|format(fhr) ~ "." ~ RUN ~ "." ~ cycle_YMDH ~ ".grib2"]) %} + {% endfor %} + + # Cyclone genesis data (only present if there are storms) + {% if path_exists(COMIN_ATMOS_GENESIS ~ "/storms.gfso.atcf_gen." ~ cycle_YMDH) %} + {% do gfs_files.append([COMIN_ATMOS_GENESIS ~ "/storms.gfso.atcf_gen." ~ cycle_YMDH, + ARCDIR ~ "/storms.gfso.atcf_gen." ~ cycle_YMDH ]) %} + {% do gfs_files.append([COMIN_ATMOS_GENESIS ~ "/storms.gfso.atcf_gen.altg." ~ cycle_YMDH, + ARCDIR ~ "/storms.gfso.atcf_gen.altg." ~ cycle_YMDH ]) %} + {% endif %} + + {% if path_exists(COMIN_ATMOS_GENESIS ~ "/trak.gfso.atcfunix." ~ cycle_YMDH) %} + {% do gfs_files.append([COMIN_ATMOS_GENESIS ~ "/trak.gfso.atcfunix." ~ cycle_YMDH, + ARCDIR ~ "/trak.gfso.atcfunix." ~ cycle_YMDH ]) %} + {% do gfs_files.append([COMIN_ATMOS_GENESIS ~ "/trak.gfso.atcfunix.altg." ~ cycle_YMDH, + ARCDIR ~ "/trak.gfso.atcfunix.altg." ~ cycle_YMDH ]) %} + {% endif %} + + # GFS Fit2Obs data + {% set fit2obs_files = [] %} + {% for fhr in range(0, FHMAX_FITS + 1, 6) %} + {% set sfcfile = "/" + head + "sfcf" + '%03d'|format(fhr) + ".nc" %} + {% set sigfile = "/" + head + "atmf" + '%03d'|format(fhr) + ".nc" %} + {% do fit2obs_files.append([COMIN_ATMOS_HISTORY ~ "/" ~ sfcfile, + VFYARC ~ "/" ~ RUN ~ "." ~ cycle_YMD ~ "/" ~ cycle_HH ~ "/" ~ sfcfile ]) %} + {% do fit2obs_files.append([COMIN_ATMOS_HISTORY ~ "/" ~ sigfile, + VFYARC ~ "/" ~ RUN ~ "." ~ cycle_YMD ~ "/" ~ cycle_HH ~ "/" ~ sigfile ]) %} + {% endfor %} + + # GDAS-specific files + {% set gdas_files = [] %} + {% for fhr in range(0, FHMAX + 1, FHOUT) %} + {% do gdas_files.append([COMIN_ATMOS_GRIB_1p00 ~ "/" ~ head ~ "pgrb2.1p00.f" ~ '%03d'|format(fhr), + ARCDIR ~ "/pgbf" ~ '%02d'|format(fhr) ~ "." ~ RUN ~ "." ~ cycle_YMDH ~ ".grib2"]) %} + {% endfor %} + + # Now append the necessary file pairs to file_set + # Common deterministic files + {% set file_set = file_set + det_files %} + {% if MODE == "cycled" %} + {% set file_set = file_set + det_anl_files %} + {% endif %} + + # Run-specific deterministic files + {% if RUN == "gfs" %} + {% set file_set = file_set + gfs_files %} + # Fit2Obs files + {% if DO_FIT2OBS == True %} + {% set file_set = file_set + fit2obs_files %} + {% endif %} + {% elif RUN == "gdas" %} + {% set file_set = file_set + gdas_files %} + {% endif %} + +{% else %} # End of deterministic files + + # Ensemble analysis files + {% set enkf_files = [] %} + {% if DO_JEDIATMENS == True %} + {% do enkf_files.append([COMIN_ATMOS_ANALYSIS_ENSSTAT ~ "/" ~ head ~ "atmensstat", + ARCDIR ~ "/atmensstat." ~ RUN ~ "." ~ cycle_YMDH ]) %} + {% do enkf_files.append([COMIN_ATMOS_ANALYSIS_ENSSTAT ~ "/" ~ head ~ "atminc.ensmean.nc", + ARCDIR ~ "/atmensstat." ~ RUN ~ "." ~ cycle_YMDH ~ ".ensmean.nc"]) %} + {% else %} + {% do enkf_files.append([COMIN_ATMOS_ANALYSIS_ENSSTAT ~ "/" ~ head ~ "enkfstat", + ARCDIR ~ "/enkfstat." ~ RUN ~ "." ~ cycle_YMDH ]) %} + {% do enkf_files.append([COMIN_ATMOS_ANALYSIS_ENSSTAT ~ "/" ~ head ~ "gsistat.ensmean", + ARCDIR ~ "/gsistat." ~ RUN ~ "." ~ cycle_YMDH ~ ".ensmean"]) %} + {% endif %} + + # Construct the final file set + {% set file_set = file_set + enkf_files %} -Ensemble: &ensemble # ENKFGDAS or ENKFGFS - analysis: - copy: - # Copy ensemble analyses - {% if DO_JEDIATMENS %} - - ["{{ COMIN_ATMOS_ANALYSIS_ENSSTAT }}/{{ head }}atmensstat", "{{ ARCDIR }}/atmensstat.{{ RUN }}.{{ cycle_YMDH }}"] - - ["{{ COMIN_ATMOS_ANALYSIS_ENSSTAT }}/{{ head }}atminc.ensmean.nc", "{{ ARCDIR }}/atmensstat.{{ RUN }}.{{ cycle_YMDH }}.ensmean.nc"] - {% else %} - - ["{{ COMIN_ATMOS_ANALYSIS_ENSSTAT }}/{{ head }}enkfstat", "{{ ARCDIR }}/enkfstat.{{ RUN }}.{{ cycle_YMDH }}"] - - ["{{ COMIN_ATMOS_ANALYSIS_ENSSTAT }}/{{ head }}gsistat.ensmean", "{{ ARCDIR }}/gsistat.{{ RUN }}.{{ cycle_YMDH }}.ensmean"] - {% endif %} - -{% if "enkf" in RUN %} -{{ RUN }}: - <<: *base - <<: *ensemble {% endif %} + + +# Actually write the yaml +mkdir: + - "{{ ARCDIR }}" + + {% if DO_FIT2OBS == True %} + - "{{ VFYARC }}/{{ RUN }}.{{ cycle_YMD }}/{{ cycle_HH }}" + {% endif %} + +copy: + {% for source_dest_pair in file_set %} + - {{ source_dest_pair }} + {% endfor %} diff --git a/parm/archive/gdas.yaml.j2 b/parm/archive/gdas.yaml.j2 index 12b89f2e61..b253d27268 100644 --- a/parm/archive/gdas.yaml.j2 +++ b/parm/archive/gdas.yaml.j2 @@ -7,32 +7,32 @@ gdas: # Cycled logs - "logs/{{ cycle_YMDH }}/{{ RUN }}atmanlprod.log" - "logs/{{ cycle_YMDH }}/{{ RUN }}prep.log" - {% if DO_JEDIATMVAR %} + {% if DO_JEDIATMVAR %} - "logs/{{ cycle_YMDH }}/{{ RUN }}prepatmiodaobs.log" - "logs/{{ cycle_YMDH }}/{{ RUN }}atmanlinit.log" - "logs/{{ cycle_YMDH }}/{{ RUN }}atmanlprod.log" - "logs/{{ cycle_YMDH }}/{{ RUN }}atmanlfinal.log" - "logs/{{ cycle_YMDH }}/{{ RUN }}atmanlfv3inc.log" - "logs/{{ cycle_YMDH }}/{{ RUN }}atmanlvar.log" - {% else %} + {% else %} - "logs/{{ cycle_YMDH }}/{{ RUN }}anal.log" - "logs/{{ cycle_YMDH }}/{{ RUN }}analdiag.log" - {% endif %} + {% endif %} - "logs/{{ cycle_YMDH }}/{{ RUN }}atmanlupp.log" - {% if DO_JEDIOCNVAR %} + {% if DO_JEDIOCNVAR %} - "logs/{{ cycle_YMDH }}/{{ RUN }}prepoceanobs.log" - "logs/{{ cycle_YMDH }}/{{ RUN }}ocnanalprep.log" - "logs/{{ cycle_YMDH }}/{{ RUN }}ocnanalbmat.log" - "logs/{{ cycle_YMDH }}/{{ RUN }}ocnanalrun.log" - "logs/{{ cycle_YMDH }}/{{ RUN }}ocnanalpost.log" - "logs/{{ cycle_YMDH }}/{{ RUN }}ocnanalchkpt.log" - {% if DOHYBVAR %} + {% if DOHYBVAR %} - "logs/{{ cycle_YMDH }}/{{ RUN }}ocnanalecen.log" - {% endif %} - {% endif %} - {% if DO_VRFY_OCEANDA %} + {% endif %} + {% endif %} + {% if DO_VRFY_OCEANDA %} - "logs/{{ cycle_YMDH }}/{{ RUN }}ocnanalvrfy.log" - {% endif %} + {% endif %} # Analysis GRIB2 (sub-sampled) data - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.anl" @@ -44,117 +44,120 @@ gdas: - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmanl.nc" - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}sfcanl.nc" - {% if DOHYBVAR %} + {% if DOHYBVAR %} # Ensemble analysis residual - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmanl.ensres.nc" - {% if DOIAU %} + {% if DOIAU %} # Ensemble IAU analysis residuals - {% for fhr in iaufhrs if fhr != 6 %} + {% for fhr in iaufhrs if fhr != 6 %} - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atma{{ '%03d' % fhr }}.ensres.nc" - {% endfor %} - {% endif %} + {% endfor %} + {% endif %} # End of ensemble analysis mean residuals - {% endif %} + {% endif %} # Analysis state - {% if DO_JEDIATMVAR %} + {% if DO_JEDIATMVAR %} - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmvar.yaml" - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmstat" - {% else %} + {% else %} - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}gsistat" - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}cnvstat" - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}oznstat" - "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}radstat" - {% endif %} - {% if AERO_ANL_CDUMP == "gdas" or AERO_ANL_CDUMP == "both" %} + {% endif %} + {% if AERO_ANL_CDUMP == "gdas" or AERO_ANL_CDUMP == "both" %} - "{{ COMIN_CHEM_ANALYSIS | relpath(ROTDIR) }}/{{ head }}aerostat" - {% endif %} - {% if DO_PREP_OBS_AERO %} + {% endif %} + {% if DO_PREP_OBS_AERO %} - "{{ COMIN_OBS | relpath(ROTDIR) }}/{{ head }}aeroobs" - "{{ COMIN_OBS | relpath(ROTDIR) }}/{{ head }}aerorawobs" - {% endif %} - {% if DO_JEDISNOWDA %} + {% endif %} + {% if DO_JEDISNOWDA %} - "{{ COMIN_SNOW_ANALYSIS | relpath(ROTDIR) }}/{{ head }}snowstat.tgz" - {% endif %} + {% endif %} # Ozone verification - {% if DO_VERFOZN %} + {% if DO_VERFOZN %} - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/time/bad_cnt.{{ cycle_YMDH }}" - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/time/bad_diag.{{ cycle_YMDH }}" - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/time/bad_pen.{{ cycle_YMDH }}" - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/time/stdout.time.tar.gz" - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/horiz/stdout.horiz.tar.gz" - "logs/{{ cycle_YMDH }}/{{ RUN }}verfozn.log" - {% endif %} + {% endif %} # Radiance verification - {% if DO_VERFRAD %} + {% if DO_VERFRAD %} - "{{ COMIN_ATMOS_RADMON | relpath(ROTDIR) }}/radmon_angle.tar.gz" - "{{ COMIN_ATMOS_RADMON | relpath(ROTDIR) }}/radmon_bcoef.tar.gz" - "{{ COMIN_ATMOS_RADMON | relpath(ROTDIR) }}/radmon_bcor.tar.gz" - "{{ COMIN_ATMOS_RADMON | relpath(ROTDIR) }}/radmon_time.tar.gz" - "logs/{{ cycle_YMDH }}/{{ RUN }}verfrad.log" - {% endif %} + {% endif %} # Minimization monitor - {% if DO_VMINMON %} + {% if DO_VMINMON %} - "{{ COMIN_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.costs.txt" - "{{ COMIN_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.cost_terms.txt" - "{{ COMIN_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.gnorms.ieee_d" - "{{ COMIN_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.reduction.ieee_d" - "{{ COMIN_ATMOS_MINMON | relpath(ROTDIR) }}/gnorm_data.txt" - "logs/{{ cycle_YMDH }}/{{ RUN }}vminmon.log" - {% endif %} + {% endif %} {% endif %} # End of cycled data # Forecast and post logs - "logs/{{ cycle_YMDH }}/{{ RUN }}fcst.log" - # TODO explicitly name the atmos_prod log files to archive - - "logs/{{ cycle_YMDH }}/{{ RUN }}atmos_prod_f*.log" - {% if not WRITE_DOPOST %} - # TODO set the forecast hours explicitly. This will require emulating numpy.array_split - - "logs/{{ cycle_YMDH }}/{{ RUN }}atmos_upp_f*.log" - {% endif %} ## not WRITE_DOPOST {% for fhr in range(0, FHMAX + 1, 3) %} + {% set fhr3 = '%03d' % fhr %} + - "logs/{{ cycle_YMDH }}/{{ RUN }}atmos_prod_f{{ fhr3 }}.log" + {% if not WRITE_DOPOST %} + - "logs/{{ cycle_YMDH }}/{{ RUN }}atmos_upp_f{{ fhr3 }}.log" + {% endif %} ## not WRITE_DOPOST # Forecast GRIB2 data - - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.f{{ '%03d' % fhr }}" - - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.f{{ '%03d' % fhr }}.idx" - - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.f{{ '%03d' % fhr }}" - - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.f{{ '%03d' % fhr }}.idx" + - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.f{{ fhr3 }}" + - "{{ COMIN_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.f{{ fhr3 }}.idx" + - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.f{{ fhr3 }}" + - "{{ COMIN_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.f{{ fhr3 }}.idx" # Forecast GRIB2 fluxes - - "{{ COMIN_ATMOS_MASTER | relpath(ROTDIR) }}/{{ head }}sfluxgrbf{{ '%03d' % fhr }}.grib2" - - "{{ COMIN_ATMOS_MASTER | relpath(ROTDIR) }}/{{ head }}sfluxgrbf{{ '%03d' % fhr }}.grib2.idx" + - "{{ COMIN_ATMOS_MASTER | relpath(ROTDIR) }}/{{ head }}sfluxgrbf{{ fhr3 }}.grib2" + - "{{ COMIN_ATMOS_MASTER | relpath(ROTDIR) }}/{{ head }}sfluxgrbf{{ fhr3 }}.grib2.idx" # FV3 log - - "{{ COMIN_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}atm.logf{{ '%03d' % fhr }}.txt" + - "{{ COMIN_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}atm.logf{{ fhr3 }}.txt" # Raw netCDF forecasts - - "{{ COMIN_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}atmf{{ '%03d' % fhr }}.nc" - - "{{ COMIN_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}sfcf{{ '%03d' % fhr }}.nc" + - "{{ COMIN_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}atmf{{ fhr3 }}.nc" + - "{{ COMIN_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}sfcf{{ fhr3 }}.nc" {% endfor %} optional: {% if MODE == "cycled" %} - {% if DO_VERFRAD %} - # Radiance verification (only created if there are problems) + # Radiance verification (only created if there are problems) + {% if DO_VERFRAD %} - "{{ COMIN_ATMOS_RADMON | relpath(ROTDIR) }}/bad_diag.{{ cycle_YMDH }}" - "{{ COMIN_ATMOS_RADMON | relpath(ROTDIR) }}/bad_pen.{{ cycle_YMDH }}" - "{{ COMIN_ATMOS_RADMON | relpath(ROTDIR) }}/low_count.{{ cycle_YMDH }}" - "{{ COMIN_ATMOS_RADMON | relpath(ROTDIR) }}/warning.{{ cycle_YMDH }}" - {% endif %} + {% endif %} - {% if DO_VERFOZN %} - # Not all of these ozone instruments always produce data - {% set oznmon_types = [ - "gome_metop-b", "omi_aura", "ompslp_npp", "ompsnp_n20", - "ompsnp_npp", "ompstc8_n20", "ompstc8_npp", "sbuv2_n19" - ] %} - {% for group in [ "horiz", "time" ] %} - {% if group == "horiz" %} {% set suffix = ".gz" %} {% else %} {% set suffix = "" %} {% endif %} - {% for type in oznmon_types %} + {% if DO_VERFOZN %} + # Not all of these ozone instruments always produce data + {% set oznmon_types = [ + "gome_metop-b", "omi_aura", "ompslp_npp", "ompsnp_n20", + "ompsnp_npp", "ompstc8_n20", "ompstc8_npp", "sbuv2_n19" + ] %} + {% for group in [ "horiz", "time" ] %} + {% if group == "horiz" %} + {% set suffix = ".gz" %} + {% else %} + {% set suffix = "" %} + {% endif %} + {% for type in oznmon_types %} - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/{{ group }}/{{ type }}.anl.ctl" - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/{{ group }}/{{ type }}.anl.{{ cycle_YMDH }}.ieee_d{{ suffix }}" - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/{{ group }}/{{ type }}.ges.ctl" - "{{ COMIN_ATMOS_OZNMON | relpath(ROTDIR) }}/{{ group }}/{{ type }}.ges.{{ cycle_YMDH }}.ieee_d{{ suffix }}" - {% endfor %} - {% endfor %} - {% endif %} + {% endfor %} + {% endfor %} + {% endif %} {% endif %} diff --git a/parm/config/gfs/yaml/defaults.yaml b/parm/config/gfs/yaml/defaults.yaml index b074b11dbf..2d662a9bcb 100644 --- a/parm/config/gfs/yaml/defaults.yaml +++ b/parm/config/gfs/yaml/defaults.yaml @@ -13,7 +13,7 @@ base: DO_TRACKER: "YES" DO_GENESIS: "YES" DO_GENESIS_FSU: "NO" - DO_METP: "NO" + DO_METP: "YES" FHMAX_GFS: 120 FHMAX_HF_GFS: 0 DO_VRFY_OCEANDA: "NO" diff --git a/ush/python/pygfs/task/archive.py b/ush/python/pygfs/task/archive.py index 269aea0ccc..d0722552e1 100644 --- a/ush/python/pygfs/task/archive.py +++ b/ush/python/pygfs/task/archive.py @@ -7,9 +7,9 @@ from logging import getLogger from typing import Any, Dict, List -from wxflow import (AttrDict, FileHandler, Hsi, Htar, Task, cast_strdict_as_dtypedict, +from wxflow import (AttrDict, FileHandler, Hsi, Htar, Task, chgrp, get_gid, logit, mkdir_p, parse_j2yaml, rm_p, strftime, - to_YMD, to_YMDH, Template, TemplateConstants) + to_YMDH) logger = getLogger(__name__.split('.')[-1]) @@ -348,19 +348,11 @@ def _construct_arcdir_set(arcdir_j2yaml, arch_dict) -> Dict: files need to be copied to the ARCDIR and the Fit2Obs directory. """ - # Parse the input jinja yaml template - arcdir_yaml = parse_j2yaml(arcdir_j2yaml, - arch_dict, - allow_missing=True) - - # Collect the needed FileHandler dicts and construct arcdir_set - arcdir_set = {} - for key, handler in arcdir_yaml[arch_dict.RUN].items(): - # Different RUNs can have different filesets that need to be copied. - # Each fileset is stored as a dictionary. Collect the contents of - # each (which should be 'mkdir' and/or 'copy') to produce singular - # mkdir and copy lists. - arcdir_set.update(handler) + # Get the FileHandler dictionary for creating directories and copying + # to the ARCDIR and VFYARC directories. + arcdir_set = parse_j2yaml(arcdir_j2yaml, + arch_dict, + allow_missing=True) return arcdir_set @@ -421,7 +413,7 @@ def replace_string_from_to_file(filename_in, filename_out, search_str, replace_s with open(filename_in) as old_file: lines = old_file.readlines() - out_lines = [line.replace(search, replace) for line in lines] + out_lines = [line.replace(search_str, replace_str) for line in lines] with open("/tmp/track_file", "w") as new_file: new_file.writelines(out_lines) From 4e1b937b67ed220120e81925c4507f03b9b8965f Mon Sep 17 00:00:00 2001 From: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> Date: Mon, 24 Jun 2024 10:50:52 -0400 Subject: [PATCH 19/20] Add minimum software requirements (#2712) Adds a table to HPC documentation stating the minimum support versions --- docs/source/hpc.rst | 95 +++++++++++---------------- modulefiles/module_gwsetup.gaea.lua | 2 - modulefiles/module_gwsetup.hera.lua | 2 - modulefiles/module_gwsetup.orion.lua | 1 - modulefiles/module_gwsetup.wcoss2.lua | 2 - 5 files changed, 40 insertions(+), 62 deletions(-) diff --git a/docs/source/hpc.rst b/docs/source/hpc.rst index 102455b822..643cffdef0 100644 --- a/docs/source/hpc.rst +++ b/docs/source/hpc.rst @@ -4,6 +4,46 @@ HPC Settings and Help Running the GFS configurations (or almost any global workflow configuration except the coarsest) is a resource intensive exercise. This page discusses recommended HPC environmental settings and contact information in case you need assistance from a particular HPC helpdesk. While most of the documentation is based on supported NOAA platforms, the learnings here can hopefully apply to other platforms. +==================================== +Minimum system software requirements +==================================== + +The following system software requirements are the minimum for any new or existing system and reflect the development and testing environment on which the global workflow is maintained. Any system that does not meet these requirements will not be supported. + ++--------------+-------------+---------------------------------------+ +| Software | Minimum | Notes | +| | supported | | +| | version(s) | | ++==============+=============+=======================================+ +| Bash | 4.4.20 | | ++--------------+-------------+---------------------------------------+ +| Python | * 3.8.6 | * 3.10.x is not supported by METplus | +| | * 3.10.13+ | verification software | +| | * 3.11.6+ | * 3.11.6 is packaged with spack-stack | +| | | * 3.9.x is untested | ++--------------+-------------+---------------------------------------+ +| Spack-Stack | 1.6.0 | * Available everywhere but WCOSS2 | ++--------------+-------------+---------------------------------------+ +| lmod | 8.3.1 | | ++--------------+-------------+---------------------------------------+ +| Slurm | 23.02.7 | * Other schedulers may be supportable | ++--------------+-------------+---------------------------------------+ +| PBSpro | 2022.1.1 | * Other schedulers may be supportable | ++--------------+-------------+---------------------------------------+ +| Git | 2.29.0 | * Some components e.g. GDASApp may | +| | | need Git-LFS for downloading test | +| | | data | ++--------------+-------------+---------------------------------------+ +| Rocoto | 1.3.5 | * 1.3.7 is required for newer | +| | | versions of Ruby (3.2+) | ++--------------+-------------+---------------------------------------+ +| Intel | 2021.5.1 | * GNU compilers are not supported | +| Compilers | | * Intel LLVM compilers are not yet | +| | | supported | +| | | * Intel 19.x is only supported on | +| | | WCOSS2 | ++--------------+-------------+---------------------------------------+ + ================================ Experiment troubleshooting help ================================ @@ -50,61 +90,6 @@ Optimizing the global workflow on S4 The S4 cluster is relatively small and so optimizations are recommended to improve cycled runtimes. Please contact Innocent Souopgui (innocent.souopgui@noaa.gov) if you are planning on running a cycled experiment on this system to obtain optimized configuration files. -============ -Git settings -============ - -^^^^^^ -Merges -^^^^^^ - -Use the following command to have merge commits include the one-line description of all the commits being merged (up to 200). You only need to do this once on each machine; it will be saved to your git settings:: - - git config --global merge.log 200 - -Use the ``--no-ff`` option to make sure there is always a merge commit when a fast-forward only is available. Exception: If the merge contains only a single commit, it can be applied as a fast-forward. - -For any merge with multiple commits, a short synopsis of the merge should appear between the title and the list of commit titles added by merge.log. - -^^^^^^^ -Version -^^^^^^^ - -It is advised to use Git v2+ when available. At the time of writing this documentation the default Git clients on the different machines were as noted in the table below. It is recommended that you check the default modules before loading recommended ones: - -+----------+----------+---------------------------------------+ -| Machine | Default | Recommended | -+----------+----------+---------------------------------------+ -| Hera | v2.18.0 | default | -+----------+----------+---------------------------------------+ -| Hercules | v2.31.1 | default | -+----------+----------+---------------------------------------+ -| Orion | v1.8.3.1 | **module load git/2.28.0** | -+----------+----------+---------------------------------------+ -| Jet | v2.18.0 | default | -+----------+----------+---------------------------------------+ -| WCOSS2 | v2.35.3 | default | -+----------+----------+---------------------------------------+ -| S4 | v1.8.3.1 | **module load git/2.30.0** | -+----------+----------+---------------------------------------+ -| AWS PW | v1.8.3.1 | default | -+----------+----------+---------------------------------------+ - -^^^^^^^^^^^^^ -Output format -^^^^^^^^^^^^^ - -For proper display of Git command output (e.g. git branch and git diff) type the following once per machine: - -:: - - git config --global core.pager 'less -FRX' - -For the manage_externals utility functioning:: - - Error: fatal: ssh variant 'simple' does not support setting port - Fix: git config --global ssh.variant ssh - ======================================== Stacksize on R&Ds (Hera, Orion, Hercules, Jet, S4) ======================================== diff --git a/modulefiles/module_gwsetup.gaea.lua b/modulefiles/module_gwsetup.gaea.lua index 5a8b2379a9..8b9f70e4a0 100644 --- a/modulefiles/module_gwsetup.gaea.lua +++ b/modulefiles/module_gwsetup.gaea.lua @@ -15,7 +15,5 @@ load(pathJoin("python", python_ver)) load("py-jinja2") load("py-pyyaml") load("py-numpy") -local git_ver=os.getenv("git_ver") or "2.35.2" -load(pathJoin("git", git_ver)) whatis("Description: GFS run setup environment") diff --git a/modulefiles/module_gwsetup.hera.lua b/modulefiles/module_gwsetup.hera.lua index 696f9577be..06cf566850 100644 --- a/modulefiles/module_gwsetup.hera.lua +++ b/modulefiles/module_gwsetup.hera.lua @@ -14,7 +14,5 @@ load(pathJoin("python", python_ver)) load("py-jinja2") load("py-pyyaml") load("py-numpy") -local git_ver=os.getenv("git_ver") or "2.18.0" -load(pathJoin("git", git_ver)) whatis("Description: GFS run setup environment") diff --git a/modulefiles/module_gwsetup.orion.lua b/modulefiles/module_gwsetup.orion.lua index 96ed50f7f0..37cb511659 100644 --- a/modulefiles/module_gwsetup.orion.lua +++ b/modulefiles/module_gwsetup.orion.lua @@ -5,7 +5,6 @@ Load environment to run GFS workflow ci scripts on Orion prepend_path("MODULEPATH", "/apps/modulefiles/core") load(pathJoin("contrib","0.1")) load(pathJoin("rocoto","1.3.3")) -load(pathJoin("git","2.28.0")) prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.6.0/envs/gsi-addon-env/install/modulefiles/Core") diff --git a/modulefiles/module_gwsetup.wcoss2.lua b/modulefiles/module_gwsetup.wcoss2.lua index d4e64548b0..a2440569db 100644 --- a/modulefiles/module_gwsetup.wcoss2.lua +++ b/modulefiles/module_gwsetup.wcoss2.lua @@ -2,8 +2,6 @@ help([[ Load environment to run GFS workflow ci scripts on WCOSS2 ]]) -load(pathJoin("git","2.29.0")) - prepend_path("MODULEPATH", "/apps/ops/test/nco/modulefiles/core") load(pathJoin("rocoto","1.3.5")) From 449b60dfdcb524351c7594d8e71e6d7d352c48e8 Mon Sep 17 00:00:00 2001 From: Guillaume Vernieres Date: Mon, 24 Jun 2024 14:18:40 -0400 Subject: [PATCH 20/20] reset to develop