diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aero b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aero index 32993554b4..2fae019574 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aero +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aero @@ -20,6 +20,9 @@ case ${machine} in "WCOSS2") AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" ;; + "GAEA") + AERO_INPUTS_DIR="/gpfs/f5/epic/proj-shared/global/glopara/data/gocart_emissions" + ;; "JET") AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" ;; @@ -30,12 +33,12 @@ case ${machine} in esac export AERO_INPUTS_DIR -export AERO_DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table.aero" -export AERO_FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table.aero" +export AERO_DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table.aero" +export AERO_FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table.aero" # Biomass burning emission dataset. Choose from: gbbepx, qfed, none export AERO_EMIS_FIRE="qfed" # Directory containing GOCART configuration files -export AERO_CONFIG_DIR="${HOMEgfs}/parm/ufs/gocart" +export AERO_CONFIG_DIR="${PARMgfs}/ufs/gocart" # Aerosol convective scavenging factors (list of string array elements) # Element syntax: ':'. Use = * to set default factor for all aerosol tracers diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanl b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanl index 634d8c55b2..ac6e80ee55 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanl +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanl @@ -6,25 +6,26 @@ echo "BEGIN: config.aeroanl" export CASE_ANL=${CASE} -export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ -export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export OBS_LIST="${PARMgfs}/gdas/aero/obs/lists/gdas_aero.yaml.j2" export STATICB_TYPE='identity' -export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml -export FIXgdas=${HOMEgfs}/fix/gdas -export BERROR_DATA_DIR=${FIXgdas}/bump/aero/${CASE_ANL}/ +export BERROR_YAML="${PARMgfs}/gdas/aero/berror/staticb_${STATICB_TYPE}.yaml.j2" +export BERROR_DATA_DIR="${FIXgfs}/gdas/bump/aero/${CASE_ANL}/" export BERROR_DATE="20160630.000000" +export CRTM_FIX_YAML="${PARMgfs}/gdas/aero_crtm_coeff.yaml.j2" +export JEDI_FIX_YAML="${PARMgfs}/gdas/aero_jedi_fix.yaml.j2" + export io_layout_x=1 export io_layout_y=1 -export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export JEDIEXE="${EXECgfs}/gdas.x" if [[ "${DOIAU}" == "YES" ]]; then export aero_bkg_times="3,6,9" - export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_fgat_gfs_aero.yaml + export JEDIYAML="${PARMgfs}/gdas/aero/variational/3dvar_fgat_gfs_aero.yaml.j2" else export aero_bkg_times="6" - export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml + export JEDIYAML="${PARMgfs}/gdas/aero/variational/3dvar_gfs_aero.yaml.j2" fi echo "END: config.aeroanl" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanlfinal b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanlfinal index 230ec5205a..34e5d8f116 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanlfinal +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanlfinal @@ -6,5 +6,5 @@ echo "BEGIN: config.aeroanlfinal" # Get task specific resources -. $EXPDIR/config.resources aeroanlfinal +source "${EXPDIR}/config.resources" aeroanlfinal echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanlinit b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanlinit index 72175b8d0c..7036d3d27b 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanlinit +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanlinit @@ -6,5 +6,5 @@ echo "BEGIN: config.aeroanlinit" # Get task specific resources -. $EXPDIR/config.resources aeroanlinit +source "${EXPDIR}/config.resources" aeroanlinit echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanlrun b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanlrun index da13df2831..012e5b79f3 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanlrun +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.aeroanlrun @@ -6,6 +6,6 @@ echo "BEGIN: config.aeroanlrun" # Get task specific resources -. $EXPDIR/config.resources aeroanlrun +source "${EXPDIR}/config.resources" aeroanlrun echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.anal b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.anal index e3a17f9c6a..27ff8742e4 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.anal +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.anal @@ -12,20 +12,13 @@ if [[ ${DONST} = "YES" ]]; then . ${EXPDIR}/config.nsst fi -if [[ "${CDUMP}" = "gfs" ]] ; then +if [[ "${RUN}" == "gfs" ]] ; then export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. export GENDIAG="NO" export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' export DIAG_TARBALL="YES" fi -export npe_gsi=${npe_anal} - -if [[ "${CDUMP}" == "gfs" ]] ; then - export npe_gsi=${npe_anal_gfs} - export nth_anal=${nth_anal_gfs} -fi - # Set parameters specific to L127 if [[ ${LEVS} = "128" ]]; then export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," @@ -45,51 +38,58 @@ export AMSR2BF=${AMSR2BF:-/dev/null} # Set default values for info files and observation error # NOTE: Remember to set PRVT in config.prep as OBERROR is set below -export CONVINFO=${FIXgsi}/global_convinfo.txt -export OZINFO=${FIXgsi}/global_ozinfo.txt -export SATINFO=${FIXgsi}/global_satinfo.txt -export OBERROR=${FIXgsi}/prepobs_errtable.global - +export CONVINFO=${FIXgfs}/gsi/global_convinfo.txt +export OZINFO=${FIXgfs}/gsi/global_ozinfo.txt +export SATINFO=${FIXgfs}/gsi/global_satinfo.txt +export OBERROR=${FIXgfs}/gsi/prepobs_errtable.global + +if [[ ${GSI_SOILANAL} = "YES" ]]; then + export hofx_2m_sfcfile=".true." + export reducedgrid=".false." # not possible for sfc analysis, Jeff Whitaker says it's not useful anyway + export paranc=".false." # temporary until sfc io coded for parance (PR being prepared by T. Gichamo) + export CONVINFO=${FIXgfs}/gsi/global_convinfo_2mObs.txt + export ANAVINFO=${FIXgfs}/gsi/global_anavinfo_soilanal.l127.txt +fi # Use experimental dumps in EMC GFS v16 parallels if [[ ${RUN_ENVIR} == "emc" ]]; then # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then - export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 - export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2019021900 fi # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then - export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 - export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2019110706 fi # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "2020052612" ]]; then - export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 - export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2020040718 fi # Assimilate COSMIC-2 if [[ "${PDY}${cyc}" -ge "2020052612" && "${PDY}${cyc}" -lt "2020082412" ]]; then - export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 - export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2020040718 fi # Assimilate HDOB if [[ "${PDY}${cyc}" -ge "2020082412" && "${PDY}${cyc}" -lt "2020091612" ]]; then - export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2020082412 fi # Assimilate Metop-C GNSSRO if [[ "${PDY}${cyc}" -ge "2020091612" && "${PDY}${cyc}" -lt "2021031712" ]]; then - export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2020091612 fi # Assimilate DO-2 GeoOptics if [[ "${PDY}${cyc}" -ge "2021031712" && "${PDY}${cyc}" -lt "2021091612" ]]; then - export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2021031712 fi # NOTE: @@ -98,38 +98,38 @@ if [[ ${RUN_ENVIR} == "emc" ]]; then # needed at this time. # Assimilate COSMIC-2 GPS # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then - # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # export CONVINFO=${FIXgfs}/gsi/gfsv16_historical/global_convinfo.txt.2021110312 # fi # Turn off assmilation of OMPS during period of bad data if [[ "${PDY}${cyc}" -ge "2020011600" && "${PDY}${cyc}" -lt "2020011806" ]]; then - export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + export OZINFO=${FIXgfs}/gsi/gfsv16_historical/global_ozinfo.txt.2020011600 fi # Set satinfo for start of GFS v16 parallels if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then - export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2019021900 fi # Turn on assimilation of Metop-C AMSUA and MHS if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020022012" ]]; then - export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2019110706 fi # Turn off assimilation of Metop-A MHS if [[ "${PDY}${cyc}" -ge "2020022012" && "${PDY}${cyc}" -lt "2021052118" ]]; then - export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2020022012 fi # Turn off assimilation of S-NPP CrIS if [[ "${PDY}${cyc}" -ge "2021052118" && "${PDY}${cyc}" -lt "2021092206" ]]; then - export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2021052118 fi # Turn off assimilation of MetOp-A IASI if [[ "${PDY}${cyc}" -ge "2021092206" && "${PDY}${cyc}" -lt "2021102612" ]]; then - export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2021092206 fi # NOTE: @@ -139,8 +139,14 @@ if [[ ${RUN_ENVIR} == "emc" ]]; then # # Turn off assmilation of all Metop-A MHS # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then - # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # export SATINFO=${FIXgfs}/gsi/gfsv16_historical/global_satinfo.txt.2021110312 # fi fi +# Flag to turn on (.true.) or off (.false.) the infrared cloud and aerosol detection software +# for AIRS, CrIS, and IASI. Default is .false. +export AIRS_CADS=".false." +export CRIS_CADS=".false." +export IASI_CADS=".false." + echo "END: config.anal" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.analcalc b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.analcalc index 9405114ecc..d9501503f0 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.analcalc +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.analcalc @@ -6,10 +6,6 @@ echo "BEGIN: config.analcalc" # Get task specific resources -. $EXPDIR/config.resources analcalc - -if [[ "$CDUMP" == "gfs" ]]; then - export nth_echgres=$nth_echgres_gfs -fi +. ${EXPDIR}/config.resources analcalc echo "END: config.analcalc" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmanl b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmanl index 59ea7072a6..7ee57da05e 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmanl +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmanl @@ -5,17 +5,34 @@ echo "BEGIN: config.atmanl" -export CASE_ANL=${CASE} -export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ -export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml -export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml -export STATICB_TYPE="gsibec" -export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export JCB_BASE_YAML="${PARMgfs}/gdas/atm/jcb-base.yaml.j2" +export JCB_ALGO_YAML=${PARMgfs}/gdas/atm/jcb-prototype_3dvar.yaml.j2 + +export STATICB_TYPE=gsibec +export LOCALIZATION_TYPE="bump" export INTERP_METHOD='barycentric' +if [[ ${DOHYBVAR} = "YES" ]]; then + # shellcheck disable=SC2153 + export CASE_ANL=${CASE_ENS} + export BERROR_YAML="atmosphere_background_error_hybrid_${STATICB_TYPE}_${LOCALIZATION_TYPE}" +else + export CASE_ANL=${CASE} + export BERROR_YAML="atmosphere_background_error_static_${STATICB_TYPE}" +fi + +export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2" +export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" +export VAR_BKG_STAGING_YAML="${PARMgfs}/gdas/staging/atm_var_bkg.yaml.j2" +export BERROR_STAGING_YAML="${PARMgfs}/gdas/staging/atm_berror_${STATICB_TYPE}.yaml.j2" +export FV3ENS_STAGING_YAML="${PARMgfs}/gdas/staging/atm_var_fv3ens.yaml.j2" + +export layout_x_atmanl=8 +export layout_y_atmanl=8 + export io_layout_x=1 export io_layout_y=1 -export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export JEDIEXE=${EXECgfs}/gdas.x echo "END: config.atmanl" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmanlfv3inc b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmanlfv3inc new file mode 100644 index 0000000000..ab7efa3a60 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmanlfv3inc @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.atmanlfv3inc ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlfv3inc" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfv3inc + +export JCB_ALGO=fv3jedi_fv3inc_variational +export JEDIEXE=${EXECgfs}/fv3jedi_fv3inc.x + +echo "END: config.atmanlfv3inc" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmanlinit b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmanlinit index bc95ef4962..1aec88bcc2 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmanlinit +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmanlinit @@ -7,4 +7,5 @@ echo "BEGIN: config.atmanlinit" # Get task specific resources . "${EXPDIR}/config.resources" atmanlinit + echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmanlvar b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmanlvar new file mode 100644 index 0000000000..cbc0334a08 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmanlvar @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlvar ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlvar" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlvar + +echo "END: config.atmanlvar" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmensanl b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmensanl index 6b5da7699b..0db554c424 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmensanl +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmensanl @@ -5,14 +5,21 @@ echo "BEGIN: config.atmensanl" -export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ -export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml -export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export JCB_BASE_YAML="${PARMgfs}/gdas/atm/jcb-base.yaml.j2" +export JCB_ALGO_YAML=${PARMgfs}/gdas/atm/jcb-prototype_lgetkf.yaml.j2 + export INTERP_METHOD='barycentric' +export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2" +export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" +export LGETKF_BKG_STAGING_YAML="${PARMgfs}/gdas/staging/atm_lgetkf_bkg.yaml.j2" + +export layout_x_atmensanl=8 +export layout_y_atmensanl=8 + export io_layout_x=1 export io_layout_y=1 -export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export JEDIEXE=${EXECgfs}/gdas.x echo "END: config.atmensanl" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmensanlfv3inc b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmensanlfv3inc new file mode 100644 index 0000000000..2dc73f3f6e --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmensanlfv3inc @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.atmensanlfv3inc ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmensanlfv3inc" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfv3inc + +export JCB_ALGO=fv3jedi_fv3inc_lgetkf +export JEDIEXE=${EXECgfs}/fv3jedi_fv3inc.x + +echo "END: config.atmensanlfv3inc" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmensanlinit b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmensanlinit index 34429023bb..0eee2ffa82 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmensanlinit +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmensanlinit @@ -7,4 +7,5 @@ echo "BEGIN: config.atmensanlinit" # Get task specific resources . "${EXPDIR}/config.resources" atmensanlinit + echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmensanlletkf b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmensanlletkf new file mode 100644 index 0000000000..1fdc57ae62 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmensanlletkf @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlletkf ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlletkf" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlletkf + +echo "END: config.atmensanlletkf" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmos_products b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmos_products index c3e861b281..e2827bc98f 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmos_products +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.atmos_products @@ -12,13 +12,15 @@ echo "BEGIN: config.atmos_products" export NFHRS_PER_GROUP=3 # Scripts used by this job -export INTERP_ATMOS_MASTERSH="${HOMEgfs}/ush/interp_atmos_master.sh" -export INTERP_ATMOS_SFLUXSH="${HOMEgfs}/ush/interp_atmos_sflux.sh" +export INTERP_ATMOS_MASTERSH="${USHgfs}/interp_atmos_master.sh" +export INTERP_ATMOS_SFLUXSH="${USHgfs}/interp_atmos_sflux.sh" if [[ "${RUN:-}" == "gdas" ]]; then export downset=1 export FHOUT_PGBS=${FHOUT:-1} # Output frequency of supplemental gfs pgb file at 1.0 and 0.5 deg export FLXGF="NO" # Create interpolated sflux.1p00 file + export WGNE="NO" # WGNE products are created for first FHMAX_WGNE forecast hours + export FHMAX_WGNE=0 elif [[ "${RUN:-}" == "gfs" ]]; then #JKHexport downset=2 ## create pgrb2b files export downset=1 ## JKH @@ -27,9 +29,9 @@ elif [[ "${RUN:-}" == "gfs" ]]; then fi # paramlist files for the different forecast hours and downsets -export paramlista="${HOMEgfs}/parm/post/global_1x1_paramlist_g2" -export paramlista_anl="${HOMEgfs}/parm/post/global_1x1_paramlist_g2.anl" -export paramlista_f000="${HOMEgfs}/parm/post/global_1x1_paramlist_g2.f000" -export paramlistb="${HOMEgfs}/parm/post/global_master-catchup_parmlist_g2" +export paramlista="${PARMgfs}/product/gfs.fFFF.paramlist.a.txt" +export paramlista_anl="${PARMgfs}/product/gfs.anl.paramlist.a.txt" +export paramlista_f000="${PARMgfs}/product/gfs.f000.paramlist.a.txt" +export paramlistb="${PARMgfs}/product/gfs.fFFF.paramlist.b.txt" echo "END: config.atmos_products" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.awips b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.awips index 3b78d4bb4b..61f0dc5652 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.awips +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.awips @@ -8,9 +8,6 @@ echo "BEGIN: config.awips" # Get task specific resources . "${EXPDIR}/config.resources" awips -export AWIPS20KM1P0DEGSH="${HOMEgfs}/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG" -export AWIPSG2SH="${HOMEgfs}/jobs/JGFS_ATMOS_AWIPS_G2" - # No. of concurrent awips jobs export NAWIPSGRP=42 diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.base b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.base index 0ca2c4d04e..f057f78330 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.base +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.base @@ -17,40 +17,46 @@ export QUEUE="batch" export QUEUE_SERVICE="batch" export PARTITION_BATCH="hera" export PARTITION_SERVICE="service" +export RESERVATION="" +export CLUSTERS="" # Project to use in mass store: export HPSS_PROJECT="fim" # Directories relative to installation areas: -export HOMEgfs=/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1_21mar24 -export PARMgfs="${HOMEgfs}/parm" -export FIXgfs="${HOMEgfs}/fix" -export USHgfs="${HOMEgfs}/ush" -export UTILgfs="${HOMEgfs}/util" +export HOMEgfs=/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1 export EXECgfs="${HOMEgfs}/exec" +export FIXgfs="${HOMEgfs}/fix" +export PARMgfs="${HOMEgfs}/parm" export SCRgfs="${HOMEgfs}/scripts" +export USHgfs="${HOMEgfs}/ush" export FIXam="${FIXgfs}/am" export FIXaer="${FIXgfs}/aer" export FIXcpl="${FIXgfs}/cpl" export FIXlut="${FIXgfs}/lut" -export FIXorog="${FIXgfs}/orog" export FIXcice="${FIXgfs}/cice" export FIXmom="${FIXgfs}/mom6" export FIXreg2grb2="${FIXgfs}/reg2grb2" -export FIXugwd="${FIXgfs}/ugwd" +export FIXgdas="${FIXgfs}/gdas" ######################################################################## # GLOBAL static environment parameters export PACKAGEROOT="/scratch1/NCEPDEV/global/glopara/nwpara" # TODO: set via prod_envir in Ops -export COMROOT="/scratch1/NCEPDEV/global/glopara/com" # TODO: set via prod_envir in Ops -export COMINsyn="${COMROOT}/gfs/prod/syndat" +export COMROOT="/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1/FV3GFSrun" # TODO: set via prod_envir in Ops +export COMINsyn="/scratch1/NCEPDEV/global/glopara/com/gfs/prod/syndat" export DMPDIR="/scratch1/NCEPDEV/global/glopara/dump" export BASE_CPLIC="/scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs" +# Gempak from external models +# Default locations are to dummy locations for testing +export COMINecmwf=/scratch1/NCEPDEV/global/glopara/data/external_gempak/ecmwf +export COMINnam=/scratch1/NCEPDEV/global/glopara/data/external_gempak/nam +export COMINukmet=/scratch1/NCEPDEV/global/glopara/data/external_gempak/ukmet + # USER specific paths -export HOMEDIR="/scratch1/BMC/gsd-fv3/NCEPDEV/global/${USER}" +export HOMEDIR="/scratch1/BMC/gsd-fv3-dev/NCEPDEV/global/${USER}" export STMP="${HOMEgfs}/FV3GFSrun/" export PTMP="${HOMEgfs}/FV3GFSrun/" export NOSCRUB="$HOMEDIR" @@ -59,25 +65,18 @@ export NOSCRUB="$HOMEDIR" export BASE_GIT="/scratch1/NCEPDEV/global/glopara/git" # Toggle to turn on/off GFS downstream processing. -export DO_GOES="NO" # GOES products -export DO_BUFRSND="NO" # BUFR sounding products -export DO_GEMPAK="NO" # GEMPAK products -export DO_AWIPS="NO" # AWIPS products -export DO_NPOESS="NO" # NPOESS products +export DO_GOES="NO" # GOES products +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export DO_NPOESS="NO" # NPOESS products export DO_TRACKER="NO" # Hurricane track verification ## JKH export DO_GENESIS="NO" # Cyclone genesis verification ## JKH export DO_GENESIS_FSU="NO" # Cyclone genesis verification (FSU) -# The monitor is not yet supported on Hercules -if [[ "${machine}" == "HERCULES" ]]; then - export DO_VERFOZN="NO" # Ozone data assimilation monitoring - export DO_VERFRAD="NO" # Radiance data assimilation monitoring - export DO_VMINMON="NO" # GSI minimization monitoring -else - export DO_VERFOZN="YES" # Ozone data assimilation monitoring - export DO_VERFRAD="YES" # Radiance data assimilation monitoring - export DO_VMINMON="YES" # GSI minimization monitoring -fi -export DO_MOS="NO" # GFS Model Output Statistics - Only supported on WCOSS2 +export DO_VERFOZN="YES" # Ozone data assimilation monitoring +export DO_VERFRAD="YES" # Radiance data assimilation monitoring +export DO_VMINMON="YES" # GSI minimization monitoring +export DO_MOS="NO" # GFS Model Output Statistics - Only supported on WCOSS2 # NO for retrospective parallel; YES for real-time parallel # arch.sh uses REALTIME for MOS. Need to set REALTIME=YES @@ -105,6 +104,7 @@ export NMV="/bin/mv" export NLN="/bin/ln -sf" export VERBOSE="YES" export KEEPDATA="NO" +export DEBUG_POSTSCRIPT="NO" # PBS only; sets debug=true export CHGRP_RSTPROD="YES" export CHGRP_CMD="chgrp rstprod" export NCDUMP="${NETCDF:-${netcdf_c_ROOT:-}}/bin/ncdump" @@ -115,34 +115,30 @@ export BASE_ENV="${HOMEgfs}/env" export BASE_JOB="${HOMEgfs}/jobs/rocoto" # EXPERIMENT specific environment parameters -export SDATE=2024011400 -export EDATE=2024011400 +export SDATE=2024091800 +export EDATE=2024091800 export EXP_WARM_START=".false." export assim_freq=6 export PSLOT="rt_v17p8_ugwpv1_c3_mynn" -export EXPDIR="/home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSwfm/${PSLOT}" -export ROTDIR="/home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSrun/${PSLOT}" +export EXPDIR="/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1/FV3GFSwfm/${PSLOT}" +export ROTDIR="/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1/FV3GFSrun/${PSLOT}" export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work export DUMP_SUFFIX="" if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel fi -export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops -export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead export ARCDIR="${NOSCRUB}/archive/${PSLOT}" -export ATARDIR="/BMC/${HPSS_PROJECT}/2year/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" # Commonly defined parameters in JJOBS export envir=${envir:-"prod"} export NET="gfs" # NET is defined in the job-card (ecf) -export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy -# TODO: determine where is RUN actually used in the workflow other than here -# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be -# consistent w/ EE2? +export RUN=${RUN:-"gfs"} # RUN is defined in the job-card (ecf) # Get all the COM path templates source "${EXPDIR}/config.com" +# shellcheck disable=SC2016 export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} export LOGSCRIPT=${LOGSCRIPT:-""} #export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} @@ -159,6 +155,17 @@ export DBNROOT=${DBNROOT:-${UTILROOT:-}/fakedbn} # APP settings export APP=ATM +shopt -s extglob +# Adjust APP based on RUN +case "${RUN}" in + enkf*) # Turn off aerosols and waves + APP="${APP/%+([WA])}" + ;; + *) # Keep app unchanged + ;; +esac +shopt -u extglob + # Defaults: export DO_ATM="YES" export DO_COUPLED="NO" @@ -166,9 +173,24 @@ export DO_WAVE="NO" export DO_OCN="NO" export DO_ICE="NO" export DO_AERO="NO" -export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DO_PREP_OBS_AERO="NO" +export AERO_FCST_RUN="" # When to run aerosol forecast: gdas, gfs, or both +export AERO_ANL_RUN="" # When to run aerosol analysis: gdas, gfs, or both +export WAVE_RUN="" # When to include wave suite: gdas, gfs, or both export DOBNDPNT_WAVE="NO" +export DOIBP_WAV="NO" # Option to create point outputs from input boundary points export FRAC_GRID=".true." +export DO_NEST="NO" # Whether to run a global-nested domain +if [[ "${DO_NEST:-NO}" == "YES" ]] ; then + export ntiles=7 + export NEST_OUTPUT_GRID="regional_latlon" + export FIXugwd="${FIXgfs}/ugwd_nest" + export FIXorog="${FIXgfs}/orog_nest" +else + export ntiles=6 + export FIXugwd="${FIXgfs}/ugwd" + export FIXorog="${FIXgfs}/orog" +fi # Set operational resolution export OPS_RES="C768" # Do not change # TODO: Why is this needed and where is it used? @@ -179,19 +201,20 @@ export CASE="C768" export CASE_ENS="@CASEENS@" export OCNRES="025" export ICERES="${OCNRES}" + # These are the currently recommended grid-combinations case "${CASE}" in "C48") - export waveGRD='glo_500' + export waveGRD='uglo_100km' ;; "C96" | "C192") - export waveGRD='glo_200' + export waveGRD='uglo_100km' ;; "C384") - export waveGRD='glo_025' + export waveGRD='uglo_100km' ;; "C768" | "C1152") - export waveGRD='mx025' + export waveGRD='uglo_m1g16' ;; *) echo "FATAL ERROR: Unrecognized CASE ${CASE}, ABORT!" @@ -204,11 +227,13 @@ case "${APP}" in ;; ATMA) export DO_AERO="YES" + export AERO_ANL_RUN="both" + export AERO_FCST_RUN="gdas" ;; ATMW) export DO_COUPLED="YES" export DO_WAVE="YES" - export WAVE_CDUMP="both" + export WAVE_RUN="both" ;; NG-GODAS) export DO_ATM="NO" @@ -222,11 +247,13 @@ case "${APP}" in if [[ "${APP}" =~ A$ ]]; then export DO_AERO="YES" + export AERO_ANL_RUN="both" + export AERO_FCST_RUN="gdas" fi if [[ "${APP}" =~ ^S2SW ]]; then export DO_WAVE="YES" - export WAVE_CDUMP="both" + export WAVE_RUN="both" fi ;; *) @@ -236,10 +263,10 @@ case "${APP}" in esac # Surface cycle update frequency -if [[ "${CDUMP}" =~ "gdas" ]] ; then +if [[ "${RUN}" =~ "gdas" ]] ; then export FHCYC=1 export FTSFS=10 -elif [[ "${CDUMP}" =~ "gfs" ]] ; then +elif [[ "${RUN}" =~ "gfs" ]] ; then export FHCYC=24 fi @@ -247,6 +274,8 @@ fi export FHMIN=0 export FHMAX=9 export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) +export FHOUT_OCN=3 +export FHOUT_ICE=3 # Cycle to run EnKF (set to BOTH for both gfs and gdas) export EUPD_CYC="gdas" @@ -256,26 +285,40 @@ export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 c # GFS output and frequency export FHMIN_GFS=0 - -export FHMAX_GFS_00=120 -export FHMAX_GFS_06=120 -export FHMAX_GFS_12=120 -export FHMAX_GFS_18=120 -current_fhmax_var=FHMAX_GFS_${cyc}; declare -x FHMAX_GFS=${!current_fhmax_var} - -export FHOUT_GFS=6 # Must be 6 for S2S until #1629 is addressed; 3 for ops +export FHMAX_GFS=120 +export FHOUT_GFS=6 # 3 for ops export FHMAX_HF_GFS=0 export FHOUT_HF_GFS=1 +export FHOUT_OCN_GFS=6 +export FHOUT_ICE_GFS=6 +export FHMIN_WAV=0 +export FHOUT_WAV=3 +export FHMAX_HF_WAV=120 +export FHOUT_HF_WAV=1 +export FHMAX_WAV=${FHMAX:-9} +export FHMAX_WAV_GFS=${FHMAX_GFS} if (( gfs_cyc != 0 )); then export STEP_GFS=$(( 24 / gfs_cyc )) else export STEP_GFS="0" fi -export ILPOST=1 # gempak output frequency up to F120 +# TODO: Change gempak to use standard out variables (#2348) +export ILPOST=${FHOUT_HF_GFS} # gempak output frequency up to F120 +if (( FHMAX_HF_GFS < 120 )); then + export ILPOST=${FHOUT_GFS} +fi + +# Limit bounds of goes processing +export FHMAX_GOES=180 +export FHOUT_GOES=3 +if (( FHMAX_GOES > FHMAX_GFS )); then + export FHMAX_GOES=${FHMAX_GFS} +fi # GFS restart interval in hours #JKHexport restart_interval_gfs=12 -export restart_interval_gfs=-1 ## JKH +#KYWexport restart_interval_gfs=-1 ## JKH +export restart_interval_gfs=$FHMAX_GFS # NOTE: Do not set this to zero. Instead set it to $FHMAX_GFS # TODO: Remove this variable from config.base and reference from config.fcst # TODO: rework logic in config.wave and push it to parsing_nameslist_WW3.sh where it is actually used @@ -311,22 +354,27 @@ export imp_physics=8 export DO_JEDIATMVAR="NO" export DO_JEDIATMENS="NO" export DO_JEDIOCNVAR="NO" -export DO_JEDILANDDA="NO" +export DO_JEDISNOWDA="NO" export DO_MERGENSST="NO" # Hybrid related export DOHYBVAR="@DOHYBVAR@" -export NMEM_ENS=@NMEM_ENS@ -export NMEM_ENS_GFS=@NMEM_ENS@ +export NMEM_ENS=0 export SMOOTH_ENKF="NO" export l4densvar=".true." export lwrite4danl=".true." +export DO_CALC_INCREMENT="NO" + +# Early-cycle EnKF parameters +export NMEM_ENS_GFS=30 +export NMEM_ENS_GFS_OFFSET=20 +export DO_CALC_INCREMENT_ENKF_GFS="NO" # EnKF output frequency if [[ ${DOHYBVAR} = "YES" ]]; then export FHMIN_ENKF=3 export FHMAX_ENKF=9 - export FHMAX_ENKF_GFS=120 + export FHMAX_ENKF_GFS=12 export FHOUT_ENKF_GFS=3 if [[ ${l4densvar} = ".true." ]]; then export FHOUT=1 @@ -344,6 +392,9 @@ if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then export IAUFHRS_ENKF="6" fi +# Generate post-processing ensemble spread files +export ENKF_SPREAD="YES" + # Check if cycle is cold starting, DOIAU off, or free-forecast mode if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then export IAU_OFFSET=0 @@ -353,6 +404,24 @@ fi if [[ "${DOIAU_ENKF}" = "NO" ]]; then export IAUFHRS_ENKF="6"; fi +# Determine restart intervals +# For IAU, write restarts at beginning of window also +if [[ "${DOIAU_ENKF:-}" == "YES" ]]; then + export restart_interval_enkfgdas="3" +else + export restart_interval_enkfgdas="6" +fi + +export restart_interval_enkfgfs=${restart_interval_enkfgdas} + +if [[ "${DOIAU}" == "YES" ]]; then + export restart_interval_gdas="3" +else + export restart_interval_gdas="6" +fi + +export GSI_SOILANAL=NO + # turned on nsst in anal and/or fcst steps, and turn off rtgsst export DONST="YES" if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi @@ -367,13 +436,10 @@ export MAKE_NSSTBUFR="NO" export MAKE_ACFTBUFR="NO" # Analysis increments to zero in CALCINCEXEC -export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" - -# Write analysis files for early cycle EnKF -export DO_CALC_INCREMENT_ENKF_GFS="YES" +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc','rwmr_inc','snmr_inc','grle_inc'" # Stratospheric increments to zero -export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc','rwmr_inc','snmr_inc','grle_inc'" export INCVARS_EFOLD="5" # Swith to generate netcdf or binary diagnostic files. If not specified, @@ -383,8 +449,13 @@ export netcdf_diag=".true." export binary_diag=".false." # Verification options -export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp; not supported with spack-stack +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp export DO_FIT2OBS="NO" # Run fit to observations package ## JKH +export DO_VRFY_OCEANDA="NO" # Run SOCA Ocean and Seaice DA verification tasks + +#--online archive of netcdf files for fit2obs verification +export FHMAX_FITS=132 +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} # Archiving options export HPSSARCH="YES" # save data to HPSS archive @@ -397,9 +468,18 @@ export ARCH_CYC=00 # Archive data at this cycle for warm_start capabil export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability -#--online archive of nemsio files for fit2obs verification -export FITSARC="YES" -export FHMAX_FITS=132 -[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} +# The monitor jobs are not yet supported for JEDIATMVAR. +if [[ ${DO_JEDIATMVAR} = "YES" ]]; then + export DO_VERFOZN="NO" # Ozone data assimilation monitoring + export DO_VERFRAD="NO" # Radiance data assimilation monitoring + export DO_VMINMON="NO" # GSI minimization monitoring +fi + +# If starting ICs that are not at cycle hour +export REPLAY_ICS="NO" +export OFFSET_START_HOUR=0 + +# Number of regional collectives to create soundings for +export NUM_SND_COLLECTIVES=${NUM_SND_COLLECTIVES:-9} echo "END: config.base" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.base.emc b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.base.emc new file mode 100644 index 0000000000..56005199aa --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.base.emc @@ -0,0 +1,483 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" +export RESERVATION="@RESERVATION@" +export CLUSTERS="@CLUSTERS@" + +# Project to use in mass store: +export HPSS_PROJECT="@HPSS_PROJECT@" + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export EXECgfs="${HOMEgfs}/exec" +export FIXgfs="${HOMEgfs}/fix" +export PARMgfs="${HOMEgfs}/parm" +export SCRgfs="${HOMEgfs}/scripts" +export USHgfs="${HOMEgfs}/ush" + +export FIXam="${FIXgfs}/am" +export FIXaer="${FIXgfs}/aer" +export FIXcpl="${FIXgfs}/cpl" +export FIXlut="${FIXgfs}/lut" +export FIXcice="${FIXgfs}/cice" +export FIXmom="${FIXgfs}/mom6" +export FIXreg2grb2="${FIXgfs}/reg2grb2" +export FIXgdas="${FIXgfs}/gdas" + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" +export BASE_CPLIC="@BASE_CPLIC@" + +# Gempak from external models +# Default locations are to dummy locations for testing +export COMINecmwf=@COMINecmwf@ +export COMINnam=@COMINnam@ +export COMINukmet=@COMINukmet@ + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_GOES="@DO_GOES@" # GOES products +export DO_BUFRSND="@DO_BUFRSND@" # BUFR sounding products +export DO_GEMPAK="@DO_GEMPAK@" # GEMPAK products +export DO_AWIPS="@DO_AWIPS@" # AWIPS products +export DO_NPOESS="@DO_NPOESS@" # NPOESS products +export DO_TRACKER="@DO_TRACKER@" # Hurricane track verification +export DO_GENESIS="@DO_GENESIS@" # Cyclone genesis verification +export DO_GENESIS_FSU="@DO_GENESIS_FSU@" # Cyclone genesis verification (FSU) +export DO_VERFOZN="YES" # Ozone data assimilation monitoring +export DO_VERFRAD="YES" # Radiance data assimilation monitoring +export DO_VMINMON="YES" # GSI minimization monitoring +export DO_MOS="NO" # GFS Model Output Statistics - Only supported on WCOSS2 + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT:-}/obsproc/v${obsproc_run_ver:-1.1.2}" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export DEBUG_POSTSCRIPT="NO" # PBS only; sets debug=true +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="${NETCDF:-${netcdf_c_ROOT:-}}/bin/ncdump" +export NCLEN="${HOMEgfs}/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@COMROOT@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-"gfs"} # RUN is defined in the job-card (ecf) + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +# shellcheck disable=SC2016 +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT:-}/fakedbn} + +# APP settings +export APP=@APP@ + +shopt -s extglob +# Adjust APP based on RUN +case "${RUN}" in + enkf*) # Turn off aerosols and waves + APP="${APP/%+([WA])}" + ;; + *) # Keep app unchanged + ;; +esac +shopt -u extglob + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export DO_PREP_OBS_AERO="NO" +export AERO_FCST_RUN="" # When to run aerosol forecast: gdas, gfs, or both +export AERO_ANL_RUN="" # When to run aerosol analysis: gdas, gfs, or both +export WAVE_RUN="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export DOIBP_WAV="NO" # Option to create point outputs from input boundary points +export FRAC_GRID=".true." +export DO_NEST="NO" # Whether to run a global-nested domain +if [[ "${DO_NEST:-NO}" == "YES" ]] ; then + export ntiles=7 + export NEST_OUTPUT_GRID="regional_latlon" + export FIXugwd="${FIXgfs}/ugwd_nest" + export FIXorog="${FIXgfs}/orog_nest" +else + export ntiles=6 + export FIXugwd="${FIXgfs}/ugwd" + export FIXorog="${FIXgfs}/orog" +fi + +# Set operational resolution +export OPS_RES="C768" # Do not change # TODO: Why is this needed and where is it used? + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENS="@CASEENS@" +export OCNRES="@OCNRES@" +export ICERES="${OCNRES}" + +# These are the currently recommended grid-combinations +case "${CASE}" in + "C48") + export waveGRD='uglo_100km' + ;; + "C96" | "C192") + export waveGRD='uglo_100km' + ;; + "C384") + export waveGRD='uglo_100km' + ;; + "C768" | "C1152") + export waveGRD='uglo_m1g16' + ;; + *) + echo "FATAL ERROR: Unrecognized CASE ${CASE}, ABORT!" + exit 1 + ;; +esac + +case "${APP}" in + ATM) + ;; + ATMA) + export DO_AERO="YES" + export AERO_ANL_RUN="both" + export AERO_FCST_RUN="gdas" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_RUN="both" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export AERO_ANL_RUN="both" + export AERO_FCST_RUN="gdas" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_RUN="both" + fi + ;; + *) + echo "Unrecognized APP: '${APP}'" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${RUN}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${RUN}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) +export FHOUT_OCN=3 +export FHOUT_ICE=3 + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="@EUPD_CYC@" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 +export FHMAX_GFS=@FHMAX_GFS@ +export FHOUT_GFS=3 # 3 for ops +export FHMAX_HF_GFS=@FHMAX_HF_GFS@ +export FHOUT_HF_GFS=1 +export FHOUT_OCN_GFS=6 +export FHOUT_ICE_GFS=6 +export FHMIN_WAV=0 +export FHOUT_WAV=3 +export FHMAX_HF_WAV=120 +export FHOUT_HF_WAV=1 +export FHMAX_WAV=${FHMAX:-9} +export FHMAX_WAV_GFS=${FHMAX_GFS} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +# TODO: Change gempak to use standard out variables (#2348) +export ILPOST=${FHOUT_HF_GFS} # gempak output frequency up to F120 +if (( FHMAX_HF_GFS < 120 )); then + export ILPOST=${FHOUT_GFS} +fi + +# Limit bounds of goes processing +export FHMAX_GOES=180 +export FHOUT_GOES=3 +if (( FHMAX_GOES > FHMAX_GFS )); then + export FHMAX_GOES=${FHMAX_GFS} +fi + +# GFS restart interval in hours +export restart_interval_gfs=12 +# NOTE: Do not set this to zero. Instead set it to $FHMAX_GFS +# TODO: Remove this variable from config.base and reference from config.fcst +# TODO: rework logic in config.wave and push it to parsing_nameslist_WW3.sh where it is actually used + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="@DOIAU@" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=${IAUFHRS%%,*} +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="@DO_JEDIATMVAR@" +export DO_JEDIATMENS="@DO_JEDIATMENS@" +export DO_JEDIOCNVAR="@DO_JEDIOCNVAR@" +export DO_JEDISNOWDA="@DO_JEDISNOWDA@" +export DO_MERGENSST="@DO_MERGENSST@" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." +export DO_CALC_INCREMENT="NO" + +# Early-cycle EnKF parameters +export NMEM_ENS_GFS=30 +export NMEM_ENS_GFS_OFFSET=20 +export DO_CALC_INCREMENT_ENKF_GFS="NO" + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=@FHMAX_ENKF_GFS@ + export FHOUT_ENKF_GFS=3 + if [[ ${l4densvar} = ".true." ]]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Generate post-processing ensemble spread files +export ENKF_SPREAD="YES" + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 + export IAUFHRS="6" +fi + +if [[ "${DOIAU_ENKF}" = "NO" ]]; then export IAUFHRS_ENKF="6"; fi + +# Determine restart intervals +# For IAU, write restarts at beginning of window also +if [[ "${DOIAU_ENKF:-}" == "YES" ]]; then + export restart_interval_enkfgdas="3" +else + export restart_interval_enkfgdas="6" +fi + +export restart_interval_enkfgfs=${restart_interval_enkfgdas} + +if [[ "${DOIAU}" == "YES" ]]; then + export restart_interval_gdas="3" +else + export restart_interval_gdas="6" +fi + +export GSI_SOILANAL=@GSI_SOILANAL@ + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc','rwmr_inc','snmr_inc','grle_inc'" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc','rwmr_inc','snmr_inc','grle_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in DA job +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="@DO_METP@" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="YES" # Run fit to observations package +export DO_VRFY_OCEANDA="@DO_VRFY_OCEANDA@" # Run SOCA Ocean and Seaice DA verification tasks + +#--online archive of netcdf files for fit2obs verification +export FHMAX_FITS=132 +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +# The monitor jobs are not yet supported for JEDIATMVAR. +if [[ ${DO_JEDIATMVAR} = "YES" ]]; then + export DO_VERFOZN="NO" # Ozone data assimilation monitoring + export DO_VERFRAD="NO" # Radiance data assimilation monitoring + export DO_VMINMON="NO" # GSI minimization monitoring +fi + +# If starting ICs that are not at cycle hour +export REPLAY_ICS="NO" +export OFFSET_START_HOUR=0 + +# Number of regional collectives to create soundings for +export NUM_SND_COLLECTIVES=${NUM_SND_COLLECTIVES:-9} + +echo "END: config.base" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.base.hera b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.base.hera new file mode 100644 index 0000000000..f779b4ace1 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.base.hera @@ -0,0 +1,485 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" +export RESERVATION="@RESERVATION@" +export CLUSTERS="@CLUSTERS@" + +# Project to use in mass store: +export HPSS_PROJECT="@HPSS_PROJECT@" + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export EXECgfs="${HOMEgfs}/exec" +export FIXgfs="${HOMEgfs}/fix" +export PARMgfs="${HOMEgfs}/parm" +export SCRgfs="${HOMEgfs}/scripts" +export USHgfs="${HOMEgfs}/ush" + +export FIXam="${FIXgfs}/am" +export FIXaer="${FIXgfs}/aer" +export FIXcpl="${FIXgfs}/cpl" +export FIXlut="${FIXgfs}/lut" +export FIXcice="${FIXgfs}/cice" +export FIXmom="${FIXgfs}/mom6" +export FIXreg2grb2="${FIXgfs}/reg2grb2" +export FIXgdas="${FIXgfs}/gdas" + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" +export BASE_CPLIC="@BASE_CPLIC@" + +# Gempak from external models +# Default locations are to dummy locations for testing +export COMINecmwf=@COMINecmwf@ +export COMINnam=@COMINnam@ +export COMINukmet=@COMINukmet@ + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_GOES="@DO_GOES@" # GOES products +export DO_BUFRSND="@DO_BUFRSND@" # BUFR sounding products +export DO_GEMPAK="@DO_GEMPAK@" # GEMPAK products +export DO_AWIPS="@DO_AWIPS@" # AWIPS products +export DO_NPOESS="@DO_NPOESS@" # NPOESS products +export DO_TRACKER="NO" # Hurricane track verification ## JKH +export DO_GENESIS="NO" # Cyclone genesis verification ## JKH +export DO_GENESIS_FSU="@DO_GENESIS_FSU@" # Cyclone genesis verification (FSU) +export DO_VERFOZN="YES" # Ozone data assimilation monitoring +export DO_VERFRAD="YES" # Radiance data assimilation monitoring +export DO_VMINMON="YES" # GSI minimization monitoring +export DO_MOS="NO" # GFS Model Output Statistics - Only supported on WCOSS2 + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT:-}/obsproc/v${obsproc_run_ver:-1.1.2}" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export DEBUG_POSTSCRIPT="NO" # PBS only; sets debug=true +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="${NETCDF:-${netcdf_c_ROOT:-}}/bin/ncdump" +export NCLEN="${HOMEgfs}/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@COMROOT@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-"gfs"} # RUN is defined in the job-card (ecf) + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +# shellcheck disable=SC2016 +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT:-}/fakedbn} + +# APP settings +export APP=@APP@ + +shopt -s extglob +# Adjust APP based on RUN +case "${RUN}" in + enkf*) # Turn off aerosols and waves + APP="${APP/%+([WA])}" + ;; + *) # Keep app unchanged + ;; +esac +shopt -u extglob + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export DO_PREP_OBS_AERO="NO" +export AERO_FCST_RUN="" # When to run aerosol forecast: gdas, gfs, or both +export AERO_ANL_RUN="" # When to run aerosol analysis: gdas, gfs, or both +export WAVE_RUN="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export DOIBP_WAV="NO" # Option to create point outputs from input boundary points +export FRAC_GRID=".true." +export DO_NEST="NO" # Whether to run a global-nested domain +if [[ "${DO_NEST:-NO}" == "YES" ]] ; then + export ntiles=7 + export NEST_OUTPUT_GRID="regional_latlon" + export FIXugwd="${FIXgfs}/ugwd_nest" + export FIXorog="${FIXgfs}/orog_nest" +else + export ntiles=6 + export FIXugwd="${FIXgfs}/ugwd" + export FIXorog="${FIXgfs}/orog" +fi + +# Set operational resolution +export OPS_RES="C768" # Do not change # TODO: Why is this needed and where is it used? + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENS="@CASEENS@" +export OCNRES="@OCNRES@" +export ICERES="${OCNRES}" + +# These are the currently recommended grid-combinations +case "${CASE}" in + "C48") + export waveGRD='uglo_100km' + ;; + "C96" | "C192") + export waveGRD='uglo_100km' + ;; + "C384") + export waveGRD='uglo_100km' + ;; + "C768" | "C1152") + export waveGRD='uglo_m1g16' + ;; + *) + echo "FATAL ERROR: Unrecognized CASE ${CASE}, ABORT!" + exit 1 + ;; +esac + +case "${APP}" in + ATM) + ;; + ATMA) + export DO_AERO="YES" + export AERO_ANL_RUN="both" + export AERO_FCST_RUN="gdas" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_RUN="both" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export AERO_ANL_RUN="both" + export AERO_FCST_RUN="gdas" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_RUN="both" + fi + ;; + *) + echo "Unrecognized APP: '${APP}'" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${RUN}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${RUN}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) +export FHOUT_OCN=3 +export FHOUT_ICE=3 + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="@EUPD_CYC@" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 +export FHMAX_GFS=@FHMAX_GFS@ +export FHOUT_GFS=6 # 3 for ops +export FHMAX_HF_GFS=@FHMAX_HF_GFS@ +export FHOUT_HF_GFS=1 +export FHOUT_OCN_GFS=6 +export FHOUT_ICE_GFS=6 +export FHMIN_WAV=0 +export FHOUT_WAV=3 +export FHMAX_HF_WAV=120 +export FHOUT_HF_WAV=1 +export FHMAX_WAV=${FHMAX:-9} +export FHMAX_WAV_GFS=${FHMAX_GFS} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +# TODO: Change gempak to use standard out variables (#2348) +export ILPOST=${FHOUT_HF_GFS} # gempak output frequency up to F120 +if (( FHMAX_HF_GFS < 120 )); then + export ILPOST=${FHOUT_GFS} +fi + +# Limit bounds of goes processing +export FHMAX_GOES=180 +export FHOUT_GOES=3 +if (( FHMAX_GOES > FHMAX_GFS )); then + export FHMAX_GOES=${FHMAX_GFS} +fi + +# GFS restart interval in hours +#JKHexport restart_interval_gfs=12 +#KYWexport restart_interval_gfs=-1 ## JKH +export restart_interval_gfs=$FHMAX_GFS +# NOTE: Do not set this to zero. Instead set it to $FHMAX_GFS +# TODO: Remove this variable from config.base and reference from config.fcst +# TODO: rework logic in config.wave and push it to parsing_nameslist_WW3.sh where it is actually used + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="@DOIAU@" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=${IAUFHRS%%,*} +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="@DO_JEDIATMVAR@" +export DO_JEDIATMENS="@DO_JEDIATMENS@" +export DO_JEDIOCNVAR="@DO_JEDIOCNVAR@" +export DO_JEDISNOWDA="@DO_JEDISNOWDA@" +export DO_MERGENSST="@DO_MERGENSST@" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." +export DO_CALC_INCREMENT="NO" + +# Early-cycle EnKF parameters +export NMEM_ENS_GFS=30 +export NMEM_ENS_GFS_OFFSET=20 +export DO_CALC_INCREMENT_ENKF_GFS="NO" + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=@FHMAX_ENKF_GFS@ + export FHOUT_ENKF_GFS=3 + if [[ ${l4densvar} = ".true." ]]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Generate post-processing ensemble spread files +export ENKF_SPREAD="YES" + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 + export IAUFHRS="6" +fi + +if [[ "${DOIAU_ENKF}" = "NO" ]]; then export IAUFHRS_ENKF="6"; fi + +# Determine restart intervals +# For IAU, write restarts at beginning of window also +if [[ "${DOIAU_ENKF:-}" == "YES" ]]; then + export restart_interval_enkfgdas="3" +else + export restart_interval_enkfgdas="6" +fi + +export restart_interval_enkfgfs=${restart_interval_enkfgdas} + +if [[ "${DOIAU}" == "YES" ]]; then + export restart_interval_gdas="3" +else + export restart_interval_gdas="6" +fi + +export GSI_SOILANAL=@GSI_SOILANAL@ + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc','rwmr_inc','snmr_inc','grle_inc'" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc','rwmr_inc','snmr_inc','grle_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in DA job +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="@DO_METP@" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package ## JKH +export DO_VRFY_OCEANDA="@DO_VRFY_OCEANDA@" # Run SOCA Ocean and Seaice DA verification tasks + +#--online archive of netcdf files for fit2obs verification +export FHMAX_FITS=132 +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +# The monitor jobs are not yet supported for JEDIATMVAR. +if [[ ${DO_JEDIATMVAR} = "YES" ]]; then + export DO_VERFOZN="NO" # Ozone data assimilation monitoring + export DO_VERFRAD="NO" # Radiance data assimilation monitoring + export DO_VMINMON="NO" # GSI minimization monitoring +fi + +# If starting ICs that are not at cycle hour +export REPLAY_ICS="NO" +export OFFSET_START_HOUR=0 + +# Number of regional collectives to create soundings for +export NUM_SND_COLLECTIVES=${NUM_SND_COLLECTIVES:-9} + +echo "END: config.base" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.cleanup b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.cleanup index 1908c91bb5..44e2690f65 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.cleanup +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.cleanup @@ -12,6 +12,11 @@ export CLEANUP_COM="YES" # NO=retain ROTDIR. YES default in cleanup.sh export RMOLDSTD=144 export RMOLDEND=24 +if [[ "${DO_GEMPAK}" == "YES" ]]; then + export RMOLDSTD=346 + export RMOLDEND=222 +fi + # Specify the list of files to exclude from the first stage of cleanup # Because arrays cannot be exported, list is a single string of comma- # separated values. This string is split to form an array at runtime. @@ -22,4 +27,4 @@ case ${RUN} in esac export exclude_string -echo "END: config.cleanup" \ No newline at end of file +echo "END: config.cleanup" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.com b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.com index db648b5866..222ffdae95 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.com +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.com @@ -5,11 +5,11 @@ echo "BEGIN: config.com" # These are just templates. All templates must use single quotations so variable # expansion does not occur when this file is sourced. Substitution happens later -# during runtime. It is recommended to use the helper function `generate_com()`, +# during runtime. It is recommended to use the helper function `declare_from_tmpl()`, # to do this substitution, which is defined in `ush/preamble.sh`. # -# Syntax for generate_com(): -# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# Syntax for declare_from_tmpl(): +# declare_from_tmpl [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] # # options: # -r: Make variable read-only (same as `decalre -r`) @@ -20,14 +20,14 @@ echo "BEGIN: config.com" # # Examples: # # Current cycle and RUN -# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_ANALYSIS # # # Previous cycle and gdas -# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ # COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL # # # Current cycle and COM for first member -# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_HISTORY # # @@ -49,10 +49,12 @@ COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' declare -rx COM_CONF_TMPL=${COM_BASE}'/conf' +declare -rx COM_OBS_JEDI=${COM_BASE}'/obs_jedi' + declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' -declare -rx COM_LAND_ANALYSIS_TMPL=${COM_BASE}'/analysis/land' +declare -rx COM_SNOW_ANALYSIS_TMPL=${COM_BASE}'/analysis/snow' declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2' @@ -80,15 +82,19 @@ declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' -declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' -declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' -declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_BMATRIX_TMPL=${COM_BASE}'/bmatrix/ocean' +declare -rx COM_OCEAN_NETCDF_TMPL=${COM_BASE}'/products/ocean/netcdf' declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2' declare -rx COM_OCEAN_GRIB_GRID_TMPL=${COM_OCEAN_GRIB_TMPL}'/${GRID}' +declare -rx COM_ICE_ANALYSIS_TMPL=${COM_BASE}'/analysis/ice' +declare -rx COM_ICE_BMATRIX_TMPL=${COM_BASE}'/bmatrix/ice' declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' +declare -rx COM_ICE_NETCDF_TMPL=${COM_BASE}'/products/ice/netcdf' +declare -rx COM_ICE_GRIB_TMPL=${COM_BASE}'/products/ice/grib2' +declare -rx COM_ICE_GRIB_GRID_TMPL=${COM_ICE_GRIB_TMPL}'/${GRID}' declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.earc b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.earc index de73a93731..00a2fa95bd 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.earc +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.earc @@ -8,7 +8,25 @@ echo "BEGIN: config.earc" # Get task specific resources . $EXPDIR/config.resources earc -export NMEM_EARCGRP=10 +# Set the number of ensemble members to archive per earc job +case "${CASE_ENS}" in + "C48" | "C96") + export NMEM_EARCGRP=80 + ;; + "C192") + export NMEM_EARCGRP=20 + ;; + "C384" | "C768") + export NMEM_EARCGRP=10 + ;; + "C1152") + export NMEM_EARCGRP=4 + ;; + *) + echo "FATAL ERROR: Unknown ensemble resolution ${CASE_ENS}, ABORT!" + exit 1 + ;; +esac #--starting and ending hours of previous cycles to be removed from rotating directory export RMOLDSTD_ENKF=144 diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.efcs b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.efcs index 283ec3ab7e..1837cf0619 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.efcs +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.efcs @@ -5,14 +5,10 @@ echo "BEGIN: config.efcs" -# Turn off components in ensemble via _ENKF, or use setting from deterministic -export DO_AERO=${DO_AERO_ENKF:-${DO_AERO:-"NO"}} -export DO_OCN=${DO_OCN_ENKF:-${DO_OCN:-"NO"}} -export DO_ICE=${DO_ICE_ENKF:-${DO_ICE:-"NO"}} -export DO_WAVE=${DO_WAVE_ENKF:-${DO_WAVE:-"NO"}} +export CASE="${CASE_ENS}" # Source model specific information that is resolution dependent -string="--fv3 ${CASE_ENS}" +string="--fv3 ${CASE}" # Ocean/Ice/Waves ensemble configurations are identical to deterministic member [[ "${DO_OCN}" == "YES" ]] && string="${string} --mom6 ${OCNRES}" [[ "${DO_ICE}" == "YES" ]] && string="${string} --cice6 ${ICERES}" @@ -25,15 +21,23 @@ source "${EXPDIR}/config.ufs" ${string} # Get task specific resources . "${EXPDIR}/config.resources" efcs +# nggps_diag_nml +export FHOUT=${FHOUT_ENKF:-3} +if [[ ${RUN} == "enkfgfs" ]]; then + export FHOUT=${FHOUT_ENKF_GFS:-${FHOUT}} +fi + +# model_configure +export FHMIN=${FHMIN_ENKF:-3} +export FHMAX=${FHMAX_ENKF:-9} +if [[ ${RUN} == "enkfgfs" ]]; then + export FHMAX=${FHMAX_ENKF_GFS:-${FHMAX}} +fi + # Use serial I/O for ensemble (lustre?) export OUTPUT_FILETYPE_ATM="netcdf" export OUTPUT_FILETYPE_SFC="netcdf" -# Number of enkf members per fcst job -export NMEM_EFCSGRP=2 -export NMEM_EFCSGRP_GFS=1 -export RERUN_EFCSGRP="NO" - # Turn off inline UPP for EnKF forecast export WRITE_DOPOST=".false." @@ -56,17 +60,35 @@ export SPPT_LSCALE=500000. export SPPT_LOGIT=".true." export SPPT_SFCLIMIT=".true." -if [[ "${QUILTING}" = ".true." ]] && [[ "${OUTPUT_GRID}" = "gaussian_grid" ]]; then - export DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table_da" +if [[ "${QUILTING}" == ".true." ]] && [[ "${OUTPUT_GRID}" == "gaussian_grid" ]]; then + export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table_da" else - export DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table_da_orig" + export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table_da_orig" fi -# For IAU, write restarts at beginning of window also -if [[ "${DOIAU_ENKF:-}" = "YES" ]]; then - export restart_interval="3" -else - export restart_interval="6" +# Model config option for Ensemble +# export TYPE=nh # choices: nh, hydro +# export MONO=non-mono # choices: mono, non-mono + +# gfs_physics_nml +export FHSWR=3600. +export FHLWR=3600. +export IEMS=1 +export ISOL=2 +export ICO2=2 +export dspheat=".true." +export shal_cnv=".true." +export FHZER=6 + +# Set PREFIX_ATMINC to r when recentering on +if [[ ${RECENTER_ENKF:-"YES"} == "YES" ]]; then + export PREFIX_ATMINC="r" +fi + +# Set restart interval to enable restarting forecasts after failures +export restart_interval=${restart_interval_enkfgdas:-6} +if [[ ${RUN} == "enkfgfs" ]]; then + export restart_interval=${restart_interval_enkfgfs:-12} fi echo "END: config.efcs" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.eobs b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.eobs index 21f982addc..7b7823e764 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.eobs +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.eobs @@ -11,12 +11,11 @@ echo "BEGIN: config.eobs" # Number of enkf members per innovation job export NMEM_EOMGGRP=8 export RERUN_EOMGGRP="YES" -export npe_gsi=$npe_eobs # GSI namelist options related to observer for EnKF export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" -if [ $LEVS = "128" ]; then +if (( LEVS == 128 )); then export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," export SETUP_INVOBS="gpstop=55,nsig_ext=56," fi diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.epos b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.epos index 8026a2ba2e..f1da929b62 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.epos +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.epos @@ -14,7 +14,4 @@ if [ $l4densvar = ".false." ]; then export NEPOSGRP=3 fi -# Generate ensemble spread files -export ENKF_SPREAD="YES" - echo "END: config.epos" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.esfc b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.esfc index 2bb3d48bb4..684dea4ee3 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.esfc +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.esfc @@ -12,8 +12,19 @@ echo "BEGIN: config.esfc" # Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at # center of analysis window. -if [ $DOIAU_ENKF = "YES" ]; then +if [[ ${DOIAU_ENKF} = "YES" ]]; then export DOSFCANL_ENKF="NO" fi +# Turn off NST in JEDIATMENS +if [[ "${DO_JEDIATMENS}" == "YES" ]]; then + export DONST="NO" +fi + +# set up soil analysis +if [[ ${GSI_SOILANAL} = "YES" ]]; then + export DO_LNDINC=".true." + export LND_SOI_FILE="lnd_incr" +fi + echo "END: config.esfc" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.eupd b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.eupd index 1ac90d2b75..2ff48240ae 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.eupd +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.eupd @@ -8,7 +8,7 @@ echo "BEGIN: config.eupd" # Get task specific resources . $EXPDIR/config.resources eupd -export npe_enkf=$npe_eupd +export ntasks_enkf=${ntasks} # Use NAM_ENKF below for serial EnKF ##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.fbwind b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.fbwind new file mode 100644 index 0000000000..49fdb9e7b4 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.fbwind @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS fbwind step specific + +echo "BEGIN: config.fbwind" + +# Get task specific resources +source "${EXPDIR}/config.resources" fbwind + +echo "END: config.fbwind" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.fcst b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.fcst index ee1077c1b3..44f137eace 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.fcst +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.fcst @@ -5,12 +5,21 @@ echo "BEGIN: config.fcst" -# Turn off waves if not used for this CDUMP -case ${WAVE_CDUMP} in - both | "${CDUMP/enkf}" ) ;; # Don't change +export USE_ESMF_THREADING="YES" # Toggle to use ESMF-managed threading or traditional threading in UFSWM +export COPY_FINAL_RESTARTS="NO" # Toggle to copy restarts from the end of GFS/GEFS Run (GDAS is handled seperately) + +# Turn off waves if not used for this RUN +case ${WAVE_RUN} in + both | "${RUN/enkf}" ) ;; # Don't change *) DO_WAVE="NO" ;; # Turn waves off esac +# Turn off aerosols if not used for this RUN +case ${AERO_FCST_RUN} in + both | "${RUN/enkf}" ) ;; # Don't change + *) DO_AERO="NO" ;; # Turn aerosols off +esac + # Source model specific information that is resolution dependent string="--fv3 ${CASE}" [[ "${DO_OCN}" == "YES" ]] && string="${string} --mom6 ${OCNRES}" @@ -21,6 +30,26 @@ string="--fv3 ${CASE}" # shellcheck disable=SC2086 source "${EXPDIR}/config.ufs" ${string} +# Forecast length for GFS forecast +case ${RUN} in + *gfs) + # shellcheck disable=SC2153 + export FHMAX=${FHMAX_GFS} + # shellcheck disable=SC2153 + export FHOUT=${FHOUT_GFS} + export FHMAX_HF=${FHMAX_HF_GFS} + export FHOUT_HF=${FHOUT_HF_GFS} + export FHOUT_OCN=${FHOUT_OCN_GFS} + export FHOUT_ICE=${FHOUT_ICE_GFS} + ;; + *gdas) + export FHMAX_HF=0 + export FHOUT_HF=0 + ;; + *) + echo "FATAL ERROR: Unsupported RUN '${RUN}'" + exit 1 +esac # Get task specific resources source "${EXPDIR}/config.resources" fcst @@ -37,16 +66,14 @@ export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_O ####################################################################### -export FORECASTSH="${HOMEgfs}/scripts/exglobal_forecast.sh" -#export FORECASTSH="${HOMEgfs}/scripts/exglobal_forecast.py" # Temp. while this is worked on -export FCSTEXECDIR="${HOMEgfs}/exec" +export FORECASTSH="${SCRgfs}/exglobal_forecast.sh" +#export FORECASTSH="${SCRgfs}/exglobal_forecast.py" # Temp. while this is worked on export FCSTEXEC="ufs_model.x" ####################################################################### # Model configuration export TYPE="nh" export MONO="non-mono" -#JKHexport range_warn=".false." ## JKH # Use stratosphere h2o physics export h2o_phys=".true." @@ -93,36 +120,36 @@ if (( gwd_opt == 2 )); then export do_ugwp_v0_orog_only=".false." export do_ugwp_v0_nst_only=".false." export do_gsl_drag_ls_bl=".true." - export do_gsl_drag_ss=".true." + export do_gsl_drag_ss=".true." #KYW (Checked with Mike) + #export do_gsl_drag_ss=".false." export do_gsl_drag_tofd=".true." + export do_gwd_opt_psl=".false." + #export do_gwd_opt_psl=".true." #KYW (Checked with Mike) export do_ugwp_v1_orog_only=".false." + export alpha_fd=35.0 #KYW (Checked with Mike) launch_level=$(echo "${LEVS}/2.35" |bc) export launch_level - if [[ ${do_gsl_drag_ls_bl} == ".true." ]]; then - export cdmbgwd=${cdmbgwd_gsl} - fi fi # Sponge layer settings -export tau=0. -export rf_cutoff=10. export d2_bg_k1=0.20 export d2_bg_k2=0.04 export dz_min=6 export n_sponge=42 -if (( LEVS == 128 )) && [[ "${CDUMP}" =~ "gdas" ]]; then - export tau=5.0 - export rf_cutoff=1.0e3 - export d2_bg_k1=0.20 - export d2_bg_k2=0.0 -fi -# PBL/turbulence schemes +# PBL/turbulance schemes export hybedmf=".false." +if [[ "${CCPP_SUITE}" == "FV3_global_nest"* ]]; then + export satmedmf=".false." +else + export satmedmf=".true." +fi +export isatmedmf=1 +#JKH if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_c3_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3_mynn" ]] ; then export satmedmf=".false." export isatmedmf=0 - export shal_cnv=".false." + export CPP export do_mynnedmf=".true." export do_mynnsfclay=".false." export icloud_bl=1 @@ -130,20 +157,21 @@ if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_mynn" || "$CCPP_SUITE" == "FV3_GFS export bl_mynn_edmf=1 export bl_mynn_edmf_mom=1 export lcnorm=".true." ## JKH -else - export satmedmf=".true." - export isatmedmf=1 fi tbf="" if [[ "${satmedmf}" == ".true." ]]; then tbf="_satmedmf" ; fi -#Convection schemes +#Convection schemes ### JKH - affects field table name export progsigma=".true." tbp="" if [[ "${progsigma}" == ".true." ]]; then tbp="_progsigma" ; fi # Radiation options -export IAER=1011 ; #spectral band mapping method for aerosol optical properties +if [[ "${DO_AERO}" == "YES" ]]; then + export IAER=2011 # spectral band mapping method for aerosol optical properties +else + export IAER=1011 +fi export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) @@ -161,21 +189,22 @@ export doGP_lwscat=.false. export iopt_sfc="3" export iopt_trs="2" +#JKH # Convection Options: 2-SASAS, 3-GF export progsigma=".true." if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_c3_mynn" ]] ; then export imfdeepcnv=5 export imfshalcnv=-1 ## JKH - no shallow GF -elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_c3" ]] ; then +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_c3" ]] ; then export imfdeepcnv=5 - export imfshalcnv=5 -elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export imfshalcnv=5 +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then export progsigma=.false. export imfdeepcnv=5 - export imfshalcnv=5 -elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfshalcnv=5 +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then export imfdeepcnv=3 - export imfshalcnv=3 + export imfshalcnv=3 else export imfdeepcnv=2 if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then @@ -184,11 +213,7 @@ else export imfshalcnv=2 fi fi - -#Convection schemes ### JKH - affects field table name -tbp="" -if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi - +#JKH # Microphysics configuration export dnats=0 @@ -199,12 +224,12 @@ export random_clds=".true." case ${imp_physics} in 99) # ZhaoCarr export ncld=1 - export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_zhaocarr${tbf}${tbp}" + export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_zhaocarr${tbf}${tbp}" export nwat=2 ;; 6) # WSM6 export ncld=2 - export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_wsm6${tbf}${tbp}" + export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_wsm6${tbf}${tbp}" export nwat=6 ;; 8) # Thompson @@ -217,31 +242,46 @@ case ${imp_physics} in export lradar=".true." export ttendlim="-999" export sedi_semi=.true. + if [[ "${sedi_semi}" == .true. ]]; then export dt_inner=${DELTIM} ; fi export decfl=10 - - if [[ "${CCPP_SUITE}" == "FV3_GFS_v17_p8_ugwpv1_mynn" || "${CCPP_SUITE}" == "FV3_GFS_v17_p8_ugwpv1_c3_mynn" || "${CCPP_SUITE}" == "FV3_GFS_v17_p8_mynn" || "${CCPP_SUITE}" == "FV3_GFS_v17_p8_c3_mynn" || +#JKH + if [[ "${CCPP_SUITE}" == "FV3_GFS_v17_p8_ugwpv1_mynn" || "${CCPP_SUITE}" == "FV3_GFS_v17_p8_ugwpv1_c3_mynn" || "${CCPP_SUITE}" == "FV3_GFS_v17_p8_mynn" || "${CCPP_SUITE}" == "FV3_GFS_v17_p8_c3_mynn" || "${CCPP_SUITE}" == "FV3_GFS_v17_p8_thompson" ]] ; then #JKH set dt_inner to 50 if running aerosol-aware Thompson export dt_inner=50 export ltaerosol=".true." - export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_thompson_aero_tke${tbp}" + export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_thompson_aero_tke${tbp}" else export dt_inner=$((DELTIM/2)) if [[ "${sedi_semi}" == .true. ]]; then export dt_inner=${DELTIM} ; fi export ltaerosol=".false." - export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_thompson_noaero_tke${tbp}" + export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_thompson_noaero_tke${tbp}" fi - +#JKH export hord_mt_nh_nonmono=5 export hord_xx_nh_nonmono=5 export vtdm4_nh_nonmono=0.02 export nord=2 export dddmp=0.1 export d4_bg=0.12 + + if [[ "${CCPP_SUITE}" == "FV3_global_nest"* ]]; then + export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_thompson_aero_tke${tbp}" + export ltaerosol=".true." + export lcnorm=".true." + export do_mynnedmf=".true." + export do_mynnsfclay=".true." + export imfshalcnv=5 + export imfdeepcnv=5 + export betascu=0.5 + export betamcu=1.5 + export betadcu=8.0 + fi + ;; 11) # GFDL export ncld=5 - export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_gfdl${tbf}${tbp}" + export FIELD_TABLE="${PARMgfs}/ufs/fv3/field_table_gfdl${tbf}${tbp}" export nwat=6 export dnats=1 export cal_pre=".false." @@ -267,6 +307,9 @@ export DO_SKEB=${DO_SKEB:-"NO"} export DO_SHUM=${DO_SHUM:-"NO"} export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} export DO_CA=${DO_CA:-"YES"} +if [[ "${DO_NEST:-NO}" == "YES" ]] ; then + export DO_CA="NO" # CA does not work with nesting. +fi #coupling settings export cplmode="ufs.frac" @@ -283,38 +326,21 @@ export FSICL="0" export FSICS="0" #--------------------------------------------------------------------- - -# ideflate: netcdf zlib lossless compression (0-9): 0 no compression -# nbits: netcdf lossy compression level (0-32): 0 lossless -export ideflate=1 -export nbits=14 -export ishuffle=0 -# compression for RESTART files written by FMS -export shuffle=1 -export deflate_level=1 - -#--------------------------------------------------------------------- -# Disable the use of coupler.res; get model start time from model_configure -export USE_COUPLER_RES="NO" - -if [[ "${CDUMP}" =~ "gdas" ]] ; then # GDAS cycle specific parameters +if [[ "${RUN}" =~ "gdas" ]] ; then # GDAS cycle specific parameters # Variables used in DA cycling - export DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table_da" + export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table_da" - if [[ "${DOIAU}" == "YES" ]]; then - export restart_interval="3" - else - export restart_interval="6" - fi + # Write gfs restart files to rerun fcst from any break point + export restart_interval=${restart_interval_gdas:-6} # Turn on dry mass adjustment in GDAS export adjust_dry_mass=".true." -elif [[ "${CDUMP}" =~ "gfs" ]] ; then # GFS cycle specific parameters +elif [[ "${RUN}" =~ "gfs" ]] ; then # GFS cycle specific parameters # Write more variables to output - export DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table" + export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table" # Write gfs restart files to rerun fcst from any break point export restart_interval=${restart_interval_gfs:-12} diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.fit2obs b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.fit2obs index 46baaa9e45..9b3fb87ead 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.fit2obs +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.fit2obs @@ -8,8 +8,8 @@ echo "BEGIN: config.fit2obs" # Get task specific resources . "${EXPDIR}/config.resources" fit2obs -export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global -export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt +export PRVT=${FIXgfs}/gsi/prepobs_errtable.global +export HYBLEVS=${FIXgfs}/am/global_hyblev.l${LEVS}.txt export VBACKUP_FITS=24 export OUTPUT_FILETYPE="netcdf" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ice b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ice index 205458020f..055bd1e2bb 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ice +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ice @@ -6,4 +6,9 @@ echo "BEGIN: config.ice" export min_seaice="1.0e-6" export use_cice_alb=".true." +export MESH_ICE="mesh.mx${ICERES}.nc" + +export CICE_GRID="grid_cice_NEMS_mx${ICERES}.nc" +export CICE_MASK="kmtu_cice_NEMS_mx${ICERES}.nc" + echo "END: config.ice" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.landanl b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.landanl index 8d49f10d76..70ebae7529 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.landanl +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.landanl @@ -28,7 +28,7 @@ export BESTDDEV="30." # Background Error Std. Dev. for LETKFOI export APPLY_INCR_EXE="${HOMEgfs}/exec/apply_incr.exe" export APPLY_INCR_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/apply_incr_nml.j2" -export io_layout_x=1 -export io_layout_y=1 +export io_layout_x=@IO_LAYOUT_X@ +export io_layout_y=@IO_LAYOUT_Y@ echo "END: config.landanl" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.marineanalletkf b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.marineanalletkf new file mode 100644 index 0000000000..fde3433a13 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.marineanalletkf @@ -0,0 +1,18 @@ +#!/bin/bash + +########## config.marineanalletkf ########## +# Ocn Analysis specific + +echo "BEGIN: config.marineanalletkf" + +# Get task specific resources +. "${EXPDIR}/config.resources" marineanalletkf + +export MARINE_LETKF_EXEC="${JEDI_BIN}/gdas.x" +export MARINE_LETKF_YAML_TMPL="${PARMgfs}/gdas/soca/letkf/letkf.yaml.j2" +export MARINE_LETKF_STAGE_YAML_TMPL="${PARMgfs}/gdas/soca/letkf/letkf_stage.yaml.j2" + +export GRIDGEN_EXEC="${JEDI_BIN}/gdas_soca_gridgen.x" +export GRIDGEN_YAML="${PARMgfs}/gdas/soca/gridgen/gridgen.yaml" + +echo "END: config.marineanalletkf" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.marinebmat b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.marinebmat new file mode 100644 index 0000000000..d88739dced --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.marinebmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.marinebmat ########## +# configuration for the marine B-matrix + +echo "BEGIN: config.marinebmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" marinebmat + +echo "END: config.marinebmat" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.metp b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.metp index c90903f6a5..564966fd6d 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.metp +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.metp @@ -8,6 +8,8 @@ echo "BEGIN: config.metp" # Get task specific resources . "${EXPDIR}/config.resources" metp +export nproc=${tasks_per_node:-1} + export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus @@ -21,8 +23,9 @@ export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh ## INPUT DATA SETTINGS export model=${PSLOT} -export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_file_format="pgbf{lead?fmt=%2H}.${RUN}.{init?fmt=%Y%m%d%H}.grib2" export model_hpss_dir=${ATARDIR}/.. +export model_dir=${ARCDIR}/.. export get_data_from_hpss="NO" export hpss_walltime="10" ## OUTPUT SETTINGS @@ -38,19 +41,19 @@ export log_MET_output_to_METplus="yes" # GRID-TO-GRID STEP 1: gfsmetpg2g1 export g2g1_type_list="anom pres sfc" export g2g1_anom_truth_name="self_anl" -export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_truth_file_format="pgbanl.${RUN}.{valid?fmt=%Y%m%d%H}.grib2" export g2g1_anom_fhr_min=${FHMIN_GFS} export g2g1_anom_fhr_max=${FHMAX_GFS} export g2g1_anom_grid="G002" export g2g1_anom_gather_by="VSDB" export g2g1_pres_truth_name="self_anl" -export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_truth_file_format="pgbanl.${RUN}.{valid?fmt=%Y%m%d%H}.grib2" export g2g1_pres_fhr_min=${FHMIN_GFS} export g2g1_pres_fhr_max=${FHMAX_GFS} export g2g1_pres_grid="G002" export g2g1_pres_gather_by="VSDB" export g2g1_sfc_truth_name="self_f00" -export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_truth_file_format="pgbf00.${RUN}.{valid?fmt=%Y%m%d%H}.grib2" export g2g1_sfc_fhr_min=${FHMIN_GFS} export g2g1_sfc_fhr_max=${FHMAX_GFS} export g2g1_sfc_grid="G002" @@ -86,7 +89,7 @@ export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow exper export precip1_type_list="ccpa_accum24hr" export precip1_ccpa_accum24hr_model_bucket="06" export precip1_ccpa_accum24hr_model_var="APCP" -export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${RUN}.{init?fmt=%Y%m%d%H}.grib2" export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} export precip1_ccpa_accum24hr_fhr_max="180" export precip1_ccpa_accum24hr_grid="G211" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.nsst b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.nsst index db4367b2c0..7bda81f058 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.nsst +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.nsst @@ -10,6 +10,11 @@ echo "BEGIN: config.nsst" # nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled export NST_MODEL=2 +# Set NST_MODEL for JEDIATMVAR or JEDIATMENS +if [[ "${DO_JEDIATMVAR}" == "YES" || "${DO_JEDIATMENS}" == "YES" ]]; then + export NST_MODEL=1 +fi + # nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, export NST_SPINUP=0 cdate="${PDY}${cyc}" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.oceanice_products b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.oceanice_products new file mode 100644 index 0000000000..9e5c5b1c68 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.oceanice_products @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.oceanice_products ########## + +echo "BEGIN: config.oceanice_products" + +# Get task specific resources +source "${EXPDIR}/config.resources" oceanice_products + +export OCEANICEPRODUCTS_CONFIG="${PARMgfs}/post/oceanice_products.yaml" + +# No. of forecast hours to process in a single job +export NFHRS_PER_GROUP=3 + +echo "END: config.oceanice_products" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocn b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocn index 37f6a966aa..317a76e58a 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocn +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocn @@ -2,8 +2,7 @@ echo "BEGIN: config.ocn" -# MOM_input template to use -export MOM_INPUT="MOM_input_template_${OCNRES}" +export MESH_OCN="mesh.mx${OCNRES}.nc" export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) @@ -17,6 +16,14 @@ if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then else export ODA_INCUPD="False" fi -export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +# Time interval for applying the increment +if [[ "${DOIAU}" == "YES" ]]; then + export ODA_INCUPD_NHOURS="6.0" +else + export ODA_INCUPD_NHOURS="3.0" +fi + + echo "END: config.ocn" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanal b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanal index 821efbb11b..5a6e524b5d 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanal +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanal @@ -6,21 +6,15 @@ echo "BEGIN: config.ocnanal" export OBS_YAML_DIR="${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config" -export OBS_LIST=/scratch1/BMC/gsd-fv3/rtruns/UFS-CAMsuite_dev1_21mar24/sorc/gdas.cd/parm/soca/obs/obs_list.yaml -export OBS_YAML="${OBS_LIST}" -export FV3JEDI_STAGE_YAML="${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml" -export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25/soca -export SOCA_VARS=tocn,socn,ssh -export SABER_BLOCKS_YAML= +export OBS_LIST=${PARMgfs}/gdas/soca/obs/obs_list.yaml # TODO(GA): doesn't look necessary as is to have +export OBS_YAML="${OBS_LIST}" # OBS_LIST and OBS_YAML pick one or add logic +export SOCA_INPUT_FIX_DIR=${FIXgfs}/gdas/soca/72x35x25/soca export SOCA_NINNER=100 -export CASE_ANL=C48 export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size resolution dependent -export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin +export SOCA_ENS_BKG_STAGE_YAML_TMPL="${PARMgfs}/gdas/soca/soca_ens_bkg_stage.yaml.j2" +export SOCA_FIX_YAML_TMPL="${PARMgfs}/gdas/soca/soca_fix_stage_${OCNRES}.yaml.j2" -export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2-v2-v3 - -# NICAS -export NICAS_RESOL=1 -export NICAS_GRID_SIZE=15000 +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin # TODO(GA): remove once analysis "run" + # and "checkpoint" are refactored echo "END: config.ocnanal" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanalecen b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanalecen new file mode 100644 index 0000000000..b64c2bcf62 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ocnanalecen @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalecen ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalecen" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalecen + +echo "END: config.ocnanalecen" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.postsnd b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.postsnd index 53d66bf4f6..7ec0ad6321 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.postsnd +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.postsnd @@ -8,7 +8,6 @@ echo "BEGIN: config.postsnd" # Get task specific resources . $EXPDIR/config.resources postsnd -export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND export ENDHOUR=180 if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prep b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prep index d5ac1925f7..e719d03d1d 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prep +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prep @@ -13,33 +13,28 @@ export cdate10=${PDY}${cyc} # Relocation and syndata QC export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} -export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export TROPCYQCRELOSH="${SCRgfs}/exglobal_atmos_tropcy_qc_reloc.sh" -export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${RUN}.${PDY}/${cyc}/atmos} export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} -export HOMERELO=$HOMEgfs -export EXECRELO=${HOMERELO}/exec -export FIXRELO=${HOMERELO}/fix/am -export USHRELO=${HOMERELO}/ush - # Adjust observation error for GFS v16 parallels # # NOTE: Remember to set OBERROR in config.anal as PRVT is set below # # Set default prepobs_errtable.global -export PRVT=$FIXgsi/prepobs_errtable.global +export PRVT=${FIXgfs}/gsi/prepobs_errtable.global # Set prepobs.errtable.global for GFS v16 retrospective parallels if [[ $RUN_ENVIR == "emc" ]]; then if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then - export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + export PRVT=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2019021900 fi # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then - export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + export PRVT=${FIXgfs}/gsi/gfsv16_historical/prepobs_errtable.global.2019110706 fi # NOTE: diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prepatmiodaobs b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prepatmiodaobs index ed9b246120..e29cf67b07 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prepatmiodaobs +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prepatmiodaobs @@ -8,7 +8,4 @@ echo "BEGIN: config.prepatmiodaobs" # Get task specific resources . "${EXPDIR}/config.resources" prepatmiodaobs -export BUFR2IODASH="${HOMEgfs}/ush/run_bufr2ioda.py" -export IODAPARM="${HOMEgfs}/sorc/gdas.cd/parm/ioda/bufr2ioda" - echo "END: config.prepatmiodaobs" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prepobsaero b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prepobsaero new file mode 100644 index 0000000000..f70138991c --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prepobsaero @@ -0,0 +1,17 @@ +#!/bin/bash -x + +########## config.prepobsaero ########## +# Prepare and thin/superob aerosol observations + +echo "BEGIN: config.prepobsaero" + +# Get task specific resources +source "${EXPDIR}/config.resources" prepobsaero + +export OBSPROCYAML="${PARMgfs}/gdas/aero/obs/lists/gdas_aero_obsproc.yaml.j2" +export OBSPROCEXE="${EXECgfs}/gdas_obsprovider2ioda.x" +export VIIRS_DATA_DIR="/scratch2/NCEPDEV/stmp3/Yaping.Wang/VIIRS/AWS/" +export SENSORS="npp,n20" + + +echo "END: config.prepaeroobs" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prepoceanobs b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prepoceanobs index d7c4e37bb9..cbc072a2fa 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prepoceanobs +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prepoceanobs @@ -6,14 +6,19 @@ echo "BEGIN: config.prepoceanobs" export OCNOBS2IODAEXEC=${HOMEgfs}/sorc/gdas.cd/build/bin/gdas_obsprovider2ioda.x -export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config -export OBSPROC_YAML=@OBSPROC_YAML@ -export OBS_LIST=@SOCA_OBS_LIST@ -[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export SOCA_INPUT_FIX_DIR=${FIXgfs}/gdas/soca/72x35x25/soca + +export OBS_YAML_DIR="${PARMgfs}/gdas/soca/obs/config" +export OBSPREP_YAML=${PARMgfs}/gdas/soca/obsprep/obsprep_config.yaml +export OBS_LIST=${PARMgfs}/gdas/soca/obs/obs_list.yaml export OBS_YAML=${OBS_LIST} # ocean analysis needs own dmpdir until standard dmpdir has full ocean obs -export DMPDIR=@DMPDIR@ +export DMPDIR=/scratch1/NCEPDEV/global/glopara/data/experimental_obs + +# For BUFR2IODA json and python scripts +export JSON_TMPL_DIR="${PARMgfs}/gdas/ioda/bufr2ioda" +export BUFR2IODA_PY_DIR="${USHgfs}" # Get task specific resources . "${EXPDIR}/config.resources" prepoceanobs diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prepsnowobs b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prepsnowobs new file mode 100644 index 0000000000..60ca16ce9e --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.prepsnowobs @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.prepsnowobs ########## +# Snow Obs Prep specific + +echo "BEGIN: config.prepsnowobs" + +# Get task specific resources +. "${EXPDIR}/config.resources" prepsnowobs + +export GTS_OBS_LIST="${PARMgfs}/gdas/snow/prep/prep_gts.yaml.j2" +export IMS_OBS_LIST="${PARMgfs}/gdas/snow/prep/prep_ims.yaml.j2" + +export BUFR2IODAX="${EXECgfs}/bufr2ioda.x" + +export CALCFIMSEXE="${EXECgfs}/calcfIMS.exe" +export FIMS_NML_TMPL="${PARMgfs}/gdas/snow/prep/fims.nml.j2" + +export IMS2IODACONV="${USHgfs}/imsfv3_scf2ioda.py" + +echo "END: config.prepsnowobs" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources index c179c33df4..7b737d05f0 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources @@ -1,29 +1,32 @@ #! /usr/bin/env bash +# shellcheck disable=SC2034 ########## config.resources ########## # Set resource information for job tasks # e.g. walltime, node, cores per node, memory etc. +# Note: machine-specific resources should be placed into the appropriate config file: +# config.resources.${machine} -if [[ $# -ne 1 ]]; then +if (( $# != 1 )); then echo "Must specify an input task argument to set resource variables!" echo "argument can be any one of the following:" echo "stage_ic aerosol_init" - echo "prep preplandobs prepatmiodaobs" - echo "atmanlinit atmanlrun atmanlfinal" - echo "atmensanlinit atmensanlrun atmensanlfinal" - echo "landanl" - echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "prep prepsnowobs prepatmiodaobs" + echo "atmanlinit atmanlvar atmanlfv3inc atmanlfinal" + echo "atmensanlinit atmensanlletkf atmensanlfv3inc atmensanlfinal" + echo "snowanl" + echo "prepobsaero aeroanlinit aeroanlrun aeroanlfinal" echo "anal sfcanl analcalc analdiag fcst echgres" echo "upp atmos_products" echo "tracker genesis genesis_fsu" echo "verfozn verfrad vminmon fit2obs metp arch cleanup" echo "eobs ediag eomg eupd ecen esfc efcs epos earc" - echo "init_chem mom6ic ocnpost" + echo "init_chem mom6ic oceanice_products" echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" echo "wavegempak waveawipsbulls waveawipsgridded" echo "postsnd awips gempak npoess" - echo "ocnanalprep prepoceanobs ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + echo "ocnanalprep prepoceanobs marinebmat ocnanalrun ocnanalecen marineanalletkf ocnanalchkpt ocnanalpost ocnanalvrfy" exit 1 fi @@ -32,1164 +35,1269 @@ step=$1 echo "BEGIN: config.resources" -if [[ "${machine}" = "WCOSS2" ]]; then - export npe_node_max=128 -elif [[ "${machine}" = "JET" ]]; then - if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then - export npe_node_max=16 - elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then - export npe_node_max=24 - elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then - export npe_node_max=16 - elif [[ ${PARTITION_BATCH} = "kjet" ]]; then - export npe_node_max=40 - fi -elif [[ "${machine}" = "HERA" ]]; then - export npe_node_max=40 -elif [[ "${machine}" = "S4" ]]; then - if [[ ${PARTITION_BATCH} = "s4" ]]; then - export npe_node_max=32 - elif [[ ${PARTITION_BATCH} = "ivy" ]]; then - export npe_node_max=20 - fi -elif [[ "${machine}" = "AWSPW" ]]; then - export PARTITION_BATCH="compute" - export npe_node_max=40 -elif [[ "${machine}" = "ORION" ]]; then - export npe_node_max=40 -elif [[ "${machine}" = "HERCULES" ]]; then - export npe_node_max=80 -fi - -if [[ "${step}" = "prep" ]]; then - export wtime_prep='00:30:00' - export npe_prep=4 - export npe_node_prep=2 - export nth_prep=1 - if [[ "${machine}" = "WCOSS2" ]]; then - export is_exclusive=True - else - export memory_prep="40G" +case ${machine} in + "WCOSS2") + max_tasks_per_node=128 + # shellcheck disable=SC2034 + mem_node_max="500GB" + ;; + "HERA") + max_tasks_per_node=40 + # shellcheck disable=SC2034 + mem_node_max="96GB" + ;; + "GAEA") + max_tasks_per_node=128 + # shellcheck disable=SC2034 + mem_node_max="251GB" + ;; + "ORION") + max_tasks_per_node=40 + # shellcheck disable=SC2034 + mem_node_max="192GB" + ;; + "HERCULES") + max_tasks_per_node=80 + # shellcheck disable=SC2034 + mem_node_max="512GB" + ;; + "JET") + case ${PARTITION_BATCH} in + "xjet") + max_tasks_per_node=24 + # shellcheck disable=SC2034 + mem_node_max="61GB" + ;; + "vjet") + max_tasks_per_node=16 + # shellcheck disable=SC2034 + mem_node_max="61GB" + ;; + "sjet") + max_tasks_per_node=16 + # shellcheck disable=SC2034 + mem_node_max="29GB" + ;; + "kjet") + max_tasks_per_node=40 + # shellcheck disable=SC2034 + mem_node_max="88GB" + ;; + *) + echo "FATAL ERROR: Unknown partition ${PARTITION_BATCH} specified for ${machine}" + exit 3 + esac + ;; + "S4") + case ${PARTITION_BATCH} in + "s4") max_tasks_per_node=32 + # shellcheck disable=SC2034 + mem_node_max="168GB" + ;; + "ivy") + max_tasks_per_node=20 + # shellcheck disable=SC2034 + mem_node_max="128GB" + ;; + *) + echo "FATAL ERROR: Unknown partition ${PARTITION_BATCH} specified for ${machine}" + exit 3 + esac + ;; + "AWSPW") + export PARTITION_BATCH="compute" + max_tasks_per_node=40 + # TODO Supply a max mem/node value for AWS + # shellcheck disable=SC2034 + mem_node_max="" + ;; + "CONTAINER") + max_tasks_per_node=1 + # TODO Supply a max mem/node value for a container + # shellcheck disable=SC2034 + mem_node_max="" + ;; + *) + echo "FATAL ERROR: Unknown machine encountered by ${BASH_SOURCE[0]}" + exit 2 + ;; +esac + +export max_tasks_per_node + +case ${step} in + "prep") + walltime='00:30:00' + ntasks=4 + tasks_per_node=2 + threads_per_task=1 + memory="40GB" + ;; + + "prepsnowobs") + walltime="00:05:00" + ntasks=1 + threads_per_task=1 + tasks_per_node=1 + ;; + + "prepatmiodaobs") + walltime="00:30:00" + ntasks=1 + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + ;; + + "aerosol_init") + walltime="00:05:00" + ntasks=1 + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + NTASKS=${ntasks} + memory="6GB" + ;; + + "waveinit") + walltime="00:10:00" + ntasks=12 + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + NTASKS=${ntasks} + memory="2GB" + ;; + + "waveprep") + walltime="00:10:00" + ntasks_gdas=5 + ntasks_gfs=65 + threads_per_task=1 + + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + NTASKS_gdas=${ntasks_gdas} + NTASKS_gfs=${ntasks_gfs} + memory_gdas="100GB" + memory_gfs="150GB" + ;; + + "wavepostsbs") + walltime_gdas="00:20:00" + walltime_gfs="03:00:00" + ntasks=8 + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + NTASKS=${ntasks} + memory_gdas="10GB" + memory_gfs="10GB" + ;; + + # The wavepost*pnt* jobs are I/O heavy and do not scale well to large nodes. + # Limit the number of tasks/node to 40. + "wavepostbndpnt") + walltime="03:00:00" + ntasks=240 + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + export is_exclusive=True + if [[ ${tasks_per_node} -gt 40 ]]; then + tasks_per_node=40 + export is_exclusive=False fi - -elif [[ "${step}" = "preplandobs" ]]; then - export wtime_preplandobs="00:05:00" - npe_preplandobs=1 - export npe_preplandobs - export nth_preplandobs=1 - npe_node_preplandobs=1 - export npe_node_preplandobs - -elif [[ "${step}" = "prepatmiodaobs" ]]; then - export wtime_prepatmiodaobs="00:10:00" - export npe_prepatmiodaobs=1 - export nth_prepatmiodaobs=1 - npe_node_prepatmiodaobs=$(echo "${npe_node_max} / ${nth_prepatmiodaobs}" | bc) - export npe_node_prepatmiodaobs - -elif [[ "${step}" = "aerosol_init" ]]; then - export wtime_aerosol_init="00:05:00" - export npe_aerosol_init=1 - export nth_aerosol_init=1 - npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) - export npe_node_aerosol_init - export NTASKS=${npe_aerosol_init} - export memory_aerosol_init="6G" - -elif [[ "${step}" = "waveinit" ]]; then - - export wtime_waveinit="00:10:00" - export npe_waveinit=12 - export nth_waveinit=1 - npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) - export npe_node_waveinit - export NTASKS=${npe_waveinit} - export memory_waveinit="2GB" - -elif [[ "${step}" = "waveprep" ]]; then - - export wtime_waveprep="00:10:00" - export npe_waveprep=5 - export npe_waveprep_gfs=65 - export nth_waveprep=1 - export nth_waveprep_gfs=1 - npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) - export npe_node_waveprep - npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) - export npe_node_waveprep_gfs - export NTASKS=${npe_waveprep} - export NTASKS_gfs=${npe_waveprep_gfs} - export memory_waveprep="100GB" - export memory_waveprep_gfs="150GB" - -elif [[ "${step}" = "wavepostsbs" ]]; then - - export wtime_wavepostsbs="00:20:00" - export wtime_wavepostsbs_gfs="03:00:00" - export npe_wavepostsbs=8 - export nth_wavepostsbs=1 - npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) - export npe_node_wavepostsbs - export NTASKS=${npe_wavepostsbs} - export memory_wavepostsbs="10GB" - export memory_wavepostsbs_gfs="10GB" - -elif [[ "${step}" = "wavepostbndpnt" ]]; then - - export wtime_wavepostbndpnt="01:00:00" - export npe_wavepostbndpnt=240 - export nth_wavepostbndpnt=1 - npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) - export npe_node_wavepostbndpnt - export NTASKS=${npe_wavepostbndpnt} + NTASKS=${ntasks} + ;; + + "wavepostbndpntbll") + walltime="01:00:00" + ntasks=448 + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) export is_exclusive=True - -elif [[ "${step}" = "wavepostbndpntbll" ]]; then - - export wtime_wavepostbndpntbll="01:00:00" - export npe_wavepostbndpntbll=448 - export nth_wavepostbndpntbll=1 - npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) - export npe_node_wavepostbndpntbll - export NTASKS=${npe_wavepostbndpntbll} + if [[ ${tasks_per_node} -gt 40 ]]; then + tasks_per_node=40 + export is_exclusive=False + fi + NTASKS=${ntasks} + ;; + + "wavepostpnt") + walltime="04:00:00" + ntasks=200 + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) export is_exclusive=True - -elif [[ "${step}" = "wavepostpnt" ]]; then - - export wtime_wavepostpnt="04:00:00" - export npe_wavepostpnt=200 - export nth_wavepostpnt=1 - npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) - export npe_node_wavepostpnt - export NTASKS=${npe_wavepostpnt} + if [[ ${tasks_per_node} -gt 40 ]]; then + tasks_per_node=40 + export is_exclusive=False + fi + NTASKS=${ntasks} + ;; + + "wavegempak") + walltime="02:00:00" + ntasks=1 + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + NTASKS=${ntasks} + memory="1GB" + ;; + + "waveawipsbulls") + walltime="00:20:00" + ntasks=1 + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + NTASKS=${ntasks} export is_exclusive=True - -elif [[ "${step}" = "wavegempak" ]]; then - - export wtime_wavegempak="02:00:00" - export npe_wavegempak=1 - export nth_wavegempak=1 - npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) - export npe_node_wavegempak - export NTASKS=${npe_wavegempak} - export memory_wavegempak="1GB" - -elif [[ "${step}" = "waveawipsbulls" ]]; then - - export wtime_waveawipsbulls="00:20:00" - export npe_waveawipsbulls=1 - export nth_waveawipsbulls=1 - npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) - export npe_node_waveawipsbulls - export NTASKS=${npe_waveawipsbulls} + ;; + + "waveawipsgridded") + walltime="02:00:00" + ntasks=1 + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + NTASKS=${ntasks} + memory_gfs="1GB" + ;; + + "atmanlinit") + export layout_x=${layout_x_atmanl} + export layout_y=${layout_y_atmanl} + + export layout_gsib_x=$(( layout_x * 3 )) + export layout_gsib_y=$(( layout_y * 2 )) + + walltime="00:10:00" + ntasks=1 + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + memory="3072M" + ;; + + "atmanlvar") + export layout_x=${layout_x_atmanl} + export layout_y=${layout_y_atmanl} + + walltime="00:30:00" + ntasks_gdas=$(( layout_x * layout_y * 6 )) + ntasks_gfs=$(( layout_x * layout_y * 6 )) + threads_per_task_gdas=1 + threads_per_task_gfs=${threads_per_task_gdas} + tasks_per_node_gdas=$(( max_tasks_per_node / threads_per_task_gdas )) + tasks_per_node_gfs=$(( max_tasks_per_node / threads_per_task_gfs )) + memory="96GB" export is_exclusive=True - -elif [[ ${step} = "waveawipsgridded" ]]; then - - export wtime_waveawipsgridded="02:00:00" - export npe_waveawipsgridded=1 - export nth_waveawipsgridded=1 - npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) - export npe_node_waveawipsgridded - export NTASKS=${npe_waveawipsgridded} - export memory_waveawipsgridded_gfs="1GB" - -elif [[ "${step}" = "atmanlinit" ]]; then - - # make below case dependent later - export layout_x=1 - export layout_y=1 - - layout_gsib_x=$(echo "${layout_x} * 3" | bc) - export layout_gsib_x - layout_gsib_y=$(echo "${layout_y} * 2" | bc) - export layout_gsib_y - - export wtime_atmanlinit="00:10:00" - export npe_atmanlinit=1 - export nth_atmanlinit=1 - npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) - export npe_node_atmanlinit - export memory_atmanlinit="3072M" - -elif [[ "${step}" = "atmanlrun" ]]; then - - # make below case dependent later - export layout_x=1 - export layout_y=1 - - export wtime_atmanlrun="00:30:00" - npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_atmanlrun - npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_atmanlrun_gfs - export nth_atmanlrun=1 - export nth_atmanlrun_gfs=${nth_atmanlrun} - npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) - export npe_node_atmanlrun + ;; + + "atmanlfv3inc") + export layout_x=${layout_x_atmanl} + export layout_y=${layout_y_atmanl} + + walltime="00:30:00" + ntasks_gdas=$(( layout_x * layout_y * 6 )) + ntasks_gfs=$(( layout_x * layout_y * 6 )) + threads_per_task_gdas=1 + threads_per_task_gfs=${threads_per_task_gdas} + tasks_per_node_gdas=$(( max_tasks_per_node / threads_per_task_gdas )) + tasks_per_node_gfs=$(( max_tasks_per_node / threads_per_task_gfs )) + memory="96GB" export is_exclusive=True + ;; -elif [[ "${step}" = "atmanlfinal" ]]; then - - export wtime_atmanlfinal="00:30:00" - export npe_atmanlfinal=${npe_node_max} - export nth_atmanlfinal=1 - npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) - export npe_node_atmanlfinal + "atmanlfinal") + walltime="00:30:00" + ntasks=${max_tasks_per_node} + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) export is_exclusive=True + ;; -elif [[ "${step}" = "landanl" ]]; then - # below lines are for creating JEDI YAML - case ${CASE} in - C768) + "snowanl") + # below lines are for creating JEDI YAML + case ${CASE} in + "C768") layout_x=6 layout_y=6 ;; - C384) + "C384") layout_x=5 layout_y=5 ;; - C192 | C96 | C48) + "C192" | "C96" | "C48") layout_x=1 layout_y=1 ;; - *) - echo "FATAL ERROR: Resolution not supported for land analysis'" - exit 1 - esac - - export layout_x - export layout_y - - export wtime_landanl="00:15:00" - npe_landanl=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_landanl - export nth_landanl=1 - npe_node_landanl=$(echo "${npe_node_max} / ${nth_landanl}" | bc) - export npe_node_landanl - -elif [[ "${step}" = "aeroanlinit" ]]; then - - # below lines are for creating JEDI YAML - case ${CASE} in - C768) + *) + echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${CASE}" + exit 4 + esac + + export layout_x + export layout_y + + walltime="00:15:00" + ntasks=$(( layout_x * layout_y * 6 )) + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + ;; + + "prepobsaero") + walltime="00:30:00" + ntasks=1 + threads_per_task=1 + tasks_per_node=1 + memory="96GB" + ;; + + "aeroanlinit") + # below lines are for creating JEDI YAML + case ${CASE} in + "C768") layout_x=8 layout_y=8 ;; - C384) + "C384") layout_x=8 layout_y=8 ;; - C192 | C96) + "C192" | "C96") layout_x=8 layout_y=8 ;; - C48 ) + "C48" ) # this case is for testing only layout_x=1 layout_y=1 ;; *) - echo "FATAL ERROR: Resolution not supported for aerosol analysis'" - exit 1 + echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${CASE}" + exit 4 esac export layout_x export layout_y - - export wtime_aeroanlinit="00:10:00" - export npe_aeroanlinit=1 - export nth_aeroanlinit=1 - npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) - export npe_node_aeroanlinit - export memory_aeroanlinit="3072M" - -elif [[ "${step}" = "aeroanlrun" ]]; then - - case ${CASE} in - C768) + walltime="00:10:00" + ntasks=1 + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + memory="3072M" + ;; + + "aeroanlrun") + case ${CASE} in + "C768") layout_x=8 layout_y=8 ;; - C384) + "C384") layout_x=8 layout_y=8 ;; - C192 | C96) + "C192" | "C96") layout_x=8 layout_y=8 ;; - C48 ) + "C48" ) # this case is for testing only layout_x=1 layout_y=1 ;; *) - echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" - exit 1 + echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${CASE}" + exit 4 esac export layout_x export layout_y - export wtime_aeroanlrun="00:30:00" - npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_aeroanlrun - npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_aeroanlrun_gfs - export nth_aeroanlrun=1 - export nth_aeroanlrun_gfs=1 - npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) - export npe_node_aeroanlrun + walltime="00:30:00" + ntasks=$(( layout_x * layout_y * 6 )) + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) export is_exclusive=True + ;; + + "aeroanlfinal") + walltime="00:10:00" + ntasks=1 + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + memory="3072M" + ;; + + "ocnanalprep") + walltime="00:10:00" + ntasks=1 + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + memory="24GB" + ;; + + "prepoceanobs") + walltime="00:10:00" + ntasks=1 + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + memory="48GB" + ;; + + "marinebmat") + npes=16 + ntasks=16 + case ${OCNRES} in + "025") ntasks=480;; + "050") ntasks=16;; + "500") ntasks=16;; + *) + echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${OCNRES}" + exit 4 + esac -elif [[ "${step}" = "aeroanlfinal" ]]; then - - export wtime_aeroanlfinal="00:10:00" - export npe_aeroanlfinal=1 - export nth_aeroanlfinal=1 - npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) - export npe_node_aeroanlfinal - export memory_aeroanlfinal="3072M" - -elif [[ "${step}" = "ocnanalprep" ]]; then - - export wtime_ocnanalprep="00:10:00" - export npe_ocnanalprep=1 - export nth_ocnanalprep=1 - npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) - export npe_node_ocnanalprep - export memory_ocnanalprep="24GB" - -elif [[ "${step}" = "prepoceanobs" ]]; then - - export wtime_prepoceanobs="00:10:00" - export npe_prepoceanobs=1 - export nth_prepoceanobs=1 - npe_node_prepoceanobs=$(echo "${npe_node_max} / ${nth_prepoceanobs}" | bc) - export npe_node_prepoceanobs - export memory_prepoceanobs="24GB" - - -elif [[ "${step}" = "ocnanalbmat" ]]; then - npes=16 - case ${CASE} in - C384) - npes=480 + walltime="00:30:00" + threads_per_task=1 + export is_exclusive=True + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + ;; + + "ocnanalrun") + ntasks=16 + case ${OCNRES} in + "025") + ntasks=480 + memory="96GB" ;; - C96) - npes=16 + "050") + ntasks=16 + memory="96GB" ;; - C48) - npes=16 + "500") + ntasks=16 + memory="24GB" ;; *) - echo "FATAL: Resolution not supported'" - exit 1 + echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${OCNRES}" + exit 4 esac - export wtime_ocnanalbmat="00:30:00" - export npe_ocnanalbmat=${npes} - export nth_ocnanalbmat=1 + walltime="00:15:00" + threads_per_task=1 export is_exclusive=True - npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) - export npe_node_ocnanalbmat - -elif [[ "${step}" = "ocnanalrun" ]]; then - npes=16 - case ${CASE} in - C384) - npes=480 - memory_ocnanalrun="128GB" + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + ;; + + "ocnanalecen") + ntasks=16 + case ${OCNRES} in + "025") + ntasks=40 + memory="96GB" ;; - C96) - npes=16 + "050") + ntasks=16 + memory="96GB" ;; - C48) - npes=16 - memory_ocnanalrun="64GB" + "500") + ntasks=16 + memory="24GB" ;; *) - echo "FATAL: Resolution not supported'" - exit 1 + echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${OCNRES}" + exit 4 esac - export wtime_ocnanalrun="00:15:00" - export npe_ocnanalrun=${npes} - export nth_ocnanalrun=2 + walltime="00:10:00" + threads_per_task=1 export is_exclusive=True - npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) - export npe_node_ocnanalrun - export memory_ocnanalrun - -elif [[ "${step}" = "ocnanalchkpt" ]]; then - - export wtime_ocnanalchkpt="00:10:00" - export npe_ocnanalchkpt=1 - export nth_ocnanalchkpt=1 - npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) - export npe_node_ocnanalchkpt - case ${CASE} in - C384) - export memory_ocnanalchkpt="128GB" + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + ;; + + "marineanalletkf") + ntasks=16 + case ${OCNRES} in + "025") + ntasks=480 + memory="96GB" ;; - C96) - export memory_ocnanalchkpt="32GB" + "050") + ntasks=16 + memory="96GB" ;; - C48) - export memory_ocnanalchkpt="32GB" + "500") + ntasks=16 + memory="24GB" ;; *) - echo "FATAL: Resolution not supported'" - exit 1 + echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${OCNRES}" + exit 4 esac -elif [[ "${step}" = "ocnanalpost" ]]; then - - export wtime_ocnanalpost="00:30:00" - export npe_ocnanalpost=${npe_node_max} - export nth_ocnanalpost=1 - npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) - export npe_node_ocnanalpost - -elif [[ "${step}" = "ocnanalvrfy" ]]; then - - export wtime_ocnanalvrfy="00:35:00" - export npe_ocnanalvrfy=1 - export nth_ocnanalvrfy=1 - npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) - export npe_node_ocnanalvrfy - export memory_ocnanalvrfy="24GB" - -elif [[ "${step}" = "anal" ]]; then - - export wtime_anal="00:50:00" - export wtime_anal_gfs="00:40:00" - export npe_anal=780 - export nth_anal=5 - export npe_anal_gfs=825 - export nth_anal_gfs=5 - if [[ "${machine}" = "WCOSS2" ]]; then - export nth_anal=8 - export nth_anal_gfs=8 - fi - if [[ "${CASE}" = "C384" ]]; then - export npe_anal=160 - export npe_anal_gfs=160 - export nth_anal=10 - export nth_anal_gfs=10 - if [[ "${machine}" = "S4" ]]; then - #On the S4-s4 partition, this is accomplished by increasing the task - #count to a multiple of 32 - if [[ ${PARTITION_BATCH} = "s4" ]]; then - export npe_anal=416 - export npe_anal_gfs=416 - fi - #S4 is small, so run this task with just 1 thread - export nth_anal=1 - export nth_anal_gfs=1 - export wtime_anal="02:00:00" - fi - fi - if [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then - export npe_anal=84 - export npe_anal_gfs=84 - if [[ "${machine}" = "S4" ]]; then - export nth_anal=4 - export nth_anal_gfs=4 - #Adjust job count for S4 - if [[ "${PARTITION_BATCH}" = "s4" ]]; then - export npe_anal=88 - export npe_anal_gfs=88 - elif [[ ${PARTITION_BATCH} = "ivy" ]]; then - export npe_anal=90 - export npe_anal_gfs=90 - fi - fi + walltime="00:10:00" + threads_per_task=1 + export is_exclusive=True + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + ;; + + + "ocnanalchkpt") + walltime="00:10:00" + ntasks=1 + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + case ${OCNRES} in + "025") + memory="128GB" + ntasks=40;; + "050") + memory="32GB" + ntasks=16;; + "500") + memory="32GB" + ntasks=8;; + *) + echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${OCNRES}" + exit 4 + esac + ;; + + "ocnanalpost") + walltime="00:30:00" + ntasks=${max_tasks_per_node} + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + ;; + + "ocnanalvrfy") + walltime="00:35:00" + ntasks=1 + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + memory="24GB" + ;; + + "anal") + walltime_gdas="01:20:00" + walltime_gfs="01:00:00" + case ${CASE} in + "C768") + ntasks_gdas=780 + ntasks_gfs=825 + threads_per_task=5 + ;; + "C384") + ntasks_gdas=160 + ntasks_gfs=160 + threads_per_task=10 + ;; + "C192" | "C96" | "C48") + ntasks_gdas=84 + ntasks_gfs=84 + threads_per_task=5 + ;; + *) + echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${CASE}" + exit 4 + ;; + esac + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + export threads_per_task_cycle=${threads_per_task} + export tasks_per_node_cycle=$(( max_tasks_per_node / threads_per_task_cycle )) + export is_exclusive=True + ;; + + "analcalc") + walltime="00:15:00" + ntasks=127 + export ntasks_calcanl="${ntasks}" + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + export threads_per_task_echgres_gdas=4 + export threads_per_task_echgres_gfs=12 + export is_exclusive=True + memory="48GB" + if [[ "${CASE}" == "C384" || "${CASE}" == "C768" ]]; then + memory="${mem_node_max}" fi - npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) - export npe_node_anal - export nth_cycle=${nth_anal} - npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) - export npe_node_cycle + ;; + + "analdiag") + walltime="00:15:00" + ntasks=96 # Should be at least twice ediag's tasks + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + memory="48GB" + ;; + + "sfcanl") + walltime="00:20:00" + ntasks=${ntiles:-6} + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) export is_exclusive=True + ;; -elif [[ "${step}" = "analcalc" ]]; then - - export wtime_analcalc="00:10:00" - export npe_analcalc=127 - export ntasks="${npe_analcalc}" - export nth_analcalc=1 - export nth_echgres=4 - export nth_echgres_gfs=12 - npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) - export npe_node_analcalc + "fcst" | "efcs") export is_exclusive=True - export memory_analcalc="48GB" -elif [[ "${step}" = "analdiag" ]]; then - - export wtime_analdiag="00:15:00" - export npe_analdiag=96 # Should be at least twice npe_ediag - export nth_analdiag=1 - npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) - export npe_node_analdiag - export memory_analdiag="48GB" + _RUN=${RUN:-"gfs"} + _RUN=${RUN/enkf/} + + # Declare variables from config.ufs based on _RUN + # Export layout and write task variables, but not ntasks/threads + # Capitalize _RUN for write tasks + for var in layout_x layout_y ntasks_fv3 ntasks_quilt nthreads_fv3 nthreads_ufs \ + WRITE_GROUP WRTTASK_PER_GROUP_PER_THREAD; do + if [[ ${var} =~ "layout" ]]; then + ufs_var_name="${var}_${_RUN}" + declare -x "${var}"="${!ufs_var_name}" + elif [[ ${var} =~ "WR" ]]; then + ufs_var_name="${var}_${_RUN^^}" + declare -x "${var}"="${!ufs_var_name}" + else + ufs_var_name="${var}_${_RUN}" + declare "${var}"="${!ufs_var_name}" + fi + done -elif [[ "${step}" = "sfcanl" ]]; then + # Will not set mediator threads if we are skipping the mediator + if [[ ${_RUN} == "gfs" ]]; then + nthreads_mediator=${nthreads_mediator_gfs:-} + elif [[ ${_RUN} == "gdas" ]]; then + nthreads_mediator=${nthreads_mediator_gdas:-} + fi - export wtime_sfcanl="00:10:00" - export npe_sfcanl=6 - export nth_sfcanl=1 - npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) - export npe_node_sfcanl - export is_exclusive=True + # Determine if using ESMF-managed threading or traditional threading + # If using traditional threading, set them to 1 + if [[ "${USE_ESMF_THREADING:-}" == "YES" ]]; then + export UFS_THREADS=1 + else # traditional threading + export UFS_THREADS=${nthreads_ufs:-1} + nthreads_fv3=1 + nthreads_mediator=1 + [[ "${DO_WAVE}" == "YES" ]] && nthreads_ww3=1 + [[ "${DO_OCN}" == "YES" ]] && nthreads_mom6=1 + [[ "${DO_ICE}" == "YES" ]] && nthreads_cice6=1 + fi -elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + if (( ntiles > 6 )); then + export layout_x_nest=${layout_x_nest:-10} + export layout_y_nest=${layout_y_nest:-10} + export npx_nest=${npx_nest:-1441} + export npy_nest=${npy_nest:-961} + fi - export is_exclusive=True + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" - if [[ "${step}" = "fcst" ]]; then - _CDUMP_LIST=${CDUMP:-"gdas gfs"} - elif [[ "${step}" = "efcs" ]]; then - _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + # PETS for quilting + if [[ "${QUILTING:-}" == ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-${FV3PETS}} + (( "${MEDPETS}" > 300 )) && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + CHMPETS=0; CHMTHREADS=0 + if [[ "${DO_AERO}" == "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" fi + export CHMPETS CHMTHREADS + + WAVPETS=0; WAVTHREADS=0 + if [[ "${DO_WAVE}" == "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + export WAVPETS WAVTHREADS + + OCNPETS=0; OCNTHREADS=0 + if [[ "${DO_OCN}" == "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + export OCNPETS OCNTHREADS + + ICEPETS=0; ICETHREADS=0 + if [[ "${DO_ICE}" == "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + export ICEPETS ICETHREADS - # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined - for _CDUMP in ${_CDUMP_LIST}; do - if [[ "${_CDUMP}" =~ "gfs" ]]; then - export layout_x=${layout_x_gfs} - export layout_y=${layout_y_gfs} - export WRITE_GROUP=${WRITE_GROUP_GFS} - export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} - ntasks_fv3=${ntasks_fv3_gfs} - ntasks_quilt=${ntasks_quilt_gfs} - nthreads_fv3=${nthreads_fv3_gfs} - fi - - # PETS for the atmosphere dycore - (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) - echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" - - # PETS for quilting - if [[ "${QUILTING:-}" = ".true." ]]; then - (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) - (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) - export WRTTASK_PER_GROUP - else - QUILTPETS=0 - fi - echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" - - # Total PETS for the atmosphere component - ATMTHREADS=${nthreads_fv3} - (( ATMPETS = FV3PETS + QUILTPETS )) - export ATMPETS ATMTHREADS - echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" - - # Total PETS for the coupled model (starting w/ the atmosphere) - NTASKS_TOT=${ATMPETS} - - # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. - # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. - # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit - # TODO: Update reference when moved to ufs-weather-model RTD - MEDTHREADS=${nthreads_mediator:-1} - MEDPETS=${MEDPETS:-${FV3PETS}} - [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 - export MEDPETS MEDTHREADS - echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" - - CHMPETS=0; CHMTHREADS=0 - if [[ "${DO_AERO}" = "YES" ]]; then - # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). - (( CHMTHREADS = ATMTHREADS )) - (( CHMPETS = FV3PETS )) - # Do not add to NTASKS_TOT - echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" - fi - export CHMPETS CHMTHREADS - - WAVPETS=0; WAVTHREADS=0 - if [[ "${DO_WAVE}" = "YES" ]]; then - (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) - (( WAVTHREADS = nthreads_ww3 )) - echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" - (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) - fi - export WAVPETS WAVTHREADS - - OCNPETS=0; OCNTHREADS=0 - if [[ "${DO_OCN}" = "YES" ]]; then - (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) - (( OCNTHREADS = nthreads_mom6 )) - echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" - (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) - fi - export OCNPETS OCNTHREADS - - ICEPETS=0; ICETHREADS=0 - if [[ "${DO_ICE}" = "YES" ]]; then - (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) - (( ICETHREADS = nthreads_cice6 )) - echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" - (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) - fi - export ICEPETS ICETHREADS - - echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" - - if [[ "${_CDUMP}" =~ "gfs" ]]; then - declare -x "npe_${step}_gfs"="${NTASKS_TOT}" - declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model - declare -x "npe_node_${step}_gfs"="${npe_node_max}" - else - declare -x "npe_${step}"="${NTASKS_TOT}" - declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model - declare -x "npe_node_${step}"="${npe_node_max}" - fi + echo "Total PETS for ${RUN:-gfs} = ${NTASKS_TOT}" - done + declare -x "ntasks"="${NTASKS_TOT}" + declare -x "threads_per_task"="${UFS_THREADS}" + declare -x "tasks_per_node"="${max_tasks_per_node}" case "${CASE}" in "C48" | "C96" | "C192") - declare -x "wtime_${step}"="00:30:00" - declare -x "wtime_${step}_gfs"="03:00:00" + declare -x "walltime_gdas"="00:20:00" + declare -x "walltime_enkfgdas"="00:20:00" + declare -x "walltime_gfs"="03:00:00" + declare -x "walltime_enkfgfs"="00:20:00" ;; "C384") - declare -x "wtime_${step}"="00:20:00" - declare -x "wtime_${step}_gfs"="06:00:00" + declare -x "walltime_gdas"="00:30:00" + declare -x "walltime_enkfgdas"="00:30:00" + declare -x "walltime_gfs"="06:00:00" + declare -x "walltime_enkfgfs"="00:30:00" ;; "C768" | "C1152") - declare -x "wtime_${step}"="01:00:00" - declare -x "wtime_${step}_gfs"="06:00:00" + # Not valid resolutions for ensembles + declare -x "walltime_gdas"="00:40:00" + declare -x "walltime_gfs"="06:00:00" ;; *) - echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" - exit 1 + echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${CASE}" + exit 4 ;; esac - unset _CDUMP _CDUMP_LIST + unset _RUN unset NTASKS_TOT + ;; -elif [[ "${step}" = "ocnpost" ]]; then - - export wtime_ocnpost="00:30:00" - export npe_ocnpost=1 - export npe_node_ocnpost=1 - export nth_ocnpost=1 - export memory_ocnpost="96G" - if [[ "${machine}" == "JET" ]]; then - # JET only has 88GB of requestable memory per node - # so a second node is required to meet the requiremtn - npe_ocnpost=2 - fi - -elif [[ "${step}" = "upp" ]]; then + "oceanice_products") + walltime="00:15:00" + ntasks=1 + tasks_per_node=1 + threads_per_task=1 + memory="96GB" + ;; + "upp") case "${CASE}" in "C48" | "C96") - export npe_upp=${CASE:1} + ntasks=${CASE:1} ;; - "C192" | "C384" | "C768") - export npe_upp=120 + "C192" | "C384" | "C768" ) + ntasks=120 + memory="${mem_node_max}" ;; *) - echo "FATAL ERROR: Resolution '${CASE}' not supported for UPP'" - exit 1 + echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${CASE}" + exit 4 ;; esac - export npe_node_upp=${npe_upp} + tasks_per_node=${ntasks} - export nth_upp=1 + threads_per_task=1 - export wtime_upp="00:15:00" - if [[ "${npe_node_upp}" -gt "${npe_node_max}" ]]; then - export npe_node_upp=${npe_node_max} + walltime="00:15:00" + if (( tasks_per_node > max_tasks_per_node )); then + tasks_per_node=${max_tasks_per_node} fi export is_exclusive=True + ;; -elif [[ ${step} = "atmos_products" ]]; then - - export wtime_atmos_products="00:15:00" - export npe_atmos_products=24 - export nth_atmos_products=1 - export npe_node_atmos_products="${npe_atmos_products}" - export wtime_atmos_products_gfs="${wtime_atmos_products}" - export npe_atmos_products_gfs="${npe_atmos_products}" - export nth_atmos_products_gfs="${nth_atmos_products}" - export npe_node_atmos_products_gfs="${npe_node_atmos_products}" + "atmos_products") + walltime="00:15:00" + ntasks=24 + threads_per_task=1 + tasks_per_node="${ntasks}" export is_exclusive=True - -elif [[ ${step} = "verfozn" ]]; then - - export wtime_verfozn="00:05:00" - export npe_verfozn=1 - export nth_verfozn=1 - export npe_node_verfozn=1 - export memory_verfozn="1G" - -elif [[ ${step} = "verfrad" ]]; then - - export wtime_verfrad="00:40:00" - export npe_verfrad=1 - export nth_verfrad=1 - export npe_node_verfrad=1 - export memory_verfrad="5G" - -elif [[ ${step} = "vminmon" ]]; then - - export wtime_vminmon="00:05:00" - export npe_vminmon=1 - export nth_vminmon=1 - export npe_node_vminmon=1 - export wtime_vminmon_gfs="00:05:00" - export npe_vminmon_gfs=1 - export nth_vminmon_gfs=1 - export npe_node_vminmon_gfs=1 - export memory_vminmon="1G" - -elif [[ ${step} = "tracker" ]]; then - - export wtime_tracker="00:10:00" - export npe_tracker=1 - export nth_tracker=1 - export npe_node_tracker=1 - export memory_tracker="4G" - -elif [[ ${step} = "genesis" ]]; then - - export wtime_genesis="00:25:00" - export npe_genesis=1 - export nth_genesis=1 - export npe_node_genesis=1 - export memory_genesis="4G" - -elif [[ ${step} = "genesis_fsu" ]]; then - - export wtime_genesis_fsu="00:10:00" - export npe_genesis_fsu=1 - export nth_genesis_fsu=1 - export npe_node_genesis_fsu=1 - export memory_genesis_fsu="4G" - -elif [[ "${step}" = "fit2obs" ]]; then - - export wtime_fit2obs="00:20:00" - export npe_fit2obs=3 - export nth_fit2obs=1 - export npe_node_fit2obs=1 - export memory_fit2obs="20G" - if [[ "${machine}" == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi - -elif [[ "${step}" = "metp" ]]; then - - export nth_metp=1 - export wtime_metp="03:00:00" - export npe_metp=4 - export npe_node_metp=4 - export wtime_metp_gfs="06:00:00" - export npe_metp_gfs=4 - export npe_node_metp_gfs=4 + ;; + + "verfozn") + walltime="00:05:00" + ntasks=1 + threads_per_task=1 + tasks_per_node=1 + memory="1G" + ;; + + "verfrad") + walltime="00:40:00" + ntasks=1 + threads_per_task=1 + tasks_per_node=1 + memory="5G" + ;; + + "vminmon") + walltime="00:05:00" + ntasks=1 + threads_per_task=1 + tasks_per_node=1 + memory="1G" + ;; + + "tracker") + walltime="00:10:00" + ntasks=1 + threads_per_task=1 + tasks_per_node=1 + memory="4G" + ;; + + "genesis") + walltime="00:25:00" + ntasks=1 + threads_per_task=1 + tasks_per_node=1 + memory="10G" + ;; + + "genesis_fsu") + walltime="00:10:00" + ntasks=1 + threads_per_task=1 + tasks_per_node=1 + memory="10G" + ;; + + "fit2obs") + walltime="00:20:00" + ntasks=3 + threads_per_task=1 + tasks_per_node=1 + memory="20G" + [[ ${CASE} == "C768" ]] && memory="80GB" + ;; + + "metp") + threads_per_task=1 + walltime_gdas="03:00:00" + walltime_gfs="06:00:00" + ntasks=1 + tasks_per_node=1 + export memory="80G" + ;; + + "echgres") + walltime="00:10:00" + ntasks=3 + threads_per_task=${max_tasks_per_node} + tasks_per_node=1 + ;; + + "init") + walltime="00:30:00" + ntasks=24 + threads_per_task=1 + tasks_per_node=6 + memory="70GB" + ;; + + "init_chem") + walltime="00:30:00" + ntasks=1 + tasks_per_node=1 export is_exclusive=True + ;; -elif [[ "${step}" = "echgres" ]]; then - - export wtime_echgres="00:10:00" - export npe_echgres=3 - export nth_echgres=${npe_node_max} - export npe_node_echgres=1 - if [[ "${machine}" = "WCOSS2" ]]; then - export memory_echgres="200GB" - fi - -elif [[ "${step}" = "init" ]]; then - - export wtime_init="00:30:00" - export npe_init=24 - export nth_init=1 - export npe_node_init=6 - export memory_init="70G" - -elif [[ "${step}" = "init_chem" ]]; then - - export wtime_init_chem="00:30:00" - export npe_init_chem=1 - export npe_node_init_chem=1 + "mom6ic") + walltime="00:30:00" + ntasks=24 + tasks_per_node=24 export is_exclusive=True - -elif [[ "${step}" = "mom6ic" ]]; then - - export wtime_mom6ic="00:30:00" - export npe_mom6ic=24 - export npe_node_mom6ic=24 + ;; + + "arch" | "earc" | "getic") + walltime="06:00:00" + ntasks=1 + tasks_per_node=1 + threads_per_task=1 + memory="4096M" + ;; + + "cleanup") + walltime="00:15:00" + ntasks=1 + tasks_per_node=1 + threads_per_task=1 + memory="4096M" + ;; + + "stage_ic") + walltime="00:15:00" + ntasks=1 + tasks_per_node=1 + threads_per_task=1 export is_exclusive=True + ;; + + "atmensanlinit") + export layout_x=${layout_x_atmensanl} + export layout_y=${layout_y_atmensanl} + + walltime="00:10:00" + ntasks=1 + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + memory="3072M" + ;; + + "atmensanlletkf") + export layout_x=${layout_x_atmensanl} + export layout_y=${layout_y_atmensanl} + + walltime="00:30:00" + ntasks=$(( layout_x * layout_y * 6 )) + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + memory="96GB" + export is_exclusive=True + ;; -elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then - - eval "export wtime_${step}='06:00:00'" - eval "export npe_${step}=1" - eval "export npe_node_${step}=1" - eval "export nth_${step}=1" - eval "export memory_${step}=4096M" - if [[ "${machine}" = "WCOSS2" ]]; then - eval "export memory_${step}=50GB" - fi - -elif [[ ${step} == "cleanup" ]]; then - export wtime_cleanup="01:00:00" - export npe_cleanup=1 - export npe_node_cleanup=1 - export nth_cleanup=1 - export memory_cleanup="4096M" - -elif [[ ${step} = "stage_ic" ]]; then + "atmensanlfv3inc") + export layout_x=${layout_x_atmensanl} + export layout_y=${layout_y_atmensanl} - export wtime_stage_ic="00:15:00" - export npe_stage_ic=1 - export npe_node_stage_ic=1 - export nth_stage_ic=1 + walltime="00:30:00" + ntasks=$(( layout_x * layout_y * 6 )) + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + memory="96GB" export is_exclusive=True + ;; -elif [[ "${step}" = "atmensanlinit" ]]; then - - # make below case dependent later - export layout_x=1 - export layout_y=1 - - export wtime_atmensanlinit="00:10:00" - export npe_atmensanlinit=1 - export nth_atmensanlinit=1 - npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) - export npe_node_atmensanlinit - export memory_atmensanlinit="3072M" - -elif [[ "${step}" = "atmensanlrun" ]]; then - - # make below case dependent later - export layout_x=1 - export layout_y=1 - - export wtime_atmensanlrun="00:30:00" - npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_atmensanlrun - npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_atmensanlrun_gfs - export nth_atmensanlrun=1 - export nth_atmensanlrun_gfs=${nth_atmensanlrun} - npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) - export npe_node_atmensanlrun + "atmensanlfinal") + walltime="00:30:00" + ntasks=${max_tasks_per_node} + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) export is_exclusive=True + ;; -elif [[ "${step}" = "atmensanlfinal" ]]; then + "eobs" | "eomg") + if [[ "${step}" == "eobs" ]]; then + walltime="00:15:00" + else + walltime="00:30:00" + fi - export wtime_atmensanlfinal="00:30:00" - export npe_atmensanlfinal=${npe_node_max} - export nth_atmensanlfinal=1 - npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) - export npe_node_atmensanlfinal + case ${CASE} in + "C768") ntasks=200;; + "C384") ntasks=100;; + "C192" | "C96" | "C48") ntasks=40;; + *) + echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${CASE}" + exit 4 + ;; + esac + threads_per_task=2 + # NOTE The number of tasks and cores used must be the same for eobs + # See https://github.com/NOAA-EMC/global-workflow/issues/2092 for details + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) export is_exclusive=True - -elif [[ "${step}" = "eobs" || "${step}" = "eomg" ]]; then - - export wtime_eobs="00:15:00" - export wtime_eomg="01:00:00" - if [[ "${CASE}" = "C768" ]]; then - export npe_eobs=200 - elif [[ "${CASE}" = "C384" ]]; then - export npe_eobs=100 - elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then - export npe_eobs=40 + # Unset tasks_per_node if it is not a multiple of max_tasks_per_node + # to prevent dropping data on the floor. This should be set int + # config.resources.{machine} instead. This will result in an error at + # experiment setup time if not set in config.resources.{machine}. + if [[ $(( max_tasks_per_node % tasks_per_node )) != 0 ]]; then + unset max_tasks_per_node fi - export npe_eomg=${npe_eobs} - export nth_eobs=2 - export nth_eomg=${nth_eobs} - npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) - export npe_node_eobs + ;; + + "ediag") + walltime="00:15:00" + ntasks=48 + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + memory="30GB" + ;; + + "eupd") + walltime="00:30:00" + case ${CASE} in + "C768") + ntasks=480 + threads_per_task=6 + ;; + "C384") + ntasks=270 + threads_per_task=8 + ;; + "C192" | "C96" | "C48") + ntasks=42 + threads_per_task=2 + ;; + *) + echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${CASE}" + exit 4 + ;; + esac + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) export is_exclusive=True - # The number of tasks and cores used must be the same for eobs - # See https://github.com/NOAA-EMC/global-workflow/issues/2092 for details - # For S4, this is accomplished by running 10 tasks/node - if [[ ${machine} = "S4" ]]; then - export npe_node_eobs=10 - elif [[ ${machine} = "HERCULES" ]]; then - # For Hercules, this is only an issue at C384; use 20 tasks/node - if [[ ${CASE} = "C384" ]]; then - export npe_node_eobs=20 - fi - fi - export npe_node_eomg=${npe_node_eobs} - -elif [[ "${step}" = "ediag" ]]; then - - export wtime_ediag="00:15:00" - export npe_ediag=48 - export nth_ediag=1 - npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) - export npe_node_ediag - export memory_ediag="30GB" - -elif [[ "${step}" = "eupd" ]]; then - - export wtime_eupd="00:30:00" - if [[ "${CASE}" = "C768" ]]; then - export npe_eupd=480 - export nth_eupd=6 - if [[ "${machine}" = "WCOSS2" ]]; then - export npe_eupd=315 - export nth_eupd=14 - fi - elif [[ "${CASE}" = "C384" ]]; then - export npe_eupd=270 - export nth_eupd=8 - if [[ "${machine}" = "WCOSS2" ]]; then - export npe_eupd=315 - export nth_eupd=14 - elif [[ "${machine}" = "S4" ]]; then - export npe_eupd=160 - export nth_eupd=2 - fi - elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then - export npe_eupd=42 - export nth_eupd=2 - if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then - export nth_eupd=4 - fi + ;; + + "ecen") + walltime="00:10:00" + ntasks=80 + threads_per_task=4 + if [[ ${CASE} == "C384" || ${CASE} == "C192" || ${CASE} == "C96" || ${CASE} == "C48" ]]; then + threads_per_task=2 fi - npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) - export npe_node_eupd - export is_exclusive=True - -elif [[ "${step}" = "ecen" ]]; then - - export wtime_ecen="00:10:00" - export npe_ecen=80 - export nth_ecen=4 - if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi - if [[ "${CASE}" = "C384" || "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then export nth_ecen=2; fi - npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) - export npe_node_ecen - export nth_cycle=${nth_ecen} - npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) - export npe_node_cycle + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + export threads_per_task_cycle=${threads_per_task} + export tasks_per_node_cycle=${tasks_per_node} export is_exclusive=True - -elif [[ "${step}" = "esfc" ]]; then - - export wtime_esfc="00:08:00" - export npe_esfc=80 - export nth_esfc=1 - npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) - export npe_node_esfc - export nth_cycle=${nth_esfc} - npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) - export npe_node_cycle - export memory_esfc="80GB" - -elif [[ "${step}" = "epos" ]]; then - - export wtime_epos="00:15:00" - export npe_epos=80 - export nth_epos=1 - npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) - export npe_node_epos + ;; + + "esfc") + walltime="00:15:00" + ntasks=80 + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + threads_per_task_cycle=${threads_per_task} + tasks_per_node_cycle=$(( max_tasks_per_node / threads_per_task_cycle )) + ;; + + "epos") + walltime="00:15:00" + [[ ${CASE} == "C768" ]] && walltime="00:25:00" + ntasks=80 + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) export is_exclusive=True - -elif [[ "${step}" = "postsnd" ]]; then - - export wtime_postsnd="02:00:00" - export npe_postsnd=40 - export nth_postsnd=8 - export npe_node_postsnd=10 - export npe_postsndcfp=9 - export npe_node_postsndcfp=1 - postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) - if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then - npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) - export npe_node_postsnd + ;; + + "postsnd") + walltime="02:00:00" + ntasks=40 + threads_per_task=8 + tasks_per_node=10 + export ntasks_postsndcfp=9 + export tasks_per_node_postsndcfp=1 + postsnd_req_cores=$(( tasks_per_node * threads_per_task )) + if (( postsnd_req_cores > max_tasks_per_node )); then + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) fi export is_exclusive=True - -elif [[ "${step}" = "awips" ]]; then - - export wtime_awips="03:30:00" - export npe_awips=1 - export npe_node_awips=1 - export nth_awips=1 - export memory_awips="3GB" - -elif [[ ${step} = "npoess" ]]; then - - export wtime_npoess="03:30:00" - export npe_npoess=1 - export npe_node_npoess=1 - export nth_npoess=1 - export memory_npoess="3GB" - -elif [[ ${step} = "gempak" ]]; then - - export wtime_gempak="03:00:00" - export npe_gempak=2 - export npe_gempak_gfs=28 - export npe_node_gempak=2 - export npe_node_gempak_gfs=28 - export nth_gempak=1 - export memory_gempak="4GB" - export memory_gempak_gfs="2GB" - -elif [[ ${step} = "mos_stn_prep" ]]; then - - export wtime_mos_stn_prep="00:10:00" - export npe_mos_stn_prep=3 - export npe_node_mos_stn_prep=3 - export nth_mos_stn_prep=1 - export memory_mos_stn_prep="5GB" - export NTASK="${npe_mos_stn_prep}" - export PTILE="${npe_node_mos_stn_prep}" - -elif [[ ${step} = "mos_grd_prep" ]]; then - - export wtime_mos_grd_prep="00:10:00" - export npe_mos_grd_prep=4 - export npe_node_mos_grd_prep=4 - export nth_mos_grd_prep=1 - export memory_mos_grd_prep="16GB" - export NTASK="${npe_mos_grd_prep}" - export PTILE="${npe_node_mos_grd_prep}" - -elif [[ ${step} = "mos_ext_stn_prep" ]]; then - - export wtime_mos_ext_stn_prep="00:15:00" - export npe_mos_ext_stn_prep=2 - export npe_node_mos_ext_stn_prep=2 - export nth_mos_ext_stn_prep=1 - export memory_mos_ext_stn_prep="5GB" - export NTASK="${npe_mos_ext_stn_prep}" - export PTILE="${npe_node_mos_ext_stn_prep}" - -elif [[ ${step} = "mos_ext_grd_prep" ]]; then - - export wtime_mos_ext_grd_prep="00:10:00" - export npe_mos_ext_grd_prep=7 - export npe_node_mos_ext_grd_prep=7 - export nth_mos_ext_grd_prep=1 - export memory_mos_ext_grd_prep="3GB" - export NTASK="${npe_mos_ext_grd_prep}" - export PTILE="${npe_node_mos_ext_grd_prep}" - -elif [[ ${step} = "mos_stn_fcst" ]]; then - - export wtime_mos_stn_fcst="00:10:00" - export npe_mos_stn_fcst=5 - export npe_node_mos_stn_fcst=5 - export nth_mos_stn_fcst=1 - export memory_mos_stn_fcst="40GB" - export NTASK="${npe_mos_stn_fcst}" - export PTILE="${npe_node_mos_stn_fcst}" - -elif [[ ${step} = "mos_grd_fcst" ]]; then - - export wtime_mos_grd_fcst="00:10:00" - export npe_mos_grd_fcst=7 - export npe_node_mos_grd_fcst=7 - export nth_mos_grd_fcst=1 - export memory_mos_grd_fcst="50GB" - export NTASK="${npe_mos_grd_fcst}" - export PTILE="${npe_node_mos_grd_fcst}" - -elif [[ ${step} = "mos_ext_stn_fcst" ]]; then - - export wtime_mos_ext_stn_fcst="00:20:00" - export npe_mos_ext_stn_fcst=3 - export npe_node_mos_ext_stn_fcst=3 - export nth_mos_ext_stn_fcst=1 - export memory_mos_ext_stn_fcst="50GB" - export NTASK="${npe_mos_ext_stn_fcst}" - export PTILE="${npe_node_mos_ext_stn_fcst}" + ;; + + "awips") + walltime="03:30:00" + ntasks=1 + tasks_per_node=1 + threads_per_task=1 + memory="3GB" + ;; + + "npoess") + walltime="03:30:00" + ntasks=1 + tasks_per_node=1 + threads_per_task=1 + memory="3GB" + ;; + + "gempak") + walltime="00:30:00" + ntasks_gdas=2 + ntasks_gfs=28 + tasks_per_node_gdas=2 + tasks_per_node_gfs=28 + threads_per_task=1 + memory_gdas="4GB" + memory_gfs="2GB" + ;; + + "fbwind") + walltime="00:05:00" + ntasks=1 + threads_per_task=1 + memory="4GB" + ;; + + "mos_stn_prep") + walltime="00:10:00" + ntasks=3 + tasks_per_node=3 + threads_per_task=1 + memory="5GB" + NTASK="${ntasks}" + export PTILE="${tasks_per_node}" + ;; + + "mos_grd_prep") + walltime="00:10:00" + ntasks=4 + tasks_per_node=4 + threads_per_task=1 + memory="16GB" + NTASK="${ntasks}" + export PTILE="${tasks_per_node}" + ;; + + "mos_ext_stn_prep") + walltime="00:15:00" + ntasks=2 + tasks_per_node=2 + threads_per_task=1 + memory="5GB" + NTASK="${ntasks}" + export PTILE="${tasks_per_node}" + ;; + + "mos_ext_grd_prep") + walltime="00:10:00" + ntasks=7 + tasks_per_node=7 + threads_per_task=1 + memory="3GB" + NTASK="${ntasks}" + export PTILE="${tasks_per_node}" + ;; + + "mos_stn_fcst") + walltime="00:10:00" + ntasks=5 + tasks_per_node=5 + threads_per_task=1 + memory="40GB" + NTASK="${ntasks}" + export PTILE="${tasks_per_node}" + ;; + + "mos_grd_fcst") + walltime="00:10:00" + ntasks=7 + tasks_per_node=7 + threads_per_task=1 + memory="50GB" + NTASK="${ntasks}" + export PTILE="${tasks_per_node}" + ;; + + "mos_ext_stn_fcst") + walltime="00:20:00" + ntasks=3 + tasks_per_node=3 + threads_per_task=1 + memory="50GB" + NTASK="${ntasks}" + export PTILE="${tasks_per_node}" export prepost=True - -elif [[ ${step} = "mos_ext_grd_fcst" ]]; then - - export wtime_mos_ext_grd_fcst="00:10:00" - export npe_mos_ext_grd_fcst=7 - export npe_node_mos_ext_grd_fcst=7 - export nth_mos_ext_grd_fcst=1 - export memory_mos_ext_grd_fcst="50GB" - export NTASK="${npe_mos_ext_grd_fcst}" - export PTILE="${npe_node_mos_ext_grd_fcst}" - -elif [[ ${step} = "mos_stn_prdgen" ]]; then - - export wtime_mos_stn_prdgen="00:10:00" - export npe_mos_stn_prdgen=1 - export npe_node_mos_stn_prdgen=1 - export nth_mos_stn_prdgen=1 - export memory_mos_stn_prdgen="15GB" - export NTASK="${npe_mos_stn_prdgen}" - export PTILE="${npe_node_mos_stn_prdgen}" + ;; + + "mos_ext_grd_fcst") + walltime="00:10:00" + ntasks=7 + tasks_per_node=7 + threads_per_task=1 + memory="50GB" + NTASK="${ntasks}" + export PTILE="${tasks_per_node}" + ;; + + "mos_stn_prdgen") + walltime="00:10:00" + ntasks=1 + tasks_per_node=1 + threads_per_task=1 + memory="15GB" + NTASK="${ntasks}" + export PTILE="${tasks_per_node}" export prepost=True - -elif [[ ${step} = "mos_grd_prdgen" ]]; then - - export wtime_mos_grd_prdgen="00:40:00" - export npe_mos_grd_prdgen=72 - export npe_node_mos_grd_prdgen=18 - export nth_mos_grd_prdgen=4 - export memory_mos_grd_prdgen="20GB" - export NTASK="${npe_mos_grd_prdgen}" - export PTILE="${npe_node_mos_grd_prdgen}" - export OMP_NUM_THREADS="${nth_mos_grd_prdgen}" - -elif [[ ${step} = "mos_ext_stn_prdgen" ]]; then - - export wtime_mos_ext_stn_prdgen="00:10:00" - export npe_mos_ext_stn_prdgen=1 - export npe_node_mos_ext_stn_prdgen=1 - export nth_mos_ext_stn_prdgen=1 - export memory_mos_ext_stn_prdgen="15GB" - export NTASK="${npe_mos_ext_stn_prdgen}" - export PTILE="${npe_node_mos_ext_stn_prdgen}" + ;; + + "mos_grd_prdgen") + walltime="00:40:00" + ntasks=72 + tasks_per_node=18 + threads_per_task=4 + memory="20GB" + NTASK="${ntasks}" + export PTILE="${tasks_per_node}" + export OMP_NUM_THREADS="${threads_per_task}" + ;; + + "mos_ext_stn_prdgen") + walltime="00:10:00" + ntasks=1 + tasks_per_node=1 + threads_per_task=1 + memory="15GB" + NTASK="${ntasks}" + export PTILE="${tasks_per_node}" export prepost=True + ;; + + "mos_ext_grd_prdgen") + walltime="00:30:00" + ntasks=96 + tasks_per_node=6 + threads_per_task=16 + memory="30GB" + NTASK="${ntasks}" + export PTILE="${tasks_per_node}" + export OMP_NUM_THREADS="${threads_per_task}" + ;; + + "mos_wx_prdgen") + walltime="00:10:00" + ntasks=4 + tasks_per_node=2 + threads_per_task=2 + memory="10GB" + NTASK="${ntasks}" + export PTILE="${tasks_per_node}" + export OMP_NUM_THREADS="${threads_per_task}" + ;; + + "mos_wx_ext_prdgen") + walltime="00:10:00" + ntasks=4 + tasks_per_node=2 + threads_per_task=2 + memory="10GB" + NTASK="${ntasks}" + export PTILE="${tasks_per_node}" + export OMP_NUM_THREADS="${threads_per_task}" + ;; + + *) + echo "FATAL ERROR: Invalid job ${step} passed to ${BASH_SOURCE[0]}" + exit 1 + ;; -elif [[ ${step} = "mos_ext_grd_prdgen" ]]; then - - export wtime_mos_ext_grd_prdgen="00:30:00" - export npe_mos_ext_grd_prdgen=96 - export npe_node_mos_ext_grd_prdgen=6 - export nth_mos_ext_grd_prdgen=16 - export memory_mos_ext_grd_prdgen="30GB" - export NTASK="${npe_mos_ext_grd_prdgen}" - export PTILE="${npe_node_mos_ext_grd_prdgen}" - export OMP_NUM_THREADS="${nth_mos_ext_grd_prdgen}" - -elif [[ ${step} = "mos_wx_prdgen" ]]; then - - export wtime_mos_wx_prdgen="00:10:00" - export npe_mos_wx_prdgen=4 - export npe_node_mos_wx_prdgen=2 - export nth_mos_wx_prdgen=2 - export memory_mos_wx_prdgen="10GB" - export NTASK="${npe_mos_wx_prdgen}" - export PTILE="${npe_node_mos_wx_prdgen}" - export OMP_NUM_THREADS="${nth_mos_wx_prdgen}" - -elif [[ ${step} = "mos_wx_ext_prdgen" ]]; then - - export wtime_mos_wx_ext_prdgen="00:10:00" - export npe_mos_wx_ext_prdgen=4 - export npe_node_mos_wx_ext_prdgen=2 - export nth_mos_wx_ext_prdgen=2 - export memory_mos_wx_ext_prdgen="10GB" - export NTASK="${npe_mos_wx_ext_prdgen}" - export PTILE="${npe_node_mos_wx_ext_prdgen}" - export OMP_NUM_THREADS="${nth_mos_wx_ext_prdgen}" - -else - - echo "Invalid step = ${step}, ABORT!" - exit 2 +esac +# Get machine-specific resources, overriding/extending the above assignments +if [[ -f "${EXPDIR}/config.resources.${machine}" ]]; then + source "${EXPDIR}/config.resources.${machine}" fi +# Check for RUN-specific variables and export them +for resource_var in threads_per_task ntasks tasks_per_node NTASKS memory walltime; do + run_resource_var="${resource_var}_${RUN}" + if [[ -n "${!run_resource_var+0}" ]]; then + declare -x "${resource_var}"="${!run_resource_var}" + elif [[ -n "${!resource_var+0}" ]]; then + export "${resource_var?}" + fi +done + echo "END: config.resources" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources.GAEA b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources.GAEA new file mode 100644 index 0000000000..51007b5b4f --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources.GAEA @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +# Gaea-specific job resources + +case ${step} in + "eobs") + # The number of tasks and cores used must be the same for eobs + # See https://github.com/NOAA-EMC/global-workflow/issues/2092 for details + case ${CASE} in + "C768" | "C384") + export tasks_per_node=50 + ;; + *) + export tasks_per_node=40 + ;; + esac + ;; + + *) + ;; + +esac + +# shellcheck disable=SC2312 +for mem_var in $(env | grep '^memory_' | cut -d= -f1); do + unset "${mem_var}" +done diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources.HERA b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources.HERA new file mode 100644 index 0000000000..36f50508c3 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources.HERA @@ -0,0 +1,35 @@ +#! /usr/bin/env bash + +# Hera-specific job resources + +case ${step} in + "anal") + if [[ "${CASE}" == "C384" ]]; then + export ntasks=270 + export threads_per_task_anal=8 + export tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + fi + ;; + + "eupd") + case ${CASE} in + "C384") + export ntasks=80 + ;; + "C192" | "C96" | "C48") + export threads_per_task=4 + ;; + *) + ;; + esac + export tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + ;; + + "ecen") + if [[ "${CASE}" == "C768" ]]; then export threads_per_task=6; fi + export tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + ;; + + *) + ;; +esac diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources.HERCULES b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources.HERCULES new file mode 100644 index 0000000000..7a5a74f69c --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources.HERCULES @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +# Hercules-specific job resources + +case ${step} in + "eobs" | "eomg") + # The number of tasks and cores used must be the same for eobs + # See https://github.com/NOAA-EMC/global-workflow/issues/2092 for details + # For Hercules, this is only an issue at C384; use 20 tasks/node + if [[ ${CASE} = "C384" ]]; then + export tasks_per_node=20 + fi + ;; + *) + ;; +esac diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources.JET b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources.JET new file mode 100644 index 0000000000..47b953c0f4 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources.JET @@ -0,0 +1,52 @@ +#! /usr/bin/env bash + +# Jet-specific job resources + +case ${step} in + "anal") + if [[ "${CASE}" == "C384" ]]; then + export ntasks=270 + export threads_per_task=8 + export tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + fi + ;; + + "eobs") + if [[ "${PARTITION_BATCH}" == "xjet" ]]; then + # The number of tasks and cores used must be the same for eobs + # See https://github.com/NOAA-EMC/global-workflow/issues/2092 for details + # This would also be an issues for vjet and sjet if anyone runs on those nodes. + export tasks_per_node=10 + fi + ;; + + "eupd") + case ${CASE} in + "C384") + export ntasks=80 + ;; + "C192" | "C96" | "C48") + export threads_per_task=4 + ;; + *) + ;; + esac + export tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + ;; + + "ecen") + if [[ "${CASE}" == "C768" ]]; then export threads_per_task=6; fi + export tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + ;; + + "upp") + export memory="${mem_node_max}" + ;; + + "esfc") + export memory="${mem_node_max}" + ;; + + *) + ;; +esac diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources.ORION b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources.ORION new file mode 100644 index 0000000000..e3e81b0182 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources.ORION @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +# Orion-specific job resources + +case ${step} in + "anal") + # TODO: + # On Orion, after Rocky 9 upgrade, GSI performance is degraded. + # Remove this block once GSI issue is resolved + # https://github.com/NOAA-EMC/GSI/pull/764 + # https://github.com/JCSDA/spack-stack/issues/1166 + export wtime_anal_gdas="02:40:00" + export wtime_anal_gfs="02:00:00" + ;; + *) + ;; +esac diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources.S4 b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources.S4 new file mode 100644 index 0000000000..1af64bf250 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources.S4 @@ -0,0 +1,59 @@ +#! /usr/bin/env bash + +# S4-specific job resources + +case ${step} in + "anal") + case ${CASE} in + "C384") + #Some of the intermediate data can be lost if the number of tasks + #per node does not match the number of reserved cores/node. + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export ntasks_gdas=416 + export ntasks_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export threads_per_task=1 + export walltime_gdas="02:00:00" + export walltime_gfs="02:00:00" + ;; + "C192" | "C96" | "C48") + export threads_per_task=4 + if [[ ${PARTITION_BATCH} == "s4" ]]; then + export ntasks_gdas=88 + export ntasks_gfs=88 + elif [[ ${PARTITION_BATCH} == "ivy" ]]; then + export ntasks_gdas=90 + export ntasks_gfs=90 + fi + ;; + *) + ;; + esac + export tasks_node=$(( max_tasks_per_node / threads_per_task )) + ;; + + "eobs") + # The number of tasks and cores used must be the same for eobs + # See https://github.com/NOAA-EMC/global-workflow/issues/2092 for details + # For S4, this is accomplished by running 10 tasks/node + export tasks_per_node=10 + ;; + + "eupd") + if [[ "${CASE}" == "C384" ]]; then + export ntasks=160 + export threads_per_task=2 + fi + export tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + ;; + + "ediag") + export memory="${mem_node_max}" + ;; + + *) + ;; +esac diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources.WCOSS2 b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources.WCOSS2 new file mode 100644 index 0000000000..a0a69fa8d1 --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.resources.WCOSS2 @@ -0,0 +1,59 @@ +#! /usr/bin/env bash + +# WCOSS2-specific job resources + +case ${step} in + "prep") + export is_exclusive=True + export memory="480GB" + ;; + + "anal") + if [[ "${CASE}" == "C768" ]]; then + export threads_per_task=8 + # Make ntasks a multiple of 16 + export ntasks_gdas=784 + export ntasks_gfs=832 + export tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + fi + ;; + + "fit2obs") + export tasks_per_node=3 + ;; + + "echgres") + export memory="200GB" + ;; + + "arch" | "earc" | "getic") + declare -x "memory"="50GB" + ;; + + "eupd") + case ${CASE} in + "C768" | "C384") + export ntasks=315 + export threads_per_task=14 + ;; + *) + ;; + esac + export tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + ;; + + "eobs") + case ${CASE} in + "C768" | "C384") + export tasks_per_node=50 + ;; + *) + export tasks_per_node=40 + ;; + esac + ;; + + *) + ;; + +esac diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.sfcanl b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.sfcanl index 9592fb77c9..e2fde8992a 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.sfcanl +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.sfcanl @@ -8,4 +8,9 @@ echo "BEGIN: config.sfcanl" # Get task specific resources . $EXPDIR/config.resources sfcanl +# Turn off NST in JEDIATMVAR +if [[ "${DO_JEDIATMVAR}" == "YES" ]]; then + export DONST="NO" +fi + echo "END: config.sfcanl" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.snowanl b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.snowanl new file mode 100644 index 0000000000..b8b04f2edc --- /dev/null +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.snowanl @@ -0,0 +1,30 @@ +#! /usr/bin/env bash + +########## config.snowanl ########## +# configuration common to snow analysis tasks + +echo "BEGIN: config.snowanl" + +# Get task specific resources +source "${EXPDIR}/config.resources" snowanl + +export OBS_LIST="${PARMgfs}/gdas/snow/obs/lists/gdas_snow.yaml.j2" + +# Name of the JEDI executable and its yaml template +export JEDIEXE="${EXECgfs}/gdas.x" +export JEDIYAML="${PARMgfs}/gdas/snow/letkfoi/letkfoi.yaml.j2" + +# Ensemble member properties +export SNOWDEPTHVAR="snodl" +export BESTDDEV="30." # Background Error Std. Dev. for LETKFOI + +# Name of the executable that applies increment to bkg and its namelist template +export APPLY_INCR_EXE="${EXECgfs}/apply_incr.exe" +export APPLY_INCR_NML_TMPL="${PARMgfs}/gdas/snow/letkfoi/apply_incr_nml.j2" + +export JEDI_FIX_YAML="${PARMgfs}/gdas/snow_jedi_fix.yaml.j2" + +export io_layout_x=1 +export io_layout_y=1 + +echo "END: config.snowanl" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.stage_ic b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.stage_ic index 7f3956af4d..9956e8af6a 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.stage_ic +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.stage_ic @@ -8,7 +8,7 @@ echo "BEGIN: config.stage_ic" source "${EXPDIR}/config.resources" stage_ic case "${CASE}" in - "C48" | "C96") + "C48" | "C96" | "C192") export CPL_ATMIC="workflow_${CASE}_refactored" export CPL_ICEIC="workflow_${CASE}_refactored" export CPL_OCNIC="workflow_${CASE}_refactored" @@ -21,16 +21,16 @@ case "${CASE}" in export CPL_WAVIC=workflow_C384_refactored ;; "C768") - export CPL_ATMIC=HR2_refactored - export CPL_ICEIC=HR1_refactored - export CPL_OCNIC=HR1_refactored - export CPL_WAVIC=HR1_refactored + export CPL_ATMIC=HR3C768 + export CPL_ICEIC=HR3marine + export CPL_OCNIC=HR3marine + export CPL_WAVIC=HR3marine ;; "C1152") - export CPL_ATMIC=HR2_C1152_refactored - export CPL_ICEIC=HR3_refactored - export CPL_OCNIC=HR3_refactored - export CPL_WAVIC=HR1_refactored + export CPL_ATMIC=HR3C1152 + export CPL_ICEIC=HR3marine + export CPL_OCNIC=HR3marine + export CPL_WAVIC=HR3marine ;; *) echo "FATAL ERROR Unrecognized resolution: ${CASE}" @@ -38,4 +38,8 @@ case "${CASE}" in ;; esac +if [[ "${DO_NEST:-NO}" == "YES" ]] ; then + export CPL_ATMIC="GLOBAL-NEST_${CASE}" +fi + echo "END: config.stage_ic" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ufs b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ufs index 3aee6a3aa1..20be02e110 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ufs +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ufs @@ -15,7 +15,7 @@ if (( $# <= 1 )); then echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" echo "--mom6 500|100|025" echo "--cice6 500|100|025" - echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_200|glo_500|mx025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_100|glo_200|glo_500|mx025|uglo_100km|uglo_m1g16" echo "--gocart" exit 1 @@ -68,169 +68,271 @@ if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${ skip_mediator=false fi -case "${machine}" in - "WCOSS2") - npe_node_max=128 - ;; - "HERA" | "ORION" ) - npe_node_max=40 - ;; - "HERCULES" ) - npe_node_max=80 - ;; - "JET") - case "${PARTITION_BATCH}" in - "xjet") - npe_node_max=24 - ;; - "vjet" | "sjet") - npe_node_max=16 - ;; - "kjet") - npe_node_max=40 - ;; - *) - echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" - exit 1 - ;; - esac - ;; - "S4") - case "${PARTITION_BATCH}" in - "s4") - npe_node_max=32 - ;; - "ivy") - npe_node_max=20 - ;; - *) - echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" - exit 1 - ;; - esac - ;; - *) - echo "FATAL ERROR: Unrecognized machine ${machine}" - exit 14 - ;; -esac -export npe_node_max +if [[ "${DO_NEST:-NO}" == "YES" ]] ; then + # Describe nest location, interaction with parent, etc. + export grid_type=0 + export stretch_fac=1.0001 + export TARGET_LAT=32.5 + export TARGET_LON=-135.0 + export NEST_LON1=-195.000000 + export NEST_LAT1=-7.500000 + export NEST_LON2=-75.000000 + export NEST_LAT2=72.500000 + export twowaynest=${twowaynest:-.true.} +else + # No nest. + export grid_type=-1 +fi # (Standard) Model resolution dependent variables case "${fv3_res}" in "C48") export DELTIM=1200 - export layout_x=1 - export layout_y=1 + export layout_x_gdas=1 + export layout_y_gdas=1 export layout_x_gfs=1 export layout_y_gfs=1 - export nthreads_fv3=1 + export nthreads_fv3_gdas=1 export nthreads_fv3_gfs=1 + export nthreads_ufs_gdas=1 + export nthreads_ufs_gfs=1 + export xr_cnvcld=".false." # Do not pass conv. clouds to Xu-Randall cloud fraction export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="40.0,1.77,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=6.0e-3 # setting for UGWPv1 non-stationary GWD - export WRITE_GROUP=1 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export k_split=1 + export n_split=4 + export tau=8.0 + export rf_cutoff=100.0 + export fv_sg_adj=3600 + export WRITE_GROUP_GDAS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=1 export WRITE_GROUP_GFS=1 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 ;; "C96") - export DELTIM=600 - export layout_x=2 - export layout_y=2 - export layout_x_gfs=2 - export layout_y_gfs=2 - export nthreads_fv3=1 - export nthreads_fv3_gfs=1 - export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling - export cdmbgwd_gsl="20.0,2.5,1.0,1.0" # settings for GSL drag suite - export knob_ugwp_tauamp=3.0e-3 # setting for UGWPv1 non-stationary GWD - export WRITE_GROUP=1 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 - export WRITE_GROUP_GFS=1 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + if [[ "${DO_NEST:-NO}" == "YES" ]] ; then + export DELTIM=450 + export layout_x_gfs=4 + export layout_y_gfs=4 + export layout_x_nest=12 + export layout_y_nest=10 + export nest_refine=4 + export nest_ioffset=4 + export nest_joffset=9 + export npx_nest=361 + export npy_nest=241 + export NEST_DLON=0.25 + export NEST_DLAT=0.25 + export WRITE_GROUP_GDAS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=2 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=2 + else + export DELTIM=600 + export layout_x_gdas=2 + export layout_y_gdas=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3_gdas=1 + export nthreads_fv3_gfs=1 + export nthreads_ufs_gdas=1 + export nthreads_ufs_gfs=1 + export xr_cnvcld=.false. # Do not pass conv. clouds to Xu-Randall cloud fraction + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="20.0,2.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=3.0e-3 # setting for UGWPv1 non-stationary GWD + export k_split=1 + export n_split=4 + export tau=8.0 + export rf_cutoff=100.0 + export fv_sg_adj=1800 + export WRITE_GROUP_GDAS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + fi ;; "C192") - export DELTIM=450 - export layout_x=4 - export layout_y=6 - export layout_x_gfs=4 - export layout_y_gfs=6 - export nthreads_fv3=1 - export nthreads_fv3_gfs=2 - export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling - export cdmbgwd_gsl="10.0,3.5,1.0,1.0" # settings for GSL drag suite - export knob_ugwp_tauamp=1.5e-3 # setting for UGWPv1 non-stationary GWD - export WRITE_GROUP=1 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 - export WRITE_GROUP_GFS=2 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + if [[ "${DO_NEST:-NO}" == "YES" ]] ; then + export DELTIM=225 + export layout_x_gfs=5 + export layout_y_gfs=6 + export layout_x_nest=15 + export layout_y_nest=25 + export nest_refine=4 + export nest_ioffset=7 + export nest_joffset=19 + export npx_nest=721 + export npy_nest=481 + export NEST_DLON=0.125 + export NEST_DLAT=0.125 + export WRITE_GROUP_GDAS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=15 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=15 + else + export DELTIM=600 + export layout_x_gdas=4 + export layout_y_gdas=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3_gdas=1 + export nthreads_fv3_gfs=2 + export nthreads_ufs_gdas=1 + export nthreads_ufs_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="10.0,3.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=1.5e-3 # setting for UGWPv1 non-stationary GWD + export k_split=2 + export n_split=4 + export tau=6.0 + export rf_cutoff=100.0 + export fv_sg_adj=1800 + export WRITE_GROUP_GDAS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + fi ;; "C384") - export DELTIM=300 - export layout_x=8 - export layout_y=8 - export layout_x_gfs=8 - export layout_y_gfs=8 - export nthreads_fv3=2 - export nthreads_fv3_gfs=2 - export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling - export cdmbgwd_gsl="5.0,5.0,1.0,1.0" # settings for GSL drag suite - export knob_ugwp_tauamp=0.8e-3 # setting for UGWPv1 non-stationary GWD - export WRITE_GROUP=4 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 - export WRITE_GROUP_GFS=4 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + if [[ "${DO_NEST:-NO}" == "YES" ]] ; then + export DELTIM=150 + export layout_x_gdas=8 + export layout_y_gdas=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export layout_x_nest=34 + export layout_y_nest=24 + export nest_refine=4 + export nest_ioffset=13 + export nest_joffset=37 + export npx_nest=1441 + export npy_nest=961 + export NEST_DLON=0.0625 + export NEST_DLAT=0.0625 + export WRITE_GROUP_GDAS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=20 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 + else + export DELTIM=300 + export layout_x_gdas=8 + export layout_y_gdas=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3_gdas=2 + export nthreads_fv3_gfs=2 + export nthreads_ufs_gdas=2 + export nthreads_ufs_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="5.0,5.0,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.8e-3 # setting for UGWPv1 non-stationary GWD + export k_split=2 + export n_split=4 + export tau=4.0 + export rf_cutoff=100.0 + export fv_sg_adj=900 + export WRITE_GROUP_GDAS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10 + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + fi ;; "C768") - export DELTIM=150 - export layout_x=8 - export layout_y=12 - export layout_x_gfs=12 - export layout_y_gfs=12 - #JKHexport layout_y_gfs=16 - export nthreads_fv3=4 - #JKHexport nthreads_fv3_gfs=4 - export nthreads_fv3_gfs=2 - export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling - export cdmbgwd_gsl="2.5,7.5,1.0,1.0" # settings for GSL drag suite - export knob_ugwp_tauamp=0.5e-3 # setting for UGWPv1 non-stationary GWD - export WRITE_GROUP=2 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 - #JKHexport WRITE_GROUP_GFS=4 - #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 #Note this should be 10 for WCOSS2 - export WRITE_GROUP_GFS=1 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=40 #Note this should be 10 for WCOSS2 + if [[ "${DO_NEST:-NO}" == "YES" ]] ; then + export DELTIM=75 + export layout_x_gdas=16 + export layout_y_gdas=10 + export layout_x_gfs=16 + export layout_y_gfs=10 + export layout_x_nest=48 + export layout_y_nest=45 + export nthreads_fv3_nest=2 + export nthreads_fv3_gdas=2 + export nthreads_fv3_gfs=2 + export nest_refine=4 + export nest_ioffset=24 + export nest_joffset=72 + export npx_nest=2881 + export npy_nest=1921 + export NEST_DLON=0.0325 + export NEST_DLAT=0.0325 + export WRITE_GROUP_GDAS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=90 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=90 + else + export DELTIM=150 + export layout_x_gdas=8 + export layout_y_gdas=12 + export layout_x_gfs=12 + export layout_y_gfs=12 + #JKHexport layout_y_gfs=16 + export nthreads_fv3_gdas=4 + #JKHexport nthreads_fv3_gfs=4 + export nthreads_fv3_gfs=2 + export nthreads_ufs_gdas=4 + export nthreads_ufs_gfs=2 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="2.5,7.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.5e-3 # setting for UGWPv1 non-stationary GWD + export k_split=2 + export n_split=4 + export tau=3.0 + export rf_cutoff=100.0 + export fv_sg_adj=450 + export WRITE_GROUP_GDAS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10 + #JKHexport WRITE_GROUP_GFS=4 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 #Note this should be 10 for WCOSS2 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=40 #Note this should be 10 for WCOSS2 + fi ;; "C1152") - export DELTIM=120 - export layout_x=8 - export layout_y=16 + export DELTIM=150 + export layout_x_gdas=8 + export layout_y_gdas=16 export layout_x_gfs=8 export layout_y_gfs=16 - export nthreads_fv3=4 + export nthreads_fv3_gdas=4 export nthreads_fv3_gfs=4 + export nthreads_ufs_gdas=4 + export nthreads_ufs_gfs=4 export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="1.67,8.8,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.35e-3 # setting for UGWPv1 non-stationary GWD - export WRITE_GROUP=4 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export k_split=2 + export n_split=6 + export tau=2.5 + export rf_cutoff=100.0 + export fv_sg_adj=450 + export WRITE_GROUP_GDAS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10 # TODO: refine these numbers when a case is available export WRITE_GROUP_GFS=4 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 # TODO: refine these numbers when a case is available ;; "C3072") export DELTIM=90 export layout_x=16 - export layout_y=32 - export layout_x_gfs=16 + export layout_y_gdas=32 + export layout_x_gfs_gdas=16 export layout_y_gfs=32 - export nthreads_fv3=4 + export nthreads_fv3_gdas=4 export nthreads_fv3_gfs=4 + export nthreads_ufs_gdas=4 + export nthreads_ufs_gfs=4 export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="0.625,14.1,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.13e-3 # setting for UGWPv1 non-stationary GWD - export WRITE_GROUP=4 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export k_split=4 + export n_split=5 + export tau=0.5 + export rf_cutoff=100.0 + export fv_sg_adj=300 + export WRITE_GROUP_GDAS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10 # TODO: refine these numbers when a case is available export WRITE_GROUP_GFS=4 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available ;; @@ -240,19 +342,22 @@ case "${fv3_res}" in ;; esac -(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GDAS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS * 6 )) (( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) -export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GDAS export WRTTASK_PER_GROUP_PER_THREAD_GFS -(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gdas = layout_x_gdas * layout_y_gdas * 6 )) (( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) -export ntasks_fv3 +if [[ "${DO_NEST:-NO}" == "YES" ]] ; then + (( ntasks_fv3_gfs += layout_x_nest * layout_y_nest )) +fi +export ntasks_fv3_gdas export ntasks_fv3_gfs -(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gdas = WRITE_GROUP_GDAS * WRTTASK_PER_GROUP_PER_THREAD_GDAS )) (( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) -export ntasks_quilt +export ntasks_quilt_gdas export ntasks_quilt_gfs # Determine whether to use parallel NetCDF based on resolution @@ -279,13 +384,19 @@ export cplice=".false." export cplchm=".false." export cplwav=".false." export cplwav2atm=".false." +if [[ "${DO_NEST:-NO}" == "YES" ]] ; then + export CCPP_SUITE="${CCPP_SUITE:-FV3_global_nest_v1}" +else +#JKH export CCPP_SUITE="${CCPP_SUITE:-FV3_GFS_v17_p8_ugwpv1}" export CCPP_SUITE="FV3_GFS_v17_p8_ugwpv1_c3_mynn" +fi model_list="atm" # Mediator specific settings if [[ "${skip_mediator}" == "false" ]]; then export cpl=".true." - export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 + export nthreads_mediator_gfs=${nthreads_fv3_gfs} # Use same threads as FV3 + export nthreads_mediator_gdas=${nthreads_fv3_gdas} export CCPP_SUITE="FV3_GFS_v17_coupled_p8_ugwpv1" # TODO: Does this include FV3_GFS_v17_p8? Can this be used instead of FV3_GFS_v17_p8? fi @@ -307,9 +418,13 @@ if [[ "${skip_mom6}" == "false" ]]; then CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" MOM6_RESTART_SETTING='r' MOM6_RIVER_RUNOFF='False' + if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then + MOM6_DIAG_MISVAL="-1e34" + else + MOM6_DIAG_MISVAL="0.0" + fi eps_imesh="4.0e-1" MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_25L.nc" - MOM6_DIAG_MISVAL="0.0" MOM6_ALLOW_LANDMASK_CHANGES='False' TOPOEDITS="" ;; @@ -326,12 +441,12 @@ if [[ "${skip_mom6}" == "false" ]]; then MOM6_RIVER_RUNOFF='False' eps_imesh="2.5e-1" TOPOEDITS="ufs.topo_edits_011818.nc" - if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then - MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" - MOM6_DIAG_MISVAL="0.0" - else + if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" MOM6_DIAG_MISVAL="-1e34" + else + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" fi MOM6_ALLOW_LANDMASK_CHANGES='True' ;; @@ -347,12 +462,12 @@ if [[ "${skip_mom6}" == "false" ]]; then MOM6_RESTART_SETTING='n' MOM6_RIVER_RUNOFF='True' eps_imesh="1.0e-1" - if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then - MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" - MOM6_DIAG_MISVAL="0.0" - else + if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" MOM6_DIAG_MISVAL="-1e34" + else + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" fi MOM6_ALLOW_LANDMASK_CHANGES='False' TOPOEDITS="" @@ -369,12 +484,12 @@ if [[ "${skip_mom6}" == "false" ]]; then MOM6_RIVER_RUNOFF='True' MOM6_RESTART_SETTING="r" eps_imesh="1.0e-1" - if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then - MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" - MOM6_DIAG_MISVAL="0.0" - else + if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" MOM6_DIAG_MISVAL="-1e34" + else + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" fi MOM6_ALLOW_LANDMASK_CHANGES='False' TOPOEDITS="" @@ -457,6 +572,10 @@ if [[ "${skip_ww3}" == "false" ]]; then "glo_025") ntasks_ww3=262 ;; + "glo_100") + ntasks_ww3=20 + nthreads_ww3=1 + ;; "glo_200") ntasks_ww3=30 nthreads_ww3=1 @@ -468,6 +587,14 @@ if [[ "${skip_ww3}" == "false" ]]; then "mx025") ntasks_ww3=80 ;; + "uglo_100km") + ntasks_ww3=40 + nthreads_ww3=1 + ;; + "uglo_m1g16") + ntasks_ww3=1000 + nthreads_ww3=1 + ;; *) echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" exit 1 @@ -484,39 +611,45 @@ if [[ "${skip_gocart}" == "false" ]]; then fi # Set the name of the UFS (previously nems) configure template to use +# Default ufs.configure templates for supported model configurations +if [[ "${USE_ESMF_THREADING:-}" == "YES" ]]; then + tmpl_suffix="_esmf" +fi case "${model_list}" in atm) - export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.atm.IN" + default_template="${PARMgfs}/ufs/ufs.configure.atm${tmpl_suffix:-}.IN" ;; atm.aero) - export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.atm_aero.IN" + default_template="${PARMgfs}/ufs/ufs.configure.atmaero${tmpl_suffix:-}.IN" ;; atm.wave) - export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.leapfrog_atm_wav.IN" + default_template="${PARMgfs}/ufs/ufs.configure.leapfrog_atm_wav${tmpl_suffix:-}.IN" ;; atm.ocean.ice) - export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld.IN" + default_template="${PARMgfs}/ufs/ufs.configure.s2s${tmpl_suffix:-}.IN" ;; atm.ocean.ice.aero) - export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_aero.IN" + default_template="${PARMgfs}/ufs/ufs.configure.s2sa${tmpl_suffix:-}.IN" ;; atm.ocean.ice.wave) - export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_outerwave.IN" + default_template="${PARMgfs}/ufs/ufs.configure.s2sw${tmpl_suffix:-}.IN" ;; atm.ocean.ice.wave.aero) - export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_aero_outerwave.IN" + default_template="${PARMgfs}/ufs/ufs.configure.s2swa${tmpl_suffix:-}.IN" ;; *) - echo "FATAL ERROR: Unable to determine appropriate UFS configure template for ${model_list}" + echo "FATAL ERROR: Unsupported UFSWM configuration for ${model_list}" exit 16 ;; esac +# Allow user to override the default template +export ufs_configure_template=${ufs_configure_template:-${default_template:-"/dev/null"}} +unset model_list default_template + if [[ ! -r "${ufs_configure_template}" ]]; then echo "FATAL ERROR: ${ufs_configure_template} either doesn't exist or is not readable." exit 17 fi -unset model_list - echo "END: config.ufs" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ufs_c768_12x12_2th_1wg40wt b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ufs_c768_12x12_2th_1wg40wt index 3aee6a3aa1..20be02e110 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ufs_c768_12x12_2th_1wg40wt +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ufs_c768_12x12_2th_1wg40wt @@ -15,7 +15,7 @@ if (( $# <= 1 )); then echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" echo "--mom6 500|100|025" echo "--cice6 500|100|025" - echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_200|glo_500|mx025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_100|glo_200|glo_500|mx025|uglo_100km|uglo_m1g16" echo "--gocart" exit 1 @@ -68,169 +68,271 @@ if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${ skip_mediator=false fi -case "${machine}" in - "WCOSS2") - npe_node_max=128 - ;; - "HERA" | "ORION" ) - npe_node_max=40 - ;; - "HERCULES" ) - npe_node_max=80 - ;; - "JET") - case "${PARTITION_BATCH}" in - "xjet") - npe_node_max=24 - ;; - "vjet" | "sjet") - npe_node_max=16 - ;; - "kjet") - npe_node_max=40 - ;; - *) - echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" - exit 1 - ;; - esac - ;; - "S4") - case "${PARTITION_BATCH}" in - "s4") - npe_node_max=32 - ;; - "ivy") - npe_node_max=20 - ;; - *) - echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" - exit 1 - ;; - esac - ;; - *) - echo "FATAL ERROR: Unrecognized machine ${machine}" - exit 14 - ;; -esac -export npe_node_max +if [[ "${DO_NEST:-NO}" == "YES" ]] ; then + # Describe nest location, interaction with parent, etc. + export grid_type=0 + export stretch_fac=1.0001 + export TARGET_LAT=32.5 + export TARGET_LON=-135.0 + export NEST_LON1=-195.000000 + export NEST_LAT1=-7.500000 + export NEST_LON2=-75.000000 + export NEST_LAT2=72.500000 + export twowaynest=${twowaynest:-.true.} +else + # No nest. + export grid_type=-1 +fi # (Standard) Model resolution dependent variables case "${fv3_res}" in "C48") export DELTIM=1200 - export layout_x=1 - export layout_y=1 + export layout_x_gdas=1 + export layout_y_gdas=1 export layout_x_gfs=1 export layout_y_gfs=1 - export nthreads_fv3=1 + export nthreads_fv3_gdas=1 export nthreads_fv3_gfs=1 + export nthreads_ufs_gdas=1 + export nthreads_ufs_gfs=1 + export xr_cnvcld=".false." # Do not pass conv. clouds to Xu-Randall cloud fraction export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="40.0,1.77,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=6.0e-3 # setting for UGWPv1 non-stationary GWD - export WRITE_GROUP=1 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export k_split=1 + export n_split=4 + export tau=8.0 + export rf_cutoff=100.0 + export fv_sg_adj=3600 + export WRITE_GROUP_GDAS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=1 export WRITE_GROUP_GFS=1 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 ;; "C96") - export DELTIM=600 - export layout_x=2 - export layout_y=2 - export layout_x_gfs=2 - export layout_y_gfs=2 - export nthreads_fv3=1 - export nthreads_fv3_gfs=1 - export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling - export cdmbgwd_gsl="20.0,2.5,1.0,1.0" # settings for GSL drag suite - export knob_ugwp_tauamp=3.0e-3 # setting for UGWPv1 non-stationary GWD - export WRITE_GROUP=1 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 - export WRITE_GROUP_GFS=1 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + if [[ "${DO_NEST:-NO}" == "YES" ]] ; then + export DELTIM=450 + export layout_x_gfs=4 + export layout_y_gfs=4 + export layout_x_nest=12 + export layout_y_nest=10 + export nest_refine=4 + export nest_ioffset=4 + export nest_joffset=9 + export npx_nest=361 + export npy_nest=241 + export NEST_DLON=0.25 + export NEST_DLAT=0.25 + export WRITE_GROUP_GDAS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=2 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=2 + else + export DELTIM=600 + export layout_x_gdas=2 + export layout_y_gdas=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3_gdas=1 + export nthreads_fv3_gfs=1 + export nthreads_ufs_gdas=1 + export nthreads_ufs_gfs=1 + export xr_cnvcld=.false. # Do not pass conv. clouds to Xu-Randall cloud fraction + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="20.0,2.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=3.0e-3 # setting for UGWPv1 non-stationary GWD + export k_split=1 + export n_split=4 + export tau=8.0 + export rf_cutoff=100.0 + export fv_sg_adj=1800 + export WRITE_GROUP_GDAS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + fi ;; "C192") - export DELTIM=450 - export layout_x=4 - export layout_y=6 - export layout_x_gfs=4 - export layout_y_gfs=6 - export nthreads_fv3=1 - export nthreads_fv3_gfs=2 - export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling - export cdmbgwd_gsl="10.0,3.5,1.0,1.0" # settings for GSL drag suite - export knob_ugwp_tauamp=1.5e-3 # setting for UGWPv1 non-stationary GWD - export WRITE_GROUP=1 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 - export WRITE_GROUP_GFS=2 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + if [[ "${DO_NEST:-NO}" == "YES" ]] ; then + export DELTIM=225 + export layout_x_gfs=5 + export layout_y_gfs=6 + export layout_x_nest=15 + export layout_y_nest=25 + export nest_refine=4 + export nest_ioffset=7 + export nest_joffset=19 + export npx_nest=721 + export npy_nest=481 + export NEST_DLON=0.125 + export NEST_DLAT=0.125 + export WRITE_GROUP_GDAS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=15 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=15 + else + export DELTIM=600 + export layout_x_gdas=4 + export layout_y_gdas=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3_gdas=1 + export nthreads_fv3_gfs=2 + export nthreads_ufs_gdas=1 + export nthreads_ufs_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="10.0,3.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=1.5e-3 # setting for UGWPv1 non-stationary GWD + export k_split=2 + export n_split=4 + export tau=6.0 + export rf_cutoff=100.0 + export fv_sg_adj=1800 + export WRITE_GROUP_GDAS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + fi ;; "C384") - export DELTIM=300 - export layout_x=8 - export layout_y=8 - export layout_x_gfs=8 - export layout_y_gfs=8 - export nthreads_fv3=2 - export nthreads_fv3_gfs=2 - export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling - export cdmbgwd_gsl="5.0,5.0,1.0,1.0" # settings for GSL drag suite - export knob_ugwp_tauamp=0.8e-3 # setting for UGWPv1 non-stationary GWD - export WRITE_GROUP=4 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 - export WRITE_GROUP_GFS=4 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + if [[ "${DO_NEST:-NO}" == "YES" ]] ; then + export DELTIM=150 + export layout_x_gdas=8 + export layout_y_gdas=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export layout_x_nest=34 + export layout_y_nest=24 + export nest_refine=4 + export nest_ioffset=13 + export nest_joffset=37 + export npx_nest=1441 + export npy_nest=961 + export NEST_DLON=0.0625 + export NEST_DLAT=0.0625 + export WRITE_GROUP_GDAS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=20 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 + else + export DELTIM=300 + export layout_x_gdas=8 + export layout_y_gdas=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3_gdas=2 + export nthreads_fv3_gfs=2 + export nthreads_ufs_gdas=2 + export nthreads_ufs_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="5.0,5.0,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.8e-3 # setting for UGWPv1 non-stationary GWD + export k_split=2 + export n_split=4 + export tau=4.0 + export rf_cutoff=100.0 + export fv_sg_adj=900 + export WRITE_GROUP_GDAS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10 + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + fi ;; "C768") - export DELTIM=150 - export layout_x=8 - export layout_y=12 - export layout_x_gfs=12 - export layout_y_gfs=12 - #JKHexport layout_y_gfs=16 - export nthreads_fv3=4 - #JKHexport nthreads_fv3_gfs=4 - export nthreads_fv3_gfs=2 - export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling - export cdmbgwd_gsl="2.5,7.5,1.0,1.0" # settings for GSL drag suite - export knob_ugwp_tauamp=0.5e-3 # setting for UGWPv1 non-stationary GWD - export WRITE_GROUP=2 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 - #JKHexport WRITE_GROUP_GFS=4 - #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 #Note this should be 10 for WCOSS2 - export WRITE_GROUP_GFS=1 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=40 #Note this should be 10 for WCOSS2 + if [[ "${DO_NEST:-NO}" == "YES" ]] ; then + export DELTIM=75 + export layout_x_gdas=16 + export layout_y_gdas=10 + export layout_x_gfs=16 + export layout_y_gfs=10 + export layout_x_nest=48 + export layout_y_nest=45 + export nthreads_fv3_nest=2 + export nthreads_fv3_gdas=2 + export nthreads_fv3_gfs=2 + export nest_refine=4 + export nest_ioffset=24 + export nest_joffset=72 + export npx_nest=2881 + export npy_nest=1921 + export NEST_DLON=0.0325 + export NEST_DLAT=0.0325 + export WRITE_GROUP_GDAS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=90 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=90 + else + export DELTIM=150 + export layout_x_gdas=8 + export layout_y_gdas=12 + export layout_x_gfs=12 + export layout_y_gfs=12 + #JKHexport layout_y_gfs=16 + export nthreads_fv3_gdas=4 + #JKHexport nthreads_fv3_gfs=4 + export nthreads_fv3_gfs=2 + export nthreads_ufs_gdas=4 + export nthreads_ufs_gfs=2 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="2.5,7.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.5e-3 # setting for UGWPv1 non-stationary GWD + export k_split=2 + export n_split=4 + export tau=3.0 + export rf_cutoff=100.0 + export fv_sg_adj=450 + export WRITE_GROUP_GDAS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10 + #JKHexport WRITE_GROUP_GFS=4 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 #Note this should be 10 for WCOSS2 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=40 #Note this should be 10 for WCOSS2 + fi ;; "C1152") - export DELTIM=120 - export layout_x=8 - export layout_y=16 + export DELTIM=150 + export layout_x_gdas=8 + export layout_y_gdas=16 export layout_x_gfs=8 export layout_y_gfs=16 - export nthreads_fv3=4 + export nthreads_fv3_gdas=4 export nthreads_fv3_gfs=4 + export nthreads_ufs_gdas=4 + export nthreads_ufs_gfs=4 export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="1.67,8.8,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.35e-3 # setting for UGWPv1 non-stationary GWD - export WRITE_GROUP=4 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export k_split=2 + export n_split=6 + export tau=2.5 + export rf_cutoff=100.0 + export fv_sg_adj=450 + export WRITE_GROUP_GDAS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10 # TODO: refine these numbers when a case is available export WRITE_GROUP_GFS=4 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 # TODO: refine these numbers when a case is available ;; "C3072") export DELTIM=90 export layout_x=16 - export layout_y=32 - export layout_x_gfs=16 + export layout_y_gdas=32 + export layout_x_gfs_gdas=16 export layout_y_gfs=32 - export nthreads_fv3=4 + export nthreads_fv3_gdas=4 export nthreads_fv3_gfs=4 + export nthreads_ufs_gdas=4 + export nthreads_ufs_gfs=4 export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="0.625,14.1,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.13e-3 # setting for UGWPv1 non-stationary GWD - export WRITE_GROUP=4 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export k_split=4 + export n_split=5 + export tau=0.5 + export rf_cutoff=100.0 + export fv_sg_adj=300 + export WRITE_GROUP_GDAS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10 # TODO: refine these numbers when a case is available export WRITE_GROUP_GFS=4 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available ;; @@ -240,19 +342,22 @@ case "${fv3_res}" in ;; esac -(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GDAS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS * 6 )) (( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) -export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GDAS export WRTTASK_PER_GROUP_PER_THREAD_GFS -(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gdas = layout_x_gdas * layout_y_gdas * 6 )) (( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) -export ntasks_fv3 +if [[ "${DO_NEST:-NO}" == "YES" ]] ; then + (( ntasks_fv3_gfs += layout_x_nest * layout_y_nest )) +fi +export ntasks_fv3_gdas export ntasks_fv3_gfs -(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gdas = WRITE_GROUP_GDAS * WRTTASK_PER_GROUP_PER_THREAD_GDAS )) (( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) -export ntasks_quilt +export ntasks_quilt_gdas export ntasks_quilt_gfs # Determine whether to use parallel NetCDF based on resolution @@ -279,13 +384,19 @@ export cplice=".false." export cplchm=".false." export cplwav=".false." export cplwav2atm=".false." +if [[ "${DO_NEST:-NO}" == "YES" ]] ; then + export CCPP_SUITE="${CCPP_SUITE:-FV3_global_nest_v1}" +else +#JKH export CCPP_SUITE="${CCPP_SUITE:-FV3_GFS_v17_p8_ugwpv1}" export CCPP_SUITE="FV3_GFS_v17_p8_ugwpv1_c3_mynn" +fi model_list="atm" # Mediator specific settings if [[ "${skip_mediator}" == "false" ]]; then export cpl=".true." - export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 + export nthreads_mediator_gfs=${nthreads_fv3_gfs} # Use same threads as FV3 + export nthreads_mediator_gdas=${nthreads_fv3_gdas} export CCPP_SUITE="FV3_GFS_v17_coupled_p8_ugwpv1" # TODO: Does this include FV3_GFS_v17_p8? Can this be used instead of FV3_GFS_v17_p8? fi @@ -307,9 +418,13 @@ if [[ "${skip_mom6}" == "false" ]]; then CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" MOM6_RESTART_SETTING='r' MOM6_RIVER_RUNOFF='False' + if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then + MOM6_DIAG_MISVAL="-1e34" + else + MOM6_DIAG_MISVAL="0.0" + fi eps_imesh="4.0e-1" MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_25L.nc" - MOM6_DIAG_MISVAL="0.0" MOM6_ALLOW_LANDMASK_CHANGES='False' TOPOEDITS="" ;; @@ -326,12 +441,12 @@ if [[ "${skip_mom6}" == "false" ]]; then MOM6_RIVER_RUNOFF='False' eps_imesh="2.5e-1" TOPOEDITS="ufs.topo_edits_011818.nc" - if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then - MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" - MOM6_DIAG_MISVAL="0.0" - else + if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" MOM6_DIAG_MISVAL="-1e34" + else + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" fi MOM6_ALLOW_LANDMASK_CHANGES='True' ;; @@ -347,12 +462,12 @@ if [[ "${skip_mom6}" == "false" ]]; then MOM6_RESTART_SETTING='n' MOM6_RIVER_RUNOFF='True' eps_imesh="1.0e-1" - if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then - MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" - MOM6_DIAG_MISVAL="0.0" - else + if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" MOM6_DIAG_MISVAL="-1e34" + else + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" fi MOM6_ALLOW_LANDMASK_CHANGES='False' TOPOEDITS="" @@ -369,12 +484,12 @@ if [[ "${skip_mom6}" == "false" ]]; then MOM6_RIVER_RUNOFF='True' MOM6_RESTART_SETTING="r" eps_imesh="1.0e-1" - if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then - MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" - MOM6_DIAG_MISVAL="0.0" - else + if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" MOM6_DIAG_MISVAL="-1e34" + else + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" fi MOM6_ALLOW_LANDMASK_CHANGES='False' TOPOEDITS="" @@ -457,6 +572,10 @@ if [[ "${skip_ww3}" == "false" ]]; then "glo_025") ntasks_ww3=262 ;; + "glo_100") + ntasks_ww3=20 + nthreads_ww3=1 + ;; "glo_200") ntasks_ww3=30 nthreads_ww3=1 @@ -468,6 +587,14 @@ if [[ "${skip_ww3}" == "false" ]]; then "mx025") ntasks_ww3=80 ;; + "uglo_100km") + ntasks_ww3=40 + nthreads_ww3=1 + ;; + "uglo_m1g16") + ntasks_ww3=1000 + nthreads_ww3=1 + ;; *) echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" exit 1 @@ -484,39 +611,45 @@ if [[ "${skip_gocart}" == "false" ]]; then fi # Set the name of the UFS (previously nems) configure template to use +# Default ufs.configure templates for supported model configurations +if [[ "${USE_ESMF_THREADING:-}" == "YES" ]]; then + tmpl_suffix="_esmf" +fi case "${model_list}" in atm) - export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.atm.IN" + default_template="${PARMgfs}/ufs/ufs.configure.atm${tmpl_suffix:-}.IN" ;; atm.aero) - export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.atm_aero.IN" + default_template="${PARMgfs}/ufs/ufs.configure.atmaero${tmpl_suffix:-}.IN" ;; atm.wave) - export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.leapfrog_atm_wav.IN" + default_template="${PARMgfs}/ufs/ufs.configure.leapfrog_atm_wav${tmpl_suffix:-}.IN" ;; atm.ocean.ice) - export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld.IN" + default_template="${PARMgfs}/ufs/ufs.configure.s2s${tmpl_suffix:-}.IN" ;; atm.ocean.ice.aero) - export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_aero.IN" + default_template="${PARMgfs}/ufs/ufs.configure.s2sa${tmpl_suffix:-}.IN" ;; atm.ocean.ice.wave) - export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_outerwave.IN" + default_template="${PARMgfs}/ufs/ufs.configure.s2sw${tmpl_suffix:-}.IN" ;; atm.ocean.ice.wave.aero) - export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_aero_outerwave.IN" + default_template="${PARMgfs}/ufs/ufs.configure.s2swa${tmpl_suffix:-}.IN" ;; *) - echo "FATAL ERROR: Unable to determine appropriate UFS configure template for ${model_list}" + echo "FATAL ERROR: Unsupported UFSWM configuration for ${model_list}" exit 16 ;; esac +# Allow user to override the default template +export ufs_configure_template=${ufs_configure_template:-${default_template:-"/dev/null"}} +unset model_list default_template + if [[ ! -r "${ufs_configure_template}" ]]; then echo "FATAL ERROR: ${ufs_configure_template} either doesn't exist or is not readable." exit 17 fi -unset model_list - echo "END: config.ufs" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ufs_c768_16x16_2th_2wg40wt b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ufs_c768_16x16_2th_2wg40wt index 589358a89e..fdf3fcde7e 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ufs_c768_16x16_2th_2wg40wt +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.ufs_c768_16x16_2th_2wg40wt @@ -15,7 +15,7 @@ if (( $# <= 1 )); then echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" echo "--mom6 500|100|025" echo "--cice6 500|100|025" - echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_200|glo_500|mx025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_100|glo_200|glo_500|mx025|uglo_100km|uglo_m1g16" echo "--gocart" exit 1 @@ -68,169 +68,271 @@ if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${ skip_mediator=false fi -case "${machine}" in - "WCOSS2") - npe_node_max=128 - ;; - "HERA" | "ORION" ) - npe_node_max=40 - ;; - "HERCULES" ) - npe_node_max=80 - ;; - "JET") - case "${PARTITION_BATCH}" in - "xjet") - npe_node_max=24 - ;; - "vjet" | "sjet") - npe_node_max=16 - ;; - "kjet") - npe_node_max=40 - ;; - *) - echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" - exit 1 - ;; - esac - ;; - "S4") - case "${PARTITION_BATCH}" in - "s4") - npe_node_max=32 - ;; - "ivy") - npe_node_max=20 - ;; - *) - echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" - exit 1 - ;; - esac - ;; - *) - echo "FATAL ERROR: Unrecognized machine ${machine}" - exit 14 - ;; -esac -export npe_node_max +if [[ "${DO_NEST:-NO}" == "YES" ]] ; then + # Describe nest location, interaction with parent, etc. + export grid_type=0 + export stretch_fac=1.0001 + export TARGET_LAT=32.5 + export TARGET_LON=-135.0 + export NEST_LON1=-195.000000 + export NEST_LAT1=-7.500000 + export NEST_LON2=-75.000000 + export NEST_LAT2=72.500000 + export twowaynest=${twowaynest:-.true.} +else + # No nest. + export grid_type=-1 +fi # (Standard) Model resolution dependent variables case "${fv3_res}" in "C48") export DELTIM=1200 - export layout_x=1 - export layout_y=1 + export layout_x_gdas=1 + export layout_y_gdas=1 export layout_x_gfs=1 export layout_y_gfs=1 - export nthreads_fv3=1 + export nthreads_fv3_gdas=1 export nthreads_fv3_gfs=1 + export nthreads_ufs_gdas=1 + export nthreads_ufs_gfs=1 + export xr_cnvcld=".false." # Do not pass conv. clouds to Xu-Randall cloud fraction export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="40.0,1.77,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=6.0e-3 # setting for UGWPv1 non-stationary GWD - export WRITE_GROUP=1 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export k_split=1 + export n_split=4 + export tau=8.0 + export rf_cutoff=100.0 + export fv_sg_adj=3600 + export WRITE_GROUP_GDAS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=1 export WRITE_GROUP_GFS=1 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 ;; "C96") - export DELTIM=600 - export layout_x=2 - export layout_y=2 - export layout_x_gfs=2 - export layout_y_gfs=2 - export nthreads_fv3=1 - export nthreads_fv3_gfs=1 - export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling - export cdmbgwd_gsl="20.0,2.5,1.0,1.0" # settings for GSL drag suite - export knob_ugwp_tauamp=3.0e-3 # setting for UGWPv1 non-stationary GWD - export WRITE_GROUP=1 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 - export WRITE_GROUP_GFS=1 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + if [[ "${DO_NEST:-NO}" == "YES" ]] ; then + export DELTIM=450 + export layout_x_gfs=4 + export layout_y_gfs=4 + export layout_x_nest=12 + export layout_y_nest=10 + export nest_refine=4 + export nest_ioffset=4 + export nest_joffset=9 + export npx_nest=361 + export npy_nest=241 + export NEST_DLON=0.25 + export NEST_DLAT=0.25 + export WRITE_GROUP_GDAS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=2 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=2 + else + export DELTIM=600 + export layout_x_gdas=2 + export layout_y_gdas=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3_gdas=1 + export nthreads_fv3_gfs=1 + export nthreads_ufs_gdas=1 + export nthreads_ufs_gfs=1 + export xr_cnvcld=.false. # Do not pass conv. clouds to Xu-Randall cloud fraction + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="20.0,2.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=3.0e-3 # setting for UGWPv1 non-stationary GWD + export k_split=1 + export n_split=4 + export tau=8.0 + export rf_cutoff=100.0 + export fv_sg_adj=1800 + export WRITE_GROUP_GDAS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + fi ;; "C192") - export DELTIM=450 - export layout_x=4 - export layout_y=6 - export layout_x_gfs=4 - export layout_y_gfs=6 - export nthreads_fv3=1 - export nthreads_fv3_gfs=2 - export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling - export cdmbgwd_gsl="10.0,3.5,1.0,1.0" # settings for GSL drag suite - export knob_ugwp_tauamp=1.5e-3 # setting for UGWPv1 non-stationary GWD - export WRITE_GROUP=1 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 - export WRITE_GROUP_GFS=2 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + if [[ "${DO_NEST:-NO}" == "YES" ]] ; then + export DELTIM=225 + export layout_x_gfs=5 + export layout_y_gfs=6 + export layout_x_nest=15 + export layout_y_nest=25 + export nest_refine=4 + export nest_ioffset=7 + export nest_joffset=19 + export npx_nest=721 + export npy_nest=481 + export NEST_DLON=0.125 + export NEST_DLAT=0.125 + export WRITE_GROUP_GDAS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=15 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=15 + else + export DELTIM=600 + export layout_x_gdas=4 + export layout_y_gdas=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3_gdas=1 + export nthreads_fv3_gfs=2 + export nthreads_ufs_gdas=1 + export nthreads_ufs_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="10.0,3.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=1.5e-3 # setting for UGWPv1 non-stationary GWD + export k_split=2 + export n_split=4 + export tau=6.0 + export rf_cutoff=100.0 + export fv_sg_adj=1800 + export WRITE_GROUP_GDAS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + fi ;; "C384") - export DELTIM=300 - export layout_x=8 - export layout_y=8 - export layout_x_gfs=8 - export layout_y_gfs=8 - export nthreads_fv3=2 - export nthreads_fv3_gfs=2 - export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling - export cdmbgwd_gsl="5.0,5.0,1.0,1.0" # settings for GSL drag suite - export knob_ugwp_tauamp=0.8e-3 # setting for UGWPv1 non-stationary GWD - export WRITE_GROUP=4 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 - export WRITE_GROUP_GFS=4 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + if [[ "${DO_NEST:-NO}" == "YES" ]] ; then + export DELTIM=150 + export layout_x_gdas=8 + export layout_y_gdas=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export layout_x_nest=34 + export layout_y_nest=24 + export nest_refine=4 + export nest_ioffset=13 + export nest_joffset=37 + export npx_nest=1441 + export npy_nest=961 + export NEST_DLON=0.0625 + export NEST_DLAT=0.0625 + export WRITE_GROUP_GDAS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=20 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 + else + export DELTIM=300 + export layout_x_gdas=8 + export layout_y_gdas=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3_gdas=2 + export nthreads_fv3_gfs=2 + export nthreads_ufs_gdas=2 + export nthreads_ufs_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="5.0,5.0,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.8e-3 # setting for UGWPv1 non-stationary GWD + export k_split=2 + export n_split=4 + export tau=4.0 + export rf_cutoff=100.0 + export fv_sg_adj=900 + export WRITE_GROUP_GDAS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10 + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + fi ;; "C768") - export DELTIM=150 - export layout_x=8 - export layout_y=12 - #JKHexport layout_x_gfs=12 - export layout_x_gfs=16 - export layout_y_gfs=16 - export nthreads_fv3=4 - #JKHexport nthreads_fv3_gfs=4 - export nthreads_fv3_gfs=2 - export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling - export cdmbgwd_gsl="2.5,7.5,1.0,1.0" # settings for GSL drag suite - export knob_ugwp_tauamp=0.5e-3 # setting for UGWPv1 non-stationary GWD - export WRITE_GROUP=2 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 - #JKHexport WRITE_GROUP_GFS=4 - #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 #Note this should be 10 for WCOSS2 - export WRITE_GROUP_GFS=2 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=40 #Note this should be 10 for WCOSS2 + if [[ "${DO_NEST:-NO}" == "YES" ]] ; then + export DELTIM=75 + export layout_x_gdas=16 + export layout_y_gdas=10 + export layout_x_gfs=16 + export layout_y_gfs=10 + export layout_x_nest=48 + export layout_y_nest=45 + export nthreads_fv3_nest=2 + export nthreads_fv3_gdas=2 + export nthreads_fv3_gfs=2 + export nest_refine=4 + export nest_ioffset=24 + export nest_joffset=72 + export npx_nest=2881 + export npy_nest=1921 + export NEST_DLON=0.0325 + export NEST_DLAT=0.0325 + export WRITE_GROUP_GDAS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=90 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=90 + else + export DELTIM=150 + export layout_x_gdas=8 + export layout_y_gdas=12 + export layout_x_gfs=16 + export layout_y_gfs=16 + #JKHexport layout_y_gfs=12 + export nthreads_fv3_gdas=4 + #JKHexport nthreads_fv3_gfs=4 + export nthreads_fv3_gfs=2 + export nthreads_ufs_gdas=4 + export nthreads_ufs_gfs=2 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="2.5,7.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.5e-3 # setting for UGWPv1 non-stationary GWD + export k_split=2 + export n_split=4 + export tau=3.0 + export rf_cutoff=100.0 + export fv_sg_adj=450 + export WRITE_GROUP_GDAS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10 + #JKHexport WRITE_GROUP_GFS=4 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 #Note this should be 10 for WCOSS2 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=40 #Note this should be 10 for WCOSS2 + fi ;; "C1152") - export DELTIM=120 - export layout_x=8 - export layout_y=16 + export DELTIM=150 + export layout_x_gdas=8 + export layout_y_gdas=16 export layout_x_gfs=8 export layout_y_gfs=16 - export nthreads_fv3=4 + export nthreads_fv3_gdas=4 export nthreads_fv3_gfs=4 + export nthreads_ufs_gdas=4 + export nthreads_ufs_gfs=4 export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="1.67,8.8,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.35e-3 # setting for UGWPv1 non-stationary GWD - export WRITE_GROUP=4 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export k_split=2 + export n_split=6 + export tau=2.5 + export rf_cutoff=100.0 + export fv_sg_adj=450 + export WRITE_GROUP_GDAS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10 # TODO: refine these numbers when a case is available export WRITE_GROUP_GFS=4 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 # TODO: refine these numbers when a case is available ;; "C3072") export DELTIM=90 export layout_x=16 - export layout_y=32 - export layout_x_gfs=16 + export layout_y_gdas=32 + export layout_x_gfs_gdas=16 export layout_y_gfs=32 - export nthreads_fv3=4 + export nthreads_fv3_gdas=4 export nthreads_fv3_gfs=4 + export nthreads_ufs_gdas=4 + export nthreads_ufs_gfs=4 export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export cdmbgwd_gsl="0.625,14.1,1.0,1.0" # settings for GSL drag suite export knob_ugwp_tauamp=0.13e-3 # setting for UGWPv1 non-stationary GWD - export WRITE_GROUP=4 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export k_split=4 + export n_split=5 + export tau=0.5 + export rf_cutoff=100.0 + export fv_sg_adj=300 + export WRITE_GROUP_GDAS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS=10 # TODO: refine these numbers when a case is available export WRITE_GROUP_GFS=4 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available ;; @@ -240,19 +342,22 @@ case "${fv3_res}" in ;; esac -(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GDAS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GDAS * 6 )) (( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) -export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GDAS export WRTTASK_PER_GROUP_PER_THREAD_GFS -(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gdas = layout_x_gdas * layout_y_gdas * 6 )) (( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) -export ntasks_fv3 +if [[ "${DO_NEST:-NO}" == "YES" ]] ; then + (( ntasks_fv3_gfs += layout_x_nest * layout_y_nest )) +fi +export ntasks_fv3_gdas export ntasks_fv3_gfs -(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gdas = WRITE_GROUP_GDAS * WRTTASK_PER_GROUP_PER_THREAD_GDAS )) (( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) -export ntasks_quilt +export ntasks_quilt_gdas export ntasks_quilt_gfs # Determine whether to use parallel NetCDF based on resolution @@ -279,13 +384,19 @@ export cplice=".false." export cplchm=".false." export cplwav=".false." export cplwav2atm=".false." +if [[ "${DO_NEST:-NO}" == "YES" ]] ; then + export CCPP_SUITE="${CCPP_SUITE:-FV3_global_nest_v1}" +else +#JKH export CCPP_SUITE="${CCPP_SUITE:-FV3_GFS_v17_p8_ugwpv1}" export CCPP_SUITE="FV3_GFS_v17_p8_ugwpv1_c3_mynn" +fi model_list="atm" # Mediator specific settings if [[ "${skip_mediator}" == "false" ]]; then export cpl=".true." - export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 + export nthreads_mediator_gfs=${nthreads_fv3_gfs} # Use same threads as FV3 + export nthreads_mediator_gdas=${nthreads_fv3_gdas} export CCPP_SUITE="FV3_GFS_v17_coupled_p8_ugwpv1" # TODO: Does this include FV3_GFS_v17_p8? Can this be used instead of FV3_GFS_v17_p8? fi @@ -307,9 +418,13 @@ if [[ "${skip_mom6}" == "false" ]]; then CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" MOM6_RESTART_SETTING='r' MOM6_RIVER_RUNOFF='False' + if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then + MOM6_DIAG_MISVAL="-1e34" + else + MOM6_DIAG_MISVAL="0.0" + fi eps_imesh="4.0e-1" MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_25L.nc" - MOM6_DIAG_MISVAL="0.0" MOM6_ALLOW_LANDMASK_CHANGES='False' TOPOEDITS="" ;; @@ -326,12 +441,12 @@ if [[ "${skip_mom6}" == "false" ]]; then MOM6_RIVER_RUNOFF='False' eps_imesh="2.5e-1" TOPOEDITS="ufs.topo_edits_011818.nc" - if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then - MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" - MOM6_DIAG_MISVAL="0.0" - else + if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" MOM6_DIAG_MISVAL="-1e34" + else + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" fi MOM6_ALLOW_LANDMASK_CHANGES='True' ;; @@ -347,12 +462,12 @@ if [[ "${skip_mom6}" == "false" ]]; then MOM6_RESTART_SETTING='n' MOM6_RIVER_RUNOFF='True' eps_imesh="1.0e-1" - if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then - MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" - MOM6_DIAG_MISVAL="0.0" - else + if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" MOM6_DIAG_MISVAL="-1e34" + else + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" fi MOM6_ALLOW_LANDMASK_CHANGES='False' TOPOEDITS="" @@ -369,12 +484,12 @@ if [[ "${skip_mom6}" == "false" ]]; then MOM6_RIVER_RUNOFF='True' MOM6_RESTART_SETTING="r" eps_imesh="1.0e-1" - if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then - MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" - MOM6_DIAG_MISVAL="0.0" - else + if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" MOM6_DIAG_MISVAL="-1e34" + else + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" fi MOM6_ALLOW_LANDMASK_CHANGES='False' TOPOEDITS="" @@ -457,6 +572,10 @@ if [[ "${skip_ww3}" == "false" ]]; then "glo_025") ntasks_ww3=262 ;; + "glo_100") + ntasks_ww3=20 + nthreads_ww3=1 + ;; "glo_200") ntasks_ww3=30 nthreads_ww3=1 @@ -468,6 +587,14 @@ if [[ "${skip_ww3}" == "false" ]]; then "mx025") ntasks_ww3=80 ;; + "uglo_100km") + ntasks_ww3=40 + nthreads_ww3=1 + ;; + "uglo_m1g16") + ntasks_ww3=1000 + nthreads_ww3=1 + ;; *) echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" exit 1 @@ -484,39 +611,45 @@ if [[ "${skip_gocart}" == "false" ]]; then fi # Set the name of the UFS (previously nems) configure template to use +# Default ufs.configure templates for supported model configurations +if [[ "${USE_ESMF_THREADING:-}" == "YES" ]]; then + tmpl_suffix="_esmf" +fi case "${model_list}" in atm) - export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.atm.IN" + default_template="${PARMgfs}/ufs/ufs.configure.atm${tmpl_suffix:-}.IN" ;; atm.aero) - export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.atm_aero.IN" + default_template="${PARMgfs}/ufs/ufs.configure.atmaero${tmpl_suffix:-}.IN" ;; atm.wave) - export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.leapfrog_atm_wav.IN" + default_template="${PARMgfs}/ufs/ufs.configure.leapfrog_atm_wav${tmpl_suffix:-}.IN" ;; atm.ocean.ice) - export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld.IN" + default_template="${PARMgfs}/ufs/ufs.configure.s2s${tmpl_suffix:-}.IN" ;; atm.ocean.ice.aero) - export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_aero.IN" + default_template="${PARMgfs}/ufs/ufs.configure.s2sa${tmpl_suffix:-}.IN" ;; atm.ocean.ice.wave) - export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_outerwave.IN" + default_template="${PARMgfs}/ufs/ufs.configure.s2sw${tmpl_suffix:-}.IN" ;; atm.ocean.ice.wave.aero) - export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_aero_outerwave.IN" + default_template="${PARMgfs}/ufs/ufs.configure.s2swa${tmpl_suffix:-}.IN" ;; *) - echo "FATAL ERROR: Unable to determine appropriate UFS configure template for ${model_list}" + echo "FATAL ERROR: Unsupported UFSWM configuration for ${model_list}" exit 16 ;; esac +# Allow user to override the default template +export ufs_configure_template=${ufs_configure_template:-${default_template:-"/dev/null"}} +unset model_list default_template + if [[ ! -r "${ufs_configure_template}" ]]; then echo "FATAL ERROR: ${ufs_configure_template} either doesn't exist or is not readable." exit 17 fi -unset model_list - echo "END: config.ufs" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.upp b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.upp index a1bd0a7d34..41015c2fee 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.upp +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.upp @@ -8,7 +8,7 @@ echo "BEGIN: config.upp" # Get task specific resources . "${EXPDIR}/config.resources" upp -export UPP_CONFIG="${HOMEgfs}/parm/post/upp.yaml" +export UPP_CONFIG="${PARMgfs}/post/upp.yaml" # No. of forecast hours to process in a single job export NFHRS_PER_GROUP=3 diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.verfozn b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.verfozn index 9eea0f25a3..df7d18012d 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.verfozn +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.verfozn @@ -9,15 +9,14 @@ echo "BEGIN: config.verfozn" export DO_DATA_RPT=1 export OZN_AREA="glb" export OZNMON_SUFFIX=${NET} -export PARMmonitor=${PARMgfs}/monitor -export SATYPE_FILE=${PARMmonitor}/gdas_oznmon_satype.txt +export SATYPE_FILE=${PARMgfs}/monitor/gdas_oznmon_satype.txt # Source the parm file -. "${PARMmonitor}/gdas_oznmon.parm" +. "${PARMgfs}/monitor/gdas_oznmon.parm" # Set up validation file if [[ ${VALIDATE_DATA} -eq 1 ]]; then - export ozn_val_file=${PARMmonitor}/gdas_oznmon_base.tar + export ozn_val_file=${PARMgfs}/monitor/gdas_oznmon_base.tar fi echo "END: config.verfozn" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.verfrad b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.verfrad index dd65020180..506ce50b4f 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.verfrad +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.verfrad @@ -6,11 +6,10 @@ echo "BEGIN: config.verfrad" # Get task specific resources . "${EXPDIR}/config.resources" verfrad -export PARMmonitor=${PARMgfs}/monitor -export satype_file=${PARMmonitor}/gdas_radmon_satype.txt +export satype_file=${PARMgfs}/monitor/gdas_radmon_satype.txt # Source the parm file -. "${PARMmonitor}/da_mon.parm" +. "${PARMgfs}/monitor/da_mon.parm" # Other variables export RAD_AREA="glb" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.vminmon b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.vminmon index 8929c36e0e..7c7d362161 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.vminmon +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.vminmon @@ -9,8 +9,7 @@ echo "BEGIN: config.vminmon" export MINMON_SUFFIX=${MINMON_SUFFIX:-${NET}} export CYCLE_INTERVAL=${assim_freq:-6} -export PARMmonitor=${PARMgfs}/monitor -export mm_gnormfile=${PARMmonitor}/${RUN}_minmon_gnorm.txt -export mm_costfile=${PARMmonitor}/${RUN}_minmon_cost.txt +export mm_gnormfile=${PARMgfs}/monitor/${RUN}_minmon_gnorm.txt +export mm_costfile=${PARMgfs}/monitor/${RUN}_minmon_cost.txt echo "END: config.vminmon" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wave b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wave index acb4c518ba..db4eb9f708 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wave +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wave @@ -6,26 +6,14 @@ echo "BEGIN: config.wave" # Parameters that are common to all wave model steps - -# System and version -export wave_sys_ver=v1.0.0 - -export EXECwave="${HOMEgfs}/exec" -export FIXwave="${HOMEgfs}/fix/wave" -export PARMwave="${HOMEgfs}/parm/wave" -export USHwave="${HOMEgfs}/ush" - # This config contains variables/parameters used in the fcst step # Some others are also used across the workflow in wave component scripts # General runtime labels -export CDUMPwave="${RUN}wave" +export RUNwave="${RUN}wave" # In GFS/GDAS, restart files are generated/read from gdas runs -export CDUMPRSTwave="gdas" - -# Grids for wave model -export waveGRD=${waveGRD:-'mx025'} +export RUNRSTwave="gdas" #grid dependent variable defaults export waveGRDN='1' # grid number for ww3_multi @@ -68,6 +56,12 @@ case "${waveGRD}" in export wavepostGRD='glo_025' export waveuoutpGRD=${waveGRD} ;; + "glo_100") + #Global regular lat/lon 1deg deg grid + export waveinterpGRD='' + export wavepostGRD='glo_100' + export waveuoutpGRD=${waveGRD} + ;; "glo_200") #Global regular lat/lon 2deg deg grid export waveinterpGRD='' @@ -80,7 +74,19 @@ case "${waveGRD}" in export wavepostGRD='glo_500' export waveuoutpGRD=${waveGRD} ;; - *) + "uglo_100km") + #unstructured 100km grid + export waveinterpGRD='glo_200' + export wavepostGRD='' + export waveuoutpGRD=${waveGRD} + ;; + "uglo_m1g16") + #unstructured m1v16 grid + export waveinterpGRD='glo_15mxt' + export wavepostGRD='' + export waveuoutpGRD=${waveGRD} + ;; + *) echo "FATAL ERROR: No grid specific wave config values exist for ${waveGRD}. Aborting." exit 1 ;; @@ -91,18 +97,12 @@ export WAVEWND_DID= export WAVEWND_FID= # The start time reflects the number of hindcast hours prior to the cycle initial time -if [[ "${CDUMP}" = "gdas" ]]; then - export FHMAX_WAV=${FHMAX:-9} -else - export FHMAX_WAV=${FHMAX_GFS} +if [[ "${RUN}" == "gfs" ]]; then + export FHMAX_WAV=${FHMAX_WAV_GFS} fi export WAVHINDH=0 -export FHMIN_WAV=0 -export FHOUT_WAV=3 -export FHMAX_HF_WAV=120 -export FHOUT_HF_WAV=1 export FHMAX_WAV_IBP=180 -if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_WAV} ; fi # gridded and point output rate export DTFLD_WAV=$(( FHOUT_HF_WAV * 3600 )) @@ -113,7 +113,7 @@ export FHINCP_WAV=$(( DTPNT_WAV / 3600 )) export OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" # Restart file config -if [[ "${CDUMP}" = "gdas" ]]; then +if [[ "${RUN}" == "gdas" ]]; then export WAVNCYC=4 export WAVHCYC=${assim_freq:-6} export FHMAX_WAV_CUR=48 # RTOFS forecasts only out to 8 days @@ -128,7 +128,7 @@ fi # Restart timing business export RSTTYPE_WAV='T' # generate second tier of restart files -if [[ "${CDUMP}" != gfs ]]; then # Setting is valid for GDAS and GEFS +if [[ "${RUN}" != gfs ]]; then # Setting is valid for GDAS and GEFS export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file export DT_2_RST_WAV=43200 # restart stride for checkpointing restart export RSTIOFF_WAV=0 # first restart file offset relative to model start @@ -136,8 +136,8 @@ else # This is a GFS run rst_dt_gfs=$(( restart_interval_gfs * 3600 )) # TODO: This calculation needs to move to parsing_namelists_WW3.sh if [[ ${rst_dt_gfs} -gt 0 ]]; then export DT_1_RST_WAV=0 #${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file - #temporarily set to zero to avoid a clash in requested restart times - #which makes the wave model crash a fix for the model issue will be coming + # temporarily set to zero to avoid a clash in requested restart times + # which makes the wave model crash a fix for the model issue will be coming export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart else rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) @@ -149,15 +149,15 @@ fi # # Set runmember to default value if not GEFS cpl run # (for a GFS coupled run, RUNMEN would be unset, this should default to -1) -export RUNMEM=${RUNMEM:--1} +export RUNMEM="-1" # Set wave model member tags if ensemble run # -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN -if [[ ${RUNMEM} = -1 ]]; then +if (( RUNMEM == -1 )); then # No suffix added to model ID in case of deterministic run - export waveMEMB= + export waveMEMB="" else # Extract member number only - export waveMEMB="${RUNMEM: -2}" + export waveMEMB="${RUNMEM}" fi # Determine if wave component needs input and/or is coupled diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostbndpnt b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostbndpnt index dfeddc79b2..412c5fb42a 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostbndpnt +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostbndpnt @@ -6,6 +6,6 @@ echo "BEGIN: config.wavepostbndpnt" # Get task specific resources -. $EXPDIR/config.resources wavepostbndpnt +source "${EXPDIR}/config.resources" wavepostbndpnt echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostbndpntbll b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostbndpntbll index bb7224cc70..6695ab0f84 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostbndpntbll +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostbndpntbll @@ -6,6 +6,6 @@ echo "BEGIN: config.wavepostbndpntbll" # Get task specific resources -. $EXPDIR/config.resources wavepostbndpntbll +source "${EXPDIR}/config.resources" wavepostbndpntbll echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostpnt b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostpnt index 8befb91760..e87237da82 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostpnt +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostpnt @@ -6,6 +6,6 @@ echo "BEGIN: config.wavepostpnt" # Get task specific resources -. $EXPDIR/config.resources wavepostpnt +source "${EXPDIR}/config.resources" wavepostpnt echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostsbs b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostsbs index 8e74aae069..82cec321da 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostsbs +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.wavepostsbs @@ -6,14 +6,13 @@ echo "BEGIN: config.wavepostsbs" # Get task specific resources -. $EXPDIR/config.resources wavepostsbs +source "${EXPDIR}/config.resources" wavepostsbs # Subgrid info for grib2 encoding export WAV_SUBGRBSRC="" export WAV_SUBGRB="" # Options for point output (switch on/off boundary point output) -export DOIBP_WAV='NO' # Input boundary points export DOFLD_WAV='YES' # Field data export DOPNT_WAV='YES' # Station data export DOGRB_WAV='YES' # Create grib2 files diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.waveprep b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.waveprep index 1c9a40c1d8..1f746eab77 100644 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.waveprep +++ b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/config.waveprep @@ -21,7 +21,7 @@ export WAV_CUR_CDO_SMOOTH="NO" export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} if [ "${WW3ICEINP}" = "YES" ]; then - export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 + export WAVICEFILE=${RUN}.t${cyc}z.seaice.5min.grib2 fi echo "END: config.waveprep" diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_pygraf_centos_dev1.xml b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_pygraf_centos_dev1.xml deleted file mode 100644 index 0ca6e2274d..0000000000 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_pygraf_centos_dev1.xml +++ /dev/null @@ -1,134 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -]> - - - - &EXPDIR;/logs/@Y@m@d@H.log - - - &SDATE; &EDATE; &INTERVAL; - - - - 0 6 12 18 24 30 36 42 48 54 60 66 72 78 84 90 96 102 108 114 120 - 000 006 012 018 024 030 036 042 048 054 060 066 072 078 084 090 096 102 108 114 120 - - - &JOBS_DIR;/remapgrib.ksh - &ACCOUNT; - 1 - 00:15:00 - remapgrib_#T#_&PSLOT; - &ROTDIR;/logs/@Y@m@d@H/remapgrib_#T#.log - ROTDIR&ROTDIR; - CDUMP&CDUMP; - COMPONENT&COMPONENT; - yyyymmdd@Y@m@d - hh@H - fcst#T# - GRID_NAMES201D130D242 - - &ROTDIR;/&CDUMP;.@Y@m@d/@H/products/&COMPONENT;/grib2/0p25/&CDUMP;.t@Hz.pgrb2.&RES;.f#T# - - - - - - - - full 242 130 201 - full,Africa,Beijing,Cambodia,EPacific,Europe,Taiwan,WAtlantic,WPacific AK,AKZoom,AKZoom2 CONUS,NC,NE,NW,SC,SE,SW NHemi - global.yml globalAK.yml globalCONUS.yml globalNHemi.yml - - - - - - source &PYGRAFDIR;/pre.sh; - cd &PYGRAFDIR;; - python &PYGRAFDIR;/create_graphics.py \ - maps \ - -d &ROTDIR;/&CDUMP;.@Y@m@d/@H/products/&COMPONENT;/grib2/0p25/post/#GRID_ID#\ - -f 0 &FCST_LENGTH; 6 \ - --file_type prs \ - --file_tmpl "&CDUMP;.t@Hz.pgrb2.0p25.f{FCST_TIME:03d}"\ - --images &PYGRAFDIR;/image_lists/#IMGFILE# hourly\ - -m "GFSv17p8_UGWPV1_C3_MYNN" \ - -n ${SLURM_CPUS_ON_NODE:-12} \ - -o &ROTDIR;/&CDUMP;.@Y@m@d/@H/products/&COMPONENT;/pyprd \ - -s @Y@m@d@H \ - --tiles "#TILESET#" \ - -z &ROTDIR;/&CDUMP;.@Y@m@d/@H/products/&COMPONENT;/img - - - &ACCOUNT; - &QUEUE; - &RESOURCES_PYTHON; - &WALLTIME_PYTHON; - --exclusive - FV3GFS_python_maps_#GRID_ID#_@H_ugwpv1_c3_mynn - &ROTDIR;/logs/@Y@m@d@H/python_@Y@m@d@H00_maps_#GRID_ID#_0-6-&FCST_LENGTH;.log - - - - - - - - - - - diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_pygraf_global_ugwpv1_c3_mynn.xml b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_pygraf_global_ugwpv1_c3_mynn.xml deleted file mode 100644 index 4d7d3cdbec..0000000000 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_pygraf_global_ugwpv1_c3_mynn.xml +++ /dev/null @@ -1,134 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -]> - - - - &EXPDIR;/logs/@Y@m@d@H.log - - - &SDATE; &EDATE; &INTERVAL; - - - - 0 6 12 18 24 30 36 42 48 54 60 66 72 78 84 90 96 102 108 114 120 - 000 006 012 018 024 030 036 042 048 054 060 066 072 078 084 090 096 102 108 114 120 - - - &JOBS_DIR;/remapgrib.ksh - &ACCOUNT; - 1 - 00:35:00 - remapgrib_#T#_&PSLOT; - &ROTDIR;/logs/@Y@m@d@H/remapgrib_#T#.log - ROTDIR&ROTDIR; - CDUMP&CDUMP; - COMPONENT&COMPONENT; - yyyymmdd@Y@m@d - hh@H - fcst#T# - GRID_NAMES201D130D242 - - &ROTDIR;/&CDUMP;.@Y@m@d/@H/products/&COMPONENT;/grib2/0p25/&CDUMP;.t@Hz.pgrb2.&RES;.f#T# - - - - - - - - full 242 130 201 - full,Africa,Beijing,Cambodia,EPacific,Europe,Taiwan,WAtlantic,WPacific AK,AKZoom,AKZoom2 CONUS,NC,NE,NW,SC,SE,SW NHemi - global.yml globalAK.yml globalCONUS.yml globalNHemi.yml - - - - - - source &PYGRAFDIR;/pre.sh; - cd &PYGRAFDIR;; - python &PYGRAFDIR;/create_graphics.py \ - maps \ - -d &ROTDIR;/&CDUMP;.@Y@m@d/@H/products/&COMPONENT;/grib2/0p25/post/#GRID_ID#\ - -f 0 &FCST_LENGTH; 6 \ - --file_type prs \ - --file_tmpl "&CDUMP;.t@Hz.pgrb2.0p25.f{FCST_TIME:03d}"\ - --images &PYGRAFDIR;/image_lists/#IMGFILE# hourly\ - -m "GFSv17p8_UGWPV1_C3_MYNN" \ - -n ${SLURM_CPUS_ON_NODE:-12} \ - -o &ROTDIR;/&CDUMP;.@Y@m@d/@H/products/&COMPONENT;/pyprd \ - -s @Y@m@d@H \ - --tiles "#TILESET#" \ - -z &ROTDIR;/&CDUMP;.@Y@m@d/@H/products/&COMPONENT;/img - - - &ACCOUNT; - &QUEUE; - &RESOURCES_PYTHON; - &WALLTIME_PYTHON; - --exclusive - FV3GFS_python_maps_#GRID_ID#_@H_ugwpv1_c3_mynn - &ROTDIR;/logs/@Y@m@d@H/python_@Y@m@d@H00_maps_#GRID_ID#_0-6-&FCST_LENGTH;.log - - - - - - - - - - - diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_v17p8_ugwpv1_c3_mynn.crontab b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_v17p8_ugwpv1_c3_mynn.crontab deleted file mode 100644 index 428ce232aa..0000000000 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_v17p8_ugwpv1_c3_mynn.crontab +++ /dev/null @@ -1,5 +0,0 @@ - -#################### rt_v17p8_ugwpv1_c3_mynn #################### -MAILTO="" -*/5 * * * * /apps/rocoto/1.3.6/bin/rocotorun -d /home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_v17p8_ugwpv1_c3_mynn.db -w /home/role.rtfim/UFS-CAMsuite_dev1//FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_v17p8_ugwpv1_c3_mynn.xml -################################################################# diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_v17p8_ugwpv1_c3_mynn.xml b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_v17p8_ugwpv1_c3_mynn.xml deleted file mode 100644 index 7df2260c8e..0000000000 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_v17p8_ugwpv1_c3_mynn.xml +++ /dev/null @@ -1,228 +0,0 @@ - - - - - - - - - - - - - -]> - - - - &EXPDIR;/logs/@Y@m@d@H.log - - - 202403210000 203401140000 24:00:00 - - - - &JOBS_DIR;/makeinit_link.sh - - &PSLOT;_gfsinit_@H - gsd-fv3 - batch - hera - 00:02:00 - 1:ppn=1:tpp=1 - &NATIVE_STR; - - &ROTDIR;/logs/@Y@m@d@H/gfsinit.log - - RUN_ENVIRemc - HOMEgfs&HOMEgfs; - EXPDIR&EXPDIR; - ROTDIR&ROTDIR; - ICSDIR&ICSDIR; - CASE&CASE; - COMPONENT&COMPONENT; - NETgfs - CDUMPgfs - RUNgfs - CDATE@Y@m@d@H - PDY@Y@m@d - cyc@H - COMROOT/scratch1/NCEPDEV/global/glopara/com - DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; - - - - - &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input - - - &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc - &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc - - - - - - - - - &JOBS_DIR;/fcst.sh - - &PSLOT;_gfsfcst_@H - gsd-fv3 - batch - hera - 05:30:00 - - 56:ppn=40:tpp=1 - &NATIVE_STR; - - &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log - - RUN_ENVIRemc - HOMEgfs&HOMEgfs; - EXPDIR&EXPDIR; - ROTDIR&ROTDIR; - NETgfs - CDUMPgfs - RUNgfs - CDATE@Y@m@d@H - PDY@Y@m@d - cyc@H - COMROOT/scratch1/NCEPDEV/global/glopara/com - DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; - - - - - &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc - &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc - - - - - - - - - _f000-f012 _f018-f030 _f036-f048 _f054-f066 _f072-f084 _f090-f102 _f108-f120 - f012 f030 f048 f066 f084 f102 f120 - f000_f006_f012 f018_f024_f030 f036_f042_f048 f054_f060_f066 f072_f078_f084 f090_f096_f102 f108_f114_f120 - - - - &JOBS_DIR;/atmos_products.sh - - &PSLOT;_gfsatmprod#grp#_@H - gsd-fv3 - batch - hera - 00:15:00 - 1:ppn=24:tpp=1 - &NATIVE_STR; - - &ROTDIR;/logs/@Y@m@d@H/gfsatmprod#grp#.log - - RUN_ENVIRemc - HOMEgfs&HOMEgfs; - EXPDIR&EXPDIR; - ROTDIR&ROTDIR; - NETgfs - CDUMPgfs - RUNgfs - CDATE@Y@m@d@H - PDY@Y@m@d - cyc@H - COMROOT/scratch1/NCEPDEV/global/glopara/com - DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; - FHRLST#lst# - - - &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/master/gfs.t@Hz.master.grb2#dep# - - - - - - - - - &JOBS_DIR;/arch.sh - - &PSLOT;_gfsarch_@H - gsd-fv3 - batch - service - 06:00:00 - 1:ppn=1:tpp=1 - 4096M - &NATIVE_STR; - - &ROTDIR;/logs/@Y@m@d@H/gfsarch.log - - RUN_ENVIRemc - HOMEgfs&HOMEgfs; - EXPDIR&EXPDIR; - ROTDIR&ROTDIR; - NETgfs - ATCFNAME&ATCFNAME; - CDUMPgfs - RUNgfs - CDATE@Y@m@d@H - PDY@Y@m@d - cyc@H - COMROOT/scratch1/NCEPDEV/global/glopara/com - DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; - - - - - - - - diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/runcmds b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/runcmds deleted file mode 100644 index aa43700fbc..0000000000 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/runcmds +++ /dev/null @@ -1,27 +0,0 @@ -rocotorun -w ${HOME}/UFS-CAMsuite_dev1/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_v17p8_ugwpv1_c3_mynn.xml -d ${HOME}/rt_dbfiles/rt_v17p8_ugwpv1_c3_mynn.db -rocotostat -w ${HOME}/UFS-CAMsuite_dev1/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_v17p8_ugwpv1_c3_mynn.xml -d ${HOME}/rt_dbfiles/rt_v17p8_ugwpv1_c3_mynn.db -c `date --date='4 days ago' +%Y%m%d0000`: | m - -rocotorun -w ${HOME}/UFS-CAMsuite_dev1/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/v17p8_ugwpv1_c3_mynn.xml -d ${HOME}/retro_dbfiles/v17p8_ugwpv1_c3_mynn.db -rocotostat -w ${HOME}/UFS-CAMsuite_dev1/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/v17p8_ugwpv1_c3_mynn.xml -d ${HOME}/retro_dbfiles/v17p8_ugwpv1_c3_mynn.db - - - -PyGraf workflow -=============== -rocotorun -w ${HOME}/UFS-CAMsuite_dev1/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_pygraf_global_ugwpv1_c3_mynn.xml -d ${HOME}/rt_dbfiles/rt_pygraf_global_ugwpv1_c3_mynn.db -rocotostat -w ${HOME}/UFS-CAMsuite_dev1/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/rt_pygraf_global_ugwpv1_c3_mynn.xml -d ${HOME}/rt_dbfiles/rt_pygraf_global_ugwpv1_c3_mynn.db -c `date --date='4 days ago' +%Y%m%d0000`: | m - -rocotorun -w pygraf_global_ugwpv1_c3_mynn.xml -d pygraf_global_ugwpv1_c3_mynn.db -rocotostat -w pygraf_global_ugwpv1_c3_mynn.xml -d pygraf_global_ugwpv1_c3_mynn.db - -** CentOS ** - 03/21-25 -rocotorun -w rt_pygraf_centos_dev1.xml -d ~/rt_dbfiles/rt_pygraf_centos_dev1_0.db -rocotostat -w rt_pygraf_centos_dev1.xml -d ~/rt_dbfiles/rt_pygraf_centos_dev1_0.db | m - 03/25-26 -rocotorun -w rt_pygraf_centos_dev1.xml -d ~/rt_dbfiles/rt_pygraf_centos_dev1.db -rocotostat -w rt_pygraf_centos_dev1.xml -d ~/rt_dbfiles/rt_pygraf_centos_dev1.db | m - -** Rocky test ** -rocotorun -w test_pygraf.xml -d test_pygraf.db -rocotostat -w test_pygraf.xml -d test_pygraf.db diff --git a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/test_pygraf.xml b/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/test_pygraf.xml deleted file mode 100644 index d3a7c94622..0000000000 --- a/FV3GFSwfm/rt_v17p8_ugwpv1_c3_mynn/test_pygraf.xml +++ /dev/null @@ -1,134 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -]> - - - - &EXPDIR;/logs/@Y@m@d@H.log - - - &SDATE; &EDATE; &INTERVAL; - - - - 0 6 12 18 24 30 36 42 48 54 60 66 72 78 84 90 96 102 108 114 120 - 000 006 012 018 024 030 036 042 048 054 060 066 072 078 084 090 096 102 108 114 120 - - - &JOBS_DIR;/remapgrib.sh - &ACCOUNT; - 1 - 00:35:00 - remapgrib_#T#_&PSLOT; - &ROTDIR;/logs/@Y@m@d@H/remapgrib_#T#.log - ROTDIR&ROTDIR; - CDUMP&CDUMP; - COMPONENT&COMPONENT; - yyyymmdd@Y@m@d - hh@H - fcst#T# - GRID_NAMES201D130D242 - - &ROTDIR;/&CDUMP;.@Y@m@d/@H/products/&COMPONENT;/grib2/0p25/&CDUMP;.t@Hz.pgrb2.&RES;.f#T# - - - - - - - - full 242 130 201 - full,Africa,Beijing,Cambodia,EPacific,Europe,Taiwan,WAtlantic,WPacific AK,AKZoom,AKZoom2 CONUS,NC,NE,NW,SC,SE,SW NHemi - global.yml globalAK.yml globalCONUS.yml globalNHemi.yml - - - - - - source &PYGRAFDIR;/pre.sh; - cd &PYGRAFDIR;; - python &PYGRAFDIR;/create_graphics.py \ - maps \ - -d &ROTDIR;/&CDUMP;.@Y@m@d/@H/products/&COMPONENT;/grib2/0p25/post/#GRID_ID#\ - -f 0 &FCST_LENGTH; 6 \ - --file_type prs \ - --file_tmpl "&CDUMP;.t@Hz.pgrb2.0p25.f{FCST_TIME:03d}"\ - --images &PYGRAFDIR;/image_lists/#IMGFILE# hourly\ - -m "GFSv17p8_UGWPV1_C3_MYNN" \ - -n ${SLURM_CPUS_ON_NODE:-12} \ - -o &ROTDIR;/&CDUMP;.@Y@m@d/@H/products/&COMPONENT;/pyprd \ - -s @Y@m@d@H \ - --tiles "#TILESET#" \ - -z &ROTDIR;/&CDUMP;.@Y@m@d/@H/products/&COMPONENT;/img - - - &ACCOUNT; - &QUEUE; - &RESOURCES_PYTHON; - &WALLTIME_PYTHON; - --exclusive - FV3GFS_python_maps_#GRID_ID#_@H_ugwpv1_c3_mynn - &ROTDIR;/logs/@Y@m@d@H/python_@Y@m@d@H00_maps_#GRID_ID#_0-6-&FCST_LENGTH;.log - - - - - - - - - - - diff --git a/workflow/rt_c3_mynn.sh b/workflow/rt_c3_mynn.sh index d50ca41bb3..7867debed3 100755 --- a/workflow/rt_c3_mynn.sh +++ b/workflow/rt_c3_mynn.sh @@ -1,12 +1,14 @@ +#!/bin/sh USER=role.rtfim -GITDIR=${HOME}/UFS-CAMsuite/ ## where your git checkout is located +#GITDIR=${HOME}/UFS-CAMsuite/ ## where your git checkout is located +GITDIR=/scratch2/BMC/gsd-fv3-dev/KaYee.Wong/global/test/Merge_gsl_ufs_dev_to_gsl_ufsrtdev1/ COMROT=$GITDIR/FV3GFSrun ## default COMROT directory EXPDIR=$GITDIR/FV3GFSwfm ## default EXPDIR directory ICSDIR=/scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127 PSLOT=rt_v17p8_ugwpv1_c3_mynn -IDATE=2024011400 -EDATE=2024011400 +IDATE=2024091800 +EDATE=2024091800 RESDET=768 ## 96 192 384 768 ### gfs_cyc 1 00Z only; gfs_cyc 2 00Z and 12Z