diff --git a/ci/cases/pr/C48_S2SA_gefs.yaml b/ci/cases/pr/C48_S2SWA_gefs.yaml similarity index 96% rename from ci/cases/pr/C48_S2SA_gefs.yaml rename to ci/cases/pr/C48_S2SWA_gefs.yaml index 2abbf0043e..5eb99d9c1e 100644 --- a/ci/cases/pr/C48_S2SA_gefs.yaml +++ b/ci/cases/pr/C48_S2SWA_gefs.yaml @@ -4,7 +4,7 @@ experiment: arguments: pslot: {{ 'pslot' | getenv }} - app: S2SA + app: S2SWA resdet: 48 resens: 48 nens: 2 diff --git a/jobs/JGDAS_ENKF_FCST b/jobs/JGDAS_ENKF_FCST index e8f9393363..53408df8cf 100755 --- a/jobs/JGDAS_ENKF_FCST +++ b/jobs/JGDAS_ENKF_FCST @@ -30,6 +30,9 @@ fi export ENSEND=$((NMEM_EFCSGRP * 10#${ENSGRP})) export ENSBEG=$((ENSEND - NMEM_EFCSGRP + 1)) +if [[ ${DO_WAVE} == "YES" ]]; then + declare -rx RUNwave="${RUN}wave" +fi ############################################################### # Run relevant script diff --git a/jobs/JGFS_ATMOS_VERIFICATION b/jobs/JGFS_ATMOS_VERIFICATION new file mode 100755 index 0000000000..23a450cd55 --- /dev/null +++ b/jobs/JGFS_ATMOS_VERIFICATION @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "metp" -c "base metp" + +############################################################### +## Abstract: +## Inline METplus verification and diagnostics driver script +## HOMEgfs : /full/path/to/workflow +## EXPDIR : /full/path/to/config/files +## CDATE : current analysis date (YYYYMMDDHH) +## CDUMP : cycle name (gdas / gfs) +## PDY : current date (YYYYMMDD) +## cyc : current cycle (HH) +## SDATE_GFS : first date of GFS cycle (YYYYMMDDHHMM) +## METPCASE : METplus verification use case (g2g1 | g2o1 | pcp1) +############################################################### + +# TODO: This should not be permitted as DATAROOT is set at the job-card level. +# TODO: DATAROOT is being used as DATA in metp jobs. This should be rectified in metp. +# TODO: The temporary directory is DATA and is created at the top of the J-Job. +# TODO: remove this line +export DATAROOT=${DATA} + +VDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${VRFYBACK_HRS} hours") +export VDATE=${VDATE:0:8} + +# Since this is currently a one-element list, shellcheck things we would rather run this as a command +# shellcheck disable=SC2041 +for grid in '1p00'; do + prod_dir="COM_ATMOS_GRIB_${grid}" + GRID=${grid} YMD=${PDY} HH=${cyc} generate_com -rx "${prod_dir}:COM_ATMOS_GRIB_GRID_TMPL" +done + +# TODO: If none of these are on, why are we running this job? +if [[ "${RUN_GRID2GRID_STEP1}" == "YES" || "${RUN_GRID2OBS_STEP1}" == "YES" || "${RUN_PRECIP_STEP1}" == "YES" ]]; then + ${VERIF_GLOBALSH} + status=$? + if (( status != 0 )); then exit "${status}"; fi +fi + +if [[ ${KEEPDATA:-"NO"} = "NO" ]] ; then rm -rf "${DATAROOT}" ; fi # TODO: This should be $DATA + diff --git a/jobs/rocoto/metp.sh b/jobs/rocoto/metp.sh index 95ff2d0f4d..2bf833c1d6 100755 --- a/jobs/rocoto/metp.sh +++ b/jobs/rocoto/metp.sh @@ -3,93 +3,13 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### -## Abstract: -## Inline METplus verification and diagnostics driver script -## HOMEgfs : /full/path/to/workflow -## EXPDIR : /full/path/to/config/files -## CDATE : current analysis date (YYYYMMDDHH) -## CDUMP : cycle name (gdas / gfs) -## PDY : current date (YYYYMMDD) -## cyc : current cycle (HH) -## SDATE_GFS : first date of GFS cycle (YYYYMMDDHHMM) -## METPCASE : METplus verification use case (g2g1 | g2o1 | pcp1) -############################################################### - -############################################################### -echo -echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" status=$? -[[ ${status} -ne 0 ]] && exit ${status} +if (( status != 0 )); then exit "${status}"; fi export job="metp${METPCASE}" export jobid="${job}.$$" -############################################## -# make temp directory -############################################## -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p ${DATA} -cd ${DATA} - - -############################################## -# Run setpdy and initialize PDY variables -############################################## -export cycle="t${cyc}z" -setpdy.sh -. ./PDY - -############################################################### -echo -echo "=============== START TO SOURCE RELEVANT CONFIGS ===============" -configs="base metp" -for config in ${configs}; do - . ${EXPDIR}/config.${config} - status=$? - [[ ${status} -ne 0 ]] && exit ${status} -done - - -############################################################### -echo -echo "=============== START TO SOURCE MACHINE RUNTIME ENVIRONMENT ===============" -. ${BASE_ENV}/${machine}.env metp -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -############################################################### -export COMPONENT="atmos" -export VDATE="$(echo $(${NDATE} -${VRFYBACK_HRS} ${CDATE}) | cut -c1-8)" -export COMIN="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}" - -# TODO: This should not be permitted as DATAROOT is set at the job-card level. -# TODO: DATAROOT is being used as DATA in metp jobs. This should be rectified in metp. -# TODO: The temporary directory is DATA and is created at the top of the J-Job. -# TODO: remove this line -export DATAROOT=${DATA} - -############################################################### -echo -echo "=============== START TO RUN METPLUS VERIFICATION ===============" -if [ ${CDUMP} = "gfs" ]; then - - if [ ${RUN_GRID2GRID_STEP1} = "YES" -o ${RUN_GRID2OBS_STEP1} = "YES" -o ${RUN_PRECIP_STEP1} = "YES" ]; then - - ${VERIF_GLOBALSH} - status=$? - [[ ${status} -ne 0 ]] && exit ${status} - [[ ${status} -eq 0 ]] && echo "Succesfully ran ${VERIF_GLOBALSH}" - fi -fi - - -if [ ${CDUMP} = "gdas" ]; then - echo "METplus verification currently not supported for CDUMP=${CDUMP}" -fi -############################################################### -# Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf ${DATAROOT} ; fi # TODO: This should be $DATA - +"${HOMEgfs}/jobs/JGFS_ATMOS_VERIFICATION" -exit 0 +exit $? diff --git a/parm/config/gefs/config.base.emc.dyn b/parm/config/gefs/config.base.emc.dyn index 196e5c7af5..238eb16991 100644 --- a/parm/config/gefs/config.base.emc.dyn +++ b/parm/config/gefs/config.base.emc.dyn @@ -242,7 +242,10 @@ export FHMAX_ENKF=${FHMAX_GFS} export FHOUT_ENKF=${FHOUT_GFS} # GFS restart interval in hours -export restart_interval_gfs=0 +export restart_interval_gfs=12 +# NOTE: Do not set this to zero. Instead set it to $FHMAX_GFS +# TODO: Remove this variable from config.base and reference from config.fcst +# TODO: rework logic in config.wave and push it to parsing_nameslist_WW3.sh where it is actually used export QUILTING=".true." export OUTPUT_GRID="gaussian_grid" diff --git a/parm/config/gefs/config.efcs b/parm/config/gefs/config.efcs index a6d093968d..9593408848 100644 --- a/parm/config/gefs/config.efcs +++ b/parm/config/gefs/config.efcs @@ -54,6 +54,6 @@ export SPPT_LSCALE=500000. export SPPT_LOGIT=".true." export SPPT_SFCLIMIT=".true." -export restart_interval=${restart_interval:-6} +export restart_interval=${restart_interval_gfs} echo "END: config.efcs" diff --git a/parm/config/gefs/config.wave b/parm/config/gefs/config.wave deleted file mode 120000 index 0b9d14a962..0000000000 --- a/parm/config/gefs/config.wave +++ /dev/null @@ -1 +0,0 @@ -../gfs/config.wave \ No newline at end of file diff --git a/parm/config/gefs/config.wave b/parm/config/gefs/config.wave new file mode 100644 index 0000000000..e04331e533 --- /dev/null +++ b/parm/config/gefs/config.wave @@ -0,0 +1,185 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="${HOMEgfs}/exec" +export FIXwave="${HOMEgfs}/fix/wave" +export PARMwave="${HOMEgfs}/parm/wave" +export USHwave="${HOMEgfs}/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +export waveGRD=${waveGRD:-'mx025'} + +#grid dependent variable defaults +export waveGRDN='1' # grid number for ww3_multi +export waveGRDG='10' # grid group for ww3_multi +export USE_WAV_RMP='NO' # YES/NO rmp grid remapping pre-processed coefficients +export waveMULTIGRID='.false.' # .true./.false. for multi or shel +export MESH_WAV="mesh.${waveGRD}.nc" # Mesh grid for wave model for CMEPS +export waveesmfGRD=' ' # input grid for multigrid + +#Grid dependent variables for various grids +case "${waveGRD}" in + "gnh_10m;aoc_9km;gsh_15m") + #GFSv16 settings: + export waveGRDN='1 2 3' + export waveGRDG='10 20 30' + export USE_WAV_RMP='YES' + export waveMULTIGRID='.true.' + export IOSRV='3' + export MESH_WAV=' ' + export waveesmfGRD='glox_10m' + export waveuoutpGRD='points' + export waveinterpGRD='glo_15mxt at_10m ep_10m wc_10m glo_30m' + export wavepostGRD='gnh_10m aoc_9km gsh_15m' + ;; + "gwes_30m") + #Grid used for P8 + export waveinterpGRD='' + export wavepostGRD='gwes_30m' + export waveuoutpGRD=${waveGRD} + ;; + "mx025") + #Grid used for HR1 (tripolar 1/4 deg) + export waveinterpGRD='reg025' + export wavepostGRD='' + export waveuoutpGRD=${waveGRD} + ;; + "glo_025") + #GEFSv13 regular lat/lon 1/4 deg grid + export waveinterpGRD='' + export wavepostGRD='glo_025' + export waveuoutpGRD=${waveGRD} + ;; + "glo_200") + #Global regular lat/lon 2deg deg grid + export waveinterpGRD='' + export wavepostGRD='glo_200' + export waveuoutpGRD=${waveGRD} + ;; + "glo_500") + #Global regular lat/lon 5deg deg grid + export waveinterpGRD='' + export wavepostGRD='glo_500' + export waveuoutpGRD=${waveGRD} + ;; + *) + echo "FATAL ERROR: No grid specific wave config values exist for ${waveGRD}. Aborting." + exit 1 + ;; +esac + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# The start time reflects the number of hindcast hours prior to the cycle initial time +export FHMAX_WAV=${FHMAX_GFS} +export WAVHINDH=0 +export FHMIN_WAV=0 +export FHOUT_WAV=3 +export FHMAX_HF_WAV=120 +export FHOUT_HF_WAV=1 +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(( FHOUT_HF_WAV * 3600 )) +export DTPNT_WAV=3600 +export FHINCP_WAV=$(( DTPNT_WAV / 3600 )) + +# Selected output parameters (gridded) +export OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" + +# Restart file config +export WAVHCYC=${assim_freq:-6} +export FHMAX_WAV_CUR=192 # RTOFS forecasts only out to 8 days + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +rst_dt_gfs=$(( restart_interval_gfs * 3600 )) # TODO: This calculation needs to move to parsing_namelists_WW3.sh +if [[ ${rst_dt_gfs} -gt 0 ]]; then + export DT_1_RST_WAV=0 #${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + #temporarily set to zero to avoid a clash in requested restart times + #which makes the wave model crash a fix for the model issue will be coming + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart +else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run +fi +export RSTIOFF_WAV=0 # first restart file offset relative to model start +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [[ ${RUNMEM} = -1 ]]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB="${RUNMEM: -2}" +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ ${DO_ICE} == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ ${DO_OCN} == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_shel/multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'1'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/parm/config/gefs/config.waveinit b/parm/config/gefs/config.waveinit new file mode 120000 index 0000000000..769bbcf903 --- /dev/null +++ b/parm/config/gefs/config.waveinit @@ -0,0 +1 @@ +../gfs/config.waveinit \ No newline at end of file diff --git a/parm/config/gfs/config.prepoceanobs b/parm/config/gfs/config.prepoceanobs index c5a9ddfd22..068ecff1ad 100644 --- a/parm/config/gfs/config.prepoceanobs +++ b/parm/config/gfs/config.prepoceanobs @@ -4,7 +4,10 @@ echo "BEGIN: config.prepoceanobs" +export OCNOBS2IODAEXEC=${HOMEgfs}/sorc/gdas.cd/build/bin/gdas_obsprovider2ioda.x + export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBSPROC_YAML=@OBSPROC_CONFIG@ export OBS_LIST=@SOCA_OBS_LIST@ [[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml export OBS_YAML=${OBS_LIST} diff --git a/parm/wave/ww3_multi.gfs.inp.tmpl b/parm/wave/ww3_multi.inp.tmpl similarity index 100% rename from parm/wave/ww3_multi.gfs.inp.tmpl rename to parm/wave/ww3_multi.inp.tmpl diff --git a/parm/wave/ww3_shel.gfs.inp.tmpl b/parm/wave/ww3_shel.inp.tmpl similarity index 100% rename from parm/wave/ww3_shel.gfs.inp.tmpl rename to parm/wave/ww3_shel.inp.tmpl diff --git a/scripts/exgfs_wave_init.sh b/scripts/exgfs_wave_init.sh index 2be224d1da..ce903a2284 100755 --- a/scripts/exgfs_wave_init.sh +++ b/scripts/exgfs_wave_init.sh @@ -203,6 +203,17 @@ source "${HOMEgfs}/ush/preamble.sh" fi done +# Copy to other members if needed +if (( NMEM_ENS > 0 )); then + for mem in $(seq -f "%03g" 1 "${NMEM_ENS}"); do + MEMDIR="mem${mem}" YMD=${PDY} HH=${cyc} generate_com COM_WAVE_PREP_MEM:COM_WAVE_PREP_TMPL + mkdir -p "${COM_WAVE_PREP_MEM}" + for grdID in ${grdALL}; do + ${NLN} "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" "${COM_WAVE_PREP_MEM}/" + done + done +fi + # --------------------------------------------------------------------------- # # 2. Ending diff --git a/ush/parsing_namelists_WW3.sh b/ush/parsing_namelists_WW3.sh index c53af9f18f..9b0a94695c 100755 --- a/ush/parsing_namelists_WW3.sh +++ b/ush/parsing_namelists_WW3.sh @@ -79,8 +79,8 @@ WW3_namelists(){ if [ $waveMULTIGRID = ".true." ]; then # ww3_multi template - if [ -f $PARMwave/ww3_multi.${NET}.inp.tmpl ]; then - cp $PARMwave/ww3_multi.${NET}.inp.tmpl ww3_multi.inp.tmpl + if [ -f $PARMwave/ww3_multi.inp.tmpl ]; then + cp $PARMwave/ww3_multi.inp.tmpl ww3_multi.inp.tmpl fi if [ ! -f ww3_multi.inp.tmpl ]; then echo "ABNORMAL EXIT: NO TEMPLATE FOR WW3 MULTI INPUT FILE" @@ -88,8 +88,8 @@ WW3_namelists(){ fi else # ww3_multi template - if [ -f $PARMwave/ww3_shel.${NET}.inp.tmpl ]; then - cp $PARMwave/ww3_shel.${NET}.inp.tmpl ww3_shel.inp.tmpl + if [ -f $PARMwave/ww3_shel.inp.tmpl ]; then + cp $PARMwave/ww3_shel.inp.tmpl ww3_shel.inp.tmpl fi if [ ! -f ww3_shel.inp.tmpl ]; then echo "ABNORMAL EXIT: NO TEMPLATE FOR WW3 SHEL INPUT FILE" diff --git a/workflow/applications/gefs.py b/workflow/applications/gefs.py index 8ac4cdc18e..b2369e8dfc 100644 --- a/workflow/applications/gefs.py +++ b/workflow/applications/gefs.py @@ -19,6 +19,9 @@ def _get_app_configs(self): if self.nens > 0: configs += ['efcs'] + if self.do_wave: + configs += ['waveinit'] + return configs @staticmethod @@ -32,7 +35,12 @@ def _update_base(base_in): def get_task_names(self): - tasks = ['stage_ic', 'fcst'] + tasks = ['stage_ic'] + + if self.do_wave: + tasks += ['waveinit'] + + tasks += ['fcst'] if self.nens > 0: tasks += ['efcs'] diff --git a/workflow/rocoto/gefs_tasks.py b/workflow/rocoto/gefs_tasks.py index c5dae3a13d..680c7d8686 100644 --- a/workflow/rocoto/gefs_tasks.py +++ b/workflow/rocoto/gefs_tasks.py @@ -50,7 +50,7 @@ def stage_ic(self): if self.app_config.do_wave: prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_WAVIC']}/@Y@m@d@H/mem000/wave" for wave_grid in self._configs['waveinit']['waveGRD'].split(): - data = f"{prefix}/{wave_grid}/@Y@m@d.@H0000.restart.{wave_grid}" + data = f"{prefix}/@Y@m@d.@H0000.restart.{wave_grid}" dep_dict = {'type': 'data', 'data': data} deps.append(rocoto.add_dependency(dep_dict)) @@ -61,12 +61,25 @@ def stage_ic(self): return task + def waveinit(self): + + resources = self.get_resource('waveinit') + task = create_wf_task('waveinit', resources, cdump=self.cdump, envar=self.envars, dependency=None) + + return task + def fcst(self): # TODO: Add real dependencies dependencies = [] dep_dict = {'type': 'task', 'name': f'{self.cdump}stage_ic'} dependencies.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_wave: + dep_dict = {'type': 'task', 'name': f'{self.cdump}waveinit'} + dependencies.append(rocoto.add_dependency(dep_dict)) + + dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies) + resources = self.get_resource('fcst') task = create_wf_task('fcst', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) @@ -77,6 +90,12 @@ def efcs(self): dep_dict = {'type': 'task', 'name': f'{self.cdump}stage_ic'} dependencies.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_wave: + dep_dict = {'type': 'task', 'name': f'{self.cdump}waveinit'} + dependencies.append(rocoto.add_dependency(dep_dict)) + + dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies) + efcsenvars = self.envars.copy() efcsenvars.append(rocoto.create_envar(name='ENSGRP', value='#grp#'))