diff --git a/ci/cases/gfsv17/C384mx025_3DVarAOWCDA.yaml b/ci/cases/gfsv17/C384mx025_3DVarAOWCDA.yaml new file mode 100644 index 0000000000..d97c9567e9 --- /dev/null +++ b/ci/cases/gfsv17/C384mx025_3DVarAOWCDA.yaml @@ -0,0 +1,18 @@ +experiment: + system: gfs + mode: cycled + +arguments: + pslot: {{ 'pslot' | getenv }} + app: S2S + resdetatmos: 384 + resdetocean: 0.25 + nens: 0 + gfs_cyc: 4 + start: cold + comroot: {{ 'RUNTESTS' | getenv }}/COMROOT + expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR + idate: 2021063000 + edate: 2021070306 + icsdir: /scratch1/NCEPDEV/climate/Jessica.Meixner/cycling/IC_2021063000_V2 + yaml: {{ HOMEgfs }}/ci/cases/gfsv17/ocnanal.yaml diff --git a/ci/cases/gfsv17/ocnanal.yaml b/ci/cases/gfsv17/ocnanal.yaml new file mode 100644 index 0000000000..9024afcb31 --- /dev/null +++ b/ci/cases/gfsv17/ocnanal.yaml @@ -0,0 +1,30 @@ +defaults: + !INC {{ HOMEgfs }}/parm/config/gfs/yaml/defaults.yaml + +base: + DOIAU: "YES" + DO_JEDIATMVAR: "NO" + DO_JEDIATMENS: "NO" + DO_JEDIOCNVAR: "YES" + DO_JEDISNOWDA: "NO" + DO_MERGENSST: "NO" + DOHYBVAR: "NO" + DO_FIT2OBS: "YES" + DO_VERFOZN: "YES" + DO_VERFRAD: "YES" + DO_VRFY_OCEANDA: "NO" + FHMAX_GFS: 240 + +ocnanal: + SOCA_INPUT_FIX_DIR: /scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/1440x1080x75/soca + CASE_ANL: 'C24' + SOCA_OBS_LIST: {{ HOMEgfs }}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml + SOCA_NINNER: 100 + SABER_BLOCKS_YAML: '' + NICAS_RESOL: 1 + NICAS_GRID_SIZE: 15000 + +prepoceanobs: + SOCA_OBS_LIST: {{ HOMEgfs }}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml + OBSPREP_YAML: {{ HOMEgfs }}/sorc/gdas.cd/parm/soca/obsprep/obsprep_config.yaml + DMPDIR: /scratch1/NCEPDEV/da/common/ diff --git a/ci/cases/weekly/C1152_S2SW.yaml b/ci/cases/weekly/C1152_S2SW.yaml new file mode 100644 index 0000000000..29e1f00bbe --- /dev/null +++ b/ci/cases/weekly/C1152_S2SW.yaml @@ -0,0 +1,14 @@ +experiment: + system: gfs + mode: forecast-only + +arguments: + pslot: {{ 'pslot' | getenv }} + app: S2SW + resdetatmos: 1152 + resdetocean: 0.25 + comroot: {{ 'RUNTESTS' | getenv }}/COMROOT + expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR + idate: 2019120300 + edate: 2019120300 + yaml: {{ HOMEgfs }}/ci/cases/yamls/gfs_defaults_ci.yaml diff --git a/ci/cases/weekly/C768_S2SW.yaml b/ci/cases/weekly/C768_S2SW.yaml new file mode 100644 index 0000000000..ad314bb75b --- /dev/null +++ b/ci/cases/weekly/C768_S2SW.yaml @@ -0,0 +1,14 @@ +experiment: + system: gfs + mode: forecast-only + +arguments: + pslot: {{ 'pslot' | getenv }} + app: S2SW + resdetatmos: 768 + resdetocean: 0.25 + comroot: {{ 'RUNTESTS' | getenv }}/COMROOT + expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR + idate: 2019120300 + edate: 2019120300 + yaml: {{ HOMEgfs }}/ci/cases/yamls/gfs_defaults_ci.yaml diff --git a/ci/cases/yamls/atmaerosnowDA_defaults_ci.yaml b/ci/cases/yamls/atmaerosnowDA_defaults_ci.yaml index 417525742e..624af591fc 100644 --- a/ci/cases/yamls/atmaerosnowDA_defaults_ci.yaml +++ b/ci/cases/yamls/atmaerosnowDA_defaults_ci.yaml @@ -3,4 +3,4 @@ defaults: base: DOIAU: "NO" DO_JEDISNOWDA: "YES" - ACCOUNT: {{ 'SLURM_ACCOUNT' | getenv }} + ACCOUNT: {{ 'HPC_ACCOUNT' | getenv }} diff --git a/ci/cases/yamls/gefs_ci_defaults.yaml b/ci/cases/yamls/gefs_ci_defaults.yaml index dfb1626cdd..ceb36d4acb 100644 --- a/ci/cases/yamls/gefs_ci_defaults.yaml +++ b/ci/cases/yamls/gefs_ci_defaults.yaml @@ -1,4 +1,4 @@ defaults: !INC {{ HOMEgfs }}/parm/config/gefs/yaml/defaults.yaml base: - ACCOUNT: {{ 'SLURM_ACCOUNT' | getenv }} + HPC_ACCOUNT: {{ 'HPC_ACCOUNT' | getenv }} diff --git a/ci/cases/yamls/gfs_defaults_ci.yaml b/ci/cases/yamls/gfs_defaults_ci.yaml index b66be2a366..d09f78b8b8 100644 --- a/ci/cases/yamls/gfs_defaults_ci.yaml +++ b/ci/cases/yamls/gfs_defaults_ci.yaml @@ -1,4 +1,4 @@ defaults: !INC {{ HOMEgfs }}/parm/config/gfs/yaml/defaults.yaml base: - ACCOUNT: {{ 'SLURM_ACCOUNT' | getenv }} + ACCOUNT: {{ 'HPC_ACCOUNT' | getenv }} diff --git a/ci/cases/yamls/gfs_extended_ci.yaml b/ci/cases/yamls/gfs_extended_ci.yaml index 4d4f79e0e8..f3a84c8fde 100644 --- a/ci/cases/yamls/gfs_extended_ci.yaml +++ b/ci/cases/yamls/gfs_extended_ci.yaml @@ -2,7 +2,7 @@ defaults: !INC {{ HOMEgfs }}/parm/config/gfs/yaml/defaults.yaml base: - ACCOUNT: {{ 'SLURM_ACCOUNT' | getenv }} + ACCOUNT: {{ 'HPC_ACCOUNT' | getenv }} DO_GOES: "YES" DO_BUFRSND: "YES" DO_GEMPAK: "YES" diff --git a/ci/cases/yamls/soca_gfs_defaults_ci.yaml b/ci/cases/yamls/soca_gfs_defaults_ci.yaml index 126637cd86..3d75cc911a 100644 --- a/ci/cases/yamls/soca_gfs_defaults_ci.yaml +++ b/ci/cases/yamls/soca_gfs_defaults_ci.yaml @@ -1,5 +1,5 @@ defaults: !INC {{ HOMEgfs }}/parm/config/gfs/yaml/defaults.yaml base: - ACCOUNT: {{ 'SLURM_ACCOUNT' | getenv }} + ACCOUNT: {{ 'HPC_ACCOUNT' | getenv }} DO_JEDIOCNVAR: "YES" diff --git a/ci/cases/yamls/ufs_hybatmDA_defaults.ci.yaml b/ci/cases/yamls/ufs_hybatmDA_defaults.ci.yaml index 1075f55b63..c4fa54dcc8 100644 --- a/ci/cases/yamls/ufs_hybatmDA_defaults.ci.yaml +++ b/ci/cases/yamls/ufs_hybatmDA_defaults.ci.yaml @@ -4,7 +4,7 @@ base: DOIAU: "NO" DO_JEDIATMVAR: "YES" DO_JEDIATMENS: "YES" - ACCOUNT: {{ 'SLURM_ACCOUNT' | getenv }} + ACCOUNT: {{ 'HPC_ACCOUNT' | getenv }} atmanl: LAYOUT_X_ATMANL: 4 LAYOUT_Y_ATMANL: 4 diff --git a/ci/platforms/config.hera b/ci/platforms/config.hera index ae08293edf..6d3e43c820 100644 --- a/ci/platforms/config.hera +++ b/ci/platforms/config.hera @@ -2,8 +2,9 @@ export GFS_CI_ROOT=/scratch1/NCEPDEV/global/Terry.McGuinness/GFS_CI_ROOT export ICSDIR_ROOT=/scratch1/NCEPDEV/global/glopara/data/ICSDIR -export STMP="/scratch1/NCEPDEV/stmp2/${USER}" -export PTMP="/scratch1/NCEPDEV/stmp2/${USER}" -export SLURM_ACCOUNT=nems +export HPC_ACCOUNT=nems export max_concurrent_cases=5 export max_concurrent_pr=4 + +export JENKINS_AGENT_LANUCH_DIR=/scratch1/NCEPDEV/global/Terry.McGuinness/Jenkins +export JENKINS_WORK_DIR=/scratch1/NCEPDEV/global/Terry.McGuinness diff --git a/ci/platforms/config.hercules b/ci/platforms/config.hercules index 7e8db00b4c..5329adae49 100644 --- a/ci/platforms/config.hercules +++ b/ci/platforms/config.hercules @@ -2,8 +2,9 @@ export GFS_CI_ROOT=/work2/noaa/stmp/GFS_CI_ROOT/HERCULES export ICSDIR_ROOT=/work/noaa/global/glopara/data/ICSDIR -export STMP="/work2/noaa/stmp/${USER}/HERCULES" -export PTMP="/work2/noaa/stmp/${USER}/HERCULES" -export SLURM_ACCOUNT=nems +export HPC_ACCOUNT=nems export max_concurrent_cases=5 export max_concurrent_pr=4 + +export JENKINS_AGENT_LANUCH_DIR=/home/role-nems/GFS_CI_ROOT_JENKINS/AGENT_mterry +export JENKINS_WORK_DIR=/home/role-nems/GFS_CI_ROOT_JENKINS diff --git a/ci/platforms/config.orion b/ci/platforms/config.orion index b6438e8564..5171373127 100644 --- a/ci/platforms/config.orion +++ b/ci/platforms/config.orion @@ -2,8 +2,9 @@ export GFS_CI_ROOT=/work2/noaa/stmp/GFS_CI_ROOT/ORION export ICSDIR_ROOT=/work/noaa/global/glopara/data/ICSDIR -export STMP="/work2/noaa/stmp/${USER}/ORION" -export PTMP="/work2/noaa/stmp/${USER}/ORION" -export SLURM_ACCOUNT=nems +export HPC_ACCOUNT=nems export max_concurrent_cases=5 export max_concurrent_pr=4 + +export JENKINS_AGENT_LANUCH_DIR=/home/role-nems/GFS_CI_ROOT_JENKINS/AGENT_mterry +export JENKINS_WORK_DIR=/home/role-nems/GFS_CI_ROOT_JENKINS diff --git a/ci/platforms/config.wcoss2 b/ci/platforms/config.wcoss2 index 44882f3f19..7a840ad2f8 100644 --- a/ci/platforms/config.wcoss2 +++ b/ci/platforms/config.wcoss2 @@ -2,8 +2,6 @@ export GFS_CI_ROOT=/lfs/h2/emc/global/noscrub/globalworkflow.ci/GFS_CI_ROOT export ICSDIR_ROOT=/lfs/h2/emc/global/noscrub/emc.global/data/ICSDIR -export STMP="/lfs/h2/emc/stmp/${USER}" -export PTMP="/lfs/h2/emc/ptmp/${USER}" -export SLURM_ACCOUNT=GFS-DEV +export HPC_ACCOUNT=GFS-DEV export max_concurrent_cases=5 export max_concurrent_pr=4 diff --git a/ci/scripts/run-check_ci.sh b/ci/scripts/run-check_ci.sh index ee089dadc2..5c891fc4bd 100755 --- a/ci/scripts/run-check_ci.sh +++ b/ci/scripts/run-check_ci.sh @@ -64,7 +64,7 @@ while true; do rocotorun -v "${ROCOTO_VERBOSE:-0}" -w "${xml}" -d "${db}" # Wait before running rocotostat - sleep 10 + sleep 60 # Get job statistics echo "Gather Rocoto statistics" diff --git a/ci/scripts/utils/launch_java_agent.sh b/ci/scripts/utils/launch_java_agent.sh new file mode 100755 index 0000000000..a4e664724b --- /dev/null +++ b/ci/scripts/utils/launch_java_agent.sh @@ -0,0 +1,77 @@ +#!/bin/env bash +set -e + +controller_url="https://jenkins.epic.oarcloud.noaa.gov" +controller_user="terry.mcguinness" +HOMEgfs="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../.." >/dev/null 2>&1 && pwd )" +host=$(hostname) + +######################################################################### +# Set up runtime environment varibles for accounts on supproted machines +######################################################################### + +source "${HOMEgfs}/ush/detect_machine.sh" +case ${MACHINE_ID} in + hera | orion | hercules | wcoss2) + echo "Launch Jenkins Java Controler on ${MACHINE_ID}" + ;; + *) + echo "Unsupported platform. Exiting with error." + exit 1 + ;; +esac + +LOG=lanuched_agent-$(date +%Y%m%d%M).log +rm -f "${LOG}" + +source "${HOMEgfs}/ush/module-setup.sh" +module use "${HOMEgfs}/modulefiles" +module load "module_gwsetup.${MACHINE_ID}" +source "${HOMEgfs}/ci/platforms/config.${MACHINE_ID}" + +JAVA_HOME="${JENKINS_AGENT_LANUCH_DIR}/JAVA/jdk-17.0.10" +if [[ ! -d "${JAVA_HOME}" ]]; then + JAVA_HOME=/usr/lib/jvm/jre-17 + if [[ ! -d "${JAVA_HOME}" ]]; then + echo "ERROR: JAVA_HOME not found. Exiting with error." + exit 1 + fi +fi + +JAVA="${JAVA_HOME}/bin/java" +echo "JAVA VERSION: " +${JAVA} -version + +export GH="${HOME}/bin/gh" +command -v "${GH}" +${GH} --version + +if [[ -d "${JENKINS_AGENT_LANUCH_DIR}" ]]; then + echo "Jenkins Agent Lanuch Directory: ${JENKINS_AGENT_LANUCH_DIR}" +else + echo "ERROR: Jenkins Agent Lanuch Directory not found. Exiting with error." + exit 1 +fi +cd "${JENKINS_AGENT_LANUCH_DIR}" + +if ! [[ -f agent.jar ]]; then + curl -sO "${controller_url}/jnlpJars/agent.jar" +fi + +JENKINS_TOKEN=$(cat jenkins_token) + +# +offline=$(curl --silent -u "${controller_user}:${JENKINS_TOKEN}" "${controller_url}/computer/${MACHINE_ID^}-EMC/api/json?pretty=true" | grep '\"offline\"' | awk '{gsub(/,/,"");print $3}') || true +echo "Jenkins Agent offline setting: ${offline}" + +if [[ "${offline}" == "true" ]]; then + echo "Jenkins Agent is offline. Lanuching Jenkins Agent on ${host}" + command="nohup ${JAVA} -jar agent.jar -jnlpUrl ${controller_url}/computer/${MACHINE_ID^}-EMC/jenkins-agent.jnlp -secret @jenkins-secret-file -workDir ${JENKINS_WORK_DIR}" + echo -e "Lanuching Jenkins Agent on ${host} with the command:\n${command}" >& "${LOG}" + ${command} >> "${LOG}" 2>&1 & + nohup_PID=$! + echo "Java agent running on PID: ${nohup_PID}" >> "${LOG}" 2>&1 + echo "Java agent running on PID: ${nohup_PID}" +else + echo "Jenkins Agent is online (nothing done)" +fi diff --git a/ci/scripts/utils/rocotostat.py b/ci/scripts/utils/rocotostat.py index 884ff82e3d..9b1d8dcc3a 100755 --- a/ci/scripts/utils/rocotostat.py +++ b/ci/scripts/utils/rocotostat.py @@ -2,13 +2,31 @@ import sys import os +import copy +from time import sleep -from wxflow import Executable, which, Logger, CommandNotFoundError +from wxflow import which, Logger, CommandNotFoundError, ProcessError from argparse import ArgumentParser, FileType +from collections import Counter + logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=False) +def attempt_multiple_times(expression, max_attempts, sleep_duration=0, exception_class=Exception): + attempt = 0 + last_exception = None + while attempt < max_attempts: + try: + pass + return expression() + except exception_class as last_exception: + attempt += 1 + sleep(sleep_duration) + else: + raise last_exception + + def input_args(): """ Parse command-line arguments. @@ -39,52 +57,130 @@ def input_args(): return args -def rocoto_statcount(): - """ - Run rocotostat and process its output. +def rocotostat_summary(rocotostat): """ + rocoto_summary Run rocotostat and process its output. - args = input_args() + rocoto_summary(rocotostat) adds a default argument '--summary' to the rocotostat + command, runs it, and processes its output to return a dictionary with the total + number of cycles and the number of cycles marked as 'Done'. - try: - rocotostat = which("rocotostat") - except CommandNotFoundError: - logger.exception("rocotostat not found in PATH") - raise CommandNotFoundError("rocotostat not found in PATH") - - rocotostat_all = which("rocotostat") - rocotostat.add_default_arg(['-w', os.path.abspath(args.w.name), '-d', os.path.abspath(args.d.name), '-s']) - rocotostat_all.add_default_arg(['-w', os.path.abspath(args.w.name), '-d', os.path.abspath(args.d.name), '-a']) + Input: + rocotostat - The rocotostat command. - rocotostat_output = rocotostat(output=str) + Output: + rocoto_status - A dictionary with the total number of cycles and the number of cycles marked as 'Done'. + """ + rocotostat = copy.deepcopy(rocotostat) + rocotostat.add_default_arg('--summary') + rocotostat_output = attempt_multiple_times(lambda: rocotostat(output=str), 3, 90, ProcessError) rocotostat_output = rocotostat_output.splitlines()[1:] rocotostat_output = [line.split()[0:2] for line in rocotostat_output] - rocotostat_output_all = rocotostat_all(output=str) - rocotostat_output_all = rocotostat_output_all.splitlines()[1:] - rocotostat_output_all = [line.split()[0:4] for line in rocotostat_output_all] - rocotostat_output_all = [line for line in rocotostat_output_all if len(line) != 1] - rocoto_status = { 'CYCLES_TOTAL': len(rocotostat_output), 'CYCLES_DONE': sum([sublist.count('Done') for sublist in rocotostat_output]) } + return rocoto_status + + +def rocoto_statcount(rocotostat): + """ + rocoto_statcount Run rocotostat and process its output. + + rocoto_statcount(rocotostat) adds a default argument '--all' to the rocotostat + command, runs it, and processes its output to return a dictionary with the count + of each status case. + + Input: + rocotostat - The rocotostat command. + + Output: + rocoto_status - A dictionary with the count of each status case. + """ + + rocotostat = copy.deepcopy(rocotostat) + rocotostat.add_default_arg('--all') + + rocotostat_output = attempt_multiple_times(lambda: rocotostat(output=str), 4, 120, ProcessError) + rocotostat_output = rocotostat_output.splitlines()[1:] + rocotostat_output = [line.split()[0:4] for line in rocotostat_output] + rocotostat_output = [line for line in rocotostat_output if len(line) != 1] status_cases = ['SUCCEEDED', 'FAIL', 'DEAD', 'RUNNING', 'SUBMITTING', 'QUEUED'] + + rocoto_status = {} + status_counts = Counter(case for sublist in rocotostat_output for case in sublist) for case in status_cases: - rocoto_status[case] = sum([sublist.count(case) for sublist in rocotostat_output_all]) + rocoto_status[case] = status_counts[case] return rocoto_status +def is_done(rocoto_status): + """ + is_done Check if all cycles are done. + + is_done(rocoto_status) checks if the total number of cycles equals the number of + done cycles in the rocoto_status dictionary. + + Input: + rocoto_status - A dictionary with the count of each status case. + + Output: + boolean - True if all cycles are done, False otherwise. + """ + + if rocoto_status['CYCLES_TOTAL'] == rocoto_status['CYCLES_DONE']: + return True + else: + return False + + +def is_stalled(rocoto_status): + """ + is_stalled Check if all cycles are stalled. + + is_stalled(rocoto_status) checks if all cycles are stalled by verifying if + there are no jobs that are RUNNING, SUBMITTING, or QUEUED. + + Input: + rocoto_status - A dictionary with the count of each status case. + + Output: + boolean - True if all cycles are stalled, False otherwise. + """ + + if rocoto_status['RUNNING'] + rocoto_status['SUBMITTING'] + rocoto_status['QUEUED'] == 0: + return True + else: + return False + + if __name__ == '__main__': + """ + main Execute the script. + + main() parses the input arguments, checks if the rocotostat command is available, + adds default arguments to the rocotostat command, and runs it and reports + out to stdout spcific information of rocoto workflow. + """ args = input_args() - error_return = 0 - rocoto_status = rocoto_statcount() + try: + rocotostat = which("rocotostat") + except CommandNotFoundError: + logger.exception("rocotostat not found in PATH") + raise CommandNotFoundError("rocotostat not found in PATH") - if rocoto_status['CYCLES_TOTAL'] == rocoto_status['CYCLES_DONE']: + rocotostat.add_default_arg(['-w', os.path.abspath(args.w.name), '-d', os.path.abspath(args.d.name)]) + + rocoto_status = rocoto_statcount(rocotostat) + rocoto_status.update(rocotostat_summary(rocotostat)) + + error_return = 0 + if is_done(rocoto_status): rocoto_state = 'DONE' elif rocoto_status['DEAD'] > 0: error_return = rocoto_status['FAIL'] + rocoto_status['DEAD'] @@ -92,13 +188,11 @@ def rocoto_statcount(): elif 'UNKNOWN' in rocoto_status: error_return = rocoto_status['UNKNOWN'] rocoto_state = 'UNKNOWN' - elif rocoto_status['RUNNING'] + rocoto_status['SUBMITTING'] + rocoto_status['QUEUED'] == 0: - # - # TODO for now a STALLED state will be just a warning as it can - # produce a false negative if there is a timestamp on a file dependency. - # - # error_return = -3 - rocoto_state = 'STALLED' + elif is_stalled(rocoto_status): + rocoto_status = attempt_multiple_times(rocoto_statcount(rocotostat), 2, 120, ProcessError) + if is_stalled(rocoto_status): + error_return = 3 + rocoto_state = 'STALLED' else: rocoto_state = 'RUNNING' diff --git a/env/AWSPW.env b/env/AWSPW.env index 5cf819ba2b..7d81000f5c 100755 --- a/env/AWSPW.env +++ b/env/AWSPW.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlvar atmanlfv3inc atmensanlrun aeroanlrun snowanl" + echo "atmanlvar atmanlfv3inc atmensanlletkf atmensanlfv3inc aeroanlrun snowanl" echo "anal sfcanl fcst post metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" diff --git a/env/CONTAINER.env b/env/CONTAINER.env index 700460b755..77768b485b 100755 --- a/env/CONTAINER.env +++ b/env/CONTAINER.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlvar atmanlfv3inc atmensanlrun aeroanlrun snowanl" + echo "atmanlvar atmanlfv3inc atmensanlletkf atmensanlfv3inc aeroanlrun snowanl" echo "anal sfcanl fcst post metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" diff --git a/env/GAEA.env b/env/GAEA.env new file mode 100755 index 0000000000..c19fecc934 --- /dev/null +++ b/env/GAEA.env @@ -0,0 +1,39 @@ +#! /usr/bin/env bash + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input argument to set runtime environment variables!" + echo "argument can be any one of the following:" + echo "fcst atmos_products" + exit 1 + +fi + +step=$1 + +export launcher="srun -l --export=ALL" +export mpmd_opt="--multi-prog --output=mpmd.%j.%t.out" + +ulimit -s unlimited +ulimit -a + +if [[ "${step}" = "fcst" ]]; then + + if [[ "${CDUMP}" =~ "gfs" ]]; then + nprocs="npe_${step}_gfs" + ppn="npe_node_${step}_gfs" || ppn="npe_node_${step}" + else + nprocs="npe_${step}" + ppn="npe_node_${step}" + fi + (( nnodes = (${!nprocs}+${!ppn}-1)/${!ppn} )) + (( ntasks = nnodes*${!ppn} )) + # With ESMF threading, the model wants to use the full node + export APRUN_UFS="${launcher} -n ${ntasks}" + unset nprocs ppn nnodes ntasks + +elif [[ "${step}" = "atmos_products" ]]; then + + export USE_CFP="YES" # Use MPMD for downstream product generation + +fi diff --git a/env/HERA.env b/env/HERA.env index fbfdb68e92..ccaaea32e7 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlvar atmensanlrun aeroanlrun snowanl atmanlfv3inc" + echo "atmanlvar atmanlfv3inc atmensanlletkf atmensanlfv3inc aeroanlrun snowanl" echo "anal sfcanl fcst post metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -68,13 +68,21 @@ elif [[ "${step}" = "atmanlvar" ]]; then [[ ${NTHREADS_ATMANLVAR} -gt ${nth_max} ]] && export NTHREADS_ATMANLVAR=${nth_max} export APRUN_ATMANLVAR="${launcher} -n ${npe_atmanlvar} --cpus-per-task=${NTHREADS_ATMANLVAR}" -elif [[ "${step}" = "atmensanlrun" ]]; then +elif [[ "${step}" = "atmensanlletkf" ]]; then - nth_max=$((npe_node_max / npe_node_atmensanlrun)) + nth_max=$((npe_node_max / npe_node_atmensanlletkf)) - export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} - [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} - export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun} --cpus-per-task=${NTHREADS_ATMENSANL}" + export NTHREADS_ATMENSANLLETKF=${nth_atmensanlletkf:-${nth_max}} + [[ ${NTHREADS_ATMENSANLLETKF} -gt ${nth_max} ]] && export NTHREADS_ATMENSANLLETKF=${nth_max} + export APRUN_ATMENSANLLETKF="${launcher} -n ${npe_atmensanlletkf} --cpus-per-task=${NTHREADS_ATMENSANLLETKF}" + +elif [[ "${step}" = "atmensanlfv3inc" ]]; then + + nth_max=$((npe_node_max / npe_node_atmensanlfv3inc)) + + export NTHREADS_ATMENSANLFV3INC=${nth_atmensanlfv3inc:-${nth_max}} + [[ ${NTHREADS_ATMENSANLFV3INC} -gt ${nth_max} ]] && export NTHREADS_ATMENSANLFV3INC=${nth_max} + export APRUN_ATMENSANLFV3INC="${launcher} -n ${npe_atmensanlfv3inc} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}" elif [[ "${step}" = "aeroanlrun" ]]; then diff --git a/env/HERCULES.env b/env/HERCULES.env index 0b62120536..0824ba913a 100755 --- a/env/HERCULES.env +++ b/env/HERCULES.env @@ -73,13 +73,21 @@ case ${step} in [[ ${NTHREADS_ATMANLFV3INC} -gt ${nth_max} ]] && export NTHREADS_ATMANLFV3INC=${nth_max} export APRUN_ATMANLFV3INC="${launcher} -n ${npe_atmanlfv3inc} --cpus-per-task=${NTHREADS_ATMANLFV3INC}" ;; - "atmensanlrun") + "atmensanlletkf") - nth_max=$((npe_node_max / npe_node_atmensanlrun)) + nth_max=$((npe_node_max / npe_node_atmensanlletkf)) - export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} - [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} - export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun} --cpus-per-task=${NTHREADS_ATMENSANL}" + export NTHREADS_ATMENSANLLETKF=${nth_atmensanlletkf:-${nth_max}} + [[ ${NTHREADS_ATMENSANLLETKF} -gt ${nth_max} ]] && export NTHREADS_ATMENSANLLETKF=${nth_max} + export APRUN_ATMENSANLLETKF="${launcher} -n ${npe_atmensanlletkf} --cpus-per-task=${NTHREADS_ATMENSANLLETKF}" + ;; + "atmensanlfv3inc") + + nth_max=$((npe_node_max / npe_node_atmensanlfv3inc)) + + export NTHREADS_ATMENSANLFV3INC=${nth_atmensanlfv3inc:-${nth_max}} + [[ ${NTHREADS_ATMENSANLFV3INC} -gt ${nth_max} ]] && export NTHREADS_ATMENSANLFV3INC=${nth_max} + export APRUN_ATMENSANLFV3INC="${launcher} -n ${npe_atmensanlfv3inc} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}" ;; "aeroanlrun") diff --git a/env/JET.env b/env/JET.env index 976e42a025..5bd88dc93a 100755 --- a/env/JET.env +++ b/env/JET.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlvar atmensanlrun aeroanlrun snowanl atmanlfv3inc" + echo "atmanlvar atmanlfv3inc atmensanlletkf atmensanlfv3inc aeroanlrun snowanl" echo "anal sfcanl fcst post metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -56,13 +56,21 @@ elif [[ "${step}" = "atmanlvar" ]]; then [[ ${NTHREADS_ATMANLVAR} -gt ${nth_max} ]] && export NTHREADS_ATMANLVAR=${nth_max} export APRUN_ATMANLVAR="${launcher} -n ${npe_atmanlvar}" -elif [[ "${step}" = "atmensanlrun" ]]; then +elif [[ "${step}" = "atmensanlletkf" ]]; then - nth_max=$((npe_node_max / npe_node_atmensanlrun)) + nth_max=$((npe_node_max / npe_node_atmensanlletkf)) - export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} - [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} - export APRUN_ATMENSANL="${launcher} ${npe_atmensanlrun}" + export NTHREADS_ATMENSANLLETKF=${nth_atmensanlletkf:-${nth_max}} + [[ ${NTHREADS_ATMENSANLLETKF} -gt ${nth_max} ]] && export NTHREADS_ATMENSANLLETKF=${nth_max} + export APRUN_ATMENSANLLETKF="${launcher} ${npe_atmensanlletkf}" + +elif [[ "${step}" = "atmensanlfv3inc" ]]; then + + nth_max=$((npe_node_max / npe_node_atmensanlfv3inc)) + + export NTHREADS_ATMENSANLFV3INC=${nth_atmensanlfv3inc:-${nth_max}} + [[ ${NTHREADS_ATMENSANLFV3INC} -gt ${nth_max} ]] && export NTHREADS_ATMENSANLFV3INC=${nth_max} + export APRUN_ATMENSANLFV3INC="${launcher} ${npe_atmensanlfv3inc}" elif [[ "${step}" = "aeroanlrun" ]]; then diff --git a/env/ORION.env b/env/ORION.env index 1b66ca65c0..f701e55aa2 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlvar atmensanlrun aeroanlrun snowanl atmanlfv3inc" + echo "atmanlvar atmanlfv3inc atmensanlletkf atmensanlfv3inc aeroanlrun snowanl" echo "anal sfcanl fcst post metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -64,13 +64,21 @@ elif [[ "${step}" = "atmanlvar" ]]; then [[ ${NTHREADS_ATMANLVAR} -gt ${nth_max} ]] && export NTHREADS_ATMANLVAR=${nth_max} export APRUN_ATMANLVAR="${launcher} -n ${npe_atmanlvar} --cpus-per-task=${NTHREADS_ATMANLVAR}" -elif [[ "${step}" = "atmensanlrun" ]]; then +elif [[ "${step}" = "atmensanlletkf" ]]; then - nth_max=$((npe_node_max / npe_node_atmensanlrun)) + nth_max=$((npe_node_max / npe_node_atmensanlletkf)) - export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} - [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} - export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun} --cpus-per-task=${NTHREADS_ATMENSANL}" + export NTHREADS_ATMENSANLLETKF=${nth_atmensanlletkf:-${nth_max}} + [[ ${NTHREADS_ATMENSANLLETKF} -gt ${nth_max} ]] && export NTHREADS_ATMENSANLLETKF=${nth_max} + export APRUN_ATMENSANLLETKF="${launcher} -n ${npe_atmensanlletkf} --cpus-per-task=${NTHREADS_ATMENSANLLETKF}" + +elif [[ "${step}" = "atmensanlfv3inc" ]]; then + + nth_max=$((npe_node_max / npe_node_atmensanlfv3inc)) + + export NTHREADS_ATMENSANLFV3INC=${nth_atmensanlfv3inc:-${nth_max}} + [[ ${NTHREADS_ATMENSANLFV3INC} -gt ${nth_max} ]] && export NTHREADS_ATMENSANLFV3INC=${nth_max} + export APRUN_ATMENSANLFV3INC="${launcher} -n ${npe_atmensanlfv3inc} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}" elif [[ "${step}" = "aeroanlrun" ]]; then diff --git a/env/S4.env b/env/S4.env index ce68fddb89..9ba3a61b01 100755 --- a/env/S4.env +++ b/env/S4.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlvar atmensanlrun aeroanlrun snowanl atmanlfv3inc" + echo "atmanlvar atmanlfv3inc atmensanlletkf atmensanlfv3inc aeroanlrun snowanl" echo "anal sfcanl fcst post metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -56,13 +56,21 @@ elif [[ "${step}" = "atmanlvar" ]]; then [[ ${NTHREADS_ATMANLVAR} -gt ${nth_max} ]] && export NTHREADS_ATMANLVAR=${nth_max} export APRUN_ATMANLVAR="${launcher} -n ${npe_atmanlvar}" -elif [[ "${step}" = "atmensanlrun" ]]; then +elif [[ "${step}" = "atmensanlletkf" ]]; then - nth_max=$((npe_node_max / npe_node_atmensanlrun)) + nth_max=$((npe_node_max / npe_node_atmensanlletkf)) - export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} - [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} - export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun}" + export NTHREADS_ATMENSANLLETKF=${nth_atmensanlletkf:-${nth_max}} + [[ ${NTHREADS_ATMENSANLLETKF} -gt ${nth_max} ]] && export NTHREADS_ATMENSANLLETKF=${nth_max} + export APRUN_ATMENSANLLETKF="${launcher} -n ${npe_atmensanlletkf}" + +elif [[ "${step}" = "atmensanlfv3inc" ]]; then + + nth_max=$((npe_node_max / npe_node_atmensanlfv3inc)) + + export NTHREADS_ATMENSANLFV3INC=${nth_atmensanlfv3inc:-${nth_max}} + [[ ${NTHREADS_ATMENSANLFV3INC} -gt ${nth_max} ]] && export NTHREADS_ATMENSANLFV3INC=${nth_max} + export APRUN_ATMENSANLFV3INC="${launcher} -n ${npe_atmensanlfv3inc}" elif [[ "${step}" = "aeroanlrun" ]]; then diff --git a/env/WCOSS2.env b/env/WCOSS2.env index ff0121e034..0876e4127d 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlvar atmensanlrun aeroanlrun snowanl atmanlfv3inc" + echo "atmanlvar atmanlfv3inc atmensanlletkf atmensanlfv3inc aeroanlrun snowanl" echo "anal sfcanl fcst post metp" echo "eobs eupd ecen esfc efcs epos" echo "postsnd awips gempak" @@ -50,13 +50,21 @@ elif [[ "${step}" = "atmanlvar" ]]; then [[ ${NTHREADS_ATMANLVAR} -gt ${nth_max} ]] && export NTHREADS_ATMANLVAR=${nth_max} export APRUN_ATMANLVAR="${launcher} -n ${npe_atmanlvar}" -elif [[ "${step}" = "atmensanlrun" ]]; then +elif [[ "${step}" = "atmensanlletkf" ]]; then - nth_max=$((npe_node_max / npe_node_atmensanlrun)) + nth_max=$((npe_node_max / npe_node_atmensanlletkf)) - export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} - [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} - export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun}" + export NTHREADS_ATMENSANLLETKF=${nth_atmensanlletkf:-${nth_max}} + [[ ${NTHREADS_ATMENSANLLETKF} -gt ${nth_max} ]] && export NTHREADS_ATMENSANLLETKF=${nth_max} + export APRUN_ATMENSANLLETKF="${launcher} -n ${npe_atmensanlletkf}" + +elif [[ "${step}" = "atmensanlfv3inc" ]]; then + + nth_max=$((npe_node_max / npe_node_atmensanlfv3inc)) + + export NTHREADS_ATMENSANLFV3INC=${nth_atmensanlfv3inc:-${nth_max}} + [[ ${NTHREADS_ATMENSANLFV3INC} -gt ${nth_max} ]] && export NTHREADS_ATMENSANLFV3INC=${nth_max} + export APRUN_ATMENSANLFV3INC="${launcher} -n ${npe_atmensanlfv3inc}" elif [[ "${step}" = "aeroanlrun" ]]; then diff --git a/jobs/JGDAS_ENKF_ARCHIVE b/jobs/JGDAS_ENKF_ARCHIVE index 07e171d668..5ac46a73e7 100755 --- a/jobs/JGDAS_ENKF_ARCHIVE +++ b/jobs/JGDAS_ENKF_ARCHIVE @@ -18,7 +18,7 @@ MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ # Run archive script ############################################################### -"${SCRgfs}/exgdas_enkf_earc.sh" +"${SCRgfs}/exgdas_enkf_earc.py" status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST index ef2f07cfe5..c87dc6b34f 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST @@ -13,9 +13,10 @@ export CDATE=${CDATE:-${PDY}${cyc}} export GDUMP=${GDUMP:-"gdas"} # Generate COM variables from templates -YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OCEAN_ANALYSIS COM_ICE_RESTART +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OCEAN_ANALYSIS COM_ICE_ANALYSIS COM_ICE_RESTART mkdir -p "${COM_OCEAN_ANALYSIS}" +mkdir -p "${COM_ICE_ANALYSIS}" mkdir -p "${COM_ICE_RESTART}" ############################################## diff --git a/jobs/JGFS_ATMOS_CYCLONE_GENESIS b/jobs/JGFS_ATMOS_CYCLONE_GENESIS index a0d1640d1b..de130bf9aa 100755 --- a/jobs/JGFS_ATMOS_CYCLONE_GENESIS +++ b/jobs/JGFS_ATMOS_CYCLONE_GENESIS @@ -3,6 +3,10 @@ source "${HOMEgfs}/ush/preamble.sh" source "${HOMEgfs}/ush/jjob_header.sh" -e "genesis" -c "base genesis" +# Hack to temporary skip this as the tracker has not been build +# on Hercules Rocky 9 yet +# TODO: Remove this after tracker has been built for Rocky 9 #2639 +if [[ "${machine}" == 'HERCULES' ]]; then exit 0; fi ############################################## # Set variables used in the exglobal script diff --git a/jobs/JGFS_ATMOS_CYCLONE_TRACKER b/jobs/JGFS_ATMOS_CYCLONE_TRACKER index 0ac06a7fe8..16a51d5c33 100755 --- a/jobs/JGFS_ATMOS_CYCLONE_TRACKER +++ b/jobs/JGFS_ATMOS_CYCLONE_TRACKER @@ -3,6 +3,10 @@ source "${HOMEgfs}/ush/preamble.sh" source "${HOMEgfs}/ush/jjob_header.sh" -e "tracker" -c "base tracker" +# Hack to temporary skip this as the tracker has not been build +# on Hercules Rocky 9 yet +# TODO: Remove this after tracker has been built for Rocky 9 #2639 +if [[ "${machine}" == 'HERCULES' ]]; then exit 0; fi export COMPONENT="atmos" diff --git a/jobs/JGFS_ATMOS_FSU_GENESIS b/jobs/JGFS_ATMOS_FSU_GENESIS index 374be2e1c5..89788e6d23 100755 --- a/jobs/JGFS_ATMOS_FSU_GENESIS +++ b/jobs/JGFS_ATMOS_FSU_GENESIS @@ -3,6 +3,11 @@ source "${HOMEgfs}/ush/preamble.sh" source "${HOMEgfs}/ush/jjob_header.sh" -e "genesis_fsu" -c "base genesis_fsu" +# Hack to temporary skip this as the tracker has not been build +# on Hercules Rocky 9 yet +# TODO: Remove this after tracker has been built for Rocky 9 #2639 +if [[ "${machine}" == 'HERCULES' ]]; then exit 0; fi + export COMPONENT="atmos" diff --git a/jobs/JGLOBAL_ARCHIVE b/jobs/JGLOBAL_ARCHIVE index 2410b49732..cec3505000 100755 --- a/jobs/JGLOBAL_ARCHIVE +++ b/jobs/JGLOBAL_ARCHIVE @@ -17,9 +17,9 @@ YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_ANALYSIS COM_ATMOS_BUFR COM COM_SNOW_ANALYSIS \ COM_ICE_HISTORY COM_ICE_INPUT COM_ICE_RESTART COM_ICE_GRIB \ COM_OBS COM_TOP \ - COM_OCEAN_HISTORY COM_OCEAN_INPUT COM_OCEAN_RESTART COM_OCEAN_GRIB COM_OCEAN_NETCDF \ + COM_OCEAN_HISTORY COM_OCEAN_RESTART COM_OCEAN_GRIB COM_OCEAN_NETCDF \ COM_OCEAN_ANALYSIS \ - COM_WAVE_GRID COM_WAVE_HISTORY COM_WAVE_STATION \ + COM_WAVE_GRID COM_WAVE_HISTORY COM_WAVE_STATION COM_WAVE_RESTART \ COM_ATMOS_OZNMON COM_ATMOS_RADMON COM_ATMOS_MINMON COM_CONF for grid in "0p25" "0p50" "1p00"; do @@ -30,7 +30,7 @@ done # Run archive script ############################################################### -${GLOBALARCHIVESH:-${SCRgfs}/exglobal_archive.sh} +${GLOBALARCHIVESH:-${SCRgfs}/exglobal_archive.py} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGLOBAL_ATMENS_ANALYSIS_FV3_INCREMENT b/jobs/JGLOBAL_ATMENS_ANALYSIS_FV3_INCREMENT new file mode 100755 index 0000000000..7179ae0624 --- /dev/null +++ b/jobs/JGLOBAL_ATMENS_ANALYSIS_FV3_INCREMENT @@ -0,0 +1,35 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA=${DATA:-${DATAROOT}/${RUN}atmensanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlfv3inc" -c "base atmensanl atmensanlfv3inc" + +############################################## +# Set variables used in the script +############################################## + +############################################## +# Begin JOB SPECIFIC work +############################################## + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASATMENSRUNSH:-${SCRgfs}/exglobal_atmens_analysis_fv3_increment.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN b/jobs/JGLOBAL_ATMENS_ANALYSIS_LETKF similarity index 83% rename from jobs/JGLOBAL_ATMENS_ANALYSIS_RUN rename to jobs/JGLOBAL_ATMENS_ANALYSIS_LETKF index 65eeb5e0d8..060b7abd06 100755 --- a/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN +++ b/jobs/JGLOBAL_ATMENS_ANALYSIS_LETKF @@ -3,7 +3,7 @@ source "${HOMEgfs}/ush/preamble.sh" export WIPE_DATA="NO" export DATA=${DATA:-${DATAROOT}/${RUN}atmensanl_${cyc}} -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlrun" -c "base atmensanl atmensanlrun" +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlletkf" -c "base atmensanl atmensanlletkf" ############################################## # Set variables used in the script @@ -16,7 +16,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlrun" -c "base atmensanl atme ############################################################### # Run relevant script -EXSCRIPT=${GDASATMENSRUNSH:-${SCRgfs}/exglobal_atmens_analysis_run.py} +EXSCRIPT=${GDASATMENSRUNSH:-${SCRgfs}/exglobal_atmens_analysis_letkf.py} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGLOBAL_FORECAST b/jobs/JGLOBAL_FORECAST index 8d91be8a57..9998470618 100755 --- a/jobs/JGLOBAL_FORECAST +++ b/jobs/JGLOBAL_FORECAST @@ -35,34 +35,51 @@ declare -rx gPDY="${GDATE:0:8}" declare -rx gcyc="${GDATE:8:2}" # Construct COM variables from templates (see config.com) -YMD="${PDY}" HH="${cyc}" declare_from_tmpl -rx COM_ATMOS_RESTART COM_ATMOS_INPUT COM_ATMOS_ANALYSIS \ - COM_ATMOS_HISTORY COM_ATMOS_MASTER COM_TOP COM_CONF +YMD="${PDY}" HH="${cyc}" declare_from_tmpl -rx \ + COMIN_ATMOS_INPUT:COM_ATMOS_INPUT_TMPL \ + COMIN_ATMOS_ANALYSIS:COM_ATMOS_ANALYSIS_TMPL \ + COMOUT_CONF:COM_CONF_TMPL \ + COMOUT_ATMOS_RESTART:COM_ATMOS_RESTART_TMPL \ + COMOUT_ATMOS_HISTORY:COM_ATMOS_HISTORY_TMPL \ + COMOUT_ATMOS_MASTER:COM_ATMOS_MASTER_TMPL RUN="${rCDUMP}" YMD="${gPDY}" HH="${gcyc}" declare_from_tmpl -rx \ - COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL + COMIN_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL if [[ "${DO_WAVE}" == "YES" ]]; then - YMD="${PDY}" HH="${cyc}" declare_from_tmpl -rx COM_WAVE_RESTART COM_WAVE_PREP COM_WAVE_HISTORY + YMD="${PDY}" HH="${cyc}" declare_from_tmpl -rx \ + COMIN_WAVE_PREP:COM_WAVE_PREP_TMPL \ + COMOUT_WAVE_RESTART:COM_WAVE_RESTART_TMPL \ + COMOUT_WAVE_HISTORY:COM_WAVE_HISTORY_TMPL RUN="${rCDUMP}" YMD="${gPDY}" HH="${gcyc}" declare_from_tmpl -rx \ - COM_WAVE_RESTART_PREV:COM_WAVE_RESTART_TMPL + COMIN_WAVE_RESTART_PREV:COM_WAVE_RESTART_TMPL fi if [[ "${DO_OCN}" == "YES" ]]; then - YMD="${PDY}" HH="${cyc}" declare_from_tmpl -rx COM_MED_RESTART COM_OCEAN_RESTART COM_OCEAN_INPUT \ - COM_OCEAN_HISTORY COM_OCEAN_ANALYSIS + YMD="${PDY}" HH="${cyc}" declare_from_tmpl -rx \ + COMIN_OCEAN_ANALYSIS:COM_OCEAN_ANALYSIS_TMPL \ + COMIN_OCEAN_INPUT:COM_OCEAN_INPUT_TMPL \ + COMOUT_MED_RESTART:COM_MED_RESTART_TMPL \ + COMOUT_OCEAN_RESTART:COM_OCEAN_RESTART_TMPL \ + COMOUT_OCEAN_HISTORY:COM_OCEAN_HISTORY_TMPL RUN="${rCDUMP}" YMD="${gPDY}" HH="${gcyc}" declare_from_tmpl -rx \ - COM_OCEAN_RESTART_PREV:COM_OCEAN_RESTART_TMPL \ - COM_MED_RESTART_PREV:COM_MED_RESTART_TMPL + COMIN_OCEAN_RESTART_PREV:COM_OCEAN_RESTART_TMPL \ + COMIN_MED_RESTART_PREV:COM_MED_RESTART_TMPL fi if [[ "${DO_ICE}" == "YES" ]]; then - YMD="${PDY}" HH="${cyc}" declare_from_tmpl -rx COM_ICE_HISTORY COM_ICE_INPUT COM_ICE_RESTART + YMD="${PDY}" HH="${cyc}" declare_from_tmpl -rx \ + COMIN_ICE_INPUT:COM_ICE_INPUT_TMPL \ + COMIN_ICE_ANALYSIS:COM_ICE_ANALYSIS_TMPL \ + COMOUT_ICE_RESTART:COM_ICE_RESTART_TMPL \ + COMOUT_ICE_HISTORY:COM_ICE_HISTORY_TMPL RUN="${rCDUMP}" YMD="${gPDY}" HH="${gcyc}" declare_from_tmpl -rx \ - COM_ICE_RESTART_PREV:COM_ICE_RESTART_TMPL + COMIN_ICE_RESTART_PREV:COM_ICE_RESTART_TMPL fi if [[ "${DO_AERO}" == "YES" ]]; then - YMD="${PDY}" HH="${cyc}" declare_from_tmpl -rx COM_CHEM_HISTORY + YMD="${PDY}" HH="${cyc}" declare_from_tmpl -rx \ + COMOUT_CHEM_HISTORY:COM_CHEM_HISTORY_TMPL fi @@ -79,7 +96,7 @@ if [[ "${RUN}" =~ "enkf" ]] && [[ "${SENDDBN:-}" == YES ]]; then for (( fhr = FHOUT; fhr <= FHMAX; fhr + FHOUT )); do if (( fhr % 3 == 0 )); then fhr3=$(printf %03i "${fhr}") - "${DBNROOT}/bin/dbn_alert" MODEL GFS_ENKF "${job}" "${COM_ATMOS_HISTORY}/${RUN}.t${cyc}z.sfcf${fhr3}.nc" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_ENKF "${job}" "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.sfcf${fhr3}.nc" fi done fi diff --git a/jobs/rocoto/arch.sh b/jobs/rocoto/arch.sh index d949b7d76f..083e319bf5 100755 --- a/jobs/rocoto/arch.sh +++ b/jobs/rocoto/arch.sh @@ -8,6 +8,11 @@ source "${HOMEgfs}/ush/preamble.sh" status=$? [[ ${status} -ne 0 ]] && exit "${status}" +############################################################### +# setup python path for workflow utilities and tasks +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/ush/python" +export PYTHONPATH + export job="arch" export jobid="${job}.$$" diff --git a/jobs/rocoto/atmensanlfv3inc.sh b/jobs/rocoto/atmensanlfv3inc.sh new file mode 100755 index 0000000000..bb44ddc3a0 --- /dev/null +++ b/jobs/rocoto/atmensanlfv3inc.sh @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="atmensanlfv3inc" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FV3_INCREMENT" +status=$? +exit "${status}" diff --git a/jobs/rocoto/atmensanlrun.sh b/jobs/rocoto/atmensanlletkf.sh similarity index 89% rename from jobs/rocoto/atmensanlrun.sh rename to jobs/rocoto/atmensanlletkf.sh index d991e3eb82..b4a1a73a80 100755 --- a/jobs/rocoto/atmensanlrun.sh +++ b/jobs/rocoto/atmensanlletkf.sh @@ -8,7 +8,7 @@ source "${HOMEgfs}/ush/preamble.sh" status=$? [[ ${status} -ne 0 ]] && exit "${status}" -export job="atmensanlrun" +export job="atmensanlletkf" export jobid="${job}.$$" ############################################################### @@ -19,6 +19,6 @@ export PYTHONPATH ############################################################### # Execute the JJOB -"${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN" +"${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_LETKF" status=$? exit "${status}" diff --git a/jobs/rocoto/earc.sh b/jobs/rocoto/earc.sh index c4c7341698..4a9263b509 100755 --- a/jobs/rocoto/earc.sh +++ b/jobs/rocoto/earc.sh @@ -8,6 +8,11 @@ source "${HOMEgfs}/ush/preamble.sh" status=$? [[ ${status} -ne 0 ]] && exit "${status}" +############################################################### +# setup python path for workflow utilities and tasks +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/ush/python" +export PYTHONPATH + export job="earc" export jobid="${job}.$$" @@ -16,5 +21,4 @@ export jobid="${job}.$$" "${HOMEgfs}/jobs/JGDAS_ENKF_ARCHIVE" status=$? - exit "${status}" diff --git a/modulefiles/module_base.gaea.lua b/modulefiles/module_base.gaea.lua new file mode 100644 index 0000000000..55ad6b0c34 --- /dev/null +++ b/modulefiles/module_base.gaea.lua @@ -0,0 +1,39 @@ +help([[ +Load environment to run GFS on Gaea +]]) + +local spack_mod_path=(os.getenv("spack_mod_path") or "None") +prepend_path("MODULEPATH", spack_mod_path) + +load(pathJoin("stack-intel", (os.getenv("stack_intel_ver") or "None"))) +load(pathJoin("stack-cray-mpich", (os.getenv("stack_cray_mpich_ver") or "None"))) +load(pathJoin("python", (os.getenv("python_ver") or "None"))) + +load(pathJoin("jasper", (os.getenv("jasper_ver") or "None"))) +load(pathJoin("libpng", (os.getenv("libpng_ver") or "None"))) +load(pathJoin("cdo", (os.getenv("cdo_ver") or "None"))) +load(pathJoin("hdf5", (os.getenv("hdf5_ver") or "None"))) +load(pathJoin("netcdf-c", (os.getenv("netcdf_c_ver") or "None"))) +load(pathJoin("netcdf-fortran", (os.getenv("netcdf_fortran_ver") or "None"))) + +load(pathJoin("nco", (os.getenv("nco_ver") or "None"))) +load(pathJoin("prod_util", (os.getenv("prod_util_ver") or "None"))) +load(pathJoin("grib-util", (os.getenv("grib_util_ver") or "None"))) +load(pathJoin("g2tmpl", (os.getenv("g2tmpl_ver") or "None"))) +load(pathJoin("gsi-ncdiag", (os.getenv("gsi_ncdiag_ver") or "None"))) +load(pathJoin("crtm", (os.getenv("crtm_ver") or "None"))) +load(pathJoin("bufr", (os.getenv("bufr_ver") or "None"))) +load(pathJoin("wgrib2", (os.getenv("wgrib2_ver") or "None"))) +load(pathJoin("py-netcdf4", (os.getenv("py_netcdf4_ver") or "None"))) +load(pathJoin("py-pyyaml", (os.getenv("py_pyyaml_ver") or "None"))) +load(pathJoin("py-jinja2", (os.getenv("py_jinja2_ver") or "None"))) +load(pathJoin("py-pandas", (os.getenv("py_pandas_ver") or "None"))) +load(pathJoin("py-python-dateutil", (os.getenv("py_python_dateutil_ver") or "None"))) +load(pathJoin("met", (os.getenv("met_ver") or "None"))) +load(pathJoin("metplus", (os.getenv("metplus_ver") or "None"))) +load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None"))) + +setenv("WGRIB2","wgrib2") +setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None")) + +whatis("Description: GFS run setup environment") diff --git a/modulefiles/module_base.hera.lua b/modulefiles/module_base.hera.lua index c47a1bfd70..f75cf886e7 100644 --- a/modulefiles/module_base.hera.lua +++ b/modulefiles/module_base.hera.lua @@ -41,6 +41,10 @@ load(pathJoin("metplus", (os.getenv("metplus_ver") or "None"))) load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None"))) setenv("WGRIB2","wgrib2") + +-- Stop gap fix for wgrib with spack-stack 1.6.0 +-- TODO Remove this when spack-stack issue #1097 is resolved +setenv("WGRIB","wgrib") setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None")) --prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles")) diff --git a/modulefiles/module_base.hercules.lua b/modulefiles/module_base.hercules.lua index 5835d013d7..998803f246 100644 --- a/modulefiles/module_base.hercules.lua +++ b/modulefiles/module_base.hercules.lua @@ -37,6 +37,10 @@ load(pathJoin("metplus", (os.getenv("metplus_ver") or "None"))) load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None"))) setenv("WGRIB2","wgrib2") + +-- Stop gap fix for wgrib with spack-stack 1.6.0 +-- TODO Remove this when spack-stack issue #1097 is resolved +setenv("WGRIB","wgrib") setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None")) prepend_path("MODULEPATH", pathJoin("/work/noaa/global/glopara/git/prepobs/feature-GFSv17_com_reorg_log_update/modulefiles")) diff --git a/modulefiles/module_base.jet.lua b/modulefiles/module_base.jet.lua index 31f8aa676d..e53132fd6a 100644 --- a/modulefiles/module_base.jet.lua +++ b/modulefiles/module_base.jet.lua @@ -43,6 +43,10 @@ load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None"))) load(pathJoin("perl", (os.getenv("perl_ver") or "None"))) setenv("WGRIB2","wgrib2") + +-- Stop gap fix for wgrib with spack-stack 1.6.0 +-- TODO Remove this when spack-stack issue #1097 is resolved +setenv("WGRIB","wgrib") setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None")) --prepend_path("MODULEPATH", pathJoin("/lfs4/HFIP/hfv3gfs/glopara/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles")) diff --git a/modulefiles/module_base.orion.lua b/modulefiles/module_base.orion.lua index 72a480a946..4d747512db 100644 --- a/modulefiles/module_base.orion.lua +++ b/modulefiles/module_base.orion.lua @@ -38,6 +38,10 @@ load(pathJoin("metplus", (os.getenv("metplus_ver") or "None"))) load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None"))) setenv("WGRIB2","wgrib2") + +-- Stop gap fix for wgrib with spack-stack 1.6.0 +-- TODO Remove this when spack-stack issue #1097 is resolved +setenv("WGRIB","wgrib") setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None")) --prepend_path("MODULEPATH", pathJoin("/work/noaa/global/glopara/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles")) diff --git a/modulefiles/module_base.s4.lua b/modulefiles/module_base.s4.lua index 6f0602c3eb..835249fb85 100644 --- a/modulefiles/module_base.s4.lua +++ b/modulefiles/module_base.s4.lua @@ -37,6 +37,10 @@ load(pathJoin("metplus", (os.getenv("metplus_ver") or "None"))) load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None"))) setenv("WGRIB2","wgrib2") + +-- Stop gap fix for wgrib with spack-stack 1.6.0 +-- TODO Remove this when spack-stack issue #1097 is resolved +setenv("WGRIB","wgrib") setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None")) --prepend_path("MODULEPATH", pathJoin("/data/prod/glopara/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles")) diff --git a/modulefiles/module_gwsetup.gaea.lua b/modulefiles/module_gwsetup.gaea.lua new file mode 100644 index 0000000000..5a8b2379a9 --- /dev/null +++ b/modulefiles/module_gwsetup.gaea.lua @@ -0,0 +1,21 @@ +help([[ +Load environment to run GFS workflow setup scripts on Gaea +]]) + +prepend_path("MODULEPATH", "/ncrc/proj/epic/rocoto/modulefiles") +load(pathJoin("rocoto")) + +prepend_path("MODULEPATH", "/ncrc/proj/epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core") + +local stack_intel_ver=os.getenv("stack_intel_ver") or "2023.1.0" +local python_ver=os.getenv("python_ver") or "3.10.13" + +load(pathJoin("stack-intel", stack_intel_ver)) +load(pathJoin("python", python_ver)) +load("py-jinja2") +load("py-pyyaml") +load("py-numpy") +local git_ver=os.getenv("git_ver") or "2.35.2" +load(pathJoin("git", git_ver)) + +whatis("Description: GFS run setup environment") diff --git a/parm/archive/arcdir.yaml.j2 b/parm/archive/arcdir.yaml.j2 new file mode 100644 index 0000000000..6321f6fc41 --- /dev/null +++ b/parm/archive/arcdir.yaml.j2 @@ -0,0 +1,85 @@ +{% set cycle_HH = current_cycle | strftime("%H") %} +{% set cycle_YMDH = current_cycle | to_YMDH %} +{% set cycle_YMD = current_cycle | to_YMD %} +{% set head = RUN + ".t" + cycle_HH + "z." %} +{% if RUN == "gdas" or RUN == "gfs" %} +deterministic: + mkdir: + - "{{ ARCDIR }}" + {% if RUN == "gfs" %} + - "{{ ARCDIR }}/tracker.{{ cycle_YMDH }}/{{ RUN }}" + {% endif %} + {% if FITSARC %} + {% set VFYARC = ROTDIR + "/vrfyarch" %} + - "{{ VFYARC }}/{{ RUN }}.{{ cycle_YMD }}/{{ cycle_HH }}" + {% endif %} + copy: + {% if current_cycle != SDATE and MODE == "cycled" %} + {% if DO_JEDIATMVAR %} + - ["{{ COM_ATMOS_ANALYSIS }}/{{ head }}atmstat", "{{ ARCDIR }}/atmstat.{{ RUN }}.{{ cycle_YMDH }}"] + {% else %} + - ["{{ COM_ATMOS_ANALYSIS }}/{{ head }}gsistat", "{{ ARCDIR }}/gsistat.{{ RUN }}.{{ cycle_YMDH }}"] + {% endif %} + {% if DO_JEDISNOWDA %} + - ["{{ COM_SNOW_ANALYSIS }}/{{ head }}snowstat.tgz", "{{ ARCDIR }}/snowstat.{{ RUN }}.{{ cycle_YMDH }}.tgz"] + {% endif %} + {% if AERO_ANL_CDUMP == RUN or AERO_ANL_CDUMP == "both" %} + - ["{{ COM_CHEM_ANALYSIS }}/{{ head }}aerostat", "{{ ARCDIR }}/aerostat.{{ RUN }}.{{ cycle_YMDH }}"] + {% endif %} + - ["{{ COM_ATMOS_GRIB_1p00 }}/{{ head }}pgrb2.1p00.anl", "{{ ARCDIR }}/pgbanl.{{ RUN }}.{{ cycle_YMDH }}.grib2"] + {% endif %} # Full cycle + {% if RUN == "gfs" %} + {% set fhmax, fhout = FHMAX_GFS, FHOUT_GFS %} + {% elif RUN == "gdas" %} + {% set fhmax, fhout = FHMAX, FHOUT %} + {% endif %} + {% for fhr in range(0, fhmax + fhout, fhout) %} + - ["{{ COM_ATMOS_GRIB_1p00 }}/{{ head }}pgrb2.1p00.f{{ '%03d' % fhr }}", "{{ ARCDIR }}/pgbf{{ '%02d' % fhr }}.{{ RUN }}.{{ cycle_YMDH }}.grib2"] + {% endfor %} + {% if RUN == "gfs" %} + {% if FITSARC %} + {% if FHMAX_FITS is defined %} + {% set fhmax = FHMAX_FITS %} + {% else %} + {% set fhmax = FHMAX_GFS %} + {% endif %} + {% for fhr in range(0, fhmax + 6, 6) %} + {% set sfcfile = "/" + head + "sfcf" + '%03d'|format(fhr) + ".nc" %} + {% set sigfile = "/" + head + "atmf" + '%03d'|format(fhr) + ".nc" %} + - ["{{COM_ATMOS_HISTORY}}/{{ sfcfile }}", "{{ VFYARC }}/{{ RUN }}.{{ cycle_YMD }}/{{ cycle_HH }}/{{ sfcfile }}"] + - ["{{COM_ATMOS_HISTORY}}/{{ sigfile }}", "{{ VFYARC }}/{{ RUN }}.{{ cycle_YMD }}/{{ cycle_HH }}/{{ sigfile }}"] + {% endfor %} + {% endif %} ## FITSARC + {% if path_exists(COM_ATMOS_GENESIS ~ "/storms.gfso.atcf_gen." ~ cycle_YMDH) %} + - ["{{ COM_ATMOS_GENESIS }}/storms.gfso.atcf_gen.{{ cycle_YMDH }}", "{{ ARCDIR }}/storms.gfso.atcf_gen.{{ cycle_YMDH }}"] + - ["{{ COM_ATMOS_GENESIS }}/storms.gfso.atcf_gen.altg.{{ cycle_YMDH }}", "{{ ARCDIR }}/storms.gfso.atcf_gen.altg.{{ cycle_YMDH }}"] + {% endif %} + {% if path_exists(COM_ATMOS_GENESIS ~ "/trak.gfso.atcfunix." ~ cycle_YMDH) %} + - ["{{ COM_ATMOS_GENESIS }}/trak.gfso.atcfunix.{{ cycle_YMDH }}", "{{ ARCDIR }}/trak.gfso.atcfunix.{{ cycle_YMDH }}"] + - ["{{ COM_ATMOS_GENESIS }}/trak.gfso.atcfunix.altg.{{ cycle_YMDH }}", "{{ ARCDIR }}/trak.gfso.atcfunix.altg.{{ cycle_YMDH }}"] + {% endif %} + ## Only created if tracking is on and there were systems to track + {% for basin in ["epac", "natl"] %} + {% if path_exists(COM_ATMOS_TRACK + "/" + basin) %} + - ["{{ COM_ATMOS_TRACK }}/{{ basin }}", "{{ ARCDIR }}/{{ basin }}"] + {% endif %} + {% endfor %} + {% endif %} ## RUN == "gfs" + {% if path_exists(COM_ATMOS_TRACK ~ "/atcfunix." ~ RUN ~ "." ~ cycle_YMDH) %} + - ["{{ COM_ATMOS_TRACK }}/atcfunix.{{ RUN }}.{{ cycle_YMDH }}", "{{ ARCDIR }}/atcfunix.{{ RUN }}.{{ cycle_YMDH }}"] + - ["{{ COM_ATMOS_TRACK }}/atcfunixp.{{ RUN }}.{{ cycle_YMDH }}", "{{ ARCDIR }}/atcfunixp.{{ RUN }}.{{ cycle_YMDH }}"] + {% endif %} +{% endif %} # gfs or gdas +{% if current_cycle != SDATE and (RUN == "enkfgdas" or RUN == "enkfgfs") %} +ensemble: + mkdir: + - "{{ ARCDIR }}" + copy: + {% if DO_JEDIATMENS %} + - ["{{ COM_ATMOS_ANALYSIS_ENSSTAT }}/{{ head }}atmensstat", "{{ ARCDIR }}/atmensstat.{{ RUN }}.{{ cycle_YMDH }}"] + - ["{{ COM_ATMOS_ANALYSIS_ENSSTAT }}/{{ head }}atminc.ensmean.nc", "{{ ARCDIR }}/atmensstat.{{ RUN }}.{{ cycle_YMDH }}.ensmean.nc"] + {% else %} + - ["{{ COM_ATMOS_ANALYSIS_ENSSTAT }}/{{ head }}enkfstat", "{{ ARCDIR }}/enkfstat.{{ RUN }}.{{ cycle_YMDH }}"] + - ["{{ COM_ATMOS_ANALYSIS_ENSSTAT }}/{{ head }}gsistat.ensmean", "{{ ARCDIR }}/gsistat.{{ RUN }}.{{ cycle_YMDH }}.ensmean"] + {% endif %} +{% endif %} # enkfgdas or enkfgfs diff --git a/parm/archive/chem.yaml.j2 b/parm/archive/chem.yaml.j2 new file mode 100644 index 0000000000..7796912b1a --- /dev/null +++ b/parm/archive/chem.yaml.j2 @@ -0,0 +1,7 @@ +chem: + {% set head = "gocart" %} + name: "CHEM" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/chem.tar" + required: + # TODO explicitize this set + - "{{ COM_CHEM_HISTORY | relpath(ROTDIR) }}/{{ head }}*" diff --git a/parm/archive/enkf.yaml.j2 b/parm/archive/enkf.yaml.j2 new file mode 100644 index 0000000000..10aee51a19 --- /dev/null +++ b/parm/archive/enkf.yaml.j2 @@ -0,0 +1,74 @@ +enkf: + name: "ENKF" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/{{ RUN }}.tar" + required: + {% for fhr in range(3, fhmax + 1, 3) %} + - "{{ COM_ATMOS_HISTORY_ENSSTAT | relpath(ROTDIR) }}/{{ head }}atmf{{ '%03d' % fhr }}.ensmean.nc" + - "{{ COM_ATMOS_HISTORY_ENSSTAT | relpath(ROTDIR) }}/{{ head }}sfcf{{ '%03d' % fhr }}.ensmean.nc" + {% if ENKF_SPREAD %} + - "{{ COM_ATMOS_HISTORY_ENSSTAT | relpath(ROTDIR) }}/{{ head }}atmf{{ '%03d' % fhr }}.ensspread.nc" + {% endif %} + {% endfor %} + {% for mem in range(1, nmem_ens + 1) %} + - "logs/{{ cycle_YMDH }}/{{ RUN }}fcst_mem{{ '%03d' % mem }}.log" + {% endfor %} + - "logs/{{ cycle_YMDH }}/{{ RUN }}epos*.log" + - "logs/{{ cycle_YMDH }}/{{ RUN }}echgres.log" + + {% if current_cycle != SDATE %} + # TODO archive ecen logs based on actual groups. Will need to emulate numpy.array_split to do so. + - "logs/{{ cycle_YMDH }}/{{ RUN }}ecen*.log" + - "logs/{{ cycle_YMDH }}/{{ RUN }}esfc.log" + + {% if not DO_JEDIATMENS %} + {% set da_files = ["enkfstat", + "gsistat.ensmean", + "cnvstat.ensmean", + "oznstat.ensmean", + "radstat.ensmean"] %} + {% else %} + {% set da_files = ["atmens.yaml", + "atminc.ensmean.nc", + "atmensstat"] %} + {% endif %} + {% for file in da_files %} + - "{{ COM_ATMOS_ANALYSIS_ENSSTAT | relpath(ROTDIR) }}/{{ head }}{{ file }}" + {% endfor %} + {% if DOIAU %} + {% for fhr in iaufhrs %} + {% if fhr == IAU_OFFSET %} + {% if do_calc_increment %} + - "{{ COM_ATMOS_ANALYSIS_ENSSTAT | relpath(ROTDIR) }}/{{ head }}atmanl.ensmean.nc" + {% endif %} # calc increment + {% else %} # fhr != IAU_OFFSET + - "{{ COM_ATMOS_ANALYSIS_ENSSTAT | relpath(ROTDIR) }}/{{ head }}atmi{{ '%03d' % fhr }}.ensmean.nc" + {% endif %} # fhr == IAU_OFFSET + {% endfor %} # IAUFHRS + {% endif %} # DOIAU + + {% if DO_JEDIATMENS %} + {% set steps = ["atmensanlinit", "atmensanlrun", "atmensanlfinal"] %} + {% else %} + {% set steps = ["eobs", "eupd"] %} + {% if lobsdiag_forenkf %} + {% do steps.append("ediag") %} + {% else %} + {% for mem in range(1, nmem_ens + 1) %} + {% do steps.append("eomg_mem{{ '%03d' % mem }}") %} + {% endfor %} + {% endif %} + {% endif %} + + {% for step in steps %} + - "logs/{{ cycle_YMDH }}/{{ RUN }}{{ step }}.log" + {% endfor %} + {% endif %} # not the first cycle + optional: + {% if current_cycle != SDATE and DOIAU %} + {% for fhr in iaufhrs %} + {% if fhr != IAU_OFFSET %} + - "{{ COM_ATMOS_ANALYSIS_ENSSTAT | relpath(ROTDIR) }}/{{ head }}atma{{ '%03d' % fhr }}.ensmean.nc" + - "{{ COM_ATMOS_ANALYSIS_ENSSTAT | relpath(ROTDIR) }}/{{ head }}atmi{{ '%03d' % fhr }}.ensmean.nc" + {% endif %} # fhr == IAU_OFFSET + {% endfor %} # IAUFHRS + {% endif %} diff --git a/parm/archive/enkf_grp.yaml.j2 b/parm/archive/enkf_grp.yaml.j2 new file mode 100644 index 0000000000..cf7933ef0e --- /dev/null +++ b/parm/archive/enkf_grp.yaml.j2 @@ -0,0 +1,29 @@ +enkf_grp: + name: "ENKF_GRP" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/{{ RUN }}_grp{{ ENSGRP }}.tar" + required: + {% for mem in range(first_group_mem, last_group_mem + 1) %} + {% set imem = mem - first_group_mem %} + {% set COM_ATMOS_ANALYSIS_MEM = COM_ATMOS_ANALYSIS_MEM_list[imem] %} + {% set COM_ATMOS_HISTORY_MEM = COM_ATMOS_HISTORY_MEM_list[imem] %} + {% set COM_ATMOS_RESTART_MEM = COM_ATMOS_RESTART_MEM_list[imem] %} + + {% for iaufhr in iaufhrs_enkf %} + - "{{ COM_ATMOS_HISTORY_MEM | relpath(ROTDIR) }}/{{ head }}atmf{{ "%03d" % iaufhr }}.nc" + {% endfor %} + + {% if 6 in iaufhrs_enkf %} + - "{{ COM_ATMOS_HISTORY_MEM | relpath(ROTDIR) }}/{{ head }}sfcf006.nc" + {% endif %} + + {% if current_cycle != SDATE %} + {% if not lobsdiag_forenkf %} + - "{{ COM_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ head }}gsistat" + {% endif %} + {% if do_calc_increment %} + - "{{ COM_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}atmanl.nc" + {% else %} + - "{{ COM_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}ratminc.nc" + {% endif %} + {% endif %} + {% endfor %} # first_group_mem to last_group_mem diff --git a/parm/archive/enkf_restarta_grp.yaml.j2 b/parm/archive/enkf_restarta_grp.yaml.j2 new file mode 100644 index 0000000000..ee768a1f92 --- /dev/null +++ b/parm/archive/enkf_restarta_grp.yaml.j2 @@ -0,0 +1,48 @@ +enkf_restarta_grp: + name: "ENKF_RESTARTA_GRP" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/{{ RUN }}_restarta_grp{{ ENSGRP }}.tar" + required: + {% for mem in range(first_group_mem, last_group_mem + 1) %} + {% set imem = mem - first_group_mem %} + {% set COM_ATMOS_ANALYSIS_MEM = COM_ATMOS_ANALYSIS_MEM_list[imem] %} + {% set COM_ATMOS_HISTORY_MEM = COM_ATMOS_HISTORY_MEM_list[imem] %} + {% set COM_ATMOS_RESTART_MEM = COM_ATMOS_RESTART_MEM_list[imem] %} + + {% if not lobsdiag_forenkf %} + - "{{ COM_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ head }}abias" + - "{{ COM_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ head }}abias_air" + - "{{ COM_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ head }}abias_int" + - "{{ COM_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ head }}abias_pc" + {% endif %} + + {% if DOIAU_ENKF %} + {% set anl_delta = "-3H" | to_timedelta %} + {% else %} + {% set anl_delta = "0H" | to_timedelta %} + {% endif %} + {% set anl_time = current_cycle | add_to_datetime(anl_delta) %} + {% for itile in range(1, 7) %} + - "{{ COM_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ anl_time | to_YMD }}.{{ anl_time | strftime("%H") }}0000.sfcanl_data.tile{{ itile }}.nc" + {% endfor %} + {% if do_calc_increment %} + - "{{ COM_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}atmanl.nc" + {% else %} + - "{{ COM_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}ratminc.nc" + {% endif %} + {% endfor %} # first_group_mem to last_group_mem + optional: + {% for mem in range(first_group_mem, last_group_mem + 1) %} + {% set imem = mem - first_group_mem %} + {% set COM_ATMOS_RESTART_MEM = COM_ATMOS_RESTART_MEM_list[imem] %} + {% set COM_ATMOS_ANALYSIS_MEM = COM_ATMOS_ANALYSIS_MEM_list[imem] %} + {% if not lobsdiag_forenkf and not DO_JEDIATMENS %} + - "{{ COM_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ head }}cnvstat" + {% endif %} + {% for iaufhr in iaufhrs if iaufhr != 6 %} + {% set iaufhr = iaufhr %} + {% if do_calc_increment %} + - "{{ COM_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}atma{{ '%03d' % iaufhr }}.nc" + - "{{ COM_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}ratmi{{ '%03d' % iaufhr }}.nc" + {% endif %} + {% endfor %} # iaufhr in iaufhrs + {% endfor %} diff --git a/parm/archive/enkf_restartb_grp.yaml.j2 b/parm/archive/enkf_restartb_grp.yaml.j2 new file mode 100644 index 0000000000..34fde9d7ca --- /dev/null +++ b/parm/archive/enkf_restartb_grp.yaml.j2 @@ -0,0 +1,37 @@ +enkf_restartb_grp: + name: "ENKF_RESTARTB_GRP" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/{{ RUN }}_restartb_grp{{ ENSGRP }}.tar" + required: + {% for mem in range(first_group_mem, last_group_mem + 1) %} + {% set imem = mem - first_group_mem %} + {% set COM_ATMOS_RESTART_MEM = COM_ATMOS_RESTART_MEM_list[imem] %} + + # Grab surface analysis data. + # If IAU is on, grab the beginning of the window, otherwise grab the center. + {% if DOIAU_ENKF %} + {% set offset_td = "-3H" | to_timedelta %} + {% else %} + {% set offset_td = "0H" | to_timedelta %} + {% endif %} + {% set offset_dt = current_cycle | add_to_datetime(offset_td) %} + {% set offset_YMD = offset_dt | to_YMD %} + {% set offset_HH = offset_dt | strftime("%H") %} + {% set prefix = offset_YMD + "." + offset_HH + "0000" %} + {% for itile in range(1, 7) %} + - "{{ COM_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ prefix }}.sfcanl_data.tile{{ itile }}.nc" + {% endfor %} + + # Now get the restart files. + {% for r_time in range(restart_interval, fhmax + 1, restart_interval) %} + {% set r_timedelta = (r_time | string + "H") | to_timedelta %} + {% set r_dt = current_cycle | add_to_datetime(r_timedelta) %} + {% set r_prefix = r_dt | to_YMD + "." + r_dt | strftime("%H") + "0000" %} + {% for itile in range(1, 7) %} + {% for datatype in ["ca_data", "fv_core.res", "fv_srf_wnd.res", "fv_tracer.res", "phy_data", "sfc_data"] %} + - "{{ COM_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ r_prefix }}.{{datatype}}.tile{{ itile }}.nc" + {% endfor %} + {% endfor %} + - "{{ COM_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ r_prefix }}.coupler.res" + - "{{ COM_ATMOS_RESTART_MEM | relpath(ROTDIR) }}/{{ r_prefix }}.fv_core.res.nc" + {% endfor %} + {% endfor %} diff --git a/parm/archive/gdas.yaml.j2 b/parm/archive/gdas.yaml.j2 new file mode 100644 index 0000000000..3c7709cfac --- /dev/null +++ b/parm/archive/gdas.yaml.j2 @@ -0,0 +1,128 @@ +gdas: + {% set head = "gdas.t" + cycle_HH + "z." %} + name: "GDAS" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdas.tar" + required: + {% if current_cycle != SDATE and MODE == "cycled" %} + - "{{ COM_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.anl" + - "{{ COM_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.anl.idx" + - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.anl" + - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.anl.idx" + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmanl.nc" + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}sfcanl.nc" + - "logs/{{ cycle_YMDH }}/{{ RUN }}atmanlprod.log" + - "logs/{{ cycle_YMDH }}/{{ RUN }}prep.log" + {% if DO_JEDIATMVAR %} + - "logs/{{ cycle_YMDH }}/{{ RUN }}prepatmiodaobs.log" + - "logs/{{ cycle_YMDH }}/{{ RUN }}atmanlinit.log" + - "logs/{{ cycle_YMDH }}/{{ RUN }}atmanlprod.log" + - "logs/{{ cycle_YMDH }}/{{ RUN }}atmanlfinal.log" + - "logs/{{ cycle_YMDH }}/{{ RUN }}atmanlfv3inc.log" + - "logs/{{ cycle_YMDH }}/{{ RUN }}atmanlvar.log" + {% else %} + - "logs/{{ cycle_YMDH }}/{{ RUN }}anal.log" + - "logs/{{ cycle_YMDH }}/{{ RUN }}analdiag.log" + {% endif %} + - "logs/{{ cycle_YMDH }}/{{ RUN }}atmanlupp.log" + {% if DO_JEDIOCNVAR %} + - "logs/{{ cycle_YMDH }}/{{ RUN }}prepoceanobs.log" + - "logs/{{ cycle_YMDH }}/{{ RUN }}ocnanalprep.log" + - "logs/{{ cycle_YMDH }}/{{ RUN }}ocnanalbmat.log" + - "logs/{{ cycle_YMDH }}/{{ RUN }}ocnanalrun.log" + - "logs/{{ cycle_YMDH }}/{{ RUN }}ocnanalpost.log" + - "logs/{{ cycle_YMDH }}/{{ RUN }}ocnanalchkpt.log" + {% if DOHYBVAR %} + - "logs/{{ cycle_YMDH }}/{{ RUN }}ocnanalecen.log" + {% endif %} + {% endif %} + {% if DO_VRFY_OCEANDA %} + - "logs/{{ cycle_YMDH }}/{{ RUN }}ocnanalvrfy.log" + {% endif %} + {% if DOHYBVAR %} + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmanl.ensres.nc" + {% endif %} + {% if DO_JEDIATMVAR %} + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmvar.yaml" + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmstat" + {% else %} + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}gsistat" + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}cnvstat" + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}oznstat" + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}radstat" + {% endif %} + {% if DO_VERFOZN %} + - "{{ COM_ATMOS_OZNMON | relpath(ROTDIR) }}/time/bad_cnt.{{ cycle_YMDH }}" + - "{{ COM_ATMOS_OZNMON | relpath(ROTDIR) }}/time/bad_diag.{{ cycle_YMDH }}" + - "{{ COM_ATMOS_OZNMON | relpath(ROTDIR) }}/time/bad_pen.{{ cycle_YMDH }}" + - "{{ COM_ATMOS_OZNMON | relpath(ROTDIR) }}/time/stdout.time.tar.gz" + - "{{ COM_ATMOS_OZNMON | relpath(ROTDIR) }}/horiz/stdout.horiz.tar.gz" + - "logs/{{ cycle_YMDH }}/{{ RUN }}verfozn.log" + {% endif %} + {% if DO_VERFRAD %} + - "{{ COM_ATMOS_RADMON | relpath(ROTDIR) }}/radmon_angle.tar.gz" + - "{{ COM_ATMOS_RADMON | relpath(ROTDIR) }}/radmon_bcoef.tar.gz" + - "{{ COM_ATMOS_RADMON | relpath(ROTDIR) }}/radmon_bcor.tar.gz" + - "{{ COM_ATMOS_RADMON | relpath(ROTDIR) }}/radmon_time.tar.gz" + - "logs/{{ cycle_YMDH }}/{{ RUN }}verfrad.log" + {% endif %} + {% if DO_VMINMON %} + - "{{ COM_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.costs.txt" + - "{{ COM_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.cost_terms.txt" + - "{{ COM_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.gnorms.ieee_d" + - "{{ COM_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.reduction.ieee_d" + - "{{ COM_ATMOS_MINMON | relpath(ROTDIR) }}/gnorm_data.txt" + - "logs/{{ cycle_YMDH }}/{{ RUN }}vminmon.log" + {% endif %} + {% if AERO_ANL_CDUMP == "gdas" or AERO_ANL_CDUMP == "both" %} + - "{{ COM_CHEM_ANALYSIS | relpath(ROTDIR) }}/{{ head }}aerostat" + {% endif %} + {% if DO_JEDISNOWDA %} + - "{{ COM_SNOW_ANALYSIS | relpath(ROTDIR) }}/{{ head }}snowstat.tgz" + {% endif %} + {% endif %} # Full cycle + - "logs/{{ cycle_YMDH }}/{{ RUN }}fcst.log" + # TODO explicitly name the atmos_prod log files to archive + - "logs/{{ cycle_YMDH }}/{{ RUN }}atmos_prod_f*.log" + {% for fhr in range(0, FHMAX + 1, 3) %} + - "{{ COM_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.f{{ '%03d' % fhr }}" + - "{{ COM_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.f{{ '%03d' % fhr }}.idx" + - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.f{{ '%03d' % fhr }}" + - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.f{{ '%03d' % fhr }}.idx" + - "{{ COM_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}atm.logf{{ '%03d' % fhr }}.txt" + - "{{ COM_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}atmf{{ '%03d' % fhr }}.nc" + - "{{ COM_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}sfcf{{ '%03d' % fhr }}.nc" + - "{{ COM_ATMOS_MASTER | relpath(ROTDIR) }}/{{ head }}sfluxgrbf{{ '%03d' % fhr }}.grib2" + - "{{ COM_ATMOS_MASTER | relpath(ROTDIR) }}/{{ head }}sfluxgrbf{{ '%03d' % fhr }}.grib2.idx" + {% endfor %} + optional: + {% if current_cycle != SDATE and MODE == "cycled" %} + {% if DOHYBVAR %} + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atma003.ensres.nc" + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atma009.ensres.nc" + {% endif %} + {% if DO_VERFRAD %} + - "{{ COM_ATMOS_RADMON | relpath(ROTDIR) }}/bad_diag.{{ cycle_YMDH }}" + - "{{ COM_ATMOS_RADMON | relpath(ROTDIR) }}/bad_pen.{{ cycle_YMDH }}" + - "{{ COM_ATMOS_RADMON | relpath(ROTDIR) }}/low_count.{{ cycle_YMDH }}" + - "{{ COM_ATMOS_RADMON | relpath(ROTDIR) }}/warning.{{ cycle_YMDH }}" + {% endif %} + {% if DO_VERFOZN %} + {% set oznmon_types = [ + "gome_metop-b", "omi_aura", "ompslp_npp", "ompsnp_n20", + "ompsnp_npp", "ompstc8_n20", "ompstc8_npp", "sbuv2_n19" + ] %} + {% for group in [ "horiz", "time" ] %} + {% if group == "horiz" %} {% set suffix = ".gz" %} {% else %} {% set suffix = "" %} {% endif %} + {% for type in oznmon_types %} + - "{{ COM_ATMOS_OZNMON | relpath(ROTDIR) }}/{{ group }}/{{ type }}.anl.ctl" + - "{{ COM_ATMOS_OZNMON | relpath(ROTDIR) }}/{{ group }}/{{ type }}.anl.{{ cycle_YMDH }}.ieee_d{{ suffix }}" + - "{{ COM_ATMOS_OZNMON | relpath(ROTDIR) }}/{{ group }}/{{ type }}.ges.ctl" + - "{{ COM_ATMOS_OZNMON | relpath(ROTDIR) }}/{{ group }}/{{ type }}.ges.{{ cycle_YMDH }}.ieee_d{{ suffix }}" + {% endfor %} + {% endfor %} + {% endif %} + {% endif %} + {% if not WRITE_DOPOST %} + # TODO set the forecast hours explicitly. This will require emulating numpy.array_split + - "logs/{{ cycle_YMDH }}/{{ RUN }}atmos_upp_f*.log" + {% endif %} ## not WRITE_DOPOST diff --git a/parm/archive/gdas_restarta.yaml.j2 b/parm/archive/gdas_restarta.yaml.j2 new file mode 100644 index 0000000000..9c4aa1244f --- /dev/null +++ b/parm/archive/gdas_restarta.yaml.j2 @@ -0,0 +1,49 @@ +gdas_restarta: + {% set head = "gdas.t" + cycle_HH + "z." %} + name: "GDAS_RESTARTA" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdas_restarta.tar" + required: + {% if current_cycle != SDATE and MODE == "cycled" %} + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atminc.nc" + {% for iaufhr in iaufhrs if iaufhr != 6 %} + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmi{{ "%03d" % iaufhr }}.nc" + {% endfor %} + {% if DOHYBVAR and DOIAU %} + {% set anl_offset = "-3H" %} + {% else %} + {% set anl_offset = "0H" %} + {% endif %} + {% set anl_timedelta = anl_offset | to_timedelta %} + {% set anl_time = current_cycle | add_to_datetime(anl_timedelta) %} + {% for itile in range(1,7) %} + - "{{ COM_ATMOS_RESTART | relpath(ROTDIR) }}/{{ anl_time | to_YMD }}.{{ anl_time | strftime("%H") }}0000.sfcanl_data.tile{{ itile }}.nc" + {% endfor %} + {% if not DO_JEDIATMVAR %} + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}abias" + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}abias_air" + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}abias_pc" + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}radstat" + {% endif %} + {% if DO_JEDISNOWDA %} + {% for itile in range(1,7) %} + # Snow analysis is 3dvar + - "{{ COM_SNOW_ANALYSIS | relpath(ROTDIR) }}/snowinc.{{ cycle_YMD }}.{{ cycle_HH }}0000.sfc_data.tile{{ itile }}.nc" + - "{{ COM_SNOW_ANALYSIS | relpath(ROTDIR) }}/{{ cycle_YMD }}.{{ cycle_HH }}0000.sfc_data.tile{{ itile }}.nc" + {% endfor %} + {% endif %} + {% endif %} + optional: + {% if current_cycle != SDATE and MODE == "cycled" %} + - "{{ COM_OBS | relpath(ROTDIR) }}/{{ head }}nsstbufr" + - "{{ COM_OBS | relpath(ROTDIR) }}/{{ head }}prepbufr" + - "{{ COM_OBS | relpath(ROTDIR) }}/{{ head }}prepbufr.acft_profiles" + {% if not DO_JEDIATMVAR %} + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}cnvstat" + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}abias_int" + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}dtfanl.nc" + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}loginc.txt" + {% endif %} + {% if DO_JEDISNOWDA %} + - "{{ COM_CONF | relpath(ROTDIR) }}/{{ head }}letkfoi.yaml" + {% endif %} + {% endif %} diff --git a/parm/archive/gdas_restartb.yaml.j2 b/parm/archive/gdas_restartb.yaml.j2 new file mode 100644 index 0000000000..c5cb29329f --- /dev/null +++ b/parm/archive/gdas_restartb.yaml.j2 @@ -0,0 +1,39 @@ +gdas_restartb: + {% set head = "gdas.t" + cycle_HH + "z." %} + name: "GDAS_RESTARTB" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdas_restartb.tar" + required: + # Grab the surface analysis data. + # If IAU is on, grab the beginning of the window. + {% if DOIAU %} + {% set offset_td = "-3H" | to_timedelta %} + {% set offset_dt = current_cycle | add_to_datetime(offset_td) %} + {% set offset_YMD = offset_dt | to_YMD %} + {% set offset_HH = offset_dt | strftime("%H") %} + {% set prefix = offset_YMD + "." + offset_HH + "0000" %} + {% for itile in range(1, 7) %} + - "{{ COM_ATMOS_RESTART | relpath(ROTDIR) }}/{{ prefix }}.sfcanl_data.tile{{ itile }}.nc" + {% endfor %} + {% endif %} + + # Regardless, always grab the center surface analysis data. + {% set prefix = cycle_YMD + "." + cycle_HH + "0000" %} + {% for itile in range(1, 7) %} + - "{{ COM_ATMOS_RESTART | relpath(ROTDIR) }}/{{ prefix }}.sfcanl_data.tile{{ itile }}.nc" + {% endfor %} + + # Now get the restart files. + {% for r_time in range(restart_interval_gdas, FHMAX + 1, restart_interval_gdas) %} + {% set r_timedelta = (r_time | string + "H") | to_timedelta %} + {% set r_dt = current_cycle | add_to_datetime(r_timedelta) %} + {% set r_YMD = r_dt | to_YMD %} + {% set r_HH = r_dt | strftime("%H") %} + {% set r_prefix = r_YMD + "." + r_HH + "0000" %} + {% for itile in range(1, 7) %} + {% for datatype in ["ca_data", "fv_core.res", "fv_srf_wnd.res", "fv_tracer.res", "phy_data", "sfc_data"] %} + - "{{ COM_ATMOS_RESTART | relpath(ROTDIR) }}/{{ r_prefix }}.{{datatype}}.tile{{ itile }}.nc" + {% endfor %} + {% endfor %} + - "{{ COM_ATMOS_RESTART | relpath(ROTDIR) }}/{{ r_prefix }}.coupler.res" + - "{{ COM_ATMOS_RESTART | relpath(ROTDIR) }}/{{ r_prefix }}.fv_core.res.nc" + {% endfor %} diff --git a/parm/archive/gdasice.yaml.j2 b/parm/archive/gdasice.yaml.j2 new file mode 100644 index 0000000000..4cfa1eb9af --- /dev/null +++ b/parm/archive/gdasice.yaml.j2 @@ -0,0 +1,10 @@ +gdasice: + {% set head = "gdas.ice.t" + cycle_HH + "z." %} + name: "GDASICE" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdasice.tar" + required: + - "{{ COM_ICE_HISTORY | relpath(ROTDIR) }}/{{ head }}ic.nc" + {% for fhr in range(FHOUT_OCNICE, FHMAX+1, FHOUT_OCNICE) %} + - "{{ COM_ICE_HISTORY | relpath(ROTDIR) }}/{{ head }}inst.f{{ '%03d' % fhr }}.nc" + {% endfor %} + - '{{ COM_CONF | relpath(ROTDIR) }}/ufs.ice_in' diff --git a/parm/archive/gdasice_restart.yaml.j2 b/parm/archive/gdasice_restart.yaml.j2 new file mode 100644 index 0000000000..39877674fb --- /dev/null +++ b/parm/archive/gdasice_restart.yaml.j2 @@ -0,0 +1,7 @@ +gdasice_restart: + {% set head = "gdas.ice.t" + cycle_HH + "z." %} + name: "GDASICE_RESTART" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdasice_restart.tar" + required: + # TODO explicitly name the restart files to archive + - '{{ COM_ICE_RESTART | relpath(ROTDIR) }}/*' diff --git a/parm/archive/gdasocean.yaml.j2 b/parm/archive/gdasocean.yaml.j2 new file mode 100644 index 0000000000..9791709319 --- /dev/null +++ b/parm/archive/gdasocean.yaml.j2 @@ -0,0 +1,9 @@ +gdasocean: + {% set head = "gdas.ocean.t" + cycle_HH + "z." %} + name: "GDASOCEAN" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdasocean.tar" + required: + {% for fhr in range(FHMIN, FHMAX + 1, FHOUT_OCNICE) %} + - "{{ COM_OCEAN_HISTORY | relpath(ROTDIR) }}/{{ head }}inst.f{{ '%03d' % fhr }}.nc" + {% endfor %} + - '{{ COM_CONF | relpath(ROTDIR) }}/ufs.MOM_input' diff --git a/parm/archive/gdasocean_analysis.yaml.j2 b/parm/archive/gdasocean_analysis.yaml.j2 new file mode 100644 index 0000000000..0c43cd40ba --- /dev/null +++ b/parm/archive/gdasocean_analysis.yaml.j2 @@ -0,0 +1,27 @@ +gdasocean_analysis: + {% set head = "gdas.t" + cycle_HH + "z." %} + name: "GDASOCEAN_ANALYSIS" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdasocean_analysis.tar" + required: + - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocninc.nc' + {% set ocngrid_cycle = '%02d' % (((cycle_HH | int) - 3) % 24) %} + - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/gdas.t{{ ocngrid_cycle }}z.ocngrid.nc' + {% for domain in ["ocn", "ice"] %} + - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}{{domain}}.bkgerr_stddev.nc' + - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}{{domain}}.incr.nc' + - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}{{domain}}ana.nc' + {% if NMEM_ENS > 2 %} + - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}{{domain}}.recentering_error.nc' + {% endif %} + {% endfor %} + {% if NMEM_ENS > 2 %} + - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocn.ssh_steric_stddev.nc' + - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocn.ssh_unbal_stddev.nc' + - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocn.ssh_total_stddev.nc' + - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocn.steric_explained_variance.nc' + {% endif %} + - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocn.adt_rads_all.stats.csv' + - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocn.icec_amsr2_north.stats.csv' + - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocn.icec_amsr2_south.stats.csv' + - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/diags/*.nc4' + - '{{ COM_OCEAN_ANALYSIS | relpath(ROTDIR) }}/yaml/*.yaml' diff --git a/parm/archive/gdasocean_restart.yaml.j2 b/parm/archive/gdasocean_restart.yaml.j2 new file mode 100644 index 0000000000..21bfc3955f --- /dev/null +++ b/parm/archive/gdasocean_restart.yaml.j2 @@ -0,0 +1,8 @@ +gdasocean_restart: + {% set head = "gdas.ocean.t" + cycle_HH + "z." %} + name: "GDASOCEAN_RESTART" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdasocean_restart.tar" + required: + # TODO explicitly name the restart files to archive + - '{{ COM_OCEAN_RESTART | relpath(ROTDIR) }}/*' + - '{{ COM_MED_RESTART | relpath(ROTDIR) }}/*' diff --git a/parm/archive/gdaswave.yaml.j2 b/parm/archive/gdaswave.yaml.j2 new file mode 100644 index 0000000000..74a5a64dbf --- /dev/null +++ b/parm/archive/gdaswave.yaml.j2 @@ -0,0 +1,8 @@ +gdaswave: + {% set head = "gdas.wave.t" + cycle_HH + "z." %} + name: "GDASWAVE" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdaswave.tar" + required: + # TODO explicitly name the wave grid/station files to archive + - "{{ COM_WAVE_GRID | relpath(ROTDIR) }}/{{ head }}*" + - "{{ COM_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}*" diff --git a/parm/archive/gdaswave_restart.yaml.j2 b/parm/archive/gdaswave_restart.yaml.j2 new file mode 100644 index 0000000000..8387d48616 --- /dev/null +++ b/parm/archive/gdaswave_restart.yaml.j2 @@ -0,0 +1,6 @@ +gdaswave_restart: + name: "GDASWAVE_RESTART" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdaswave_restart.tar" + required: + # TODO explicitly name the wave restart files to archive + - "{{ COM_WAVE_RESTART | relpath(ROTDIR) }}/*" diff --git a/parm/archive/gfs_downstream.yaml.j2 b/parm/archive/gfs_downstream.yaml.j2 new file mode 100644 index 0000000000..23c9383d28 --- /dev/null +++ b/parm/archive/gfs_downstream.yaml.j2 @@ -0,0 +1,12 @@ +gfs_downstream: + {% set head = "gfs.t" + cycle_HH + "z." %} + name: "GFS_DOWNSTREAM" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gfs_downstream.tar" + required: + - "{{ COM_ATMOS_GEMPAK | relpath(ROTDIR) }}/gfs_{{ cycle_YMDH }}.sfc" + - "{{ COM_ATMOS_GEMPAK | relpath(ROTDIR) }}/gfs_{{ cycle_YMDH }}.snd" + {% for i in range(1, NUM_SND_COLLECTIVES) %} + - "{{ COM_ATMOS_WMO | relpath(ROTDIR) }}/gfs_collective{{ i }}.postsnd_{{ cycle_HH }}" + {% endfor %} + - "{{ COM_ATMOS_BUFR | relpath(ROTDIR) }}/bufr.t{{ cycle_HH }}z" + - "{{ COM_ATMOS_BUFR | relpath(ROTDIR) }}/gfs.t{{ cycle_HH }}z.bufrsnd.tar.gz" diff --git a/parm/archive/gfs_flux.yaml.j2 b/parm/archive/gfs_flux.yaml.j2 new file mode 100644 index 0000000000..66c8221f60 --- /dev/null +++ b/parm/archive/gfs_flux.yaml.j2 @@ -0,0 +1,9 @@ +gfs_flux: + {% set head = "gfs.t" + cycle_HH + "z." %} + name: "GFS_FLUX" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gfs_flux.tar" + required: + {% for fhr in range(FHMIN_GFS, FHMAX_GFS + FHOUT_GFS, FHOUT_GFS) %} + - "{{ COM_ATMOS_MASTER | relpath(ROTDIR) }}/{{ head }}sfluxgrbf{{ '%03d' % fhr }}.grib2" + - "{{ COM_ATMOS_MASTER | relpath(ROTDIR) }}/{{ head }}sfluxgrbf{{ '%03d' % fhr }}.grib2.idx" + {% endfor %} diff --git a/parm/archive/gfs_flux_1p00.yaml.j2 b/parm/archive/gfs_flux_1p00.yaml.j2 new file mode 100644 index 0000000000..2f5c9c8910 --- /dev/null +++ b/parm/archive/gfs_flux_1p00.yaml.j2 @@ -0,0 +1,9 @@ +gfs_flux_1p00: + {% set head = "gfs.t" + cycle_HH + "z." %} + name: "GFS_FLUX_1P00" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gfs_flux_1p00.tar" + required: + {% for fhr in range(FHMIN_GFS, FHMAX_GFS + FHOUT_GFS, FHOUT_GFS) %} + - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}flux.1p00.f{{ '%03d' % fhr }}" + - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}flux.1p00.f{{ '%03d' % fhr }}.idx" + {% endfor %} diff --git a/parm/archive/gfs_netcdfa.yaml.j2 b/parm/archive/gfs_netcdfa.yaml.j2 new file mode 100644 index 0000000000..6bcafe1b89 --- /dev/null +++ b/parm/archive/gfs_netcdfa.yaml.j2 @@ -0,0 +1,16 @@ +gfs_netcdfa: + {% set head = "gfs.t" + cycle_HH + "z." %} + name: "GFS_NETCDFA" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gfs_netcdfa.tar" + required: + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmanl.nc" + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}sfcanl.nc" + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atminc.nc" + {% for iauhr in iaufhrs if iauhr != 6 %} + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmi{{ "%03d" % iauhr }}.nc" + {% endfor %} + optional: + {% if not DO_JEDIATMVAR %} + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}dtfanl.nc" + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}loginc.txt" + {% endif %} diff --git a/parm/archive/gfs_netcdfb.yaml.j2 b/parm/archive/gfs_netcdfb.yaml.j2 new file mode 100644 index 0000000000..b0393d63b6 --- /dev/null +++ b/parm/archive/gfs_netcdfb.yaml.j2 @@ -0,0 +1,9 @@ +gfs_netcdfb: + {% set head = "gfs.t" + cycle_HH + "z." %} + name: "GFS_NETCDFB" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gfs_netcdfb.tar" + required: + {% for fhr in range(0, ARCH_GAUSSIAN_FHMAX + ARCH_GAUSSIAN_FHINC, ARCH_GAUSSIAN_FHINC) %} + - "{{ COM_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}atmf{{ '%03d' % fhr }}.nc" + - "{{ COM_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}sfcf{{ '%03d' % fhr }}.nc" + {% endfor %} diff --git a/parm/archive/gfs_pgrb2b.yaml.j2 b/parm/archive/gfs_pgrb2b.yaml.j2 new file mode 100644 index 0000000000..b06dd14b73 --- /dev/null +++ b/parm/archive/gfs_pgrb2b.yaml.j2 @@ -0,0 +1,19 @@ +gfs_pgrb2b: + {% set head = "gfs.t" + cycle_HH + "z." %} + name: "GFS_PGRB2B" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gfs_pgrb2b.tar" + required: + {% if MODE == "cycled" %} + - "{{ COM_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2b.0p25.anl" + - "{{ COM_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2b.0p25.anl.idx" + - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2b.1p00.anl" + - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2b.1p00.anl.idx" + {% endif %} + {% if ARCH_GAUSSIAN %} + {% for fhr in range(0, FHMAX_GFS + FHOUT_GFS, FHOUT_GFS) %} + - "{{ COM_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2b.0p25.f{{ '%03d' % fhr }}" + - "{{ COM_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2b.0p25.f{{ '%03d' % fhr }}.idx" + - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2b.1p00.f{{ '%03d' % fhr }}" + - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2b.1p00.f{{ '%03d' % fhr }}.idx" + {% endfor %} + {% endif %} diff --git a/parm/archive/gfs_restarta.yaml.j2 b/parm/archive/gfs_restarta.yaml.j2 new file mode 100644 index 0000000000..c2ad717484 --- /dev/null +++ b/parm/archive/gfs_restarta.yaml.j2 @@ -0,0 +1,23 @@ +gfs_restarta: + {% set head = "gfs.t" + cycle_HH + "z." %} + name: "GFS_RESTARTA" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gfs_restarta.tar" + required: + {% if MODE == "cycled" %} + {% if DOHYBVAR and DOIAU %} + {% set anl_offset = "-3H" %} + {% else %} + {% set anl_offset = "0H" %} + {% endif %} + {% set anl_timedelta = anl_offset | to_timedelta %} + {% set anl_time = current_cycle | add_to_datetime(anl_timedelta) %} + {% for i_tile in range(1, 7) %} + - "{{ COM_ATMOS_RESTART | relpath(ROTDIR) }}/{{ anl_time | to_YMD }}.{{ anl_time | strftime("%H") }}0000.sfcanl_data.tile{{ i_tile }}.nc" + {% endfor %} + {% elif MODE == "forecast-only" %} + - "{{ COM_ATMOS_INPUT | relpath(ROTDIR) }}/gfs_ctrl.nc" + {% for i_tile in range(1, 7) %} + - "{{ COM_ATMOS_INPUT | relpath(ROTDIR) }}/gfs_data.tile{{ i_tile }}.nc" + - "{{ COM_ATMOS_INPUT | relpath(ROTDIR) }}/sfc_data.tile{{ i_tile }}.nc" + {% endfor %} + {% endif %} diff --git a/parm/archive/gfsa.yaml.j2 b/parm/archive/gfsa.yaml.j2 new file mode 100644 index 0000000000..7ed12819a0 --- /dev/null +++ b/parm/archive/gfsa.yaml.j2 @@ -0,0 +1,62 @@ +gfsa: + {% set head = "gfs.t" + cycle_HH + "z." %} + name: "GFSA" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gfsa.tar" + required: + # TODO explicitly name all logs to include + {% for log in glob("logs/" ~ cycle_YMDH ~ "/gfs*.log") %} + {% if not "gfsarch.log" in log %} + - "{{ log }}" + {% endif %} + {% endfor %} + # - "logs/{{ cycle_YMDH }}/{{ RUN }}fcst.log" + # - "logs/{{ cycle_YMDH }}/{{ RUN }}atmos_prod_f*.log" + - "{{ COM_CONF | relpath(ROTDIR) }}/ufs.input.nml" + {% if MODE == "cycled" %} + - "{{ COM_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.anl" + - "{{ COM_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.anl.idx" + - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.anl" + - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.anl.idx" + # - "logs/{{ cycle_YMDH }}/{{ RUN }}atmanlprod.log" + # - "logs/{{ cycle_YMDH }}/{{ RUN }}prep.log" + # - "logs/{{ cycle_YMDH }}/{{ RUN }}anal.log" + # - "logs/{{ cycle_YMDH }}/{{ RUN }}atmanlupp.log" + {% if DO_VMINMON %} + - "{{ COM_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.costs.txt" + - "{{ COM_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.cost_terms.txt" + - "{{ COM_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.gnorms.ieee_d" + - "{{ COM_ATMOS_MINMON | relpath(ROTDIR) }}/{{ cycle_YMDH }}.reduction.ieee_d" + - "{{ COM_ATMOS_MINMON | relpath(ROTDIR) }}/gnorm_data.txt" + # - "logs/{{ cycle_YMDH }}/{{ RUN }}vminmon.log" + {% endif %} + {% if AERO_ANL_CDUMP == "gfs" or AERO_ANL_CDUMP == "both" %} + - "{{ COM_CHEM_ANALYSIS | relpath(ROTDIR) }}/{{ head }}aerostat" + {% endif %} + {% if DO_JEDIATMVAR %} + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmvar.yaml" + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmstat" + {% else %} + - "{{ COM_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}gsistat" + {% endif %} + {% endif %} # Full cycle + {% for fhr in range(FHMIN_GFS, FHMAX_GFS + FHOUT_GFS, FHOUT_GFS) %} + - "{{ COM_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.f{{ '%03d' % fhr }}" + - "{{ COM_ATMOS_GRIB_0p25 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p25.f{{ '%03d' % fhr }}.idx" + - "{{ COM_ATMOS_HISTORY | relpath(ROTDIR) }}/{{ head }}atm.logf{{ '%03d' % fhr }}.txt" + {% endfor %} + optional: + {% if MODE == "cycled" %} + - "{{ COM_OBS | relpath(ROTDIR) }}/{{ head }}nsstbufr" + - "{{ COM_OBS | relpath(ROTDIR) }}/{{ head }}prepbufr" + - "{{ COM_OBS | relpath(ROTDIR) }}/{{ head }}prepbufr.acft_profiles" + {% endif %} # cycled + - "{{ COM_ATMOS_TRACK | relpath(ROTDIR) }}/avno.t{{ cycle_HH }}z.cyclone.trackatcfunix" + - "{{ COM_ATMOS_TRACK | relpath(ROTDIR) }}/avnop.t{{ cycle_HH }}z.cyclone.trackatcfunix" + - "{{ COM_ATMOS_GENESIS | relpath(ROTDIR) }}/trak.gfso.atcfunix.{{ cycle_YMDH }}" + - "{{ COM_ATMOS_GENESIS | relpath(ROTDIR) }}/trak.gfso.atcfunix.altg.{{ cycle_YMDH }}" + - "{{ COM_ATMOS_GENESIS | relpath(ROTDIR) }}/storms.gfso.atcf_gen.{{ cycle_YMDH }}" + - "{{ COM_ATMOS_GENESIS | relpath(ROTDIR) }}/storms.gfso.atcf_gen.altg.{{ cycle_YMDH }}" + # {% if not WRITE_DOPOST %} + # # TODO set the forecast hours explicitly. This will require emulating numpy.array_split + # - "logs/{{ cycle_YMDH }}/{{ RUN }}atmos_upp_f*.log" + # {% endif %} ## not WRITE_DOPOST diff --git a/parm/archive/gfsb.yaml.j2 b/parm/archive/gfsb.yaml.j2 new file mode 100644 index 0000000000..721b529278 --- /dev/null +++ b/parm/archive/gfsb.yaml.j2 @@ -0,0 +1,17 @@ +gfsb: + {% set head = "gfs.t" + cycle_HH + "z." %} + name: "GFSB" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gfsb.tar" + required: + {% if MODE == "cycled" %} + - "{{ COM_ATMOS_GRIB_0p50 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p50.anl" + - "{{ COM_ATMOS_GRIB_0p50 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p50.anl.idx" + - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.anl" + - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.anl.idx" + {% endif %} + {% for fhr in range(FHMIN_GFS, FHMAX_GFS + FHOUT_GFS, FHOUT_GFS) %} + - "{{ COM_ATMOS_GRIB_0p50 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p50.f{{ '%03d' % fhr }}" + - "{{ COM_ATMOS_GRIB_0p50 | relpath(ROTDIR) }}/{{ head }}pgrb2.0p50.f{{ '%03d' % fhr }}.idx" + - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.f{{ '%03d' % fhr }}" + - "{{ COM_ATMOS_GRIB_1p00 | relpath(ROTDIR) }}/{{ head }}pgrb2.1p00.f{{ '%03d' % fhr }}.idx" + {% endfor %} diff --git a/parm/archive/gfswave.yaml.j2 b/parm/archive/gfswave.yaml.j2 new file mode 100644 index 0000000000..8542afac0c --- /dev/null +++ b/parm/archive/gfswave.yaml.j2 @@ -0,0 +1,23 @@ +gfswave: + {% set head = "gfswave.t" + cycle_HH + "z." %} + name: "GFSWAVE" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gfswave.tar" + required: + {% for fh in range(0, FHMAX_HF_WAV + FHOUT_HF_WAV, FHOUT_HF_WAV) %} + # NOTE This is as explicit as possible without major logic to parse wavepostGRD. + # Matches files of the form "gfswave.tCCz...fHHH.grib2". + - "{{ COM_WAVE_GRID | relpath(ROTDIR) }}/{{ head }}*.*.f{{ '%03d' % fh }}.grib2" + - "{{ COM_WAVE_GRID | relpath(ROTDIR) }}/{{ head }}*.*.f{{ '%03d' % fh }}.grib2.idx" + {% endfor %} + {% for fh in range(FHMAX_HF_WAV + FHOUT_WAV, FHMAX_WAV_GFS + FHOUT_WAV, FHOUT_WAV) %} + - "{{ COM_WAVE_GRID | relpath(ROTDIR) }}/{{ head }}*.*.f{{ '%03d' % fh }}.grib2" + - "{{ COM_WAVE_GRID | relpath(ROTDIR) }}/{{ head }}*.*.f{{ '%03d' % fh }}.grib2.idx" + {% endfor %} + - "{{ COM_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}bull_tar" + - "{{ COM_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}cbull_tar" + - "{{ COM_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}spec_tar.gz" + {% if DOIBP_WAV %} + - "{{ COM_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}ibpbull_tar" + - "{{ COM_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}ibpcbull_tar" + - "{{ COM_WAVE_STATION | relpath(ROTDIR) }}/{{ head }}ibp_tar" + {% endif %} diff --git a/parm/archive/ice_6hravg.yaml.j2 b/parm/archive/ice_6hravg.yaml.j2 new file mode 100644 index 0000000000..251e51b110 --- /dev/null +++ b/parm/archive/ice_6hravg.yaml.j2 @@ -0,0 +1,9 @@ +ice_6hravg: + {% set head = "gfs.ice.t" + cycle_HH + "z." %} + name: "ICE_6HRAVG" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/ice_6hravg.tar" + required: + - "{{ COM_ICE_HISTORY | relpath(ROTDIR) }}/{{ head }}ic.nc" + {% for fhr in range(6, FHMAX_GFS + 6, 6) %} + - "{{ COM_ICE_HISTORY | relpath(ROTDIR) }}/{{ head }}6hr_avg.f{{ '%03d' % fhr }}.nc" + {% endfor %} diff --git a/parm/archive/ice_grib2.yaml.j2 b/parm/archive/ice_grib2.yaml.j2 new file mode 100644 index 0000000000..42e6910a16 --- /dev/null +++ b/parm/archive/ice_grib2.yaml.j2 @@ -0,0 +1,18 @@ +ice_grib2: + {% set head = "gfs.ice.t" + cycle_HH + "z." %} + name: "ICE_GRIB2" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/ice_grib2.tar" + required: + {% for fhr in range(FHOUT_OCNICE_GFS, FHMAX_GFS + FHOUT_OCNICE_GFS, FHOUT_OCNICE_GFS) %} + {% set fhr3 = '%03d' % fhr %} + {% if ICERES == 500 %} + - "{{ COM_ICE_GRIB | relpath(ROTDIR) }}/5p00/{{ head }}5p00.f{{ fhr3 }}.grib2" + - "{{ COM_ICE_GRIB | relpath(ROTDIR) }}/5p00/{{ head }}5p00.f{{ fhr3 }}.grib2.idx" + {% elif ICERES == 100 %} + - "{{ COM_ICE_GRIB | relpath(ROTDIR) }}/1p00/{{ head }}1p00.f{{ fhr3 }}.grib2" + - "{{ COM_ICE_GRIB | relpath(ROTDIR) }}/1p00/{{ head }}1p00.f{{ fhr3 }}.grib2.idx" + {% elif ICERES == 25 or ICERES == "025" %} + - "{{ COM_ICE_GRIB | relpath(ROTDIR) }}/0p25/{{ head }}0p25.f{{ fhr3 }}.grib2" + - "{{ COM_ICE_GRIB | relpath(ROTDIR) }}/0p25/{{ head }}0p25.f{{ fhr3 }}.grib2.idx" + {% endif %} + {% endfor %} diff --git a/parm/archive/master_enkf.yaml.j2 b/parm/archive/master_enkf.yaml.j2 new file mode 100644 index 0000000000..7ab7f45e30 --- /dev/null +++ b/parm/archive/master_enkf.yaml.j2 @@ -0,0 +1,100 @@ +# Set variables/lists needed to parse the enkf templates +{% set cycle_HH = current_cycle | strftime("%H") %} +{% set cycle_YMD = current_cycle | to_YMD %} +{% set cycle_YMDH = current_cycle | to_YMDH %} +{% set head = RUN + ".t" + cycle_HH + "z." %} + +{% if IAUFHRS is string %} +{% set iaufhrs = [] %} +{% for iaufhr in IAUFHRS.split(",") %} +{% do iaufhrs.append(iaufhr | int) %} +{% endfor %} +{% else %} +{% set iaufhrs = [IAUFHRS] %} +{% endif %} + +{% if IAUFHRS_ENKF is string %} +{% set iaufhrs_enkf = [] %} +{% for iaufhr in IAUFHRS_ENKF.split(",") %} +{% do iaufhrs_enkf.append(iaufhr | int) %} +{% endfor %} +{% else %} +{% set iaufhrs_enkf = [IAUFHRS_ENKF] %} +{% endif %} + +datasets: +{% if ENSGRP == 0 %} +{% filter indent(width=4) %} +{% include "enkf.yaml.j2" %} +{% endfilter %} +{% else %} + +# Declare to-be-filled lists of member COM directories +{% set COM_ATMOS_ANALYSIS_MEM_list = [] %} +{% set COM_ATMOS_RESTART_MEM_list = [] %} +{% set COM_ATMOS_HISTORY_MEM_list = [] %} + +{% set first_group_mem = (ENSGRP - 1) * NMEM_EARCGRP + 1 %} +{% set last_group_mem = [ ENSGRP * NMEM_EARCGRP, nmem_ens ] | min %} + +# Construct member COM directories +{% for mem in range(first_group_mem, last_group_mem + 1) %} + +# Declare a dict of search and replace terms to run on each template +{% set tmpl_dict = {'ROTDIR':ROTDIR, + 'RUN':RUN, + 'YMD':cycle_YMD, + 'HH':cycle_HH, + 'MEMDIR':"mem" + '%03d' % mem} %} + +# Replace template variables with tmpl_dict, one key at a time +# This must be done in a namespace to overcome jinja scoping +# Variables set inside of a for loop are lost at the end of the loop +# unless they are part of a namespace +{% set com_ns = namespace(COM_ATMOS_ANALYSIS_MEM = COM_ATMOS_ANALYSIS_TMPL, + COM_ATMOS_HISTORY_MEM = COM_ATMOS_HISTORY_TMPL, + COM_ATMOS_RESTART_MEM = COM_ATMOS_RESTART_TMPL) %} + +{% for key in tmpl_dict.keys() %} +{% set search_term = '${' + key + '}' %} +{% set replace_term = tmpl_dict[key] %} +{% set com_ns.COM_ATMOS_ANALYSIS_MEM = com_ns.COM_ATMOS_ANALYSIS_MEM.replace(search_term, replace_term) %} +{% set com_ns.COM_ATMOS_HISTORY_MEM = com_ns.COM_ATMOS_HISTORY_MEM.replace(search_term, replace_term) %} +{% set com_ns.COM_ATMOS_RESTART_MEM = com_ns.COM_ATMOS_RESTART_MEM.replace(search_term, replace_term) %} +{% endfor %} + +{% do COM_ATMOS_ANALYSIS_MEM_list.append(com_ns.COM_ATMOS_ANALYSIS_MEM)%} +{% do COM_ATMOS_HISTORY_MEM_list.append(com_ns.COM_ATMOS_HISTORY_MEM)%} +{% do COM_ATMOS_RESTART_MEM_list.append(com_ns.COM_ATMOS_RESTART_MEM)%} + +{% endfor %} + +# Determine which members to archive +{% filter indent(width=4) %} +{% include "enkf_grp.yaml.j2" %} +{% endfilter %} + +# Determine if restarts should be saved +{% set save_warm_start_forecast, save_warm_start_cycled = ( False, False ) %} + +# Save the increments and restarts every ARCH_WARMICFREQ days +# The ensemble increments (group a) should be saved on the ARCH_CYC +{% if (current_cycle - SDATE).days % ARCH_WARMICFREQ == 0 %} +{% if ARCH_CYC == cycle_HH | int and current_cycle != SDATE %} +{% filter indent(width=4) %} +{% include "enkf_restarta_grp.yaml.j2" %} +{% endfilter %} +{% endif %} +{% endif %} + +# The ensemble ICs (group b) are restarts and always lag increments by assim_freq +{% set ics_offset = (assim_freq | string + "H") | to_timedelta %} +{% if (current_cycle | add_to_datetime(ics_offset) - SDATE).days % ARCH_WARMICFREQ == 0 %} +{% if (ARCH_CYC - assim_freq) % 24 == cycle_HH | int and current_cycle != SDATE %} +{% filter indent(width=4) %} +{% include "enkf_restartb_grp.yaml.j2" %} +{% endfilter %} +{% endif %} +{% endif %} + +{% endif %} # ENSGRP != 0 diff --git a/parm/archive/master_enkfgdas.yaml.j2 b/parm/archive/master_enkfgdas.yaml.j2 new file mode 100644 index 0000000000..e21f6a381b --- /dev/null +++ b/parm/archive/master_enkfgdas.yaml.j2 @@ -0,0 +1,6 @@ +# Set variables specific to gdasenkf runs then parse the master_enkf template +{% set (fhmin, fhmax, fhout) = (FHMIN_ENKF, FHMAX_ENKF, FHOUT_ENKF) %} +{% set do_calc_increment = DO_CALC_INCREMENT %} +{% set nmem_ens = NMEM_ENS %} +{% set restart_interval = restart_interval_enkfgdas %} +{% include "master_enkf.yaml.j2" %} diff --git a/parm/archive/master_enkfgfs.yaml.j2 b/parm/archive/master_enkfgfs.yaml.j2 new file mode 100644 index 0000000000..93ec38b660 --- /dev/null +++ b/parm/archive/master_enkfgfs.yaml.j2 @@ -0,0 +1,6 @@ +# Set variables specific to gfsenkf runs then parse the master_enkf template +{% set (fhmin, fhmax, fhout) = (FHMIN_ENKF, FHMAX_ENKF_GFS, FHOUT_ENKF_GFS) %} +{% set do_calc_increment = DO_CALC_INCREMENT_ENKF_GFS %} +{% set nmem_ens = NMEM_ENS_GFS %} +{% set restart_interval = restart_interval_enkfgfs %} +{% include "master_enkf.yaml.j2" %} diff --git a/parm/archive/master_gdas.yaml.j2 b/parm/archive/master_gdas.yaml.j2 new file mode 100644 index 0000000000..6813209e6a --- /dev/null +++ b/parm/archive/master_gdas.yaml.j2 @@ -0,0 +1,87 @@ +{% set cycle_HH = current_cycle | strftime("%H") %} +{% set cycle_YMD = current_cycle | to_YMD %} +{% set cycle_YMDH = current_cycle | to_YMDH %} +{% set head = "gdas.t" + cycle_HH + "z." %} + +{% if IAUFHRS is string %} +{% set iaufhrs = [] %} +{% for iaufhr in IAUFHRS.split(",") %} +{% do iaufhrs.append(iaufhr | int) %} +{% endfor %} +{% else %} +{% set iaufhrs = [IAUFHRS] %} +{% endif %} + +datasets: +{% filter indent(width=4) %} +{% include "gdas.yaml.j2" %} +{% endfilter %} + +{% if DO_ICE %} +{% filter indent(width=4) %} +{% include "gdasice.yaml.j2" %} +{% endfilter %} +{% endif %} + +{% if DO_OCN %} +{% filter indent(width=4) %} +{% include "gdasocean.yaml.j2" %} +{% endfilter %} +{% if DO_JEDIOCNVAR and current_cycle != SDATE and MODE == "cycled" %} +{% filter indent(width=4) %} +{% include "gdasocean_analysis.yaml.j2" %} +{% endfilter %} +{% endif %} +{% endif %} + +{% if DO_WAVE %} +{% filter indent(width=4) %} +{% include "gdaswave.yaml.j2" %} +{% endfilter %} +{% endif %} + +# Determine if we will save restart ICs or not +{% set save_warm_start_forecast, save_warm_start_cycled = ( False, False ) %} + +{% if ARCH_CYC == cycle_HH | int and current_cycle != SDATE%} +# Save the warm and forecast-only cycle ICs every ARCH_WARMICFREQ days +{% if (current_cycle - SDATE).days % ARCH_WARMICFREQ == 0 %} +{% set save_warm_start_forecast = True %} +{% set save_warm_start_cycled = True %} +# Save the forecast-only restarts every ARCH_FCSTICFREQ days +{% elif (current_cycle - SDATE).days % ARCH_FCSTICFREQ == 0 %} +{% set save_warm_start_forecast = True %} +{% endif %} +{% endif %} + +{% if save_warm_start_forecast %} +{% filter indent(width=4) %} +{% include "gdas_restarta.yaml.j2" %} +{% endfilter %} + +{% if DO_WAVE %} +{% filter indent(width=4) %} +{% include "gdaswave_restart.yaml.j2" %} +{% endfilter %} +{% endif %} + +{% if DO_OCN %} +{% filter indent(width=4) %} +{% include "gdasocean_restart.yaml.j2" %} +{% endfilter %} +{% endif %} + +{% if DO_ICE %} +{% filter indent(width=4) %} +{% include "gdasice_restart.yaml.j2" %} +{% endfilter %} +{% endif %} + +{% endif %} # Save forecast-only restarts + +# Save cycled restarts +{% if save_warm_start_cycled %} +{% filter indent(width=4) %} +{% include "gdas_restartb.yaml.j2" %} +{% endfilter %} +{% endif %} diff --git a/parm/archive/master_gfs.yaml.j2 b/parm/archive/master_gfs.yaml.j2 new file mode 100644 index 0000000000..5340ddb721 --- /dev/null +++ b/parm/archive/master_gfs.yaml.j2 @@ -0,0 +1,95 @@ +{% set cycle_HH = current_cycle | strftime("%H") %} +{% set cycle_YMD = current_cycle | to_YMD %} +{% set cycle_YMDH = current_cycle | to_YMDH %} + +{% if IAUFHRS is string %} +{% set iaufhrs = [] %} +{% for iaufhr in IAUFHRS.split(",") %} +{% do iaufhrs.append(iaufhr | int) %} +{% endfor %} +{% else %} +{% set iaufhrs = [IAUFHRS] %} +{% endif %} +datasets: +{% filter indent(width=4) %} +{% include "gfsa.yaml.j2" %} +{% include "gfsb.yaml.j2" %} +{% endfilter %} + +{% if ARCH_GAUSSIAN %} +{% filter indent(width=4) %} +{% include "gfs_flux.yaml.j2" %} +{% include "gfs_netcdfb.yaml.j2" %} +{% include "gfs_pgrb2b.yaml.j2" %} +{% endfilter %} +{% if MODE == "cycled" %} +{% filter indent(width=4) %} +{% include "gfs_netcdfa.yaml.j2" %} +{% endfilter %} +{% endif %} +{% endif %} + +{% if DO_WAVE %} +{% filter indent(width=4) %} +{% include "gfswave.yaml.j2" %} +{% endfilter %} +{% endif %} + +{% if AERO_FCST_CDUMP == "gfs" or AERO_FCST_CDUMP == "both" %} +{% filter indent(width=4) %} +{% include "chem.yaml.j2" %} +{% endfilter %} +{% endif %} + +{% if DO_BUFRSND %} +{% filter indent(width=4) %} +{% include "gfs_downstream.yaml.j2" %} +{% endfilter %} +{% endif %} + +{% if DO_OCN %} +{% filter indent(width=4) %} +{% include "ocean_6hravg.yaml.j2" %} +{% include "ocean_daily.yaml.j2" %} +{% include "ocean_grib2.yaml.j2" %} +{% include "gfs_flux_1p00.yaml.j2" %} +{% endfilter %} +{% endif %} + +{% if DO_ICE %} +{% filter indent(width=4) %} +{% include "ice_6hravg.yaml.j2" %} +{% include "ice_grib2.yaml.j2" %} +{% endfilter %} +{% endif %} + +# Determine whether to save the MOS tarball +{% if DO_MOS and cycle_HH == "18" %} +{% if not REALTIME %} +{% filter indent(width=4) %} +{% include "gfsmos.yaml.j2" %} +{% endfilter %} +{% else %} +{% set td_from_sdate = current_cycle - SDATE %} +{% set td_one_day = "+1D" | to_timedelta %} +{% if td_from_sdate > td_one_day %} +{% filter indent(width=4) %} +{% include "gfsmos.yaml.j2" %} +{% endfilter %} +{% endif %} +{% endif %} +{% endif %} + +# Determine if we will save restart ICs or not +{% if ARCH_CYC == cycle_HH | int and current_cycle != SDATE %} +# Save the forecast-only cycle ICs every ARCH_WARMICFREQ or ARCH_FCSTICFREQ days +{% if (current_cycle - SDATE).days % ARCH_WARMICFREQ == 0 %} +{% filter indent(width=4) %} +{% include "gfs_restarta.yaml.j2" %} +{% endfilter %} +{% elif (current_cycle - SDATE).days % ARCH_FCSTICFREQ == 0 %} +{% filter indent(width=4) %} +{% include "gfs_restarta.yaml.j2" %} +{% endfilter %} +{% endif %} +{% endif %} diff --git a/parm/archive/ocean_6hravg.yaml.j2 b/parm/archive/ocean_6hravg.yaml.j2 new file mode 100644 index 0000000000..dac3ce262a --- /dev/null +++ b/parm/archive/ocean_6hravg.yaml.j2 @@ -0,0 +1,8 @@ +ocean_6hravg: + {% set head = "gfs.ocean.t" + cycle_HH + "z." %} + name: "OCEAN_6HRAVG" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/ocean_6hravg.tar" + required: + {% for fhr in range(6, FHMAX_GFS + 6, 6) %} + - "{{ COM_OCEAN_HISTORY | relpath(ROTDIR) }}/{{ head }}6hr_avg.f{{ '%03d' % fhr }}.nc" + {% endfor %} diff --git a/parm/archive/ocean_daily.yaml.j2 b/parm/archive/ocean_daily.yaml.j2 new file mode 100644 index 0000000000..0f45264973 --- /dev/null +++ b/parm/archive/ocean_daily.yaml.j2 @@ -0,0 +1,8 @@ +ocean_daily: + {% set head = "gfs.ocean.t" + cycle_HH + "z." %} + name: "OCEAN_DAILY" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/ocean_daily.tar" + required: + {% for fhr in range(24, FHMAX_GFS + 24, 24) %} + - "{{ COM_OCEAN_HISTORY | relpath(ROTDIR) }}/{{ head }}daily.f{{ '%03d' % fhr }}.nc" + {% endfor %} diff --git a/parm/archive/ocean_grib2.yaml.j2 b/parm/archive/ocean_grib2.yaml.j2 new file mode 100644 index 0000000000..2e63c0ca98 --- /dev/null +++ b/parm/archive/ocean_grib2.yaml.j2 @@ -0,0 +1,18 @@ +ocean_grib2: + {% set head = "gfs.ocean.t" + cycle_HH + "z." %} + name: "OCEAN_GRIB2" + target: "{{ ATARDIR }}/{{ cycle_YMDH }}/ocean_grib2.tar" + required: + {% for fhr in range(FHOUT_OCNICE_GFS, FHMAX_GFS + FHOUT_OCNICE_GFS, FHOUT_OCNICE_GFS) %} + {% set fhr3 = '%03d' % fhr %} + {% if OCNRES == 500 %} + - "{{ COM_OCEAN_GRIB | relpath(ROTDIR) }}/5p00/{{ head }}5p00.f{{ fhr3 }}.grib2" + - "{{ COM_OCEAN_GRIB | relpath(ROTDIR) }}/5p00/{{ head }}5p00.f{{ fhr3 }}.grib2.idx" + {% elif OCNRES == 100 %} + - "{{ COM_OCEAN_GRIB | relpath(ROTDIR) }}/1p00/{{ head }}1p00.f{{ fhr3 }}.grib2" + - "{{ COM_OCEAN_GRIB | relpath(ROTDIR) }}/1p00/{{ head }}1p00.f{{ fhr3 }}.grib2.idx" + {% elif OCNRES == 25 or OCNRES == "025" %} + - "{{ COM_OCEAN_GRIB | relpath(ROTDIR) }}/0p25/{{ head }}0p25.f{{ fhr3 }}.grib2" + - "{{ COM_OCEAN_GRIB | relpath(ROTDIR) }}/0p25/{{ head }}0p25.f{{ fhr3 }}.grib2.idx" + {% endif %} + {% endfor %} diff --git a/parm/config/gefs/config.base b/parm/config/gefs/config.base index 90a75e3639..1b4f948349 100644 --- a/parm/config/gefs/config.base +++ b/parm/config/gefs/config.base @@ -13,10 +13,12 @@ export RUN_ENVIR="emc" # Account, queue, etc. export ACCOUNT="@ACCOUNT@" +export ACCOUNT_SERVICE="@ACCOUNT_SERVICE@" export QUEUE="@QUEUE@" export QUEUE_SERVICE="@QUEUE_SERVICE@" export PARTITION_BATCH="@PARTITION_BATCH@" export PARTITION_SERVICE="@PARTITION_SERVICE@" +export RESERVATION="@RESERVATION@" # Project to use in mass store: export HPSS_PROJECT="@HPSS_PROJECT@" @@ -134,8 +136,11 @@ export DO_WAVE="NO" export DO_OCN="NO" export DO_ICE="NO" export DO_AERO="NO" +export AERO_FCST_CDUMP="" # When to run aerosol forecast: gdas, gfs, or both +export AERO_ANL_CDUMP="" # When to run aerosol analysis: gdas, gfs, or both export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both export DOBNDPNT_WAVE="NO" # The GEFS buoys file does not currently have any boundary points +export DOIBP_WAV="NO" # Option to create point outputs from input boundary points export FRAC_GRID=".true." export DO_NEST="NO" # Whether to run a global-nested domain if [[ "${DO_NEST:-NO}" == "YES" ]] ; then @@ -160,7 +165,7 @@ case "${CASE}" in export waveGRD='glo_500' ;; "C96" | "C192") - export waveGRD='glo_200' + export waveGRD='glo_100' ;; "C384") export waveGRD='glo_025' @@ -179,6 +184,8 @@ case "${APP}" in ;; ATMA) export DO_AERO="YES" + export AERO_ANL_CDUMP="both" + export AERO_FCST_CDUMP="gdas" ;; ATMW) export DO_COUPLED="YES" @@ -197,6 +204,8 @@ case "${APP}" in if [[ "${APP}" =~ A$ ]]; then export DO_AERO="YES" + export AERO_ANL_CDUMP="both" + export AERO_FCST_CDUMP="gdas" fi if [[ "${APP}" =~ ^S2SW ]]; then @@ -222,6 +231,11 @@ export FHOUT_GFS=6 export FHMAX_HF_GFS=0 export FHOUT_HF_GFS=1 export FHOUT_OCNICE_GFS=6 +export FHMIN_WAV=0 +export FHOUT_WAV=3 +export FHMAX_HF_WAV=120 +export FHOUT_HF_WAV=1 +export FHMAX_WAV=${FHMAX_GFS} if (( gfs_cyc != 0 )); then export STEP_GFS=$(( 24 / gfs_cyc )) else @@ -235,6 +249,7 @@ export FHOUT_ENKF=${FHOUT_GFS} # GFS restart interval in hours export restart_interval_gfs=12 +export restart_interval_enkfgfs=12 # NOTE: Do not set this to zero. Instead set it to $FHMAX_GFS # TODO: Remove this variable from config.base and reference from config.fcst # TODO: rework logic in config.wave and push it to parsing_nameslist_WW3.sh where it is actually used @@ -313,4 +328,7 @@ export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs foreca export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. +# Number of regional collectives to create soundings for +export NUM_SND_COLLECTIVES=${NUM_SND_COLLECTIVES:-9} + echo "END: config.base" diff --git a/parm/config/gefs/config.efcs b/parm/config/gefs/config.efcs index ad90fa864c..915726b974 100644 --- a/parm/config/gefs/config.efcs +++ b/parm/config/gefs/config.efcs @@ -78,6 +78,6 @@ if [[ "${USE_OCN_PERTURB_FILES:-false}" == "true" ]]; then else export ODA_INCUPD="False" fi -export restart_interval="${restart_interval_gfs}" +export restart_interval="${restart_interval_enkfgfs:-12}" echo "END: config.efcs" diff --git a/parm/config/gefs/config.fcst b/parm/config/gefs/config.fcst index f91316c7d2..12e461cef8 100644 --- a/parm/config/gefs/config.fcst +++ b/parm/config/gefs/config.fcst @@ -8,12 +8,18 @@ echo "BEGIN: config.fcst" export USE_ESMF_THREADING="YES" # Toggle to use ESMF-managed threading or traditional threading in UFSWM export COPY_FINAL_RESTARTS="NO" # Toggle to copy restarts from the end of GFS/GEFS Run (GDAS is handled seperately) -# Turn off waves if not used for this CDUMP +# Turn off waves if not used for this RUN case ${WAVE_CDUMP} in - both | "${CDUMP/enkf}" ) ;; # Don't change + both | "${RUN/enkf}" ) ;; # Don't change *) DO_WAVE="NO" ;; # Turn waves off esac +# Turn off aerosols if not used for this RUN +case ${AERO_FCST_CDUMP} in + both | "${RUN/enkf}" ) ;; # Don't change + *) DO_AERO="NO" ;; # Turn waves off +esac + # Source model specific information that is resolution dependent string="--fv3 ${CASE}" [[ "${DO_OCN}" == "YES" ]] && string="${string} --mom6 ${OCNRES}" diff --git a/parm/config/gefs/config.ufs b/parm/config/gefs/config.ufs index 9b42e4aa82..9c39bf06de 100644 --- a/parm/config/gefs/config.ufs +++ b/parm/config/gefs/config.ufs @@ -15,7 +15,7 @@ if (( $# <= 1 )); then echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" echo "--mom6 500|100|025" echo "--cice6 500|100|025" - echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_200|glo_500|mx025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_200|glo_500|mx025|glo_100" echo "--gocart" exit 1 @@ -456,6 +456,10 @@ if [[ "${skip_ww3}" == "false" ]]; then "glo_025") ntasks_ww3=262 ;; + "glo_100") + ntasks_ww3=20 + nthreads_ww3=1 + ;; "glo_200") ntasks_ww3=30 nthreads_ww3=1 diff --git a/parm/config/gefs/config.wave b/parm/config/gefs/config.wave index 38ad959eee..b61a2f6e54 100644 --- a/parm/config/gefs/config.wave +++ b/parm/config/gefs/config.wave @@ -56,6 +56,12 @@ case "${waveGRD}" in export wavepostGRD='glo_025' export waveuoutpGRD=${waveGRD} ;; + "glo_100") + #Global regular lat/lon 1deg deg grid + export waveinterpGRD='' + export wavepostGRD='glo_100' + export waveuoutpGRD=${waveGRD} + ;; "glo_200") #Global regular lat/lon 2deg deg grid export waveinterpGRD='' @@ -79,12 +85,7 @@ export WAVEWND_DID= export WAVEWND_FID= # The start time reflects the number of hindcast hours prior to the cycle initial time -export FHMAX_WAV=${FHMAX_GFS} export WAVHINDH=0 -export FHMIN_WAV=0 -export FHOUT_WAV=3 -export FHMAX_HF_WAV=120 -export FHOUT_HF_WAV=1 export FHMAX_WAV_IBP=180 if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi diff --git a/parm/config/gefs/config.wavepostsbs b/parm/config/gefs/config.wavepostsbs index b3c5902e3c..82cec321da 100644 --- a/parm/config/gefs/config.wavepostsbs +++ b/parm/config/gefs/config.wavepostsbs @@ -13,7 +13,6 @@ export WAV_SUBGRBSRC="" export WAV_SUBGRB="" # Options for point output (switch on/off boundary point output) -export DOIBP_WAV='NO' # Input boundary points export DOFLD_WAV='YES' # Field data export DOPNT_WAV='YES' # Station data export DOGRB_WAV='YES' # Create grib2 files diff --git a/parm/config/gfs/config.aero b/parm/config/gfs/config.aero index c152fafd12..2fae019574 100644 --- a/parm/config/gfs/config.aero +++ b/parm/config/gfs/config.aero @@ -20,6 +20,9 @@ case ${machine} in "WCOSS2") AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" ;; + "GAEA") + AERO_INPUTS_DIR="/gpfs/f5/epic/proj-shared/global/glopara/data/gocart_emissions" + ;; "JET") AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" ;; diff --git a/parm/config/gfs/config.atmensanlfv3inc b/parm/config/gfs/config.atmensanlfv3inc new file mode 100644 index 0000000000..2dc73f3f6e --- /dev/null +++ b/parm/config/gfs/config.atmensanlfv3inc @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.atmensanlfv3inc ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmensanlfv3inc" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfv3inc + +export JCB_ALGO=fv3jedi_fv3inc_lgetkf +export JEDIEXE=${EXECgfs}/fv3jedi_fv3inc.x + +echo "END: config.atmensanlfv3inc" diff --git a/parm/config/gfs/config.atmensanlletkf b/parm/config/gfs/config.atmensanlletkf new file mode 100644 index 0000000000..1fdc57ae62 --- /dev/null +++ b/parm/config/gfs/config.atmensanlletkf @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlletkf ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlletkf" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlletkf + +echo "END: config.atmensanlletkf" diff --git a/parm/config/gfs/config.atmensanlrun b/parm/config/gfs/config.atmensanlrun deleted file mode 100644 index 01f211a17a..0000000000 --- a/parm/config/gfs/config.atmensanlrun +++ /dev/null @@ -1,11 +0,0 @@ -#! /usr/bin/env bash - -########## config.atmensanlrun ########## -# Atm Ens Analysis specific - -echo "BEGIN: config.atmensanlrun" - -# Get task specific resources -. "${EXPDIR}/config.resources" atmensanlrun - -echo "END: config.atmensanlrun" diff --git a/parm/config/gfs/config.base b/parm/config/gfs/config.base index 8ee1a2c17e..50fcc168d7 100644 --- a/parm/config/gfs/config.base +++ b/parm/config/gfs/config.base @@ -13,10 +13,12 @@ export RUN_ENVIR="emc" # Account, queue, etc. export ACCOUNT="@ACCOUNT@" +export ACCOUNT_SERVICE="@ACCOUNT_SERVICE@" export QUEUE="@QUEUE@" export QUEUE_SERVICE="@QUEUE_SERVICE@" export PARTITION_BATCH="@PARTITION_BATCH@" export PARTITION_SERVICE="@PARTITION_SERVICE@" +export RESERVATION="@RESERVATION@" # Project to use in mass store: export HPSS_PROJECT="@HPSS_PROJECT@" @@ -163,9 +165,6 @@ export APP=@APP@ shopt -s extglob # Adjust APP based on RUN case "${RUN}" in - gfs) # Turn off aerosols - APP="${APP/%A}" - ;; enkf*) # Turn off aerosols and waves APP="${APP/%+([WA])}" ;; @@ -181,8 +180,11 @@ export DO_WAVE="NO" export DO_OCN="NO" export DO_ICE="NO" export DO_AERO="NO" +export AERO_FCST_CDUMP="" # When to run aerosol forecast: gdas, gfs, or both +export AERO_ANL_CDUMP="" # When to run aerosol analysis: gdas, gfs, or both export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both export DOBNDPNT_WAVE="NO" +export DOIBP_WAV="NO" # Option to create point outputs from input boundary points export FRAC_GRID=".true." export DO_NEST="NO" # Whether to run a global-nested domain if [[ "${DO_NEST:-NO}" == "YES" ]] ; then @@ -227,6 +229,8 @@ case "${APP}" in ;; ATMA) export DO_AERO="YES" + export AERO_ANL_CDUMP="both" + export AERO_FCST_CDUMP="gdas" ;; ATMW) export DO_COUPLED="YES" @@ -245,6 +249,8 @@ case "${APP}" in if [[ "${APP}" =~ A$ ]]; then export DO_AERO="YES" + export AERO_ANL_CDUMP="both" + export AERO_FCST_CDUMP="gdas" fi if [[ "${APP}" =~ ^S2SW ]]; then @@ -285,6 +291,12 @@ export FHOUT_GFS=3 # Must be 6 for S2S until #1629 is addressed; 3 for ops export FHMAX_HF_GFS=0 export FHOUT_HF_GFS=1 export FHOUT_OCNICE_GFS=6 +export FHMIN_WAV=0 +export FHOUT_WAV=3 +export FHMAX_HF_WAV=120 +export FHOUT_HF_WAV=1 +export FHMAX_WAV=${FHMAX:-9} +export FHMAX_WAV_GFS=${FHMAX_GFS} if (( gfs_cyc != 0 )); then export STEP_GFS=$(( 24 / gfs_cyc )) else @@ -342,6 +354,7 @@ export NMEM_ENS=@NMEM_ENS@ export SMOOTH_ENKF="NO" export l4densvar=".true." export lwrite4danl=".true." +export DO_CALC_INCREMENT="NO" # Early-cycle EnKF parameters export NMEM_ENS_GFS=30 @@ -370,6 +383,9 @@ if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then export IAUFHRS_ENKF="6" fi +# Generate post-processing ensemble spread files +export ENKF_SPREAD="YES" + # Check if cycle is cold starting, DOIAU off, or free-forecast mode if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then export IAU_OFFSET=0 @@ -379,6 +395,22 @@ fi if [[ "${DOIAU_ENKF}" = "NO" ]]; then export IAUFHRS_ENKF="6"; fi +# Determine restart intervals +# For IAU, write restarts at beginning of window also +if [[ "${DOIAU_ENKF:-}" == "YES" ]]; then + export restart_interval_enkfgdas="3" +else + export restart_interval_enkfgdas="6" +fi + +export restart_interval_enkfgfs=${restart_interval_enkfgdas} + +if [[ "${DOIAU}" == "YES" ]]; then + export restart_interval_gdas="3" +else + export restart_interval_gdas="6" +fi + export GSI_SOILANAL=@GSI_SOILANAL@ # turned on nsst in anal and/or fcst steps, and turn off rtgsst @@ -435,4 +467,7 @@ if [[ ${DO_JEDIATMVAR} = "YES" ]]; then export DO_VMINMON="NO" # GSI minimization monitoring fi +# Number of regional collectives to create soundings for +export NUM_SND_COLLECTIVES=${NUM_SND_COLLECTIVES:-9} + echo "END: config.base" diff --git a/parm/config/gfs/config.com b/parm/config/gfs/config.com index 5d63a499ed..004ca1affb 100644 --- a/parm/config/gfs/config.com +++ b/parm/config/gfs/config.com @@ -84,6 +84,7 @@ declare -rx COM_OCEAN_NETCDF_TMPL=${COM_BASE}'/products/ocean/netcdf' declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2' declare -rx COM_OCEAN_GRIB_GRID_TMPL=${COM_OCEAN_GRIB_TMPL}'/${GRID}' +declare -rx COM_ICE_ANALYSIS_TMPL=${COM_BASE}'/analysis/ice' declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' diff --git a/parm/config/gfs/config.earc b/parm/config/gfs/config.earc index de73a93731..00a2fa95bd 100644 --- a/parm/config/gfs/config.earc +++ b/parm/config/gfs/config.earc @@ -8,7 +8,25 @@ echo "BEGIN: config.earc" # Get task specific resources . $EXPDIR/config.resources earc -export NMEM_EARCGRP=10 +# Set the number of ensemble members to archive per earc job +case "${CASE_ENS}" in + "C48" | "C96") + export NMEM_EARCGRP=80 + ;; + "C192") + export NMEM_EARCGRP=20 + ;; + "C384" | "C768") + export NMEM_EARCGRP=10 + ;; + "C1152") + export NMEM_EARCGRP=4 + ;; + *) + echo "FATAL ERROR: Unknown ensemble resolution ${CASE_ENS}, ABORT!" + exit 1 + ;; +esac #--starting and ending hours of previous cycles to be removed from rotating directory export RMOLDSTD_ENKF=144 diff --git a/parm/config/gfs/config.efcs b/parm/config/gfs/config.efcs index 402ba64087..1837cf0619 100644 --- a/parm/config/gfs/config.efcs +++ b/parm/config/gfs/config.efcs @@ -85,11 +85,10 @@ if [[ ${RECENTER_ENKF:-"YES"} == "YES" ]]; then export PREFIX_ATMINC="r" fi -# For IAU, write restarts at beginning of window also -if [[ "${DOIAU_ENKF:-}" == "YES" ]]; then - export restart_interval="3" -else - export restart_interval="6" +# Set restart interval to enable restarting forecasts after failures +export restart_interval=${restart_interval_enkfgdas:-6} +if [[ ${RUN} == "enkfgfs" ]]; then + export restart_interval=${restart_interval_enkfgfs:-12} fi echo "END: config.efcs" diff --git a/parm/config/gfs/config.epos b/parm/config/gfs/config.epos index 8026a2ba2e..f1da929b62 100644 --- a/parm/config/gfs/config.epos +++ b/parm/config/gfs/config.epos @@ -14,7 +14,4 @@ if [ $l4densvar = ".false." ]; then export NEPOSGRP=3 fi -# Generate ensemble spread files -export ENKF_SPREAD="YES" - echo "END: config.epos" diff --git a/parm/config/gfs/config.fcst b/parm/config/gfs/config.fcst index db7306d2e8..f45874a31a 100644 --- a/parm/config/gfs/config.fcst +++ b/parm/config/gfs/config.fcst @@ -8,12 +8,18 @@ echo "BEGIN: config.fcst" export USE_ESMF_THREADING="YES" # Toggle to use ESMF-managed threading or traditional threading in UFSWM export COPY_FINAL_RESTARTS="NO" # Toggle to copy restarts from the end of GFS/GEFS Run (GDAS is handled seperately) -# Turn off waves if not used for this CDUMP +# Turn off waves if not used for this RUN case ${WAVE_CDUMP} in - both | "${CDUMP/enkf}" ) ;; # Don't change + both | "${RUN/enkf}" ) ;; # Don't change *) DO_WAVE="NO" ;; # Turn waves off esac +# Turn off aerosols if not used for this RUN +case ${AERO_FCST_CDUMP} in + both | "${RUN/enkf}" ) ;; # Don't change + *) DO_AERO="NO" ;; # Turn aerosols off +esac + # Source model specific information that is resolution dependent string="--fv3 ${CASE}" [[ "${DO_OCN}" == "YES" ]] && string="${string} --mom6 ${OCNRES}" @@ -271,11 +277,8 @@ if [[ "${CDUMP}" =~ "gdas" ]] ; then # GDAS cycle specific parameters # Variables used in DA cycling export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table_da" - if [[ "${DOIAU}" == "YES" ]]; then - export restart_interval="3" - else - export restart_interval="6" - fi + # Write gfs restart files to rerun fcst from any break point + export restart_interval=${restart_interval_gdas:-6} # Turn on dry mass adjustment in GDAS export adjust_dry_mass=".true." diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index d58ecf85b2..affb0da04f 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -11,7 +11,7 @@ if (( $# != 1 )); then echo "stage_ic aerosol_init" echo "prep prepsnowobs prepatmiodaobs" echo "atmanlinit atmanlvar atmanlfv3inc atmanlfinal" - echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "atmensanlinit atmensanlletkf atmensanlfv3inc atmensanlfinal" echo "snowanl" echo "aeroanlinit aeroanlrun aeroanlfinal" echo "anal sfcanl analcalc analdiag fcst echgres" @@ -35,6 +35,7 @@ echo "BEGIN: config.resources" case ${machine} in "WCOSS2") npe_node_max=128;; "HERA") npe_node_max=40;; + "GAEA") npe_node_max=128;; "ORION") npe_node_max=40;; "HERCULES") npe_node_max=80;; "JET") @@ -956,17 +957,31 @@ case ${step} in export memory_atmensanlinit="3072M" ;; - "atmensanlrun") + "atmensanlletkf") export layout_x=${layout_x_atmensanl} export layout_y=${layout_y_atmensanl} - export wtime_atmensanlrun="00:30:00" - export npe_atmensanlrun=$(( layout_x * layout_y * 6 )) - export npe_atmensanlrun_gfs=$(( layout_x * layout_y * 6 )) - export nth_atmensanlrun=1 - export nth_atmensanlrun_gfs=${nth_atmensanlrun} - export npe_node_atmensanlrun=$(( npe_node_max / nth_atmensanlrun )) - export memory_atmensanlrun="96GB" + export wtime_atmensanlletkf="00:30:00" + export npe_atmensanlletkf=$(( layout_x * layout_y * 6 )) + export npe_atmensanlletkf_gfs=$(( layout_x * layout_y * 6 )) + export nth_atmensanlletkf=1 + export nth_atmensanlletkf_gfs=${nth_atmensanlletkf} + export npe_node_atmensanlletkf=$(( npe_node_max / nth_atmensanlletkf )) + export memory_atmensanlletkf="96GB" + export is_exclusive=True + ;; + + "atmensanlfv3inc") + export layout_x=${layout_x_atmensanl} + export layout_y=${layout_y_atmensanl} + + export wtime_atmensanlfv3inc="00:30:00" + export npe_atmensanlfv3inc=$(( layout_x * layout_y * 6 )) + export npe_atmensanlfv3inc_gfs=$(( layout_x * layout_y * 6 )) + export nth_atmensanlfv3inc=1 + export nth_atmensanlfv3inc_gfs=${nth_atmensanlfv3inc} + export npe_node_atmensanlfv3inc=$(( npe_node_max / nth_atmensanlfv3inc )) + export memory_atmensanlfv3inc="96GB" export is_exclusive=True ;; diff --git a/parm/config/gfs/config.ufs b/parm/config/gfs/config.ufs index a7dabd3d0e..9f6c47ce72 100644 --- a/parm/config/gfs/config.ufs +++ b/parm/config/gfs/config.ufs @@ -15,7 +15,7 @@ if (( $# <= 1 )); then echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" echo "--mom6 500|100|025" echo "--cice6 500|100|025" - echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_200|glo_500|mx025|uglo_100km|uglo_m1g16" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_100|glo_200|glo_500|mx025|uglo_100km|uglo_m1g16" echo "--gocart" exit 1 @@ -561,6 +561,10 @@ if [[ "${skip_ww3}" == "false" ]]; then "glo_025") ntasks_ww3=262 ;; + "glo_100") + ntasks_ww3=20 + nthreads_ww3=1 + ;; "glo_200") ntasks_ww3=30 nthreads_ww3=1 diff --git a/parm/config/gfs/config.wave b/parm/config/gfs/config.wave index 5efceeeacf..7253ef1396 100644 --- a/parm/config/gfs/config.wave +++ b/parm/config/gfs/config.wave @@ -56,6 +56,12 @@ case "${waveGRD}" in export wavepostGRD='glo_025' export waveuoutpGRD=${waveGRD} ;; + "glo_100") + #Global regular lat/lon 1deg deg grid + export waveinterpGRD='' + export wavepostGRD='glo_100' + export waveuoutpGRD=${waveGRD} + ;; "glo_200") #Global regular lat/lon 2deg deg grid export waveinterpGRD='' @@ -91,16 +97,10 @@ export WAVEWND_DID= export WAVEWND_FID= # The start time reflects the number of hindcast hours prior to the cycle initial time -if [[ "${CDUMP}" = "gdas" ]]; then - export FHMAX_WAV=${FHMAX:-9} -else - export FHMAX_WAV=${FHMAX_GFS} +if [[ "${RUN}" == "gfs" ]]; then + export FHMAX_WAV=${FHMAX_WAV_GFS} fi export WAVHINDH=0 -export FHMIN_WAV=0 -export FHOUT_WAV=3 -export FHMAX_HF_WAV=120 -export FHOUT_HF_WAV=1 export FHMAX_WAV_IBP=180 if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi @@ -113,7 +113,7 @@ export FHINCP_WAV=$(( DTPNT_WAV / 3600 )) export OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" # Restart file config -if [[ "${CDUMP}" = "gdas" ]]; then +if [[ "${RUN}" == "gdas" ]]; then export WAVNCYC=4 export WAVHCYC=${assim_freq:-6} export FHMAX_WAV_CUR=48 # RTOFS forecasts only out to 8 days @@ -128,7 +128,7 @@ fi # Restart timing business export RSTTYPE_WAV='T' # generate second tier of restart files -if [[ "${CDUMP}" != gfs ]]; then # Setting is valid for GDAS and GEFS +if [[ "${RUN}" != gfs ]]; then # Setting is valid for GDAS and GEFS export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file export DT_2_RST_WAV=43200 # restart stride for checkpointing restart export RSTIOFF_WAV=0 # first restart file offset relative to model start diff --git a/parm/config/gfs/config.wavepostsbs b/parm/config/gfs/config.wavepostsbs index b3c5902e3c..82cec321da 100644 --- a/parm/config/gfs/config.wavepostsbs +++ b/parm/config/gfs/config.wavepostsbs @@ -13,7 +13,6 @@ export WAV_SUBGRBSRC="" export WAV_SUBGRB="" # Options for point output (switch on/off boundary point output) -export DOIBP_WAV='NO' # Input boundary points export DOFLD_WAV='YES' # Field data export DOPNT_WAV='YES' # Station data export DOGRB_WAV='YES' # Create grib2 files diff --git a/parm/post/oceanice_products.yaml b/parm/post/oceanice_products.yaml index 8d8bcd4b51..48e5a5f204 100644 --- a/parm/post/oceanice_products.yaml +++ b/parm/post/oceanice_products.yaml @@ -29,9 +29,9 @@ ocean: cosvar: "cos_rot" angvar: "" {% if model_grid == 'mx025' or model_grid == 'mx050' or model_grid == 'mx100' %} - ocean_levels: [5, 15, 25, 35, 45, 55, 65, 75, 85, 95, 105, 115, 125, 135, 145, 155, 165, 175, 185, 195, 205, 215, 225.86945, 241.06255, 266.5239, 308.7874, 373.9288, 467.3998, 593.87915, 757.1453, 959.97325, 1204.059, 1489.9735, 1817.1455, 2183.879, 2587.3995, 3023.9285, 3488.7875, 3976.524, 4481.0625] + ocean_levels: [5, 15, 25, 35, 45, 55, 65, 75, 85, 95, 105, 115, 125, 135, 145, 155, 165, 175, 185, 195, 205, 215, 226, 241, 267, 309, 374, 467, 594, 757, 960, 1204, 1490, 1817, 2184, 2587, 3024, 3489, 3977, 4481] {% elif model_grid == 'mx500' %} - ocean_levels: [5, 15, 25, 35, 45, 55, 65, 75, 85, 95, 105, 115, 125, 135, 145, 155, 165, 175, 185, 195, 205, 215, 225.86945, 241.06255, 266.5239] + ocean_levels: [5, 15, 25, 35, 45, 55, 65, 75, 85, 95, 105, 115, 125, 135, 145, 155, 165, 175, 185, 195, 205, 215, 226, 241, 267] {% endif %} subset: ['SSH', 'SST', 'SSS', 'speed', 'MLD_003', 'latent', 'sensible', 'SW', 'LW', 'LwLatSens', 'Heat_PmE', 'SSU', 'SSV', 'taux', 'tauy', 'temp', 'so', 'uo', 'vo'] data_in: diff --git a/parm/wave/ww3_grib2.glo_100.inp.tmpl b/parm/wave/ww3_grib2.glo_100.inp.tmpl new file mode 100755 index 0000000000..ddfabdb13d --- /dev/null +++ b/parm/wave/ww3_grib2.glo_100.inp.tmpl @@ -0,0 +1,9 @@ +$ WAVEWATCH-III gridded output input file +$ ---------------------------------------- +TIME DT NT +N +FLAGS +$ +TIME 7 MODNR GRIDNR 0 0 +$ +$ end of input file diff --git a/scripts/exgdas_enkf_earc.py b/scripts/exgdas_enkf_earc.py new file mode 100755 index 0000000000..2febbc27f5 --- /dev/null +++ b/scripts/exgdas_enkf_earc.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python3 + +import os + +from pygfs.task.archive import Archive +from wxflow import AttrDict, Logger, cast_strdict_as_dtypedict, chdir, logit + +# initialize root logger +logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True) + + +@logit(logger) +def main(): + + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the Archive object + archive = Archive(config) + + # Pull out all the configuration keys needed to run the rest of archive steps + keys = ['ATARDIR', 'current_cycle', 'IAUFHRS', 'RUN', 'PDY', + 'PSLOT', 'HPSSARCH', 'LOCALARCH', 'ROTDIR', 'PARMgfs', + 'ARCDIR', 'SDATE', 'MODE', 'ENSGRP', 'NMEM_EARCGRP', + 'NMEM_ENS', 'DO_CALC_INCREMENT_ENKF_GFS', 'DO_JEDIATMENS', + 'lobsdiag_forenkf', 'FHMIN_ENKF', 'FHMAX_ENKF_GFS', + 'FHOUT_ENKF_GFS', 'FHMAX_ENKF', 'FHOUT_ENKF', 'ENKF_SPREAD', + 'restart_interval_enkfgdas', 'restart_interval_enkfgfs', + 'DOHYBVAR', 'DOIAU_ENKF', 'IAU_OFFSET', 'DOIAU', + 'DO_CALC_INCREMENT', 'assim_freq', 'ARCH_CYC', + 'ARCH_WARMICFREQ', 'ARCH_FCSTICFREQ', + 'IAUFHRS_ENKF'] + + archive_dict = AttrDict() + for key in keys: + archive_dict[key] = archive.task_config[key] + + # Also import all COM* directory and template variables + for key in archive.task_config.keys(): + if key.startswith("COM"): + archive_dict[key] = archive.task_config[key] + + cwd = os.getcwd() + + os.chdir(config.ROTDIR) + + # Determine which archives to create + arcdir_set, atardir_sets = archive.configure(archive_dict) + + # Populate the product archive (ARCDIR) + archive.execute_store_products(arcdir_set) + + # Create the backup tarballs and store in ATARDIR + for atardir_set in atardir_sets: + archive.execute_backup_dataset(atardir_set) + + os.chdir(cwd) + + +if __name__ == '__main__': + main() diff --git a/scripts/exgdas_enkf_earc.sh b/scripts/exgdas_enkf_earc.sh deleted file mode 100755 index 3e54c658e9..0000000000 --- a/scripts/exgdas_enkf_earc.sh +++ /dev/null @@ -1,163 +0,0 @@ -#! /usr/bin/env bash - -source "${USHgfs}/preamble.sh" - -############################################## -# Begin JOB SPECIFIC work -############################################## -export n=$((10#${ENSGRP})) -export CDUMP_ENKF="${EUPD_CYC:-"gdas"}" - -# ICS are restarts and always lag INC by $assim_freq hours. -EARCINC_CYC=${ARCH_CYC} -EARCICS_CYC=$((ARCH_CYC-assim_freq)) -if [ "${EARCICS_CYC}" -lt 0 ]; then - EARCICS_CYC=$((EARCICS_CYC+24)) -fi - -"${USHgfs}/hpssarch_gen.sh" "${RUN}" -status=$? -if [ "${status}" -ne 0 ]; then - echo "${USHgfs}/hpssarch_gen.sh ${RUN} failed, ABORT!" - exit "${status}" -fi - -cd "${ROTDIR}" || exit 2 - -source "${USHgfs}/file_utils.sh" - -################################################################### -# ENSGRP > 0 archives a group of ensemble members -firstday=$(${NDATE} +24 "${SDATE}") -if (( 10#${ENSGRP} > 0 )) && [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then - -#--set the archiving command and create local directories, if necessary - TARCMD="htar" - if [[ ${LOCALARCH} = "YES" ]]; then - TARCMD="tar" - if [[ ! -d "${ATARDIR}/${PDY}${cyc}" ]]; then mkdir -p "${ATARDIR}/${PDY}${cyc}"; fi - fi - -#--determine when to save ICs for warm start - SAVEWARMICA="NO" - SAVEWARMICB="NO" - mm="${PDY:4:2}" - dd="${PDY:6:2}" - nday=$(( (10#${mm}-1)*30+10#${dd} )) - mod=$((nday % ARCH_WARMICFREQ)) - if [ "${PDY}${cyc}" -eq "${firstday}" ] && [ "${cyc}" -eq "${EARCINC_CYC}" ]; then SAVEWARMICA="YES" ; fi - if [ "${PDY}${cyc}" -eq "${firstday}" ] && [ "${cyc}" -eq "${EARCICS_CYC}" ]; then SAVEWARMICB="YES" ; fi - if [ "${mod}" -eq 0 ] && [ "${cyc}" ] && [ "${EARCINC_CYC}" ]; then SAVEWARMICA="YES" ; fi - if [ "${mod}" -eq 0 ] && [ "${cyc}" ] && [ "${EARCICS_CYC}" ]; then SAVEWARMICB="YES" ; fi - - if [ "${EARCICS_CYC}" -eq 18 ]; then - nday1=$((nday+1)) - mod1=$((nday1 % ARCH_WARMICFREQ)) - if [ "${mod1}" -eq 0 ] && [ "${cyc}" -eq "${EARCICS_CYC}" ] ; then SAVEWARMICB="YES" ; fi - if [ "${mod1}" -ne 0 ] && [ "${cyc}" -eq "${EARCICS_CYC}" ] ; then SAVEWARMICB="NO" ; fi - if [ "${PDY}${cyc}" -eq "${SDATE}" ] && [ "${cyc}" -eq "${EARCICS_CYC}" ] ; then SAVEWARMICB="YES" ; fi - fi - - if [ "${PDY}${cyc}" -gt "${SDATE}" ]; then # Don't run for first half cycle - - ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${RUN}_grp${ENSGRP}.tar" $(cat "${DATA}/${RUN}_grp${n}.txt") - status=$? - if [ "${status}" -ne 0 ] && [ "${PDY}${cyc}" -ge "${firstday}" ]; then - echo "FATAL ERROR: ${TARCMD} ${PDY}${cyc} ${RUN}_grp${ENSGRP}.tar failed" - exit "${status}" - fi - - if [ "${SAVEWARMICA}" = "YES" ] && [ "${cyc}" -eq "${EARCINC_CYC}" ]; then - ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${RUN}_restarta_grp${ENSGRP}.tar" $(cat "${DATA}/${RUN}_restarta_grp${n}.txt") - status=$? - if [ "${status}" -ne 0 ]; then - echo "FATAL ERROR: ${TARCMD} ${PDY}${cyc} ${RUN}_restarta_grp${ENSGRP}.tar failed" - exit "${status}" - fi - fi - - if [ "${SAVEWARMICB}" = "YES" ] && [ "${cyc}" -eq "${EARCICS_CYC}" ]; then - ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${RUN}_restartb_grp${ENSGRP}.tar" $(cat "${DATA}/${RUN}_restartb_grp${n}.txt") - status=$? - if [ "${status}" -ne 0 ]; then - echo "FATAL ERROR: ${TARCMD} ${PDY}${cyc} ${RUN}_restartb_grp${ENSGRP}.tar failed" - exit "${status}" - fi - fi - - fi # CDATE>SDATE - -fi - - -################################################################### -# ENSGRP 0 archives ensemble means and copy data to online archive -if [ "${ENSGRP}" -eq 0 ]; then - - if [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then - - #--set the archiving command and create local directories, if necessary - TARCMD="htar" - HSICMD="hsi" - if [[ ${LOCALARCH} = "YES" ]]; then - TARCMD="tar" - HSICMD="" - if [[ ! -d "${ATARDIR}/${PDY}${cyc}" ]]; then mkdir -p "${ATARDIR}/${PDY}${cyc}"; fi - fi - - set +e - # Check if the tarball will have rstprod in it - has_rstprod="NO" - while IFS= read -r file; do - if [[ -f ${file} ]]; then - group=$( stat -c "%G" "${file}" ) - if [[ "${group}" == "rstprod" ]]; then - has_rstprod="YES" - break - fi - fi - done < "${DATA}/${RUN}.txt" - - # Create the tarball - tar_fl=${ATARDIR}/${PDY}${cyc}/${RUN}.tar - ${TARCMD} -P -cvf "${tar_fl}" $(cat "${DATA}/${RUN}.txt") - status=$? - if [[ "${status}" -ne 0 ]]; then - echo "FATAL ERROR: Tarring of ${tar_fl} failed" - exit "${status}" - fi - - # If rstprod was found, change the group of the tarball - if [[ "${has_rstprod}" == "YES" ]]; then - ${HSICMD} chgrp rstprod "${tar_fl}" - stat_chgrp=$? - ${HSICMD} chmod 640 "${tar_fl}" - stat_chgrp=$((stat_chgrp+$?)) - if [[ "${stat_chgrp}" -gt 0 ]]; then - echo "FATAL ERROR: Unable to properly restrict ${tar_fl}!" - echo "Attempting to delete ${tar_fl}" - ${HSICMD} rm "${tar_fl}" - echo "Please verify that ${tar_fl} was deleted!" - exit "${stat_chgrp}" - fi - fi - - # For safety, test if the htar/tar command failed only after changing groups - if (( status != 0 && ${PDY}${cyc} >= firstday )); then - echo "FATAL ERROR: ${TARCMD} ${tar_fl} failed" - exit "${status}" - fi - set_strict - fi - - #-- Archive online for verification and diagnostics - [[ ! -d ${ARCDIR} ]] && mkdir -p "${ARCDIR}" - cd "${ARCDIR}" || exit 2 - - nb_copy "${COM_ATMOS_ANALYSIS_ENSSTAT}/${RUN}.t${cyc}z.enkfstat" \ - "enkfstat.${RUN}.${PDY}${cyc}" - nb_copy "${COM_ATMOS_ANALYSIS_ENSSTAT}/${RUN}.t${cyc}z.gsistat.ensmean" \ - "gsistat.${RUN}.${PDY}${cyc}.ensmean" -fi - -exit 0 diff --git a/scripts/exgfs_atmos_postsnd.sh b/scripts/exgfs_atmos_postsnd.sh index 7aa97f3644..23c41157fe 100755 --- a/scripts/exgfs_atmos_postsnd.sh +++ b/scripts/exgfs_atmos_postsnd.sh @@ -113,7 +113,7 @@ fi # add appropriate WMO Headers. ######################################## rm -rf poe_col -for (( m = 1; m <10 ; m++ )); do +for (( m = 1; m <= NUM_SND_COLLECTIVES ; m++ )); do echo "sh ${USHgfs}/gfs_sndp.sh ${m} " >> poe_col done diff --git a/scripts/exgfs_wave_post_gridded_sbs.sh b/scripts/exgfs_wave_post_gridded_sbs.sh index 3b103b8dd3..cee6d40b49 100755 --- a/scripts/exgfs_wave_post_gridded_sbs.sh +++ b/scripts/exgfs_wave_post_gridded_sbs.sh @@ -287,6 +287,7 @@ source "${USHgfs}/preamble.sh" glo_15mxt) GRDNAME='global' ; GRDRES=0p25 ; GRIDNR=255 ; MODNR=11 ;; reg025) GRDNAME='global' ; GRDRES=0p25 ; GRIDNR=255 ; MODNR=11 ;; glo_025) GRDNAME='global' ; GRDRES=0p25 ; GRIDNR=255 ; MODNR=11 ;; + glo_100) GRDNAME='global' ; GRDRES=1p00 ; GRIDNR=255 ; MODNR=11 ;; glo_200) GRDNAME='global' ; GRDRES=2p00 ; GRIDNR=255 ; MODNR=11 ;; glo_500) GRDNAME='global' ; GRDRES=5p00 ; GRIDNR=255 ; MODNR=11 ;; glo_30mxt) GRDNAME='global' ; GRDRES=0p50 ; GRIDNR=255 ; MODNR=11 ;; @@ -321,7 +322,8 @@ source "${USHgfs}/preamble.sh" glo_15mxt) GRDNAME='global' ; GRDRES=0p25 ; GRIDNR=255 ; MODNR=11 ;; reg025) GRDNAME='global' ; GRDRES=0p25 ; GRIDNR=255 ; MODNR=11 ;; glo_025) GRDNAME='global' ; GRDRES=0p25 ; GRIDNR=255 ; MODNR=11 ;; - glo_200) GRDNAME='global' ; GRDRES=2p00 ; GRIDNR=255 ; MODNR=11 ;; + glo_100) GRDNAME='global' ; GRDRES=1p00 ; GRIDNR=255 ; MODNR=11 ;; + glo_200) GRDNAME='global' ; GRDRES=2p00 ; GRIDNR=255 ; MODNR=11 ;; glo_500) GRDNAME='global' ; GRDRES=5p00 ; GRIDNR=255 ; MODNR=11 ;; gwes_30m) GRDNAME='global' ; GRDRES=0p50 ; GRIDNR=255 ; MODNR=10 ;; esac diff --git a/scripts/exglobal_archive.py b/scripts/exglobal_archive.py new file mode 100755 index 0000000000..31b5eb1186 --- /dev/null +++ b/scripts/exglobal_archive.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python3 + +import os + +from pygfs.task.archive import Archive +from wxflow import AttrDict, Logger, cast_strdict_as_dtypedict, chdir, logit + +# initialize root logger +logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True) + + +@logit(logger) +def main(): + + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the Archive object + archive = Archive(config) + + # Pull out all the configuration keys needed to run the rest of archive steps + keys = ['ATARDIR', 'current_cycle', 'FHMIN', 'FHMAX', 'FHOUT', 'RUN', 'PDY', + 'DO_VERFRAD', 'DO_VMINMON', 'DO_VERFOZN', 'DO_ICE', 'DO_AERO', 'PARMgfs', + 'DO_OCN', 'DO_WAVE', 'WRITE_DOPOST', 'PSLOT', 'HPSSARCH', 'DO_MOS', + 'DO_JEDISNOWDA', 'LOCALARCH', 'REALTIME', 'ROTDIR', 'ARCH_WARMICFREQ', + 'ARCH_FCSTICFREQ', 'ARCH_CYC', 'assim_freq', 'ARCDIR', 'SDATE', + 'FHMIN_GFS', 'FHMAX_GFS', 'FHOUT_GFS', 'ARCH_GAUSSIAN', 'MODE', + 'FHOUT_OCNICE', 'FHOUT_OCNICE_GFS', 'DO_BUFRSND', 'DOHYBVAR', + 'ARCH_GAUSSIAN_FHMAX', 'ARCH_GAUSSIAN_FHINC', 'ARCH_GAUSSIAN_FHINC', + 'DOIAU', 'OCNRES', 'ICERES', 'NUM_SND_COLLECTIVES', 'FHOUT_WAV', + 'FHOUT_HF_WAV', 'FHMAX_WAV', 'FHMAX_HF_WAV', 'FHMAX_WAV_GFS', + 'restart_interval_gdas', 'restart_interval_gfs', + 'AERO_ANL_CDUMP', 'AERO_FCST_CDUMP', 'DOIBP_WAV', 'DO_JEDIOCNVAR', + 'NMEM_ENS', 'DO_JEDIATMVAR', 'DO_VRFY_OCEANDA', 'FHMAX_FITS', + 'FITSARC', 'IAUFHRS'] + + archive_dict = AttrDict() + for key in keys: + archive_dict[key] = archive.task_config[key] + + # Also import all COM* directory and template variables + for key in archive.task_config.keys(): + if key.startswith("COM"): + archive_dict[key] = archive.task_config[key] + + cwd = os.getcwd() + + os.chdir(config.ROTDIR) + + # Determine which archives to create + arcdir_set, atardir_sets = archive.configure(archive_dict) + + # Populate the product archive (ARCDIR) + archive.execute_store_products(arcdir_set) + + # Create the backup tarballs and store in ATARDIR + for atardir_set in atardir_sets: + archive.execute_backup_dataset(atardir_set) + + os.chdir(cwd) + + +if __name__ == '__main__': + main() diff --git a/scripts/exglobal_archive.sh b/scripts/exglobal_archive.sh deleted file mode 100755 index acb926d0e6..0000000000 --- a/scripts/exglobal_archive.sh +++ /dev/null @@ -1,321 +0,0 @@ -#! /usr/bin/env bash - -source "${USHgfs}/preamble.sh" - -############################################## -# Begin JOB SPECIFIC work -############################################## - -# ICS are restarts and always lag INC by $assim_freq hours -ARCHINC_CYC=${ARCH_CYC} -ARCHICS_CYC=$((ARCH_CYC-assim_freq)) -if [ "${ARCHICS_CYC}" -lt 0 ]; then - ARCHICS_CYC=$((ARCHICS_CYC+24)) -fi - -# CURRENT CYCLE -APREFIX="${RUN}.t${cyc}z." - -# Realtime parallels run GFS MOS on 1 day delay -# If realtime parallel, back up CDATE_MOS one day -# Ignore possible spelling error (nothing is misspelled) -# shellcheck disable=SC2153 -CDATE_MOS=${PDY}${cyc} -if [ "${REALTIME}" = "YES" ]; then - CDATE_MOS=$(${NDATE} -24 "${PDY}${cyc}") -fi -PDY_MOS="${CDATE_MOS:0:8}" - -############################################################### -# Archive online for verification and diagnostics -############################################################### -source "${USHgfs}/file_utils.sh" - -[[ ! -d ${ARCDIR} ]] && mkdir -p "${ARCDIR}" -nb_copy "${COM_ATMOS_ANALYSIS}/${APREFIX}gsistat" "${ARCDIR}/gsistat.${RUN}.${PDY}${cyc}" -nb_copy "${COM_SNOW_ANALYSIS}/${APREFIX}snowstat" "${ARCDIR}/snowstat.${RUN}.${PDY}${cyc}" -if [[ ${DO_AERO} = "YES" ]]; then - nb_copy "${COM_CHEM_ANALYSIS}/${APREFIX}aerostat" "${ARCDIR}/aerostat.${RUN}.${PDY}${cyc}" -fi -nb_copy "${COM_ATMOS_GRIB_1p00}/${APREFIX}pgrb2.1p00.anl" "${ARCDIR}/pgbanl.${RUN}.${PDY}${cyc}.grib2" - -# Archive 1 degree forecast GRIB2 files for verification -if [[ "${RUN}" == "gfs" ]]; then - fhmax=${FHMAX_GFS} - fhr=0 - while [ "${fhr}" -le "${fhmax}" ]; do - fhr2=$(printf %02i "${fhr}") - fhr3=$(printf %03i "${fhr}") - nb_copy "${COM_ATMOS_GRIB_1p00}/${APREFIX}pgrb2.1p00.f${fhr3}" "${ARCDIR}/pgbf${fhr2}.${RUN}.${PDY}${cyc}.grib2" - fhr=$((10#${fhr} + 10#${FHOUT_GFS} )) - done -fi -if [[ "${RUN}" == "gdas" ]]; then - flist="000 003 006 009" - for fhr in ${flist}; do - fname="${COM_ATMOS_GRIB_1p00}/${APREFIX}pgrb2.1p00.f${fhr}" - # TODO Shouldn't the archived files also use three-digit tags? - fhr2=$(printf %02i $((10#${fhr}))) - nb_copy "${fname}" "${ARCDIR}/pgbf${fhr2}.${RUN}.${PDY}${cyc}.grib2" - done -fi - -if [[ -s "${COM_ATMOS_TRACK}/avno.t${cyc}z.cyclone.trackatcfunix" ]]; then - # shellcheck disable=2153 - PSLOT4=${PSLOT:0:4} - # shellcheck disable= - PSLOT4=${PSLOT4^^} - sed "s:AVNO:${PSLOT4}:g" < "${COM_ATMOS_TRACK}/avno.t${cyc}z.cyclone.trackatcfunix" \ - > "${ARCDIR}/atcfunix.${RUN}.${PDY}${cyc}" - sed "s:AVNO:${PSLOT4}:g" < "${COM_ATMOS_TRACK}/avnop.t${cyc}z.cyclone.trackatcfunix" \ - > "${ARCDIR}/atcfunixp.${RUN}.${PDY}${cyc}" -fi - -if [[ "${RUN}" == "gdas" ]] && [[ -s "${COM_ATMOS_TRACK}/gdas.t${cyc}z.cyclone.trackatcfunix" ]]; then - # shellcheck disable=2153 - PSLOT4=${PSLOT:0:4} - # shellcheck disable= - PSLOT4=${PSLOT4^^} - sed "s:AVNO:${PSLOT4}:g" < "${COM_ATMOS_TRACK}/gdas.t${cyc}z.cyclone.trackatcfunix" \ - > "${ARCDIR}/atcfunix.${RUN}.${PDY}${cyc}" - sed "s:AVNO:${PSLOT4}:g" < "${COM_ATMOS_TRACK}/gdasp.t${cyc}z.cyclone.trackatcfunix" \ - > "${ARCDIR}/atcfunixp.${RUN}.${PDY}${cyc}" -fi - -if [ "${RUN}" = "gfs" ]; then - nb_copy "${COM_ATMOS_GENESIS}/storms.gfso.atcf_gen.${PDY}${cyc}" "${ARCDIR}/." - nb_copy "${COM_ATMOS_GENESIS}/storms.gfso.atcf_gen.altg.${PDY}${cyc}" "${ARCDIR}/." - nb_copy "${COM_ATMOS_TRACK}/trak.gfso.atcfunix.${PDY}${cyc}" "${ARCDIR}/." - nb_copy "${COM_ATMOS_TRACK}/trak.gfso.atcfunix.altg.${PDY}${cyc}" "${ARCDIR}/." - - mkdir -p "${ARCDIR}/tracker.${PDY}${cyc}/${RUN}" - blist="epac natl" - for basin in ${blist}; do - if [[ -f ${basin} ]]; then - cp -rp "${COM_ATMOS_TRACK}/${basin}" "${ARCDIR}/tracker.${PDY}${cyc}/${RUN}" - fi - done -fi - -# Archive required gaussian gfs forecast files for Fit2Obs -if [[ "${RUN}" == "gfs" ]] && [[ "${FITSARC}" = "YES" ]]; then - VFYARC=${VFYARC:-${ROTDIR}/vrfyarch} - [[ ! -d ${VFYARC} ]] && mkdir -p "${VFYARC}" - mkdir -p "${VFYARC}/${RUN}.${PDY}/${cyc}" - prefix="${RUN}.t${cyc}z" - fhmax=${FHMAX_FITS:-${FHMAX_GFS}} - fhr=0 - while [[ ${fhr} -le ${fhmax} ]]; do - fhr3=$(printf %03i "${fhr}") - sfcfile="${COM_ATMOS_HISTORY}/${prefix}.sfcf${fhr3}.nc" - sigfile="${COM_ATMOS_HISTORY}/${prefix}.atmf${fhr3}.nc" - nb_copy "${sfcfile}" "${VFYARC}/${RUN}.${PDY}/${cyc}/" - nb_copy "${sigfile}" "${VFYARC}/${RUN}.${PDY}/${cyc}/" - (( fhr = 10#${fhr} + 6 )) - done -fi - - -############################################################### -# Archive data either to HPSS or locally -if [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then -############################################################### - - # --set the archiving command and create local directories, if necessary - TARCMD="htar" - HSICMD="hsi" - if [[ ${LOCALARCH} = "YES" ]]; then - TARCMD="tar" - HSICMD='' - [[ ! -d "${ATARDIR}/${PDY}${cyc}" ]] && mkdir -p "${ATARDIR}/${PDY}${cyc}" - [[ ! -d "${ATARDIR}/${CDATE_MOS}" ]] && [[ -d "${ROTDIR}/gfsmos.${PDY_MOS}" ]] && [[ "${cyc}" -eq 18 ]] && mkdir -p "${ATARDIR}/${CDATE_MOS}" - fi - - #--determine when to save ICs for warm start and forecast-only runs - SAVEWARMICA="NO" - SAVEWARMICB="NO" - SAVEFCSTIC="NO" - firstday=$(${NDATE} +24 "${SDATE}") - mm="${PDY:2:2}" - dd="${PDY:4:2}" - # TODO: This math yields multiple dates sharing the same nday - nday=$(( (10#${mm}-1)*30+10#${dd} )) - mod=$((nday % ARCH_WARMICFREQ)) - if [[ "${PDY}${cyc}" -eq "${firstday}" ]] && [[ "${cyc}" -eq "${ARCHINC_CYC}" ]]; then SAVEWARMICA="YES" ; fi - if [[ "${PDY}${cyc}" -eq "${firstday}" ]] && [[ "${cyc}" -eq "${ARCHICS_CYC}" ]]; then SAVEWARMICB="YES" ; fi - if [[ "${mod}" -eq 0 ]] && [[ "${cyc}" -eq "${ARCHINC_CYC}" ]]; then SAVEWARMICA="YES" ; fi - if [[ "${mod}" -eq 0 ]] && [[ "${cyc}" -eq "${ARCHICS_CYC}" ]]; then SAVEWARMICB="YES" ; fi - - if [[ "${ARCHICS_CYC}" -eq 18 ]]; then - nday1=$((nday+1)) - mod1=$((nday1 % ARCH_WARMICFREQ)) - if [[ "${mod1}" -eq 0 ]] && [[ "${cyc}" -eq "${ARCHICS_CYC}" ]] ; then SAVEWARMICB="YES" ; fi - if [[ "${mod1}" -ne 0 ]] && [[ "${cyc}" -eq "${ARCHICS_CYC}" ]] ; then SAVEWARMICB="NO" ; fi - if [[ "${PDY}${cyc}" -eq "${SDATE}" ]] && [[ "${cyc}" -eq "${ARCHICS_CYC}" ]] ; then SAVEWARMICB="YES" ; fi - fi - - mod=$((nday % ARCH_FCSTICFREQ)) - if [[ "${mod}" -eq 0 ]] || [[ "${PDY}${cyc}" -eq "${firstday}" ]]; then SAVEFCSTIC="YES" ; fi - - cd "${DATA}" || exit 2 - - "${USHgfs}/hpssarch_gen.sh" "${RUN}" - status=$? - if [ "${status}" -ne 0 ]; then - echo "${USHgfs}/hpssarch_gen.sh ${RUN} failed, ABORT!" - exit "${status}" - fi - - cd "${ROTDIR}" || exit 2 - - if [[ "${RUN}" = "gfs" ]]; then - - targrp_list="gfsa gfsb" - - if [ "${ARCH_GAUSSIAN:-"NO"}" = "YES" ]; then - targrp_list="${targrp_list} gfs_flux gfs_netcdfb gfs_pgrb2b" - if [ "${MODE}" = "cycled" ]; then - targrp_list="${targrp_list} gfs_netcdfa" - fi - fi - - if [ "${DO_WAVE}" = "YES" ]; then - targrp_list="${targrp_list} gfswave" - fi - - if [[ "${DO_OCN}" == "YES" ]]; then - targrp_list="${targrp_list} ocean_6hravg ocean_daily ocean_grib2 gfs_flux_1p00" - fi - - if [[ "${DO_ICE}" == "YES" ]]; then - targrp_list="${targrp_list} ice_6hravg ice_grib2" - fi - - # Aerosols - if [ "${DO_AERO}" = "YES" ]; then - for targrp in chem; do - # TODO: Why is this tar being done here instead of being added to the list? - ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${targrp}.tar" $(cat "${DATA}/${targrp}.txt") - status=$? - if [[ "${status}" -ne 0 ]] && [[ "${PDY}${cyc}" -ge "${firstday}" ]]; then - echo "HTAR ${PDY}${cyc} ${targrp}.tar failed" - exit "${status}" - fi - done - fi - - #for restarts - if [ "${SAVEFCSTIC}" = "YES" ]; then - targrp_list="${targrp_list} gfs_restarta" - fi - - #for downstream products - if [ "${DO_BUFRSND}" = "YES" ]; then - targrp_list="${targrp_list} gfs_downstream" - fi - - #--save mdl gfsmos output from all cycles in the 18Z archive directory - if [[ -d "gfsmos.${PDY_MOS}" ]] && [[ "${cyc}" -eq 18 ]]; then - set +e - # TODO: Why is this tar being done here instead of being added to the list? - ${TARCMD} -P -cvf "${ATARDIR}/${CDATE_MOS}/gfsmos.tar" "./gfsmos.${PDY_MOS}" - status=$? - if [[ "${status}" -ne 0 ]] && [[ "${PDY}${cyc}" -ge "${firstday}" ]]; then - echo "${TARCMD^^} ${PDY}${cyc} gfsmos.tar failed" - exit "${status}" - fi - set_strict - fi - elif [[ "${RUN}" = "gdas" ]]; then - - targrp_list="gdas" - - #gdaswave - if [ "${DO_WAVE}" = "YES" ]; then - targrp_list="${targrp_list} gdaswave" - fi - - #gdasocean - if [ "${DO_OCN}" = "YES" ]; then - targrp_list="${targrp_list} gdasocean" - if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then - targrp_list="${targrp_list} gdasocean_analysis" - fi - fi - - #gdasice - if [ "${DO_ICE}" = "YES" ]; then - targrp_list="${targrp_list} gdasice" - fi - - if [ "${SAVEWARMICA}" = "YES" ] || [ "${SAVEFCSTIC}" = "YES" ]; then - targrp_list="${targrp_list} gdas_restarta" - if [ "${DO_WAVE}" = "YES" ]; then targrp_list="${targrp_list} gdaswave_restart"; fi - if [ "${DO_OCN}" = "YES" ]; then targrp_list="${targrp_list} gdasocean_restart"; fi - if [ "${DO_ICE}" = "YES" ]; then targrp_list="${targrp_list} gdasice_restart"; fi - fi - - if [ "${SAVEWARMICB}" = "YES" ] || [ "${SAVEFCSTIC}" = "YES" ]; then - targrp_list="${targrp_list} gdas_restartb" - fi - fi - - # Turn on extended globbing options - shopt -s extglob - for targrp in ${targrp_list}; do - set +e - - # Test whether gdas.tar or gdas_restarta.tar will have rstprod data - has_rstprod="NO" - case ${targrp} in - 'gdas'|'gdas_restarta') - # Test for rstprod in each archived file - while IFS= read -r file; do - if [[ -f ${file} ]]; then - group=$( stat -c "%G" "${file}" ) - if [[ "${group}" == "rstprod" ]]; then - has_rstprod="YES" - break - fi - fi - done < "${DATA}/${targrp}.txt" - - ;; - *) ;; - esac - - # Create the tarball - tar_fl="${ATARDIR}/${PDY}${cyc}/${targrp}.tar" - ${TARCMD} -P -cvf "${tar_fl}" $(cat "${DATA}/${targrp}.txt") - status=$? - - # Change group to rstprod if it was found even if htar/tar failed in case of partial creation - if [[ "${has_rstprod}" == "YES" ]]; then - ${HSICMD} chgrp rstprod "${tar_fl}" - stat_chgrp=$? - ${HSICMD} chmod 640 "${tar_fl}" - stat_chgrp=$((stat_chgrp+$?)) - if [[ "${stat_chgrp}" -gt 0 ]]; then - echo "FATAL ERROR: Unable to properly restrict ${tar_fl}!" - echo "Attempting to delete ${tar_fl}" - ${HSICMD} rm "${tar_fl}" - echo "Please verify that ${tar_fl} was deleted!" - exit "${stat_chgrp}" - fi - fi - - # For safety, test if the htar/tar command failed after changing groups - if [[ "${status}" -ne 0 ]] && [[ "${PDY}${cyc}" -ge "${firstday}" ]]; then - echo "FATAL ERROR: ${TARCMD} ${tar_fl} failed" - exit "${status}" - fi - set_strict - done - # Turn extended globbing back off - shopt -u extglob - -############################################################### -fi ##end of HPSS archive -############################################################### - -exit 0 diff --git a/scripts/exglobal_atm_analysis_fv3_increment.py b/scripts/exglobal_atm_analysis_fv3_increment.py index 57f2e7c9ee..66f6796343 100755 --- a/scripts/exglobal_atm_analysis_fv3_increment.py +++ b/scripts/exglobal_atm_analysis_fv3_increment.py @@ -1,8 +1,8 @@ #!/usr/bin/env python3 # exglobal_atm_analysis_fv3_increment.py # This script creates an AtmAnalysis object -# and runs the increment method -# which converts the JEDI increment into an FV3 increment +# and runs the init_fv3_increment and fv3_increment methods +# which convert the JEDI increment into an FV3 increment import os from wxflow import Logger, cast_strdict_as_dtypedict diff --git a/scripts/exglobal_atmens_analysis_fv3_increment.py b/scripts/exglobal_atmens_analysis_fv3_increment.py new file mode 100755 index 0000000000..c50b00548f --- /dev/null +++ b/scripts/exglobal_atmens_analysis_fv3_increment.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 +# exglobal_atmens_analysis_fv3_increment.py +# This script creates an AtmEnsAnalysis object +# and runs the init_fv3_increment and fv3_increment methods +# which convert the JEDI increment into an FV3 increment +import os + +from wxflow import Logger, cast_strdict_as_dtypedict +from pygfs.task.atmens_analysis import AtmEnsAnalysis + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the atmens analysis task + AtmEnsAnl = AtmEnsAnalysis(config) + AtmEnsAnl.init_fv3_increment() + AtmEnsAnl.fv3_increment() diff --git a/scripts/exglobal_atmens_analysis_run.py b/scripts/exglobal_atmens_analysis_letkf.py similarity index 86% rename from scripts/exglobal_atmens_analysis_run.py rename to scripts/exglobal_atmens_analysis_letkf.py index b2eb9fb2e4..30394537cd 100755 --- a/scripts/exglobal_atmens_analysis_run.py +++ b/scripts/exglobal_atmens_analysis_letkf.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 -# exglobal_atmens_analysis_run.py +# exglobal_atmens_analysis_letkf.py # This script creates an AtmEnsAnalysis object -# and runs the execute method +# and runs the letkf method # which executes the global atm local ensemble analysis import os @@ -19,4 +19,4 @@ # Instantiate the atmens analysis task AtmEnsAnl = AtmEnsAnalysis(config) - AtmEnsAnl.execute() + AtmEnsAnl.letkf() diff --git a/sorc/build_all.sh b/sorc/build_all.sh index 0797092a37..e3040fc0fa 100755 --- a/sorc/build_all.sh +++ b/sorc/build_all.sh @@ -183,7 +183,7 @@ echo "Building ${build_list}" # Go through all builds and adjust CPU counts up if possible if [[ ${requested_cpus} -lt ${_build_job_max} && ${big_jobs} -gt 0 ]]; then - # Add cores to the gdas, ufs, and gsi build jobs + # Add cores to the gdas and ufs build jobs extra_cores=$(( _build_job_max - requested_cpus )) extra_cores=$(( extra_cores / big_jobs )) for build in "${!build_jobs[@]}"; do diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 2b2d417a96..6742ec62a1 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 2b2d417a96528527d7d3e7eedaccf150dc075d92 +Subproject commit 6742ec62a12d7d6f8129057bcf77cee0e2175022 diff --git a/sorc/gsi_utils.fd b/sorc/gsi_utils.fd index 0cdc3b4f7f..bb03e172e0 160000 --- a/sorc/gsi_utils.fd +++ b/sorc/gsi_utils.fd @@ -1 +1 @@ -Subproject commit 0cdc3b4f7ff8d4f0c54da3dab70ea2743bd68478 +Subproject commit bb03e172e0d0d9c56d6da7788ca033bfb5ef5119 diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index c5d7243e8f..68873d0f1a 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -75,6 +75,7 @@ case "${machine}" in "hercules") FIX_DIR="/work/noaa/global/glopara/fix" ;; "jet") FIX_DIR="/lfs4/HFIP/hfv3gfs/glopara/git/fv3gfs/fix" ;; "s4") FIX_DIR="/data/prod/glopara/fix" ;; + "gaea") FIX_DIR="/gpfs/f5/epic/proj-shared/global/glopara/data/fix" ;; *) echo "FATAL: Unknown target machine ${machine}, couldn't set FIX_DIR" exit 1 diff --git a/sorc/ufs_model.fd b/sorc/ufs_model.fd index c54e98637e..5bec704243 160000 --- a/sorc/ufs_model.fd +++ b/sorc/ufs_model.fd @@ -1 +1 @@ -Subproject commit c54e98637ead81b1fc1e336bd0443c8bfb6faf01 +Subproject commit 5bec704243286421fc613838fc67a2129e96acd6 diff --git a/sorc/wxflow b/sorc/wxflow index 942b90bfaa..71f6b10f76 160000 --- a/sorc/wxflow +++ b/sorc/wxflow @@ -1 +1 @@ -Subproject commit 942b90bfaa14f6b6d7374310dbdfd421ddb30548 +Subproject commit 71f6b10f76a440993580027ba1183d61277d1299 diff --git a/ush/check_netcdf.sh b/ush/check_netcdf.sh index e115ae8ae3..5f56a38aba 100755 --- a/ush/check_netcdf.sh +++ b/ush/check_netcdf.sh @@ -8,7 +8,7 @@ source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" 1>/dev/null 2>&1 ncfile=${1?} -ncdump -h "${ncfile}" 1>/dev/null 2>&1 # redirect stdout and stderr to /dev/null to suppress output in cron +(( $(ncdump "${ncfile}" 2> /dev/null | grep -Po '(?<=time = UNLIMITED ; // \()\d+(?= currently)' || echo 0) > 0 )) # redirect stdout and stderr to /dev/null to suppress output in cron rc=$? # If there is no error, rc=0, else rc!=0 diff --git a/ush/detect_machine.sh b/ush/detect_machine.sh index 8a719c10d9..683ee0db7f 100755 --- a/ush/detect_machine.sh +++ b/ush/detect_machine.sh @@ -21,10 +21,8 @@ case $(hostname -f) in dlogin0[1-9].dogwood.wcoss2.ncep.noaa.gov) MACHINE_ID=wcoss2 ;; ### dogwood01-9 dlogin10.dogwood.wcoss2.ncep.noaa.gov) MACHINE_ID=wcoss2 ;; ### dogwood10 - gaea9) MACHINE_ID=gaea ;; ### gaea9 - gaea1[0-6]) MACHINE_ID=gaea ;; ### gaea10-16 - gaea9.ncrc.gov) MACHINE_ID=gaea ;; ### gaea9 - gaea1[0-6].ncrc.gov) MACHINE_ID=gaea ;; ### gaea10-16 + gaea5[1-8]) MACHINE_ID=gaea ;; ### gaea51-58 + gaea5[1-8].ncrc.gov) MACHINE_ID=gaea ;; ### gaea51-58 hfe0[1-9]) MACHINE_ID=hera ;; ### hera01-09 hfe1[0-2]) MACHINE_ID=hera ;; ### hera10-12 diff --git a/ush/forecast_det.sh b/ush/forecast_det.sh index e4b9ded3d3..603447f612 100755 --- a/ush/forecast_det.sh +++ b/ush/forecast_det.sh @@ -6,7 +6,7 @@ UFS_det(){ echo "SUB ${FUNCNAME[0]}: Run type determination for UFS" # Determine if the current cycle is a warm start (based on the availability of restarts) - if [[ -f "${COM_ATMOS_RESTART_PREV}/${model_start_date_current_cycle:0:8}.${model_start_date_current_cycle:8:2}0000.coupler.res" ]]; then + if [[ -f "${COMIN_ATMOS_RESTART_PREV}/${model_start_date_current_cycle:0:8}.${model_start_date_current_cycle:8:2}0000.coupler.res" ]]; then warm_start=".true." fi @@ -16,8 +16,8 @@ UFS_det(){ # Since restarts are not available from the previous cycle, this is likely a cold start # Ensure cold start ICs are present when warm start is not set # TODO: add checks for other cold start ICs as well - if [[ ! -f "${COM_ATMOS_INPUT}/gfs_ctrl.nc" ]]; then - echo "FATAL ERROR: Cold start ICs are missing from '${COM_ATMOS_INPUT}'" + if [[ ! -f "${COMIN_ATMOS_INPUT}/gfs_ctrl.nc" ]]; then + echo "FATAL ERROR: Cold start ICs are missing from '${COMIN_ATMOS_INPUT}'" exit 1 fi @@ -87,7 +87,7 @@ UFS_det(){ # Check for CICE6 restart availability if [[ "${cplice}" == ".true." ]]; then if [[ ! -f "${DATArestart}/CICE_RESTART/cice_model.res.${rdate:0:4}-${rdate:4:2}-${rdate:6:2}-${seconds}.nc" ]]; then - cice_rst_ok="NO" + cice6_rst_ok="NO" fi fi diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 9c8858ec3d..d4d1d4ad6f 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -22,9 +22,9 @@ FV3_postdet() { done done - echo "Copying FV3 cold start files for 'RUN=${RUN}' at '${current_cycle}' from '${COM_ATMOS_INPUT}'" + echo "Copying FV3 cold start files for 'RUN=${RUN}' at '${current_cycle}' from '${COMIN_ATMOS_INPUT}'" for fv3_input_file in "${fv3_input_files[@]}"; do - ${NCP} "${COM_ATMOS_INPUT}/${fv3_input_file}" "${DATA}/INPUT/${fv3_input_file}" \ + ${NCP} "${COMIN_ATMOS_INPUT}/${fv3_input_file}" "${DATA}/INPUT/${fv3_input_file}" \ || ( echo "FATAL ERROR: Unable to copy FV3 IC, ABORT!"; exit 1 ) done @@ -49,7 +49,7 @@ FV3_postdet() { restart_dir="${DATArestart}/FV3_RESTART" else # "${RERUN}" == "NO" restart_date="${model_start_date_current_cycle}" - restart_dir="${COM_ATMOS_RESTART_PREV}" + restart_dir="${COMIN_ATMOS_RESTART_PREV}" fi echo "Copying FV3 restarts for 'RUN=${RUN}' at '${restart_date}' from '${restart_dir}'" @@ -75,12 +75,12 @@ FV3_postdet() { # Replace sfc_data with sfcanl_data restart files from current cycle (if found) local nn for (( nn = 1; nn <= ntiles; nn++ )); do - if [[ -f "${COM_ATMOS_RESTART}/${restart_date:0:8}.${restart_date:8:2}0000.sfcanl_data.tile${nn}.nc" ]]; then + if [[ -f "${COMOUT_ATMOS_RESTART}/${restart_date:0:8}.${restart_date:8:2}0000.sfcanl_data.tile${nn}.nc" ]]; then rm -f "${DATA}/INPUT/sfc_data.tile${nn}.nc" - ${NCP} "${COM_ATMOS_RESTART}/${restart_date:0:8}.${restart_date:8:2}0000.sfcanl_data.tile${nn}.nc" \ + ${NCP} "${COMOUT_ATMOS_RESTART}/${restart_date:0:8}.${restart_date:8:2}0000.sfcanl_data.tile${nn}.nc" \ "${DATA}/INPUT/sfc_data.tile${nn}.nc" else - echo "'sfcanl_data.tile1.nc' not found in '${COM_ATMOS_RESTART}', using 'sfc_data.tile1.nc'" + echo "'sfcanl_data.tile1.nc' not found in '${COMOUT_ATMOS_RESTART}', using 'sfc_data.tile1.nc'" break fi done @@ -125,7 +125,7 @@ EOF local increment_file for inc_file in "${inc_files[@]}"; do - increment_file="${COM_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.${PREFIX_ATMINC}${inc_file}" + increment_file="${COMIN_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.${PREFIX_ATMINC}${inc_file}" if [[ -f "${increment_file}" ]]; then ${NCP} "${increment_file}" "${DATA}/INPUT/${inc_file}" else @@ -169,26 +169,26 @@ EOF for fhr in ${FV3_OUTPUT_FH}; do FH3=$(printf %03i "${fhr}") FH2=$(printf %02i "${fhr}") - ${NLN} "${COM_ATMOS_HISTORY}/${RUN}.t${cyc}z.atmf${FH3}.nc" "atmf${FH3}.nc" - ${NLN} "${COM_ATMOS_HISTORY}/${RUN}.t${cyc}z.sfcf${FH3}.nc" "sfcf${FH3}.nc" - ${NLN} "${COM_ATMOS_HISTORY}/${RUN}.t${cyc}z.atm.logf${FH3}.txt" "log.atm.f${FH3}" + ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.atmf${FH3}.nc" "atmf${FH3}.nc" + ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.sfcf${FH3}.nc" "sfcf${FH3}.nc" + ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.atm.logf${FH3}.txt" "log.atm.f${FH3}" if [[ "${WRITE_DOPOST}" == ".true." ]]; then - ${NLN} "${COM_ATMOS_MASTER}/${RUN}.t${cyc}z.master.grb2f${FH3}" "GFSPRS.GrbF${FH2}" - ${NLN} "${COM_ATMOS_MASTER}/${RUN}.t${cyc}z.sfluxgrbf${FH3}.grib2" "GFSFLX.GrbF${FH2}" + ${NLN} "${COMOUT_ATMOS_MASTER}/${RUN}.t${cyc}z.master.grb2f${FH3}" "GFSPRS.GrbF${FH2}" + ${NLN} "${COMOUT_ATMOS_MASTER}/${RUN}.t${cyc}z.sfluxgrbf${FH3}.grib2" "GFSFLX.GrbF${FH2}" if [[ "${DO_NEST:-NO}" == "YES" ]] ; then - ${NLN} "${COM_ATMOS_MASTER}/${RUN}.t${cyc}z.nest.grb2f${FH3}" "GFSPRS.GrbF${FH2}.nest02" - ${NLN} "${COM_ATMOS_MASTER}/${RUN}.t${cyc}z.nest.sfluxgrbf${FH3}.grib2" "GFSFLX.GrbF${FH2}.nest02" + ${NLN} "${COMOUT_ATMOS_MASTER}/${RUN}.t${cyc}z.nest.grb2f${FH3}" "GFSPRS.GrbF${FH2}.nest02" + ${NLN} "${COMOUT_ATMOS_MASTER}/${RUN}.t${cyc}z.nest.sfluxgrbf${FH3}.grib2" "GFSFLX.GrbF${FH2}.nest02" fi fi done else # TODO: Is this even valid anymore? local nn for (( nn = 1; nn <= ntiles; nn++ )); do - ${NLN} "nggps2d.tile${nn}.nc" "${COM_ATMOS_HISTORY}/nggps2d.tile${nn}.nc" - ${NLN} "nggps3d.tile${nn}.nc" "${COM_ATMOS_HISTORY}/nggps3d.tile${nn}.nc" - ${NLN} "grid_spec.tile${nn}.nc" "${COM_ATMOS_HISTORY}/grid_spec.tile${nn}.nc" - ${NLN} "atmos_static.tile${nn}.nc" "${COM_ATMOS_HISTORY}/atmos_static.tile${nn}.nc" - ${NLN} "atmos_4xdaily.tile${nn}.nc" "${COM_ATMOS_HISTORY}/atmos_4xdaily.tile${nn}.nc" + ${NLN} "nggps2d.tile${nn}.nc" "${COMOUT_ATMOS_HISTORY}/nggps2d.tile${nn}.nc" + ${NLN} "nggps3d.tile${nn}.nc" "${COMOUT_ATMOS_HISTORY}/nggps3d.tile${nn}.nc" + ${NLN} "grid_spec.tile${nn}.nc" "${COMOUT_ATMOS_HISTORY}/grid_spec.tile${nn}.nc" + ${NLN} "atmos_static.tile${nn}.nc" "${COMOUT_ATMOS_HISTORY}/atmos_static.tile${nn}.nc" + ${NLN} "atmos_4xdaily.tile${nn}.nc" "${COMOUT_ATMOS_HISTORY}/atmos_4xdaily.tile${nn}.nc" done fi } @@ -218,10 +218,10 @@ FV3_out() { # Copy configuration files if [[ "${RUN}" == "gfs" || "${RUN}" == "gefs" ]]; then - ${NCP} "${DATA}/input.nml" "${COM_CONF}/ufs.input.nml" - ${NCP} "${DATA}/model_configure" "${COM_CONF}/ufs.model_configure" - ${NCP} "${DATA}/ufs.configure" "${COM_CONF}/ufs.ufs.configure" - ${NCP} "${DATA}/diag_table" "${COM_CONF}/ufs.diag_table" + ${NCP} "${DATA}/input.nml" "${COMOUT_CONF}/ufs.input.nml" + ${NCP} "${DATA}/model_configure" "${COMOUT_CONF}/ufs.model_configure" + ${NCP} "${DATA}/ufs.configure" "${COMOUT_CONF}/ufs.ufs.configure" + ${NCP} "${DATA}/diag_table" "${COMOUT_CONF}/ufs.diag_table" fi # Create an array of fv3 restart files @@ -239,12 +239,12 @@ FV3_out() { if [[ "${RUN}" =~ "gdas" || "${RUN}" == "enkfgfs" ]]; then local restart_date restart_date=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${restart_interval} hours" +%Y%m%d%H) - while (( restart_date < forecast_end_cycle )); do + while (( restart_date <= forecast_end_cycle )); do echo "Copying FV3 restarts for 'RUN=${RUN}' at ${restart_date}" for fv3_restart_file in "${fv3_restart_files[@]}"; do restart_file="${restart_date:0:8}.${restart_date:8:2}0000.${fv3_restart_file}" ${NCP} "${DATArestart}/FV3_RESTART/${restart_file}" \ - "${COM_ATMOS_RESTART}/${restart_file}" + "${COMOUT_ATMOS_RESTART}/${restart_file}" done restart_date=$(date --utc -d "${restart_date:0:8} ${restart_date:8:2} + ${restart_interval} hours" +%Y%m%d%H) done @@ -253,15 +253,15 @@ FV3_out() { # Copy the final restart files at the end of the forecast segment # The final restart written at the end of the forecast does not include the valid date # TODO: verify the above statement since RM found that it did! - # TODO: For other components, this is only for gfs/gefs - check to see if this should also have this - if [[ "${COPY_FINAL_RESTARTS}" == "YES" ]]; then + # TODO: For other components, this is only for gfs/gefs - check to see if this should also have this + if [[ "${COPY_FINAL_RESTARTS}" == "YES" ]]; then echo "Copying FV3 restarts for 'RUN=${RUN}' at the end of the forecast segment: ${forecast_end_cycle}" for fv3_restart_file in "${fv3_restart_files[@]}"; do restart_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.${fv3_restart_file}" ${NCP} "${DATArestart}/FV3_RESTART/${restart_file}" \ - "${COM_ATMOS_RESTART}/${restart_file}" + "${COMOUT_ATMOS_RESTART}/${restart_file}" done - fi + fi echo "SUB ${FUNCNAME[0]}: Output data for FV3 copied" } @@ -279,7 +279,7 @@ WW3_postdet() { restart_dir="${DATArestart}/WW3_RESTART" else restart_date="${model_start_date_current_cycle}" - restart_dir="${COM_WAVE_RESTART_PREV}" + restart_dir="${COMIN_WAVE_RESTART_PREV}" fi echo "Copying WW3 restarts for 'RUN=${RUN}' at '${restart_date}' from '${restart_dir}'" local ww3_restart_file @@ -308,12 +308,12 @@ WW3_postdet() { # Link output files local wavprfx="${RUN}wave${WAV_MEMBER:-}" if [[ "${waveMULTIGRID}" == ".true." ]]; then - ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.log.mww3.${PDY}${cyc}" "log.mww3" + ${NLN} "${COMOUT_WAVE_HISTORY}/${wavprfx}.log.mww3.${PDY}${cyc}" "log.mww3" for ww3_grid in ${waveGRD}; do - ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.log.${ww3_grid}.${PDY}${cyc}" "log.${ww3_grid}" + ${NLN} "${COMOUT_WAVE_HISTORY}/${wavprfx}.log.${ww3_grid}.${PDY}${cyc}" "log.${ww3_grid}" done else - ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.log.${waveGRD}.${PDY}${cyc}" "log.ww3" + ${NLN} "${COMOUT_WAVE_HISTORY}/${wavprfx}.log.${waveGRD}.${PDY}${cyc}" "log.ww3" fi # Loop for gridded output (uses FHINC) @@ -324,10 +324,10 @@ WW3_postdet() { vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d.%H0000) if [[ "${waveMULTIGRID}" == ".true." ]]; then for ww3_grid in ${waveGRD} ; do - ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.out_grd.${ww3_grid}.${vdate}" "${DATA}/${vdate}.out_grd.${ww3_grid}" + ${NLN} "${COMOUT_WAVE_HISTORY}/${wavprfx}.out_grd.${ww3_grid}.${vdate}" "${DATA}/${vdate}.out_grd.${ww3_grid}" done else - ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.out_grd.${waveGRD}.${vdate}" "${DATA}/${vdate}.out_grd.ww3" + ${NLN} "${COMOUT_WAVE_HISTORY}/${wavprfx}.out_grd.${waveGRD}.${vdate}" "${DATA}/${vdate}.out_grd.ww3" fi if (( FHMAX_HF_WAV > 0 && FHOUT_HF_WAV > 0 && fhr < FHMAX_HF_WAV )); then fhinc=${FHOUT_HF_WAV} @@ -341,9 +341,9 @@ WW3_postdet() { while (( fhr <= FHMAX_WAV )); do vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d.%H0000) if [[ "${waveMULTIGRID}" == ".true." ]]; then - ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.out_pnt.${waveuoutpGRD}.${vdate}" "${DATA}/${vdate}.out_pnt.${waveuoutpGRD}" + ${NLN} "${COMOUT_WAVE_HISTORY}/${wavprfx}.out_pnt.${waveuoutpGRD}.${vdate}" "${DATA}/${vdate}.out_pnt.${waveuoutpGRD}" else - ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.out_pnt.${waveuoutpGRD}.${vdate}" "${DATA}/${vdate}.out_pnt.ww3" + ${NLN} "${COMOUT_WAVE_HISTORY}/${wavprfx}.out_pnt.${waveuoutpGRD}.${vdate}" "${DATA}/${vdate}.out_pnt.ww3" fi fhr=$((fhr + fhinc)) done @@ -357,14 +357,14 @@ WW3_nml() { WW3_out() { echo "SUB ${FUNCNAME[0]}: Copying output data for WW3" - # TODO: Need to add logic to copy restarts from DATArestart/WW3_RESTART to COM_WAVE_RESTART + # TODO: Need to add logic to copy restarts from DATArestart/WW3_RESTART to COMOUT_WAVE_RESTART } CPL_out() { echo "SUB ${FUNCNAME[0]}: Copying output data for general cpl fields" if [[ "${esmf_profile:-}" == ".true." ]]; then - ${NCP} "${DATA}/ESMF_Profile.summary" "${COM_ATMOS_HISTORY}/ESMF_Profile.summary" + ${NCP} "${DATA}/ESMF_Profile.summary" "${COMOUT_ATMOS_HISTORY}/ESMF_Profile.summary" fi } @@ -376,7 +376,7 @@ MOM6_postdet() { restart_dir="${DATArestart}/MOM6_RESTART" restart_date="${RERUN_DATE}" else # "${RERUN}" == "NO" - restart_dir="${COM_OCEAN_RESTART_PREV}" + restart_dir="${COMIN_OCEAN_RESTART_PREV}" restart_date="${model_start_date_current_cycle}" fi @@ -399,7 +399,7 @@ MOM6_postdet() { # Copy increment (only when RERUN=NO) if [[ "${RERUN}" == "NO" ]]; then if [[ "${DO_JEDIOCNVAR:-NO}" == "YES" ]]; then - ${NCP} "${COM_OCEAN_ANALYSIS}/${RUN}.t${cyc}z.ocninc.nc" "${DATA}/INPUT/mom6_increment.nc" \ + ${NCP} "${COMIN_OCEAN_ANALYSIS}/${RUN}.t${cyc}z.ocninc.nc" "${DATA}/INPUT/mom6_increment.nc" \ || ( echo "FATAL ERROR: Unable to copy MOM6 increment, ABORT!"; exit 1 ) fi @@ -407,7 +407,7 @@ MOM6_postdet() { # TODO if [[ $RUN} == "gefs" ]] block maybe be needed # to ensure it does not interfere with the GFS when ensemble is updated in the GFS if (( MEMBER > 0 )) && [[ "${ODA_INCUPD:-False}" == "True" ]]; then - ${NCP} "${COM_OCEAN_RESTART_PREV}/${restart_date:0:8}.${restart_date:0:8}0000.mom6_increment.nc" "${DATA}/INPUT/mom6_increment.nc" \ + ${NCP} "${COMIN_OCEAN_RESTART_PREV}/${restart_date:0:8}.${restart_date:0:8}0000.mom6_increment.nc" "${DATA}/INPUT/mom6_increment.nc" \ || ( echo "FATAL ERROR: Unable to copy ensemble MOM6 increment, ABORT!"; exit 1 ) fi fi # if [[ "${RERUN}" == "NO" ]]; then @@ -434,13 +434,13 @@ MOM6_postdet() { # Native model output uses window midpoint in the filename, but we are mapping that to the end of the period for COM source_file="ocn_${vdate_mid:0:4}_${vdate_mid:4:2}_${vdate_mid:6:2}_${vdate_mid:8:2}.nc" dest_file="${RUN}.ocean.t${cyc}z.${interval}hr_avg.f${fhr3}.nc" - ${NLN} "${COM_OCEAN_HISTORY}/${dest_file}" "${DATA}/MOM6_OUTPUT/${source_file}" + ${NLN} "${COMOUT_OCEAN_HISTORY}/${dest_file}" "${DATA}/MOM6_OUTPUT/${source_file}" # Daily output if (( fhr > 0 & fhr % 24 == 0 )); then source_file="ocn_daily_${vdate:0:4}_${vdate:4:2}_${vdate:6:2}.nc" dest_file="${RUN}.ocean.t${cyc}z.daily.f${fhr3}.nc" - ${NLN} "${COM_OCEAN_HISTORY}/${dest_file}" "${DATA}/MOM6_OUTPUT/${source_file}" + ${NLN} "${COMOUT_OCEAN_HISTORY}/${dest_file}" "${DATA}/MOM6_OUTPUT/${source_file}" fi last_fhr=${fhr} @@ -454,7 +454,7 @@ MOM6_postdet() { for fhr in ${MOM6_OUTPUT_FH}; do fhr3=$(printf %03i "${fhr}") vdatestr=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y_%m_%d_%H) - ${NLN} "${COM_OCEAN_HISTORY}/${RUN}.ocean.t${cyc}z.inst.f${fhr3}.nc" "${DATA}/MOM6_OUTPUT/ocn_da_${vdatestr}.nc" + ${NLN} "${COMOUT_OCEAN_HISTORY}/${RUN}.ocean.t${cyc}z.inst.f${fhr3}.nc" "${DATA}/MOM6_OUTPUT/ocn_da_${vdatestr}.nc" done fi @@ -471,8 +471,8 @@ MOM6_nml() { MOM6_out() { echo "SUB ${FUNCNAME[0]}: Copying output data for MOM6" - # Copy MOM_input from DATA to COM_OCEAN_INPUT after the forecast is run (and successfull) - ${NCP} "${DATA}/INPUT/MOM_input" "${COM_CONF}/ufs.MOM_input" + # Copy MOM_input from DATA to COMOUT_CONF after the forecast is run (and successfull) + ${NCP} "${DATA}/INPUT/MOM_input" "${COMOUT_CONF}/ufs.MOM_input" # Create a list of MOM6 restart files # Coarser than 1/2 degree has a single MOM restart @@ -497,9 +497,9 @@ MOM6_out() { for mom6_restart_file in "${mom6_restart_files[@]}"; do restart_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.${mom6_restart_file}" ${NCP} "${DATArestart}/MOM6_RESTART/${restart_file}" \ - "${COM_OCEAN_RESTART}/${restart_file}" + "${COMOUT_OCEAN_RESTART}/${restart_file}" done - fi + fi fi # Copy restarts for the next cycle for RUN=gdas|enkfgdas|enkfgfs @@ -510,7 +510,7 @@ MOM6_out() { for mom6_restart_file in "${mom6_restart_files[@]}"; do restart_file="${restart_date:0:8}.${restart_date:8:2}0000.${mom6_restart_file}" ${NCP} "${DATArestart}/MOM6_RESTART/${restart_file}" \ - "${COM_OCEAN_RESTART}/${restart_file}" + "${COMOUT_OCEAN_RESTART}/${restart_file}" done fi } @@ -526,7 +526,10 @@ CICE_postdet() { cice_restart_file="${DATArestart}/CICE_RESTART/cice_model.res.${restart_date:0:4}-${restart_date:4:2}-${restart_date:6:2}-${seconds}.nc" else # "${RERUN}" == "NO" restart_date="${model_start_date_current_cycle}" - cice_restart_file="${COM_ICE_RESTART_PREV}/${restart_date:0:8}.${restart_date:8:2}0000.cice_model.res.nc" + cice_restart_file="${COMIN_ICE_RESTART_PREV}/${restart_date:0:8}.${restart_date:8:2}0000.cice_model.res.nc" + if [[ "${DO_JEDIOCNVAR:-NO}" == "YES" ]]; then + cice_restart_file="${COMIN_ICE_ANALYSIS}/${restart_date:0:8}.${restart_date:8:2}0000.cice_model_anl.res.nc" + fi fi # Copy CICE ICs @@ -538,7 +541,7 @@ CICE_postdet() { local vdate seconds vdatestr fhr fhr3 interval last_fhr seconds=$(to_seconds "${model_start_date_current_cycle:8:2}0000") # convert HHMMSS to seconds vdatestr="${model_start_date_current_cycle:0:4}-${model_start_date_current_cycle:4:2}-${model_start_date_current_cycle:6:2}-${seconds}" - ${NLN} "${COM_ICE_HISTORY}/${RUN}.ice.t${cyc}z.ic.nc" "${DATA}/CICE_OUTPUT/iceh_ic.${vdatestr}.nc" + ${NLN} "${COMOUT_ICE_HISTORY}/${RUN}.ice.t${cyc}z.ic.nc" "${DATA}/CICE_OUTPUT/iceh_ic.${vdatestr}.nc" # Link CICE forecast output files from DATA/CICE_OUTPUT to COM local source_file dest_file @@ -563,7 +566,7 @@ CICE_postdet() { source_file="iceh_inst.${vdatestr}.nc" dest_file="${RUN}.ice.t${cyc}z.inst.f${fhr3}.nc" fi - ${NLN} "${COM_ICE_HISTORY}/${dest_file}" "${DATA}/CICE_OUTPUT/${source_file}" + ${NLN} "${COMOUT_ICE_HISTORY}/${dest_file}" "${DATA}/CICE_OUTPUT/${source_file}" last_fhr=${fhr} done @@ -579,8 +582,8 @@ CICE_nml() { CICE_out() { echo "SUB ${FUNCNAME[0]}: Copying output data for CICE" - # Copy ice_in namelist from DATA to COMOUTice after the forecast is run (and successfull) - ${NCP} "${DATA}/ice_in" "${COM_CONF}/ufs.ice_in" + # Copy ice_in namelist from DATA to COMOUT_CONF after the forecast is run (and successfull) + ${NCP} "${DATA}/ice_in" "${COMOUT_CONF}/ufs.ice_in" # Copy CICE restarts at the end of the forecast segment to COM for RUN=gfs|gefs if [[ "${COPY_FINAL_RESTARTS}" == "YES" ]]; then @@ -591,8 +594,8 @@ CICE_out() { source_file="cice_model.res.${forecast_end_cycle:0:4}-${forecast_end_cycle:4:2}-${forecast_end_cycle:6:2}-${seconds}.nc" target_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.cice_model.res.nc" ${NCP} "${DATArestart}/CICE_RESTART/${source_file}" \ - "${COM_ICE_RESTART}/${target_file}" - fi + "${COMOUT_ICE_RESTART}/${target_file}" + fi fi # Copy restarts for next cycle for RUN=gdas|enkfgdas|enkfgfs @@ -604,7 +607,7 @@ CICE_out() { source_file="cice_model.res.${restart_date:0:4}-${restart_date:4:2}-${restart_date:6:2}-${seconds}.nc" target_file="${restart_date:0:8}.${restart_date:8:2}0000.cice_model.res.nc" ${NCP} "${DATArestart}/CICE_RESTART/${source_file}" \ - "${COM_ICE_RESTART}/${target_file}" + "${COMOUT_ICE_RESTART}/${target_file}" fi } @@ -648,12 +651,12 @@ GOCART_postdet() { vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H) # Temporarily delete existing files due to noclobber in GOCART - if [[ -e "${COM_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" ]]; then - rm -f "${COM_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" + if [[ -e "${COMOUT_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" ]]; then + rm -f "${COMOUT_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" fi #TODO: Temporarily removing this as this will crash gocart, adding copy statement at the end - #${NLN} "${COM_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" \ + #${NLN} "${COMOUT_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" \ # "${DATA}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" done } @@ -669,7 +672,7 @@ GOCART_out() { if (( fhr == 0 )); then continue; fi vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H) ${NCP} "${DATA}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" \ - "${COM_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" + "${COMOUT_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" done } @@ -687,7 +690,7 @@ CMEPS_postdet() { cmeps_restart_file="${DATArestart}/CMEPS_RESTART/ufs.cpld.cpl.r.${restart_date:0:4}-${restart_date:4:2}-${restart_date:6:2}-${seconds}.nc" else # "${RERUN}" == "NO" restart_date="${model_start_date_current_cycle}" - cmeps_restart_file="${COM_MED_RESTART_PREV}/${restart_date:0:8}.${restart_date:8:2}0000.ufs.cpld.cpl.r.nc" + cmeps_restart_file="${COMIN_MED_RESTART_PREV}/${restart_date:0:8}.${restart_date:8:2}0000.ufs.cpld.cpl.r.nc" fi # Copy CMEPS restarts @@ -724,11 +727,11 @@ CMEPS_out() { target_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.ufs.cpld.cpl.r.nc" if [[ -f "${DATArestart}/CMEPS_RESTART/${source_file}" ]]; then ${NCP} "${DATArestart}/CMEPS_RESTART/${source_file}" \ - "${COM_MED_RESTART}/${target_file}" + "${COMOUT_MED_RESTART}/${target_file}" else echo "Mediator restart '${DATArestart}/CMEPS_RESTART/${source_file}' not found." fi - fi + fi # Copy restarts for the next cycle to COM for RUN=gdas|enkfgdas|enkfgfs if [[ "${RUN}" =~ "gdas" || "${RUN}" == "enkfgfs" ]]; then @@ -740,7 +743,7 @@ CMEPS_out() { target_file="${restart_date:0:8}.${restart_date:8:2}0000.ufs.cpld.cpl.r.nc" if [[ -f "${DATArestart}/CMEPS_RESTART/${source_file}" ]]; then ${NCP} "${DATArestart}/CMEPS_RESTART/${source_file}" \ - "${COM_MED_RESTART}/${target_file}" + "${COMOUT_MED_RESTART}/${target_file}" else echo "Mediator restart '${DATArestart}/CMEPS_RESTART/${source_file}' not found." fi diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index de414437b1..c26b214fc9 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -70,7 +70,7 @@ common_predet(){ FHMAX_HF=${FHMAX_HF:-0} FHOUT_HF=${FHOUT_HF:-1} - if [[ ! -d "${COM_CONF}" ]]; then mkdir -p "${COM_CONF}"; fi + if [[ ! -d "${COMOUT_CONF}" ]]; then mkdir -p "${COMOUT_CONF}"; fi cd "${DATA}" || ( echo "FATAL ERROR: Unable to 'cd ${DATA}', ABORT!"; exit 8 ) @@ -83,9 +83,9 @@ common_predet(){ FV3_predet(){ echo "SUB ${FUNCNAME[0]}: Defining variables for FV3" - if [[ ! -d "${COM_ATMOS_HISTORY}" ]]; then mkdir -p "${COM_ATMOS_HISTORY}"; fi - if [[ ! -d "${COM_ATMOS_MASTER}" ]]; then mkdir -p "${COM_ATMOS_MASTER}"; fi - if [[ ! -d "${COM_ATMOS_RESTART}" ]]; then mkdir -p "${COM_ATMOS_RESTART}"; fi + if [[ ! -d "${COMOUT_ATMOS_HISTORY}" ]]; then mkdir -p "${COMOUT_ATMOS_HISTORY}"; fi + if [[ ! -d "${COMOUT_ATMOS_MASTER}" ]]; then mkdir -p "${COMOUT_ATMOS_MASTER}"; fi + if [[ ! -d "${COMOUT_ATMOS_RESTART}" ]]; then mkdir -p "${COMOUT_ATMOS_RESTART}"; fi if [[ ! -d "${DATArestart}/FV3_RESTART" ]]; then mkdir -p "${DATArestart}/FV3_RESTART"; fi ${NLN} "${DATArestart}/FV3_RESTART" "${DATA}/RESTART" @@ -482,8 +482,8 @@ FV3_predet(){ WW3_predet(){ echo "SUB ${FUNCNAME[0]}: WW3 before run type determination" - if [[ ! -d "${COM_WAVE_HISTORY}" ]]; then mkdir -p "${COM_WAVE_HISTORY}"; fi - if [[ ! -d "${COM_WAVE_RESTART}" ]]; then mkdir -p "${COM_WAVE_RESTART}" ; fi + if [[ ! -d "${COMOUT_WAVE_HISTORY}" ]]; then mkdir -p "${COMOUT_WAVE_HISTORY}"; fi + if [[ ! -d "${COMOUT_WAVE_RESTART}" ]]; then mkdir -p "${COMOUT_WAVE_RESTART}" ; fi if [[ ! -d "${DATArestart}/WAVE_RESTART" ]]; then mkdir -p "${DATArestart}/WAVE_RESTART"; fi ${NLN} "${DATArestart}/WAVE_RESTART" "${DATA}/restart_wave" @@ -499,17 +499,17 @@ WW3_predet(){ grdALL=$(printf "%s\n" "${array[@]}" | sort -u | tr '\n' ' ') for ww3_grid in ${grdALL}; do - ${NCP} "${COM_WAVE_PREP}/${RUN}wave.mod_def.${ww3_grid}" "${DATA}/mod_def.${ww3_grid}" \ - || ( echo "FATAL ERROR: Failed to copy '${RUN}wave.mod_def.${ww3_grid}' from '${COM_WAVE_PREP}'"; exit 1 ) + ${NCP} "${COMIN_WAVE_PREP}/${RUN}wave.mod_def.${ww3_grid}" "${DATA}/mod_def.${ww3_grid}" \ + || ( echo "FATAL ERROR: Failed to copy '${RUN}wave.mod_def.${ww3_grid}' from '${COMIN_WAVE_PREP}'"; exit 1 ) done else #if shel, only 1 waveGRD which is linked to mod_def.ww3 - ${NCP} "${COM_WAVE_PREP}/${RUN}wave.mod_def.${waveGRD}" "${DATA}/mod_def.ww3" \ - || ( echo "FATAL ERROR: Failed to copy '${RUN}wave.mod_def.${waveGRD}' from '${COM_WAVE_PREP}'"; exit 1 ) + ${NCP} "${COMIN_WAVE_PREP}/${RUN}wave.mod_def.${waveGRD}" "${DATA}/mod_def.ww3" \ + || ( echo "FATAL ERROR: Failed to copy '${RUN}wave.mod_def.${waveGRD}' from '${COMIN_WAVE_PREP}'"; exit 1 ) fi if [[ "${WW3ICEINP}" == "YES" ]]; then - local wavicefile="${COM_WAVE_PREP}/${RUN}wave.${WAVEICE_FID}.t${current_cycle:8:2}z.ice" + local wavicefile="${COMIN_WAVE_PREP}/${RUN}wave.${WAVEICE_FID}.t${current_cycle:8:2}z.ice" if [[ ! -f "${wavicefile}" ]]; then echo "FATAL ERROR: WW3ICEINP='${WW3ICEINP}', but missing ice file '${wavicefile}', ABORT!" exit 1 @@ -519,7 +519,7 @@ WW3_predet(){ fi if [[ "${WW3CURINP}" == "YES" ]]; then - local wavcurfile="${COM_WAVE_PREP}/${RUN}wave.${WAVECUR_FID}.t${current_cycle:8:2}z.cur" + local wavcurfile="${COMIN_WAVE_PREP}/${RUN}wave.${WAVECUR_FID}.t${current_cycle:8:2}z.cur" if [[ ! -f "${wavcurfile}" ]]; then echo "FATAL ERROR: WW3CURINP='${WW3CURINP}', but missing current file '${wavcurfile}', ABORT!" exit 1 @@ -558,9 +558,9 @@ WW3_predet(){ CICE_predet(){ echo "SUB ${FUNCNAME[0]}: CICE before run type determination" - if [[ ! -d "${COM_ICE_HISTORY}" ]]; then mkdir -p "${COM_ICE_HISTORY}"; fi - if [[ ! -d "${COM_ICE_RESTART}" ]]; then mkdir -p "${COM_ICE_RESTART}"; fi - if [[ ! -d "${COM_ICE_INPUT}" ]]; then mkdir -p "${COM_ICE_INPUT}"; fi + if [[ ! -d "${COMOUT_ICE_HISTORY}" ]]; then mkdir -p "${COMOUT_ICE_HISTORY}"; fi + if [[ ! -d "${COMOUT_ICE_RESTART}" ]]; then mkdir -p "${COMOUT_ICE_RESTART}"; fi + if [[ ! -d "${COMIN_ICE_INPUT}" ]]; then mkdir -p "${COMIN_ICE_INPUT}"; fi if [[ ! -d "${DATA}/CICE_OUTPUT" ]]; then mkdir -p "${DATA}/CICE_OUTPUT"; fi if [[ ! -d "${DATArestart}/CICE_RESTART" ]]; then mkdir -p "${DATArestart}/CICE_RESTART"; fi @@ -581,9 +581,9 @@ CICE_predet(){ MOM6_predet(){ echo "SUB ${FUNCNAME[0]}: MOM6 before run type determination" - if [[ ! -d "${COM_OCEAN_HISTORY}" ]]; then mkdir -p "${COM_OCEAN_HISTORY}"; fi - if [[ ! -d "${COM_OCEAN_RESTART}" ]]; then mkdir -p "${COM_OCEAN_RESTART}"; fi - if [[ ! -d "${COM_OCEAN_INPUT}" ]]; then mkdir -p "${COM_OCEAN_INPUT}"; fi + if [[ ! -d "${COMOUT_OCEAN_HISTORY}" ]]; then mkdir -p "${COMOUT_OCEAN_HISTORY}"; fi + if [[ ! -d "${COMOUT_OCEAN_RESTART}" ]]; then mkdir -p "${COMOUT_OCEAN_RESTART}"; fi + if [[ ! -d "${COMIN_OCEAN_INPUT}" ]]; then mkdir -p "${COMIN_OCEAN_INPUT}"; fi if [[ ! -d "${DATA}/MOM6_OUTPUT" ]]; then mkdir -p "${DATA}/MOM6_OUTPUT"; fi if [[ ! -d "${DATArestart}/MOM6_RESTART" ]]; then mkdir -p "${DATArestart}/MOM6_RESTART"; fi @@ -626,7 +626,7 @@ MOM6_predet(){ CMEPS_predet(){ echo "SUB ${FUNCNAME[0]}: CMEPS before run type determination" - if [[ ! -d "${COM_MED_RESTART}" ]]; then mkdir -p "${COM_MED_RESTART}"; fi + if [[ ! -d "${COMOUT_MED_RESTART}" ]]; then mkdir -p "${COMOUT_MED_RESTART}"; fi if [[ ! -d "${DATArestart}/CMEPS_RESTART" ]]; then mkdir -p "${DATArestart}/CMEPS_RESTART"; fi ${NLN} "${DATArestart}/CMEPS_RESTART" "${DATA}/CMEPS_RESTART" @@ -637,7 +637,7 @@ CMEPS_predet(){ GOCART_predet(){ echo "SUB ${FUNCNAME[0]}: GOCART before run type determination" - if [[ ! -d "${COM_CHEM_HISTORY}" ]]; then mkdir -p "${COM_CHEM_HISTORY}"; fi + if [[ ! -d "${COMOUT_CHEM_HISTORY}" ]]; then mkdir -p "${COMOUT_CHEM_HISTORY}"; fi GOCART_OUTPUT_FH=$(seq -s ' ' "${FHMIN}" "6" "${FHMAX}") # TODO: AERO_HISTORY.rc has hardwired output frequency to 6 hours diff --git a/ush/hpssarch_gen.sh b/ush/hpssarch_gen.sh deleted file mode 100755 index 101745da8e..0000000000 --- a/ush/hpssarch_gen.sh +++ /dev/null @@ -1,798 +0,0 @@ -#! /usr/bin/env bash - -################################################### -# Fanglin Yang, 20180318 -# --create bunches of files to be archived to HPSS -################################################### -source "${USHgfs}/preamble.sh" - -type=${1:-gfs} ##gfs, gdas, enkfgdas or enkfggfs - -ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"YES"} -ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} -ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} - -# Set whether to archive downstream products -DO_DOWN=${DO_DOWN:-"NO"} -if [[ ${DO_BUFRSND} = "YES" ]]; then - export DO_DOWN="YES" -fi - -#----------------------------------------------------- -if [[ ${type} = "gfs" ]]; then -#----------------------------------------------------- - FHMIN_GFS=${FHMIN_GFS:-0} - FHMAX_GFS=${FHMAX_GFS:-384} - FHOUT_GFS=${FHOUT_GFS:-3} - FHMAX_HF_GFS=${FHMAX_HF_GFS:-120} - FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} - - rm -f "${DATA}/gfsa.txt" - rm -f "${DATA}/gfsb.txt" - rm -f "${DATA}/gfs_restarta.txt" - touch "${DATA}/gfsa.txt" - touch "${DATA}/gfsb.txt" - touch "${DATA}/gfs_restarta.txt" - - if [[ ${ARCH_GAUSSIAN} = "YES" ]]; then - rm -f "${DATA}/gfs_pgrb2b.txt" - rm -f "${DATA}/gfs_netcdfb.txt" - rm -f "${DATA}/gfs_flux.txt" - touch "${DATA}/gfs_pgrb2b.txt" - touch "${DATA}/gfs_netcdfb.txt" - touch "${DATA}/gfs_flux.txt" - - if [[ ${MODE} = "cycled" ]]; then - rm -f "${DATA}/gfs_netcdfa.txt" - touch "${DATA}/gfs_netcdfa.txt" - fi - fi - - if [[ ${DO_DOWN} = "YES" ]]; then - rm -f "${DATA}/gfs_downstream.txt" - touch "${DATA}/gfs_downstream.txt" - fi - - head="gfs.t${cyc}z." - - if [[ ${ARCH_GAUSSIAN} = "YES" ]]; then - { - echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2b.0p25.anl" - echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2b.0p25.anl.idx" - echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2b.1p00.anl" - echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2b.1p00.anl.idx" - } >> "${DATA}/gfs_pgrb2b.txt" - - if [[ ${MODE} = "cycled" ]]; then - { - echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmanl.nc" - echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}sfcanl.nc" - echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmi*.nc" - gsida_files=("dtfanl.nc" - "loginc.txt") - for file in "${gsida_files[@]}"; do - [[ -s ${COM_ATMOS_ANALYSIS}/${head}${file} ]] && echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}${file}" - done - } >> "${DATA}/gfs_netcdfa.txt" - fi - - fh=0 - while (( fh <= ARCH_GAUSSIAN_FHMAX )); do - fhr=$(printf %03i "${fh}") - { - echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}atmf${fhr}.nc" - echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}sfcf${fhr}.nc" - } >> "${DATA}/gfs_netcdfb.txt" - fh=$((fh+ARCH_GAUSSIAN_FHINC)) - done - fi - - #.................. - # Exclude the gfsarch.log file, which will change during the tar operation - # This uses the bash extended globbing option - { - echo "./logs/${PDY}${cyc}/gfs!(arch).log" - echo "${COM_CONF/${ROTDIR}\//}/ufs.input.nml" - - if [[ ${MODE} = "cycled" ]]; then - if [[ -s "${COM_ATMOS_ANALYSIS}/${head}gsistat" ]]; then - echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}gsistat" - fi - gsiob_files=("nsstbufr" - "prepbufr" - "prepbufr.acft_profiles") - for file in "${gsiob_files[@]}"; do - [[ -s ${COM_OBS}/${head}${file} ]] && echo "${COM_OBS/${ROTDIR}\//}/${head}${file}" - done - if [[ -s "${COM_ATMOS_ANALYSIS}/${head}atmvar.yaml" ]]; then - echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmvar.yaml" - fi - if [[ -s "${COM_ATMOS_ANALYSIS}/${head}atmstat" ]]; then - echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmstat" - fi - fi - - echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.anl" - echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.anl.idx" - - #Only generated if there are cyclones to track - cyclone_files=("avno.t${cyc}z.cyclone.trackatcfunix" - "avnop.t${cyc}z.cyclone.trackatcfunix" - "trak.gfso.atcfunix.${PDY}${cyc}" - "trak.gfso.atcfunix.altg.${PDY}${cyc}") - - for file in "${cyclone_files[@]}"; do - [[ -s ${COM_ATMOS_TRACK}/${file} ]] && echo "${COM_ATMOS_TRACK/${ROTDIR}\//}/${file}" - done - - genesis_files=("storms.gfso.atcf_gen.${PDY}${cyc}" - "storms.gfso.atcf_gen.altg.${PDY}${cyc}") - for file in "${genesis_files[@]}"; do - [[ -s ${COM_ATMOS_GENESIS}/${file} ]] && echo "${COM_ATMOS_GENESIS/${ROTDIR}\//}/${file}" - done - - # GSI Monitor job output - - if [[ ${DO_VMINMON} = "YES" ]]; then - echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.costs.txt" - echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.cost_terms.txt" - echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.gnorms.ieee_d" - echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.reduction.ieee_d" - echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/gnorm_data.txt" - fi - - } >> "${DATA}/gfsa.txt" - - { - if [[ ${DO_DOWN} = "YES" ]]; then - if [[ ${DO_BUFRSND} = "YES" ]]; then - echo "${COM_ATMOS_GEMPAK/${ROTDIR}\//}/gfs_${PDY}${cyc}.sfc" - echo "${COM_ATMOS_GEMPAK/${ROTDIR}\//}/gfs_${PDY}${cyc}.snd" - echo "${COM_ATMOS_WMO/${ROTDIR}\//}/gfs_collective*.postsnd_${cyc}" - echo "${COM_ATMOS_BUFR/${ROTDIR}\//}/bufr.t${cyc}z" - echo "${COM_ATMOS_BUFR/${ROTDIR}\//}/gfs.t${cyc}z.bufrsnd.tar.gz" - fi - fi - } >> "${DATA}/gfs_downstream.txt" - - { - echo "${COM_ATMOS_GRIB_0p50/${ROTDIR}\//}/${head}pgrb2.0p50.anl" - echo "${COM_ATMOS_GRIB_0p50/${ROTDIR}\//}/${head}pgrb2.0p50.anl.idx" - echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.anl" - echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.anl.idx" - } >> "${DATA}/gfsb.txt" - - - fh=0 - while (( fh <= FHMAX_GFS )); do - fhr=$(printf %03i "${fh}") - if [[ ${ARCH_GAUSSIAN} = "YES" ]]; then - { - echo "${COM_ATMOS_MASTER/${ROTDIR}\//}/${head}sfluxgrbf${fhr}.grib2" - echo "${COM_ATMOS_MASTER/${ROTDIR}\//}/${head}sfluxgrbf${fhr}.grib2.idx" - } >> "${DATA}/gfs_flux.txt" - - { - echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2b.0p25.f${fhr}" - echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2b.0p25.f${fhr}.idx" - if [[ -s "${COM_ATMOS_GRIB_1p00}/${head}pgrb2b.1p00.f${fhr}" ]]; then - echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2b.1p00.f${fhr}" - echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2b.1p00.f${fhr}.idx" - fi - } >> "${DATA}/gfs_pgrb2b.txt" - fi - - { - echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.f${fhr}" - echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.f${fhr}.idx" - echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}atm.logf${fhr}.txt" - } >> "${DATA}/gfsa.txt" - - - { - if [[ -s "${COM_ATMOS_GRIB_0p50}/${head}pgrb2.0p50.f${fhr}" ]]; then - echo "${COM_ATMOS_GRIB_0p50/${ROTDIR}\//}/${head}pgrb2.0p50.f${fhr}" - echo "${COM_ATMOS_GRIB_0p50/${ROTDIR}\//}/${head}pgrb2.0p50.f${fhr}.idx" - fi - if [[ -s "${COM_ATMOS_GRIB_1p00}/${head}pgrb2.1p00.f${fhr}" ]]; then - echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.f${fhr}" - echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.f${fhr}.idx" - fi - } >> "${DATA}/gfsb.txt" - - inc=${FHOUT_GFS} - if (( FHMAX_HF_GFS > 0 && FHOUT_HF_GFS > 0 && fh < FHMAX_HF_GFS )); then - inc=${FHOUT_HF_GFS} - fi - - fh=$((fh+inc)) - done - - #.................. - { - if [[ ${MODE} = "cycled" ]]; then - echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile1.nc" - echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile2.nc" - echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile3.nc" - echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile4.nc" - echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile5.nc" - echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile6.nc" - elif [[ ${MODE} = "forecast-only" ]]; then - echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_ctrl.nc" - echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_data.tile1.nc" - echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_data.tile2.nc" - echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_data.tile3.nc" - echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_data.tile4.nc" - echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_data.tile5.nc" - echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_data.tile6.nc" - echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile1.nc" - echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile2.nc" - echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile3.nc" - echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile4.nc" - echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile5.nc" - echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile6.nc" - fi - } >> "${DATA}/gfs_restarta.txt" - - - #.................. - if [[ ${DO_WAVE} = "YES" ]]; then - - rm -rf "${DATA}/gfswave.txt" - touch "${DATA}/gfswave.txt" - - head="gfswave.t${cyc}z." - - #........................... - { - echo "${COM_WAVE_HISTORY/${ROTDIR}\//}/ww3_multi*" - echo "${COM_WAVE_GRID/${ROTDIR}\//}/${head}*" - echo "${COM_WAVE_STATION/${ROTDIR}\//}/${head}*" - } >> "${DATA}/gfswave.txt" - fi - - if [[ "${DO_OCN}" == "YES" ]]; then - - head="gfs.ocean.t${cyc}z." - rm -f "${DATA}/ocean_6hravg.txt"; touch "${DATA}/ocean_6hravg.txt" - rm -f "${DATA}/ocean_daily.txt"; touch "${DATA}/ocean_daily.txt" - rm -f "${DATA}/ocean_grib2.txt"; touch "${DATA}/ocean_grib2.txt" - - echo "${COM_OCEAN_HISTORY/${ROTDIR}\//}/${head}6hr_avg.f*.nc" >> "${DATA}/ocean_6hravg.txt" - echo "${COM_OCEAN_HISTORY/${ROTDIR}\//}/${head}daily.f*.nc" >> "${DATA}/ocean_daily.txt" - - { - if [[ -d "${COM_OCEAN_GRIB}/5p00" ]]; then - echo "${COM_OCEAN_GRIB/${ROTDIR}\//}/5p00/${head}5p00.f*.grib2" - echo "${COM_OCEAN_GRIB/${ROTDIR}\//}/5p00/${head}5p00.f*.grib2.idx" - fi - if [[ -d "${COM_OCEAN_GRIB}/1p00" ]]; then - echo "${COM_OCEAN_GRIB/${ROTDIR}\//}/1p00/${head}1p00.f*.grib2" - echo "${COM_OCEAN_GRIB/${ROTDIR}\//}/1p00/${head}1p00.f*.grib2.idx" - fi - if [[ -d "${COM_OCEAN_GRIB}/0p25" ]]; then - echo "${COM_OCEAN_GRIB/${ROTDIR}\//}/0p25/${head}0p25.f*.grib2" - echo "${COM_OCEAN_GRIB/${ROTDIR}\//}/0p25/${head}0p25.f*.grib2.idx" - fi - } >> "${DATA}/ocean_grib2.txt" - - # Also save fluxes from atmosphere - head="gfs.t${cyc}z." - rm -f "${DATA}/gfs_flux_1p00.txt"; touch "${DATA}/gfs_flux_1p00.txt" - { - echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}flux.1p00.f???" - echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}flux.1p00.f???.idx" - } >> "${DATA}/gfs_flux_1p00.txt" - fi - - if [[ "${DO_ICE}" == "YES" ]]; then - head="gfs.ice.t${cyc}z." - rm -f "${DATA}/ice_6hravg.txt"; touch "${DATA}/ice_6hravg.txt" - rm -f "${DATA}/ice_grib2.txt"; touch "${DATA}/ice_grib2.txt" - - { - echo "${COM_ICE_HISTORY/${ROTDIR}\//}/${head}ic.nc" - echo "${COM_ICE_HISTORY/${ROTDIR}\//}/${head}6hr_avg.f*.nc" - } >> "${DATA}/ice_6hravg.txt" - - { - if [[ -d "${COM_ICE_GRIB}/5p00" ]]; then - echo "${COM_ICE_GRIB/${ROTDIR}\//}/5p00/${head}5p00.f*.grib2" - echo "${COM_ICE_GRIB/${ROTDIR}\//}/5p00/${head}5p00.f*.grib2.idx" - fi - if [[ -d "${COM_ICE_GRIB}/1p00" ]]; then - echo "${COM_ICE_GRIB/${ROTDIR}\//}/1p00/${head}1p00.f*.grib2" - echo "${COM_ICE_GRIB/${ROTDIR}\//}/1p00/${head}1p00.f*.grib2.idx" - fi - if [[ -d "${COM_ICE_GRIB}/0p25" ]]; then - echo "${COM_ICE_GRIB/${ROTDIR}\//}/0p25/${head}0p25.f*.grib2" - echo "${COM_ICE_GRIB/${ROTDIR}\//}/0p25/${head}0p25.f*.grib2.idx" - fi - } >> "${DATA}/ice_grib2.txt" - fi - - if [[ ${DO_AERO} = "YES" ]]; then - head="gocart" - - rm -f "${DATA}/chem.txt" - touch "${DATA}/chem.txt" - - echo "${COM_CHEM_HISTORY/${ROTDIR}\//}/${head}*" >> "${DATA}/chem.txt" - fi - -#----------------------------------------------------- -fi ##end of gfs -#----------------------------------------------------- - - - -#----------------------------------------------------- -if [[ ${type} == "gdas" ]]; then -#----------------------------------------------------- - - rm -f "${DATA}/gdas.txt" - rm -f "${DATA}/gdas_restarta.txt" - rm -f "${DATA}/gdas_restartb.txt" - touch "${DATA}/gdas.txt" - touch "${DATA}/gdas_restarta.txt" - touch "${DATA}/gdas_restartb.txt" - - head="gdas.t${cyc}z." - - #.................. - { - echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.anl" - echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.anl.idx" - echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.anl" - echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.anl.idx" - echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmanl.nc" - echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}sfcanl.nc" - if [[ -s "${COM_ATMOS_ANALYSIS}/${head}atmvar.yaml" ]]; then - echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmvar.yaml" - fi - if [[ -s "${COM_ATMOS_ANALYSIS}/${head}atmstat" ]]; then - echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmstat" - fi - if [[ -s "${COM_ATMOS_ANALYSIS}/${head}gsistat" ]]; then - echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}gsistat" - fi - if [[ -s "${COM_ATMOS_ANALYSIS}/${head}atmanl.ensres.nc" ]]; then - echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmanl.ensres.nc" - fi - if [[ -s "${COM_ATMOS_ANALYSIS}/${head}atma003.ensres.nc" ]]; then - echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atma003.ensres.nc" - fi - if [[ -s "${COM_ATMOS_ANALYSIS}/${head}atma009.ensres.nc" ]]; then - echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atma009.ensres.nc" - fi - if [[ -s "${COM_ATMOS_ANALYSIS}/${head}cnvstat" ]]; then - echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}cnvstat" - fi - if [[ -s "${COM_ATMOS_ANALYSIS}/${head}oznstat" ]]; then - echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}oznstat" - fi - if [[ ${DO_AERO} = "YES" ]]; then - if [[ -s "${COM_CHEM_ANALYSIS}/${head}aerostat" ]]; then - echo "${COM_CHEM_ANALYSIS/${ROTDIR}\//}/${head}aerostat" - fi - fi - if [[ -s "${COM_SNOW_ANALYSIS}/${head}snowstat.tgz" ]]; then - echo "${COM_SNOW_ANALYSIS/${ROTDIR}\//}/${head}snowstat.tgz" - fi - if [[ -s "${COM_ATMOS_ANALYSIS}/${head}radstat" ]]; then - echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}radstat" - fi - for fstep in prep anal fcst verfozn verfrad vminmon; do - if [[ -s "${ROTDIR}/logs/${PDY}${cyc}/gdas${fstep}.log" ]]; then - echo "./logs/${PDY}${cyc}/gdas${fstep}.log" - fi - done - echo "./logs/${PDY}${cyc}/gdas*prod*.log" - if [[ "${WRITE_DOPOST}" == ".false." ]]; then - echo "./logs/${PDY}${cyc}/gdas*upp*.log" - fi - - fh=0 - while [[ ${fh} -le 9 ]]; do - fhr=$(printf %03i "${fh}") - echo "${COM_ATMOS_MASTER/${ROTDIR}\//}/${head}sfluxgrbf${fhr}.grib2" - echo "${COM_ATMOS_MASTER/${ROTDIR}\//}/${head}sfluxgrbf${fhr}.grib2.idx" - echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.f${fhr}" - echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.f${fhr}.idx" - echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.f${fhr}" - echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.f${fhr}.idx" - echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}atm.logf${fhr}.txt" - echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}atmf${fhr}.nc" - echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}sfcf${fhr}.nc" - fh=$((fh+3)) - done - flist="001 002 004 005 007 008" - for fhr in ${flist}; do - file="${COM_ATMOS_MASTER/${ROTDIR}\//}/${head}sfluxgrbf${fhr}.grib2" - if [[ -s "${file}" ]]; then - echo "${file}" - echo "${file}.idx" - fi - done - - # GSI Monitor jobs output - - if [[ ${DO_VERFOZN} = "YES" ]]; then - for type in horiz time; do - if [[ ${type} = "horiz" ]]; then - suffix=".gz" - elif [[ ${type} = "time" ]]; then - suffix="" - echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/bad_cnt.${PDY}${cyc}" - echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/bad_diag.${PDY}${cyc}" - echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/bad_pen.${PDY}${cyc}" - fi - subtyplist="gome_metop-b omi_aura ompslp_npp ompsnp_n20 ompsnp_npp ompstc8_n20 ompstc8_npp sbuv2_n19" - for subtype in ${subtyplist}; do - # On occassion, data is not available for some of these satellites. Check for existence. - if [[ -s "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/${subtype}.ges.${PDY}${cyc}.ieee_d${suffix}" ]]; then - echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/${subtype}.anl.${PDY}${cyc}.ieee_d${suffix}" - echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/${subtype}.anl.ctl" - echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/${subtype}.ges.${PDY}${cyc}.ieee_d${suffix}" - echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/${subtype}.ges.ctl" - fi - done - echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/stdout.${type}.tar.gz" - done - fi - - if [[ ${DO_VERFRAD} = "YES" ]]; then - echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/bad_diag.${PDY}${cyc}" - echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/bad_pen.${PDY}${cyc}" - echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/low_count.${PDY}${cyc}" - echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/radmon_angle.tar.gz" - echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/radmon_bcoef.tar.gz" - echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/radmon_bcor.tar.gz" - echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/radmon_time.tar.gz" - echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/warning.${PDY}${cyc}" - fi - - if [[ ${DO_VMINMON} = "YES" ]]; then - echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.costs.txt" - echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.cost_terms.txt" - echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.gnorms.ieee_d" - echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.reduction.ieee_d" - echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/gnorm_data.txt" - fi - - } >> "${DATA}/gdas.txt" - - #.................. - if [[ -s "${COM_ATMOS_ANALYSIS}/${head}cnvstat" ]]; then - echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}cnvstat" >> "${DATA}/gdas_restarta.txt" - fi - if [[ -s "${COM_ATMOS_ANALYSIS}/${head}radstat" ]]; then - echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}radstat" >> "${DATA}/gdas_restarta.txt" - fi - - { - gsiob_files=("nsstbufr" - "prepbufr" - "prepbufr.acft_profiles") - for file in "${gsiob_files[@]}"; do - [[ -s ${COM_OBS}/${head}${file} ]] && echo "${COM_OBS/${ROTDIR}\//}/${head}${file}" - done - - gsida_files=("abias" - "abias_air" - "abias_int" - "abias_pc" - "dtfanl.nc" - "loginc.txt") - for file in "${gsida_files[@]}"; do - [[ -s ${COM_ATMOS_ANALYSIS}/${head}${file} ]] && echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}${file}" - done - - ufsda_files=("amsua_n19.satbias.nc4" - "amsua_n19.satbias_cov.nc4" - "amsua_n19.tlapse.txt") - for file in "${ufsda_files[@]}"; do - [[ -s ${COM_ATMOS_ANALYSIS}/${head}${file} ]] && echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}${file}" - done - - echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmi*nc" - - echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile1.nc" - echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile2.nc" - echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile3.nc" - echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile4.nc" - echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile5.nc" - echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile6.nc" - - [[ -s "${COM_CONF}/${head}letkfoi.yaml" ]] && echo "${COM_CONF/${ROTDIR}\//}/${head}letkfoi.yaml" - - if [[ "${DO_JEDISNOWDA:-}" == "YES" ]]; then - echo "${COM_SNOW_ANALYSIS/${ROTDIR}\//}/*0000.sfc_data.tile1.nc" - echo "${COM_SNOW_ANALYSIS/${ROTDIR}\//}/*0000.sfc_data.tile2.nc" - echo "${COM_SNOW_ANALYSIS/${ROTDIR}\//}/*0000.sfc_data.tile3.nc" - echo "${COM_SNOW_ANALYSIS/${ROTDIR}\//}/*0000.sfc_data.tile4.nc" - echo "${COM_SNOW_ANALYSIS/${ROTDIR}\//}/*0000.sfc_data.tile5.nc" - echo "${COM_SNOW_ANALYSIS/${ROTDIR}\//}/*0000.sfc_data.tile6.nc" - fi - } >> "${DATA}/gdas_restarta.txt" - - #.................. - echo "${COM_ATMOS_RESTART/${ROTDIR}\//}" >> "${DATA}/gdas_restartb.txt" - - #.................. - if [[ ${DO_WAVE} = "YES" ]]; then - - rm -rf "${DATA}/gdaswave.txt" - touch "${DATA}/gdaswave.txt" - rm -rf "${DATA}/gdaswave_restart.txt" - touch "${DATA}/gdaswave_restart.txt" - - head="gdaswave.t${cyc}z." - - #........................... - { - echo "${COM_WAVE_GRID/${ROTDIR}\//}/${head}*" - echo "${COM_WAVE_STATION/${ROTDIR}\//}/${head}*" - } >> "${DATA}/gdaswave.txt" - - echo "${COM_WAVE_RESTART/${ROTDIR}\//}/*" >> "${DATA}/gdaswave_restart.txt" - - fi - - #.................. - if [[ ${DO_OCN} = "YES" ]]; then - - rm -rf "${DATA}/gdasocean.txt" - touch "${DATA}/gdasocean.txt" - rm -rf "${DATA}/gdasocean_restart.txt" - touch "${DATA}/gdasocean_restart.txt" - - head="gdas.t${cyc}z." - - #........................... - { - echo "${COM_OCEAN_HISTORY/${ROTDIR}\//}/${head}*" - echo "${COM_OCEAN_INPUT/${ROTDIR}\//}" - } >> "${DATA}/gdasocean.txt" - - { - echo "${COM_OCEAN_RESTART/${ROTDIR}\//}/*" - echo "${COM_MED_RESTART/${ROTDIR}\//}/*" - } >> "${DATA}/gdasocean_restart.txt" - - if [[ ${DO_JEDIOCNVAR} = "YES" ]]; then - { - echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/${head}*" - echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/gdas.t??z.ocngrid.nc" - echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/diags" - echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/yaml" - } >> "${DATA}/gdasocean_analysis.txt" - fi - fi - - if [[ ${DO_ICE} = "YES" ]]; then - - rm -rf "${DATA}/gdasice.txt" - touch "${DATA}/gdasice.txt" - rm -rf "${DATA}/gdasice_restart.txt" - touch "${DATA}/gdasice_restart.txt" - - head="gdas.ice.t${cyc}z." - - #........................... - { - echo "${COM_ICE_HISTORY/${ROTDIR}\//}/${head}*" - } >> "${DATA}/gdasice.txt" - - echo "${COM_ICE_RESTART/${ROTDIR}\//}/*" >> "${DATA}/gdasice_restart.txt" - - fi - - -#----------------------------------------------------- -fi ##end of gdas -#----------------------------------------------------- - - -#----------------------------------------------------- -if [[ ${type} == "enkfgdas" || ${type} == "enkfgfs" ]]; then -#----------------------------------------------------- - - IAUFHRS_ENKF=${IAUFHRS_ENKF:-6} - lobsdiag_forenkf=${lobsdiag_forenkf:-".false."} - IFS=',' read -ra nfhrs <<< ${IAUFHRS_ENKF} - NMEM_ENS=${NMEM_ENS:-80} - NMEM_EARCGRP=${NMEM_EARCGRP:-10} ##number of ens memebers included in each tarball - NTARS=$((NMEM_ENS/NMEM_EARCGRP)) - [[ ${NTARS} -eq 0 ]] && NTARS=1 - [[ $((NTARS*NMEM_EARCGRP)) -lt ${NMEM_ENS} ]] && NTARS=$((NTARS+1)) - ##NTARS2=$((NTARS/2)) # number of earc groups to include analysis/increments - NTARS2=${NTARS} - - head="${RUN}.t${cyc}z." - - #.................. - rm -f "${DATA}/${RUN}.txt" - touch "${DATA}/${RUN}.txt" - - { - gsida_files=("enkfstat" - "gsistat.ensmean" - "cnvstat.ensmean" - "oznstat.ensmean" - "radstat.ensmean") - for file in "${gsida_files[@]}"; do - [[ -s ${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}${file} ]] && echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}${file}" - done - - ufsda_files=("atmens.yaml" - "atmensstat") - for file in "${ufsda_files[@]}"; do - [[ -s ${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}${file} ]] && echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}${file}" - done - - for FHR in "${nfhrs[@]}"; do # loop over analysis times in window - if [[ ${FHR} -eq 6 ]]; then - if [[ -s "${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}atmanl.ensmean.nc" ]]; then - echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}atmanl.ensmean.nc" - fi - if [[ -s "${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}atminc.ensmean.nc" ]]; then - echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}atminc.ensmean.nc" - fi - else - if [[ -s "${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}atma00${FHR}.ensmean.nc" ]]; then - echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}atma00${FHR}.ensmean.nc" - fi - if [[ -s "${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}atmi00${FHR}.ensmean.nc" ]]; then - echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}atmi00${FHR}.ensmean.nc" - fi - fi - done # loop over FHR - for fstep in fcst epos ; do - echo "logs/${PDY}${cyc}/${RUN}${fstep}*.log" - done - - # eobs, ecen, esfc, and eupd are not run on the first cycle - for fstep in eobs ecen esfc eupd ; do - for log in "${ROTDIR}/logs/${PDY}${cyc}/${RUN}${fstep}"*".log"; do - if [[ -s "${log}" ]]; then - echo "logs/${PDY}${cyc}/${RUN}${fstep}*.log" - fi - done - done - - # eomg* are optional jobs - for log in "${ROTDIR}/logs/${PDY}${cyc}/${RUN}eomg"*".log"; do - if [[ -s "${log}" ]]; then - echo "logs/${PDY}${cyc}/${RUN}eomg*.log" - fi - break - done - - # Ensemble spread file only available with netcdf output - fh=3 - while [ $fh -le 9 ]; do - fhr=$(printf %03i $fh) - echo "${COM_ATMOS_HISTORY_ENSSTAT/${ROTDIR}\//}/${head}atmf${fhr}.ensmean.nc" - echo "${COM_ATMOS_HISTORY_ENSSTAT/${ROTDIR}\//}/${head}sfcf${fhr}.ensmean.nc" - if [[ -s "${COM_ATMOS_HISTORY_ENSSTAT}/${head}atmf${fhr}.ensspread.nc" ]]; then - echo "${COM_ATMOS_HISTORY_ENSSTAT/${ROTDIR}\//}/${head}atmf${fhr}.ensspread.nc" - fi - fh=$((fh+3)) - done - } >> "${DATA}/${RUN}.txt" - - #........................... - n=1 - while (( n <= NTARS )); do - #........................... - - rm -f "${DATA}/${RUN}_grp${n}.txt" - rm -f "${DATA}/${RUN}_restarta_grp${n}.txt" - rm -f "${DATA}/${RUN}_restartb_grp${n}.txt" - touch "${DATA}/${RUN}_grp${n}.txt" - touch "${DATA}/${RUN}_restarta_grp${n}.txt" - touch "${DATA}/${RUN}_restartb_grp${n}.txt" - - m=1 - while (( m <= NMEM_EARCGRP && (n-1)*NMEM_EARCGRP+m <= NMEM_ENS )); do - nm=$(((n-1)*NMEM_EARCGRP+m)) - mem=$(printf %03i ${nm}) - head="${RUN}.t${cyc}z." - - MEMDIR="mem${mem}" YMD=${PDY} HH=${cyc} declare_from_tmpl \ - COM_ATMOS_ANALYSIS_MEM:COM_ATMOS_ANALYSIS_TMPL \ - COM_ATMOS_RESTART_MEM:COM_ATMOS_RESTART_TMPL \ - COM_ATMOS_HISTORY_MEM:COM_ATMOS_HISTORY_TMPL - - #--- - for FHR in "${nfhrs[@]}"; do # loop over analysis times in window - if [ "${FHR}" -eq 6 ]; then - { - if (( n <= NTARS2 )); then - if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}atmanl.nc" ]] ; then - echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}atmanl.nc" - fi - if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}ratminc.nc" ]] ; then - echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}ratminc.nc" - fi - fi - } >> "${DATA}/${RUN}_grp${n}.txt" - - if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}ratminc.nc" ]] ; then - echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}ratminc.nc" \ - >> "${DATA}/${RUN}_restarta_grp${n}.txt" - fi - - else - { - if (( n <= NTARS2 )); then - if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}atma00${FHR}.nc" ]] ; then - echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}atma00${FHR}.nc" - fi - if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}ratmi00${FHR}.nc" ]] ; then - echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}ratmi00${FHR}.nc" - fi - fi - } >> "${DATA}/${RUN}_grp${n}.txt" - if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}ratmi00${FHR}.nc" ]] ; then - echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}ratmi00${FHR}.nc" \ - >> "${DATA}/${RUN}_restarta_grp${n}.txt" - fi - fi - { - echo "${COM_ATMOS_HISTORY_MEM/${ROTDIR}\//}/${head}atmf00${FHR}.nc" - if (( FHR == 6 )); then - echo "${COM_ATMOS_HISTORY_MEM/${ROTDIR}\//}/${head}sfcf00${FHR}.nc" - fi - } >> "${DATA}/${RUN}_grp${n}.txt" - done # loop over FHR - - if [[ ${lobsdiag_forenkf} == ".false." ]] ; then - { - echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}gsistat" - if [[ -s "${COM_ATMOS_RESTART_MEM}/${head}cnvstat" ]] ; then - echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}cnvstat" - fi - } >> "${DATA}/${RUN}_grp${n}.txt" - - { - if [[ -s "${COM_ATMOS_RESTART_MEM}/${head}radstat" ]]; then - echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}radstat" - fi - if [[ -s "${COM_ATMOS_RESTART_MEM}/${head}cnvstat" ]]; then - echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}cnvstat" - fi - echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}abias" - echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}abias_air" - echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}abias_int" - echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}abias_pc" - } >> "${DATA}/${RUN}_restarta_grp${n}.txt" - fi - #--- - { - echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile1.nc" - echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile2.nc" - echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile3.nc" - echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile4.nc" - echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile5.nc" - echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile6.nc" - } >> "${DATA}/${RUN}_restarta_grp${n}.txt" - #--- - echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}" >> "${DATA}/${RUN}_restartb_grp${n}.txt" - - m=$((m+1)) - done - - - #........................... - n=$((n+1)) - done - #........................... - - -#----------------------------------------------------- -fi ##end of enkfgdas or enkfgfs -#----------------------------------------------------- - -exit 0 diff --git a/ush/module-setup.sh b/ush/module-setup.sh index b66e3622d0..b4ec3edafa 100755 --- a/ush/module-setup.sh +++ b/ush/module-setup.sh @@ -70,39 +70,10 @@ elif [[ ${MACHINE_ID} = stampede* ]] ; then elif [[ ${MACHINE_ID} = gaea* ]] ; then # We are on GAEA. if ( ! eval module help > /dev/null 2>&1 ) ; then - # We cannot simply load the module command. The GAEA - # /etc/profile modifies a number of module-related variables - # before loading the module command. Without those variables, - # the module command fails. Hence we actually have to source - # /etc/profile here. - source /etc/profile - __ms_source_etc_profile=yes - else - __ms_source_etc_profile=no - fi - module purge - # clean up after purge - unset _LMFILES_ - unset _LMFILES_000 - unset _LMFILES_001 - unset LOADEDMODULES - module load modules - if [[ -d /opt/cray/ari/modulefiles ]] ; then - module use -a /opt/cray/ari/modulefiles - fi - if [[ -d /opt/cray/pe/ari/modulefiles ]] ; then - module use -a /opt/cray/pe/ari/modulefiles - fi - if [[ -d /opt/cray/pe/craype/default/modulefiles ]] ; then - module use -a /opt/cray/pe/craype/default/modulefiles - fi - if [[ -s /etc/opt/cray/pe/admin-pe/site-config ]] ; then - source /etc/opt/cray/pe/admin-pe/site-config - fi - if [[ "${__ms_source_etc_profile}" == yes ]] ; then + source /usr/share/lmod/lmod/init/bash source /etc/profile - unset __ms_source_etc_profile fi + module reset elif [[ ${MACHINE_ID} = expanse* ]]; then # We are on SDSC Expanse diff --git a/ush/oceanice_nc2grib2.sh b/ush/oceanice_nc2grib2.sh index 2afd0e07f2..5781e06b36 100755 --- a/ush/oceanice_nc2grib2.sh +++ b/ush/oceanice_nc2grib2.sh @@ -45,23 +45,23 @@ function _ice_nc2grib2 { # tables in wgrib2 v2.0.8: # -import_netcdf "${infile}" "hs_h" "0:1:${latlon_dims}" \ -# -set_var ??? -set center 7 \ +# -set_var SNVOLSI -set center 7 \ # -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \ # -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \ # -import_netcdf "${infile}" "frzmlt_h" "0:1:${latlon_dims}" \ -# -set_var ??? -set center 7 \ +# -set_var FRZMLTPOT -set center 7 \ # -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \ # -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \ # -import_netcdf "${infile}" "albsni_h" "0:1:${latlon_dims}" \ -# -set_var ALBICE -set center 7 -rpn "100.0:/" \ +# -set_var ALBDOICE -set center 7 -rpn "100.0:/" \ # -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \ # -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \ # -import_netcdf "${infile}" "mlt_onset_h" "0:1:${latlon_dims}" \ -# -set_var ??? -set center 7 \ +# -set_var MLTDATE -set center 7 \ # -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \ # -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \ # -import_netcdf "${infile}" "frz_onset_h" "0:1:${latlon_dims}" \ -# -set_var ??? -set center 7 \ +# -set_var FRZDATE -set center 7 \ # -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \ # -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" @@ -121,11 +121,11 @@ function _ocean2D_nc2grib2 { -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \ -import_netcdf "${infile}" "SW" "0:1:${latlon_dims}" \ - -set_var DSWRF -set center 7 \ + -set_var NSWRF -set center 7 \ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \ -import_netcdf "${infile}" "LW" "0:1:${latlon_dims}" \ - -set_var DLWRF -set center 7 \ + -set_var NLWRF -set center 7 \ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \ -import_netcdf "${infile}" "LwLatSens" "0:1:${latlon_dims}" \ @@ -187,22 +187,22 @@ function _ocean3D_nc2grib2 { ${WGRIB2} "${template}" \ -import_netcdf "${infile}" "temp" "0:1:${zl}:1:${latlon_dims}" \ -set_var WTMP -set center 7 -rpn "273.15:+" \ - -set_lev "${depth} m below water surface" \ + -set_lev "${depth} m below sea level" \ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \ -set_scaling same same -set_grib_type c1 -grib_out tmp.gb2 \ -import_netcdf "${infile}" "so" "0:1:${zl}:1:${latlon_dims}" \ -set_var SALIN -set center 7 \ - -set_lev "${depth} m below water surface" \ + -set_lev "${depth} m below sea level" \ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \ -set_scaling same same -set_grib_type c1 -grib_out tmp.gb2 \ -import_netcdf "${infile}" "uo" "0:1:${zl}:1:${latlon_dims}" \ -set_var UOGRD -set center 7 \ - -set_lev "${depth} m below water surface" \ + -set_lev "${depth} m below sea level" \ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \ -set_scaling same same -set_grib_type c1 -grib_out tmp.gb2 \ -import_netcdf "${infile}" "vo" "0:1:${zl}:1:${latlon_dims}" \ -set_var VOGRD -set center 7 \ - -set_lev "${depth} m below water surface" \ + -set_lev "${depth} m below sea level" \ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \ -set_scaling same same -set_grib_type c1 -grib_out tmp.gb2 diff --git a/ush/parsing_namelists_CICE.sh b/ush/parsing_namelists_CICE.sh index dc67777af9..9aed59e214 100755 --- a/ush/parsing_namelists_CICE.sh +++ b/ush/parsing_namelists_CICE.sh @@ -59,6 +59,13 @@ local CICE_RUNID="unknown" local CICE_USE_RESTART_TIME=${use_restart_time} local CICE_RESTART_DIR="./CICE_RESTART/" local CICE_RESTART_FILE="cice_model.res" +local CICE_RESTART_DEFLATE=0 +local CICE_RESTART_CHUNK=0,0 +local CICE_RESTART_STRIDE=-99 +local CICE_RESTART_ROOT=-99 +local CICE_RESTART_REARR="box" +local CICE_RESTART_IOTASKS=-99 +local CICE_RESTART_FORMAT="pnetcdf2" local CICE_DUMPFREQ="y" # "h","d","m" or "y" for restarts at intervals of "hours", "days", "months" or "years" local CICE_DUMPFREQ_N=10000 # Set this to a really large value, as cice, mom6 and cmeps restart interval is controlled by ufs.configure local CICE_DIAGFREQ=$(( 86400 / DT_CICE )) # frequency of diagnostic output in timesteps, recommended for 1x per day @@ -68,8 +75,16 @@ if [[ "${RUN}" =~ "gdas" ]]; then else local CICE_HIST_AVG=".true., .true., .true., .true., .true." # GFS long forecaset wants averaged over CICE_HISTFREQ_N fi +local CICE_HISTORY_FORMAT="pnetcdf2" local CICE_HISTORY_DIR="./CICE_OUTPUT/" local CICE_INCOND_DIR="./CICE_OUTPUT/" +local CICE_HISTORY_IOTASKS=-99 +local CICE_HISTORY_REARR="box" +local CICE_HISTORY_ROOT=-99 +local CICE_HISTORY_STRIDE=-99 +local CICE_HISTORY_CHUNK=0,0 +local CICE_HISTORY_DEFLATE=0 +local CICE_HISTORY_PREC=4 # grid_nml section # CICE_GRID # CICE_MASK diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index 5464c25370..b668ac3980 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -335,6 +335,7 @@ def tgz_diags(statfile: str, diagdir: str) -> None: # get list of diag files to put in tarball diags = glob.glob(os.path.join(diagdir, 'diags', 'diag*nc')) + diags.extend(glob.glob(os.path.join(diagdir, 'diags', 'diag*nc4'))) logger.info(f"Compressing {len(diags)} diag files to {statfile}") diff --git a/ush/python/pygfs/task/archive.py b/ush/python/pygfs/task/archive.py new file mode 100644 index 0000000000..d66b02d7bc --- /dev/null +++ b/ush/python/pygfs/task/archive.py @@ -0,0 +1,381 @@ +#!/usr/bin/env python3 + +import glob +import os +import shutil +import tarfile +from logging import getLogger +from typing import Any, Dict, List + +from wxflow import (AttrDict, FileHandler, Hsi, Htar, Task, cast_strdict_as_dtypedict, + chgrp, get_gid, logit, mkdir_p, parse_j2yaml, rm_p, strftime, + to_YMD, to_YMDH, Template, TemplateConstants) + +logger = getLogger(__name__.split('.')[-1]) + + +class Archive(Task): + """Task to archive ROTDIR data to HPSS (or locally) + """ + + @logit(logger, name="Archive") + def __init__(self, config: Dict[str, Any]) -> None: + """Constructor for the Archive task + The constructor is responsible for collecting necessary yamls based on + the runtime options and RUN. + + Parameters + ---------- + config : Dict[str, Any] + Incoming configuration for the task from the environment + + Returns + ------- + None + """ + super().__init__(config) + + rotdir = self.config.ROTDIR + os.sep + + # Find all absolute paths in the environment and get their relative paths from ${ROTDIR} + path_dict = self._gen_relative_paths(rotdir) + + self.task_config = AttrDict(**self.config, **self.runtime_config, **path_dict) + + @logit(logger) + def configure(self, arch_dict: Dict[str, Any]) -> (Dict[str, Any], List[Dict[str, Any]]): + """Determine which tarballs will need to be created. + + Parameters + ---------- + arch_dict : Dict[str, Any] + Task specific keys, e.g. runtime options (DO_AERO, DO_ICE, etc) + + Return + ------ + arcdir_set : Dict[str, Any] + Set of FileHandler instructions to copy files to the ARCDIR + atardir_sets : List[Dict[str, Any]] + List of tarballs and instructions for creating them via tar or htar + """ + + archive_parm = os.path.join(arch_dict.PARMgfs, "archive") + + # Collect the dataset to archive locally + arcdir_filename = os.path.join(archive_parm, "arcdir.yaml.j2") + + # Add the glob.glob function for capturing log filenames + # TODO remove this kludge once log filenames are explicit + arch_dict['glob'] = glob.glob + + # Add the os.path.exists function to the dict for yaml parsing + arch_dict['path_exists'] = os.path.exists + + # Parse the input jinja yaml template + arcdir_set = parse_j2yaml(arcdir_filename, arch_dict) + + # Collect datasets that need to be archived + # Each dataset represents one tarball + + if arch_dict.HPSSARCH: + self.tar_cmd = "htar" + self.hsi = Hsi() + self.htar = Htar() + self.cvf = self.htar.cvf + self.rm_cmd = self.hsi.rm + self.chgrp_cmd = self.hsi.chgrp + self.chmod_cmd = self.hsi.chmod + elif arch_dict.LOCALARCH: + self.tar_cmd = "tar" + self.cvf = Archive._create_tarball + self.chgrp_cmd = chgrp + self.chmod_cmd = os.chmod + self.rm_cmd = rm_p + else: # Only perform local archiving. Do not create tarballs. + self.tar_cmd = "" + return arcdir_set, [] + + if not os.path.isdir(arch_dict.ROTDIR): + raise FileNotFoundError(f"FATAL ERROR: The ROTDIR ({arch_dict.ROTDIR}) does not exist!") + + if arch_dict.RUN == "gdas" or arch_dict.RUN == "gfs": + + # Copy the cyclone track files and rename the experiments + Archive._rename_cyclone_expt(arch_dict) + + if arch_dict.RUN == "gefs": + raise NotImplementedError("FATAL ERROR: Archiving is not yet set up for GEFS runs") + + master_yaml = "master_" + arch_dict.RUN + ".yaml.j2" + + parsed_sets = parse_j2yaml(os.path.join(archive_parm, master_yaml), arch_dict) + + atardir_sets = [] + + for dataset in parsed_sets.datasets.values(): + + dataset["fileset"] = Archive._create_fileset(dataset) + dataset["has_rstprod"] = Archive._has_rstprod(dataset.fileset) + + atardir_sets.append(dataset) + + return arcdir_set, atardir_sets + + @logit(logger) + def execute_store_products(self, arcdir_set: Dict[str, Any]) -> None: + """Perform local archiving of data products to ARCDIR. + + Parameters + ---------- + arcdir_set : Dict[str, Any] + FileHandler instructions to populate ARCDIR with + + Return + ------ + None + """ + + # Copy files to the local ARCDIR + for key in arcdir_set.keys(): + FileHandler(arcdir_set[key]).sync() + + @logit(logger) + def execute_backup_dataset(self, atardir_set: Dict[str, Any]) -> None: + """Create a backup tarball from a yaml dict. + + Parameters + ---------- + atardir_set: Dict[str, Any] + Dict defining set of files to backup and the target tarball. + + Return + ------ + None + """ + + # Generate tarball + if len(atardir_set.fileset) == 0: + logger.warning(f"WARNING: skipping would-be empty archive {atardir_set.target}.") + return + + if atardir_set.has_rstprod: + + try: + self.cvf(atardir_set.target, atardir_set.fileset) + # Regardless of exception type, attempt to remove the target + except Exception: + self.rm_cmd(atardir_set.target) + raise RuntimeError(f"FATAL ERROR: Failed to create restricted archive {atardir_set.target}, deleting!") + + self._protect_rstprod(atardir_set) + + else: + self.cvf(atardir_set.target, atardir_set.fileset) + + @staticmethod + @logit(logger) + def _create_fileset(atardir_set: Dict[str, Any]) -> List: + """ + Collect the list of all available files from the parsed yaml dict. + Globs are expanded and if required files are missing, an error is + raised. + + TODO: expand all globs in the jinja yaml files instead of expanding + them here and issue errors here if globbing patterns (*, ?, []) + are found. + + Parameters + ---------- + atardir_set: Dict + Contains full paths for required and optional files to be archived. + """ + + fileset = [] + if "required" in atardir_set: + if atardir_set.required is not None: + for item in atardir_set.required: + glob_set = glob.glob(item) + if len(glob_set) == 0: + raise FileNotFoundError(f"FATAL ERROR: Required file, directory, or glob {item} not found!") + for entry in glob_set: + fileset.append(entry) + + if "optional" in atardir_set: + if atardir_set.optional is not None: + for item in atardir_set.optional: + glob_set = glob.glob(item) + if len(glob_set) == 0: + logger.warning(f"WARNING: optional file/glob {item} not found!") + else: + for entry in glob_set: + fileset.append(entry) + + return fileset + + @staticmethod + @logit(logger) + def _has_rstprod(fileset: List) -> bool: + """ + Checks if any files in the input fileset belongs to rstprod. + + Parameters + ---------- + fileset : List + List of filenames to check. + """ + + try: + rstprod_gid = get_gid("rstprod") + except KeyError: + # rstprod does not exist on this machine + return False + + # Expand globs and check each file for group ownership + for file_or_glob in fileset: + glob_set = glob.glob(file_or_glob) + for filename in glob_set: + if os.stat(filename).st_gid == rstprod_gid: + return True + + return False + + @logit(logger) + def _protect_rstprod(self, atardir_set: Dict[str, any]) -> None: + """ + Changes the group of the target tarball to rstprod and the permissions to + 640. If this fails for any reason, attempt to delete the file before exiting. + + """ + + try: + self.chgrp_cmd("rstprod", atardir_set.target) + if self.tar_cmd == "htar": + self.chmod_cmd("640", atardir_set.target) + else: + self.chmod_cmd(atardir_set.target, 0o640) + # Regardless of exception type, attempt to remove the target + except Exception: + try: + self.rm_cmd(atardir_set.target) + finally: + raise RuntimeError(f"FATAL ERROR: Failed to protect {atardir_set.target}!\n" + f"Please verify that it has been deleted!!") + + @staticmethod + @logit(logger) + def _create_tarball(target: str, fileset: List) -> None: + """Method to create a local tarball. + + Parameters + ---------- + target : str + Tarball to create + + file_list : List + List of files to add to an archive + """ + + # TODO create a set of tar helper functions in wxflow + # Attempt to create the parent directory if it does not exist + mkdir_p(os.path.dirname(os.path.realpath(target))) + + # Create the archive + with tarfile.open(target, "w") as tarball: + for filename in fileset: + tarball.add(filename) + + @logit(logger) + def _gen_relative_paths(self, root_path: str) -> Dict: + """Generate a dict of paths in self.config relative to root_path + + Parameters + ---------- + root_path : str + Path to base all relative paths off of + + Return + ------ + rel_path_dict : Dict + Dictionary of paths relative to root_path. Members will be named + based on the dict names in self.config. For COM paths, the names will + follow COM_ --> _dir. For all other directories, the + names will follow --> _dir. + """ + + rel_path_dict = {} + for key, value in self.config.items(): + if isinstance(value, str): + if root_path in value: + rel_path = value.replace(root_path, "") + rel_key = (key[4:] if key.startswith("COM_") else key).lower() + "_dir" + rel_path_dict[rel_key] = rel_path + + return rel_path_dict + + @staticmethod + @logit(logger) + def _rename_cyclone_expt(arch_dict) -> None: + + # Rename the experiment in the tracker files from "AVNO" to the + # first 4 letters of PSLOT. + pslot4 = arch_dict.PSLOT.upper() + if len(arch_dict.PSLOT) > 4: + pslot4 = arch_dict.PSLOT[0:4].upper() + + track_dir = arch_dict.COM_ATMOS_TRACK + run = arch_dict.RUN + cycle_HH = strftime(arch_dict.current_cycle, "%H") + + if run == "gfs": + in_track_file = (track_dir + "/avno.t" + + cycle_HH + "z.cycle.trackatcfunix") + in_track_p_file = (track_dir + "/avnop.t" + + cycle_HH + "z.cycle.trackatcfunixp") + elif run == "gdas": + in_track_file = (track_dir + "/gdas.t" + + cycle_HH + "z.cycle.trackatcfunix") + in_track_p_file = (track_dir + "/gdasp.t" + + cycle_HH + "z.cycle.trackatcfunixp") + + if not os.path.isfile(in_track_file): + # Do not attempt to archive the outputs + return + + out_track_file = track_dir + "/atcfunix." + run + "." + to_YMDH(arch_dict.current_cycle) + out_track_p_file = track_dir + "/atcfunixp." + run + "." + to_YMDH(arch_dict.current_cycle) + + def replace_string_from_to_file(filename_in, filename_out, search_str, replace_str): + + """Write a new file from the contents of an input file while searching + and replacing ASCII strings. To prevent partial file creation, a + temporary file is created and moved to the final location only + after the search/replace is finished. + + Parameters + ---------- + filename_in : str + Input filename + + filename_out : str + Output filename + + search_str : str + ASCII string to search for + + replace_str : str + ASCII string to replace the search_str with + """ + with open(filename_in) as old_file: + lines = old_file.readlines() + + out_lines = [line.replace(search, replace) for line in lines] + + with open("/tmp/track_file", "w") as new_file: + new_file.writelines(out_lines) + + shutil.move("tmp/track_file", filename_out) + + replace_string_from_to_file(in_track_file, out_track_file, "AVNO", pslot4) + replace_string_from_to_file(in_track_p_file, out_track_p_file, "AVNO", pslot4) + + return diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 5aaacc42e8..37ac613736 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -132,7 +132,7 @@ def initialize(self: Analysis) -> None: FileHandler({'mkdir': newdirs}).sync() @logit(logger) - def execute(self: Analysis) -> None: + def letkf(self: Analysis) -> None: """Execute a global atmens analysis This method will execute a global atmens analysis using JEDI. @@ -150,8 +150,9 @@ def execute(self: Analysis) -> None: """ chdir(self.task_config.DATA) - exec_cmd = Executable(self.task_config.APRUN_ATMENSANL) + exec_cmd = Executable(self.task_config.APRUN_ATMENSANLLETKF) exec_name = os.path.join(self.task_config.DATA, 'gdas.x') + exec_cmd.add_default_arg(exec_name) exec_cmd.add_default_arg('fv3jedi') exec_cmd.add_default_arg('localensembleda') @@ -167,6 +168,31 @@ def execute(self: Analysis) -> None: pass + @logit(logger) + def init_fv3_increment(self: Analysis) -> None: + # Setup JEDI YAML file + self.task_config.jedi_yaml = os.path.join(self.runtime_config.DATA, + f"{self.task_config.JCB_ALGO}.yaml") + save_as_yaml(self.get_jedi_config(self.task_config.JCB_ALGO), self.task_config.jedi_yaml) + + # Link JEDI executable to run directory + self.task_config.jedi_exe = self.link_jediexe() + + @logit(logger) + def fv3_increment(self: Analysis) -> None: + # Run executable + exec_cmd = Executable(self.task_config.APRUN_ATMENSANLFV3INC) + exec_cmd.add_default_arg(self.task_config.jedi_exe) + exec_cmd.add_default_arg(self.task_config.jedi_yaml) + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd() + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") + @logit(logger) def finalize(self: Analysis) -> None: """Finalize a global atmens analysis @@ -218,42 +244,6 @@ def finalize(self: Analysis) -> None: } FileHandler(yaml_copy).sync() - # Create UFS model readable atm increment file from UFS-DA atm increment - logger.info("Create UFS model readable atm increment file from UFS-DA atm increment") - self.jedi2fv3inc() - - def clean(self): - super().clean() - - @logit(logger) - def jedi2fv3inc(self: Analysis) -> None: - """Generate UFS model readable analysis increment - - This method writes a UFS DA atm increment in UFS model readable format. - This includes: - - write UFS-DA atm increments using variable names expected by UFS model - - compute and write delp increment - - compute and write hydrostatic delz increment - - Please note that some of these steps are temporary and will be modified - once the modle is able to directly read atm increments. - - Parameters - ---------- - Analysis: parent class for GDAS task - - Returns - ---------- - None - """ - # Select the atm guess file based on the analysis and background resolutions - # Fields from the atm guess are used to compute the delp and delz increments - cdate = to_fv3time(self.task_config.current_cycle) - cdate_inc = cdate.replace('.', '_') - - # Reference the python script which does the actual work - incpy = os.path.join(self.task_config.HOMEgfs, 'ush/jediinc2fv3.py') - # create template dictionaries template_inc = self.task_config.COM_ATMOS_ANALYSIS_TMPL tmpl_inc_dict = { @@ -263,14 +253,10 @@ def jedi2fv3inc(self: Analysis) -> None: 'HH': self.task_config.current_cycle.strftime('%H') } - template_ges = self.task_config.COM_ATMOS_HISTORY_TMPL - tmpl_ges_dict = { - 'ROTDIR': self.task_config.ROTDIR, - 'RUN': self.task_config.RUN, - 'YMD': to_YMD(self.task_config.previous_cycle), - 'HH': self.task_config.previous_cycle.strftime('%H') - } - + # copy FV3 atm increment to comrot directory + logger.info("Copy UFS model readable atm increment file") + cdate = to_fv3time(self.task_config.current_cycle) + cdate_inc = cdate.replace('.', '_') # loop over ensemble members for imem in range(1, self.task_config.NMEM_ENS + 1): memchar = f"mem{imem:03d}" @@ -278,20 +264,15 @@ def jedi2fv3inc(self: Analysis) -> None: # create output path for member analysis increment tmpl_inc_dict['MEMDIR'] = memchar incdir = Template.substitute_structure(template_inc, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_inc_dict.get) + src = os.path.join(self.task_config.DATA, 'anl', memchar, f"atminc.{cdate_inc}z.nc4") + dest = os.path.join(incdir, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atminc.nc") + + # copy increment + logger.debug(f"Copying {src} to {dest}") + inc_copy = { + 'copy': [[src, dest]] + } + FileHandler(inc_copy).sync() - # rewrite UFS-DA atmens increments - tmpl_ges_dict['MEMDIR'] = memchar - gesdir = Template.substitute_structure(template_ges, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_ges_dict.get) - atmges_fv3 = os.path.join(gesdir, f"{self.task_config.CDUMP}.t{self.task_config.previous_cycle.hour:02d}z.atmf006.nc") - atminc_jedi = os.path.join(self.task_config.DATA, 'anl', memchar, f'atminc.{cdate_inc}z.nc4') - atminc_fv3 = os.path.join(incdir, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atminc.nc") - - # Execute incpy to create the UFS model atm increment file - # TODO: use MPMD or parallelize with mpi4py - # See https://github.com/NOAA-EMC/global-workflow/pull/1373#discussion_r1173060656 - cmd = Executable(incpy) - cmd.add_default_arg(atmges_fv3) - cmd.add_default_arg(atminc_jedi) - cmd.add_default_arg(atminc_fv3) - logger.debug(f"Executing {cmd}") - cmd(output='stdout', error='stderr') + def clean(self): + super().clean() diff --git a/versions/build.gaea.ver b/versions/build.gaea.ver new file mode 100644 index 0000000000..b92fe8c1db --- /dev/null +++ b/versions/build.gaea.ver @@ -0,0 +1,6 @@ +export stack_intel_ver=2023.1.0 +export stack_cray_mpich_ver=8.1.25 +export spack_env=gsi-addon-dev + +source "${HOMEgfs:-}/versions/run.spack.ver" +export spack_mod_path="/ncrc/proj/epic/spack-stack/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core" diff --git a/versions/run.gaea.ver b/versions/run.gaea.ver new file mode 100644 index 0000000000..b92fe8c1db --- /dev/null +++ b/versions/run.gaea.ver @@ -0,0 +1,6 @@ +export stack_intel_ver=2023.1.0 +export stack_cray_mpich_ver=8.1.25 +export spack_env=gsi-addon-dev + +source "${HOMEgfs:-}/versions/run.spack.ver" +export spack_mod_path="/ncrc/proj/epic/spack-stack/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core" diff --git a/workflow/applications/applications.py b/workflow/applications/applications.py index 3a8d23f744..50a9a7cdd0 100644 --- a/workflow/applications/applications.py +++ b/workflow/applications/applications.py @@ -77,6 +77,14 @@ def __init__(self, conf: Configuration) -> None: elif wave_cdump in ['gfs', 'gdas']: self.wave_cdumps = [wave_cdump] + self.aero_anl_cdumps = None + if self.do_aero: + aero_anl_cdump = _base.get('AERO_ANL_CDUMP', 'BOTH').lower() + if aero_anl_cdump in ['both']: + self.aero_anl_cdumps = ['gfs', 'gdas'] + elif aero_anl_cdump in ['gfs', 'gdas']: + self.aero_anl_cdumps = [aero_anl_cdump] + def _init_finalize(self, conf: Configuration): print("Finalizing initialize") @@ -147,7 +155,7 @@ def _source_configs(self, conf: Configuration) -> Dict[str, Any]: files += ['config.fcst', 'config.efcs'] elif config in ['atmanlinit', 'atmanlvar', 'atmanlfv3inc']: files += ['config.atmanl', f'config.{config}'] - elif config in ['atmensanlinit', 'atmensanlrun']: + elif config in ['atmensanlinit', 'atmensanlletkf', 'atmensanlfv3inc']: files += ['config.atmensanl', f'config.{config}'] elif 'wave' in config: files += ['config.wave', f'config.{config}'] diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index 4d785bc4da..f7f9b5b5e6 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -57,7 +57,7 @@ def _get_app_configs(self): if self.do_hybvar: if self.do_jediatmens: - configs += ['atmensanlinit', 'atmensanlrun', 'atmensanlfinal'] + configs += ['atmensanlinit', 'atmensanlletkf', 'atmensanlfv3inc', 'atmensanlfinal'] else: configs += ['eobs', 'eomg', 'ediag', 'eupd'] configs += ['ecen', 'esfc', 'efcs', 'echgres', 'epos', 'earc'] @@ -150,9 +150,6 @@ def get_task_names(self): gdas_gfs_common_tasks_before_fcst += ['sfcanl', 'analcalc'] - if self.do_aero: - gdas_gfs_common_tasks_before_fcst += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] - if self.do_jedisnowda: gdas_gfs_common_tasks_before_fcst += ['prepsnowobs', 'snowanl'] @@ -164,7 +161,7 @@ def get_task_names(self): hybrid_after_eupd_tasks = [] if self.do_hybvar: if self.do_jediatmens: - hybrid_tasks += ['atmensanlinit', 'atmensanlrun', 'atmensanlfinal', 'echgres'] + hybrid_tasks += ['atmensanlinit', 'atmensanlletkf', 'atmensanlfv3inc', 'atmensanlfinal', 'echgres'] else: hybrid_tasks += ['eobs', 'eupd', 'echgres'] hybrid_tasks += ['ediag'] if self.lobsdiag_forenkf else ['eomg'] @@ -179,6 +176,9 @@ def get_task_names(self): if self.do_wave and 'gdas' in self.wave_cdumps: gdas_tasks += wave_prep_tasks + if self.do_aero and 'gdas' in self.aero_anl_cdumps: + gdas_tasks += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] + gdas_tasks += ['atmanlupp', 'atmanlprod', 'fcst'] if self.do_upp: @@ -213,6 +213,9 @@ def get_task_names(self): if self.do_wave and 'gfs' in self.wave_cdumps: gfs_tasks += wave_prep_tasks + if self.do_aero and 'gfs' in self.aero_anl_cdumps: + gfs_tasks += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] + gfs_tasks += ['atmanlupp', 'atmanlprod', 'fcst'] if self.do_ocean: diff --git a/workflow/applications/gfs_forecast_only.py b/workflow/applications/gfs_forecast_only.py index 89881af8c9..e219032551 100644 --- a/workflow/applications/gfs_forecast_only.py +++ b/workflow/applications/gfs_forecast_only.py @@ -87,7 +87,9 @@ def get_task_names(self): tasks = ['stage_ic'] if self.do_aero: - tasks += ['aerosol_init'] + aero_fcst_cdump = _base.get('AERO_FCST_CDUMP', 'BOTH').lower() + if self._base['CDUMP'] in aero_fcst_cdump or aero_fcst_cdump == "both": + tasks += ['aerosol_init'] if self.do_wave: tasks += ['waveinit'] diff --git a/workflow/hosts.py b/workflow/hosts.py index a17cd3f4a8..2334a3ac35 100644 --- a/workflow/hosts.py +++ b/workflow/hosts.py @@ -15,7 +15,7 @@ class Host: """ SUPPORTED_HOSTS = ['HERA', 'ORION', 'JET', 'HERCULES', - 'WCOSS2', 'S4', 'CONTAINER', 'AWSPW'] + 'WCOSS2', 'S4', 'CONTAINER', 'AWSPW', 'GAEA'] def __init__(self, host=None): @@ -49,6 +49,8 @@ def detect(cls): machine = 'WCOSS2' elif os.path.exists('/data/prod'): machine = 'S4' + elif os.path.exists('/gpfs/f5'): + machine = 'GAEA' elif container is not None: machine = 'CONTAINER' elif pw_csp is not None: diff --git a/workflow/hosts/awspw.yaml b/workflow/hosts/awspw.yaml index c683010e0e..d2223e799e 100644 --- a/workflow/hosts/awspw.yaml +++ b/workflow/hosts/awspw.yaml @@ -7,11 +7,13 @@ STMP: '/lustre/${USER}/stmp2/' PTMP: '/lustre/${USER}/stmp4/' NOSCRUB: ${HOMEDIR} ACCOUNT: hwufscpldcld +ACCOUNT_SERVICE: hwufscpldcld SCHEDULER: slurm QUEUE: batch QUEUE_SERVICE: batch PARTITION_BATCH: compute PARTITION_SERVICE: compute +RESERVATION: '' CHGRP_RSTPROD: 'YES' CHGRP_CMD: 'chgrp rstprod' # TODO: This is not yet supported. HPSSARCH: 'YES' diff --git a/workflow/hosts/container.yaml b/workflow/hosts/container.yaml index 3fd3856679..5f4a66ac1f 100644 --- a/workflow/hosts/container.yaml +++ b/workflow/hosts/container.yaml @@ -8,10 +8,12 @@ PTMP: '/home/${USER}' NOSCRUB: $HOMEDIR SCHEDULER: none ACCOUNT: '' +ACCOUNT_SERVICE: '' QUEUE: '' QUEUE_SERVICE: '' PARTITION_BATCH: '' PARTITION_SERVICE: '' +RESERVATION: '' CHGRP_RSTPROD: 'YES' CHGRP_CMD: 'chgrp rstprod' HPSSARCH: 'NO' diff --git a/workflow/hosts/gaea.yaml b/workflow/hosts/gaea.yaml new file mode 100644 index 0000000000..7ca8420997 --- /dev/null +++ b/workflow/hosts/gaea.yaml @@ -0,0 +1,25 @@ +BASE_GIT: '/gpfs/f5/epic/proj-shared/global/glopara/data/git' +DMPDIR: '/gpfs/f5/epic/proj-shared/global/glopara/data/dump' +BASE_CPLIC: '/gpfs/f5/epic/proj-shared/global/glopara/data/ICSDIR/prototype_ICs' +PACKAGEROOT: '/gpfs/f5/epic/proj-shared/global/glopara/data/nwpara' +COMROOT: '/gpfs/f5/epic/proj-shared/global/glopara/data/com' +COMINsyn: '${COMROOT}/gfs/prod/syndat' +HOMEDIR: '/gpfs/f5/epic/scratch/${USER}' +STMP: '/gpfs/f5/epic/scratch/${USER}' +PTMP: '/gpfs/f5/epic/scratch/${USER}' +NOSCRUB: $HOMEDIR +ACCOUNT: epic +SCHEDULER: slurm +QUEUE: normal +QUEUE_SERVICE: normal +PARTITION_BATCH: batch +PARTITION_SERVICE: batch +CHGRP_RSTPROD: 'NO' +CHGRP_CMD: 'chgrp rstprod' +HPSSARCH: 'NO' +HPSS_PROJECT: emc-global +LOCALARCH: 'NO' +ATARDIR: '${NOSCRUB}/archive_rotdir/${PSLOT}' +MAKE_NSSTBUFR: 'NO' +MAKE_ACFTBUFR: 'NO' +SUPPORTED_RESOLUTIONS: ['C1152', 'C768', 'C384', 'C192', 'C96', 'C48'] diff --git a/workflow/hosts/hera.yaml b/workflow/hosts/hera.yaml index 1393694153..8cf7363605 100644 --- a/workflow/hosts/hera.yaml +++ b/workflow/hosts/hera.yaml @@ -8,11 +8,13 @@ STMP: '/scratch1/NCEPDEV/stmp2/${USER}' PTMP: '/scratch1/NCEPDEV/stmp4/${USER}' NOSCRUB: $HOMEDIR ACCOUNT: fv3-cpu +ACCOUNT_SERVICE: fv3-cpu SCHEDULER: slurm QUEUE: batch QUEUE_SERVICE: batch PARTITION_BATCH: hera PARTITION_SERVICE: service +RESERVATION: '' CHGRP_RSTPROD: 'YES' CHGRP_CMD: 'chgrp rstprod' HPSSARCH: 'YES' diff --git a/workflow/hosts/hercules.yaml b/workflow/hosts/hercules.yaml index 2623672709..adebdfe23d 100644 --- a/workflow/hosts/hercules.yaml +++ b/workflow/hosts/hercules.yaml @@ -1,18 +1,20 @@ -BASE_GIT: '/work/noaa/global/glopara/git' +BASE_GIT: '/work/noaa/global/glopara/git_rocky9' DMPDIR: '/work/noaa/rstprod/dump' BASE_CPLIC: '/work/noaa/global/glopara/data/ICSDIR/prototype_ICs' PACKAGEROOT: '/work/noaa/global/glopara/nwpara' COMINsyn: '/work/noaa/global/glopara/com/gfs/prod/syndat' HOMEDIR: '/work/noaa/global/${USER}' -STMP: '/work/noaa/stmp/${USER}' -PTMP: '/work/noaa/stmp/${USER}' +STMP: '/work/noaa/stmp/${USER}/HERCULES' +PTMP: '/work/noaa/stmp/${USER}/HERCULES' NOSCRUB: $HOMEDIR SCHEDULER: slurm ACCOUNT: fv3-cpu +ACCOUNT_SERVICE: fv3-cpu QUEUE: batch QUEUE_SERVICE: batch PARTITION_BATCH: hercules PARTITION_SERVICE: service +RESERVATION: '' CHGRP_RSTPROD: 'YES' CHGRP_CMD: 'chgrp rstprod' HPSSARCH: 'NO' diff --git a/workflow/hosts/jet.yaml b/workflow/hosts/jet.yaml index 28b7571b32..fd556fadc7 100644 --- a/workflow/hosts/jet.yaml +++ b/workflow/hosts/jet.yaml @@ -8,11 +8,13 @@ STMP: '/lfs4/HFIP/hfv3gfs/${USER}/stmp' PTMP: '/lfs4/HFIP/hfv3gfs/${USER}/ptmp' NOSCRUB: $HOMEDIR ACCOUNT: hfv3gfs +ACCOUNT_SERVICE: hfv3gfs SCHEDULER: slurm QUEUE: batch QUEUE_SERVICE: batch PARTITION_BATCH: kjet PARTITION_SERVICE: service +RESERVATION: '' CHGRP_RSTPROD: 'YES' CHGRP_CMD: 'chgrp rstprod' HPSSARCH: 'YES' diff --git a/workflow/hosts/orion.yaml b/workflow/hosts/orion.yaml index dd95def386..ba289df1e3 100644 --- a/workflow/hosts/orion.yaml +++ b/workflow/hosts/orion.yaml @@ -4,15 +4,17 @@ BASE_CPLIC: '/work/noaa/global/glopara/data/ICSDIR/prototype_ICs' PACKAGEROOT: '/work/noaa/global/glopara/nwpara' COMINsyn: '/work/noaa/global/glopara/com/gfs/prod/syndat' HOMEDIR: '/work/noaa/global/${USER}' -STMP: '/work/noaa/stmp/${USER}' -PTMP: '/work/noaa/stmp/${USER}' +STMP: '/work/noaa/stmp/${USER}/ORION' +PTMP: '/work/noaa/stmp/${USER}/ORION' NOSCRUB: $HOMEDIR SCHEDULER: slurm ACCOUNT: fv3-cpu +ACCOUNT_SERVICE: fv3-cpu QUEUE: batch QUEUE_SERVICE: batch PARTITION_BATCH: orion PARTITION_SERVICE: service +RESERVATION: '' CHGRP_RSTPROD: 'YES' CHGRP_CMD: 'chgrp rstprod' HPSSARCH: 'NO' diff --git a/workflow/hosts/s4.yaml b/workflow/hosts/s4.yaml index 52a9f7a365..543912cf23 100644 --- a/workflow/hosts/s4.yaml +++ b/workflow/hosts/s4.yaml @@ -8,11 +8,13 @@ STMP: '/scratch/users/${USER}' PTMP: '/scratch/users/${USER}' NOSCRUB: ${HOMEDIR} ACCOUNT: star +ACCOUNT_SERVICE: star SCHEDULER: slurm QUEUE: s4 QUEUE_SERVICE: serial PARTITION_BATCH: s4 PARTITION_SERVICE: serial +RESERVATION: '' CHGRP_RSTPROD: 'NO' CHGRP_CMD: 'ls' HPSSARCH: 'NO' diff --git a/workflow/hosts/wcoss2.yaml b/workflow/hosts/wcoss2.yaml index ba203a8413..4943495289 100644 --- a/workflow/hosts/wcoss2.yaml +++ b/workflow/hosts/wcoss2.yaml @@ -8,11 +8,13 @@ STMP: '/lfs/h2/emc/stmp/${USER}' PTMP: '/lfs/h2/emc/ptmp/${USER}' NOSCRUB: $HOMEDIR ACCOUNT: 'GFS-DEV' +ACCOUNT_SERVICE: 'GFS-DEV' SCHEDULER: pbspro QUEUE: 'dev' QUEUE_SERVICE: 'dev_transfer' PARTITION_BATCH: '' PARTITION_SERVICE: '' +RESERVATION: '' CHGRP_RSTPROD: 'YES' CHGRP_CMD: 'chgrp rstprod' HPSSARCH: 'NO' diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 6125a33dec..24da085bcc 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -829,7 +829,7 @@ def _fcst_forecast_only(self): dep_dict = {'type': 'task', 'name': f'{self.cdump}{wave_job}'} dependencies.append(rocoto.add_dependency(dep_dict)) - if self.app_config.do_aero: + if self.app_config.do_aero and self.cdump in self.app_config.aero_fcst_cdumps: # Calculate offset based on CDUMP = gfs | gdas interval = None if self.cdump in ['gfs']: @@ -873,7 +873,7 @@ def _fcst_cycled(self): dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalpost'} dependencies.append(rocoto.add_dependency(dep_dict)) - if self.app_config.do_aero: + if self.app_config.do_aero and self.cdump in self.app_config.aero_anl_cdumps: dep_dict = {'type': 'task', 'name': f'{self.cdump}aeroanlfinal'} dependencies.append(rocoto.add_dependency(dep_dict)) @@ -2397,7 +2397,7 @@ def atmensanlinit(self): return task - def atmensanlrun(self): + def atmensanlletkf(self): deps = [] dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlinit'} @@ -2406,14 +2406,40 @@ def atmensanlrun(self): deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - resources = self.get_resource('atmensanlrun') - task_name = f'{self.cdump}atmensanlrun' + resources = self.get_resource('atmensanlletkf') + task_name = f'{self.cdump}atmensanlletkf' task_dict = {'task_name': task_name, 'resources': resources, 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.cdump.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/jobs/rocoto/atmensanlrun.sh', + 'command': f'{self.HOMEgfs}/jobs/rocoto/atmensanlletkf.sh', + 'job_name': f'{self.pslot}_{task_name}_@H', + 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', + 'maxtries': '&MAXTRIES;' + } + + task = rocoto.create_task(task_dict) + + return task + + def atmensanlfv3inc(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlletkf'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('atmensanlfv3inc') + task_name = f'{self.cdump}atmensanlfv3inc' + task_dict = {'task_name': task_name, + 'resources': resources, + 'dependency': dependencies, + 'envars': self.envars, + 'cycledef': self.cdump.replace('enkf', ''), + 'command': f'{self.HOMEgfs}/jobs/rocoto/atmensanlfv3inc.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -2426,7 +2452,7 @@ def atmensanlrun(self): def atmensanlfinal(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlrun'} + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlfv3inc'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep=deps) diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index a8b4eb9fac..52d5466d10 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -18,7 +18,7 @@ class Tasks: 'ocnanalprep', 'ocnanalbmat', 'ocnanalrun', 'ocnanalecen', 'ocnanalchkpt', 'ocnanalpost', 'ocnanalvrfy', 'earc', 'ecen', 'echgres', 'ediag', 'efcs', 'eobs', 'eomg', 'epos', 'esfc', 'eupd', - 'atmensanlinit', 'atmensanlrun', 'atmensanlfinal', + 'atmensanlinit', 'atmensanlletkf', 'atmensanlfv3inc', 'atmensanlfinal', 'aeroanlinit', 'aeroanlrun', 'aeroanlfinal', 'prepsnowobs', 'snowanl', 'fcst', @@ -169,7 +169,7 @@ def get_resource(self, task_name): task_config = self._configs[task_name] - account = task_config['ACCOUNT'] + account = task_config['ACCOUNT_SERVICE'] if task_name in Tasks.SERVICE_TASKS else task_config['ACCOUNT'] walltime = task_config[f'wtime_{task_name}'] if self.cdump in ['gfs'] and f'wtime_{task_name}_gfs' in task_config.keys(): @@ -208,6 +208,8 @@ def get_resource(self, task_name): native += ':shared' elif scheduler in ['slurm']: native = '--export=NONE' + if task_config['RESERVATION'] != "": + native += '' if task_name in Tasks.SERVICE_TASKS else ' --reservation=' + task_config['RESERVATION'] queue = task_config['QUEUE_SERVICE'] if task_name in Tasks.SERVICE_TASKS else task_config['QUEUE'] diff --git a/workflow/setup_expt.py b/workflow/setup_expt.py index 9602b66b60..97d25dc15a 100755 --- a/workflow/setup_expt.py +++ b/workflow/setup_expt.py @@ -96,6 +96,7 @@ def fill_ROTDIR_cycled(host, inputs): dst_ocn_rst_dir = os.path.join('model_data', 'ocean', 'restart') dst_ocn_anl_dir = os.path.join('analysis', 'ocean') dst_ice_rst_dir = os.path.join('model_data', 'ice', 'restart') + dst_ice_anl_dir = os.path.join('analysis', 'ice') dst_atm_anl_dir = os.path.join('analysis', 'atmos') if flat_structure: @@ -111,6 +112,7 @@ def fill_ROTDIR_cycled(host, inputs): src_ocn_rst_dir = os.path.join('ocean', 'RESTART') src_ocn_anl_dir = 'ocean' src_ice_rst_dir = os.path.join('ice', 'RESTART') + src_ice_anl_dir = dst_ice_anl_dir src_atm_anl_dir = 'atmos' else: src_atm_dir = dst_atm_dir @@ -118,6 +120,7 @@ def fill_ROTDIR_cycled(host, inputs): src_ocn_rst_dir = dst_ocn_rst_dir src_ocn_anl_dir = dst_ocn_anl_dir src_ice_rst_dir = dst_ice_rst_dir + src_ice_anl_dir = dst_ice_anl_dir src_atm_anl_dir = dst_atm_anl_dir def link_files_from_src_to_dst(src_dir, dst_dir): @@ -203,8 +206,9 @@ def link_files_from_src_to_dst(src_dir, dst_dir): # Link ice files if do_ice: - dst_dir = os.path.join(rotdir, previous_cycle_dir, dst_ice_rst_dir) - src_dir = os.path.join(inputs.icsdir, previous_cycle_dir, src_ice_rst_dir) + # First 1/2 cycle needs a CICE6 analysis restart + src_dir = os.path.join(inputs.icsdir, current_cycle_dir, src_ice_anl_dir) + dst_dir = os.path.join(rotdir, current_cycle_dir, src_ice_anl_dir) makedirs_if_missing(dst_dir) link_files_from_src_to_dst(src_dir, dst_dir)