diff --git a/ci/Jenkinsfile b/ci/Jenkinsfile index b1334e5344..eba25ca99a 100644 --- a/ci/Jenkinsfile +++ b/ci/Jenkinsfile @@ -19,6 +19,7 @@ pipeline { stages { // This initial stage is used to get the Machine name from the GitHub labels on the PR // which is used to designate the Nodes in the Jenkins Controler by the agent label // Each Jenknis Node is connected to said machine via an JAVA agent via an ssh tunnel + // no op 2 stage('Get Machine') { agent { label 'built-in' } @@ -111,7 +112,7 @@ pipeline { try { sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --repo PR_BUILD_${env.CHANGE_ID}") gist_url=sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --gist PR_BUILD_${env.CHANGE_ID}", returnStdout: true).trim() - sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Build **FAILED** on **${Machine}** with error logs:\n\\`\\`\\`${error_logs_message}\n\\`\\`\\`\n\nFollow link here to view the contents of the above file(s): [(link)](${gist_url})" """) + sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Build **FAILED** on **${Machine}** with error logs:\n\\`\\`\\`\n${error_logs_message}\\`\\`\\`\n\nFollow link here to view the contents of the above file(s): [(link)](${gist_url})" """) } catch (Exception error_comment) { echo "Failed to comment on PR: ${error_comment.getMessage()}" } @@ -215,6 +216,7 @@ pipeline { STATUS = 'Failed' try { sh(script: """${GH} pr edit ${env.CHANGE_ID} --repo ${repo_url} --remove-label "CI-${Machine}-Running" --add-label "CI-${Machine}-${STATUS}" """, returnStatus: true) + sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Experiment ${Case} **FAILED** on ${Machine}\nin\\`${HOME}/RUNTESTS/${pslot}\\`" """) } catch (Exception e) { echo "Failed to update label from Running to ${STATUS}: ${e.getMessage()}" } diff --git a/ci/scripts/utils/rocotostat.py b/ci/scripts/utils/rocotostat.py index 730def1d27..0cb0c6c09f 100755 --- a/ci/scripts/utils/rocotostat.py +++ b/ci/scripts/utils/rocotostat.py @@ -95,7 +95,11 @@ def rocoto_statcount(): error_return = rocoto_status['UNKNOWN'] rocoto_state = 'UNKNOWN' elif rocoto_status['RUNNING'] + rocoto_status['SUBMITTING'] + rocoto_status['QUEUED'] == 0: - error_return = -3 + # + # TODO for now a STALLED state will be just a warning as it can + # produce a false negative if there is a timestamp on a file dependency. + # + # error_return = -3 rocoto_state = 'STALLED' else: rocoto_state = 'RUNNING' diff --git a/env/AWSPW.env b/env/AWSPW.env index 2dbba67eb3..5cf819ba2b 100755 --- a/env/AWSPW.env +++ b/env/AWSPW.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlrun atmensanlrun aeroanlrun snowanl" + echo "atmanlvar atmanlfv3inc atmensanlrun aeroanlrun snowanl" echo "anal sfcanl fcst post metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" diff --git a/env/CONTAINER.env b/env/CONTAINER.env index bc2d64b4ce..700460b755 100755 --- a/env/CONTAINER.env +++ b/env/CONTAINER.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlrun atmensanlrun aeroanlrun snowanl" + echo "atmanlvar atmanlfv3inc atmensanlrun aeroanlrun snowanl" echo "anal sfcanl fcst post metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" diff --git a/env/HERA.env b/env/HERA.env index fc3eda29dc..edbe420496 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlrun atmensanlrun aeroanlrun snowanl" + echo "atmanlvar atmensanlrun aeroanlrun snowanl atmanlfv3inc" echo "anal sfcanl fcst post metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -56,13 +56,13 @@ elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step} export wavempexec=${launcher} export wave_mpmd=${mpmd_opt} -elif [[ "${step}" = "atmanlrun" ]]; then +elif [[ "${step}" = "atmanlvar" ]]; then - nth_max=$((npe_node_max / npe_node_atmanlrun)) + nth_max=$((npe_node_max / npe_node_atmanlvar)) - export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}} - [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} - export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun} --cpus-per-task=${NTHREADS_ATMANL}" + export NTHREADS_ATMANLVAR=${nth_atmanlvar:-${nth_max}} + [[ ${NTHREADS_ATMANLVAR} -gt ${nth_max} ]] && export NTHREADS_ATMANLVAR=${nth_max} + export APRUN_ATMANLVAR="${launcher} -n ${npe_atmanlvar} --cpus-per-task=${NTHREADS_ATMANLVAR}" elif [[ "${step}" = "atmensanlrun" ]]; then @@ -82,6 +82,14 @@ elif [[ "${step}" = "aeroanlrun" ]]; then [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max} export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun} --cpus-per-task=${NTHREADS_AEROANL}" +elif [[ "${step}" = "atmanlfv3inc" ]]; then + + nth_max=$((npe_node_max / npe_node_atmanlfv3inc)) + + export NTHREADS_ATMANLFV3INC=${nth_atmanlfv3inc:-${nth_max}} + [[ ${NTHREADS_ATMANLFV3INC} -gt ${nth_max} ]] && export NTHREADS_ATMANLFV3INC=${nth_max} + export APRUN_ATMANLFV3INC="${launcher} -n ${npe_atmanlfv3inc} --cpus-per-task=${NTHREADS_ATMANLFV3INC}" + elif [[ "${step}" = "snowanl" ]]; then nth_max=$((npe_node_max / npe_node_snowanl)) diff --git a/env/HERCULES.env b/env/HERCULES.env index 7d2aa5f8d0..da5ad972f2 100755 --- a/env/HERCULES.env +++ b/env/HERCULES.env @@ -53,13 +53,21 @@ case ${step} in export wave_mpmd=${mpmd_opt} ;; - "atmanlrun") + "atmanlvar") - nth_max=$((npe_node_max / npe_node_atmanlrun)) + nth_max=$((npe_node_max / npe_node_atmanlvar)) - export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}} - [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} - export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun} --cpus-per-task=${NTHREADS_ATMANL}" + export NTHREADS_ATMANLVAR=${nth_atmanlvar:-${nth_max}} + [[ ${NTHREADS_ATMANLVAR} -gt ${nth_max} ]] && export NTHREADS_ATMANLVAR=${nth_max} + export APRUN_ATMANLVAR="${launcher} -n ${npe_atmanlvar} --cpus-per-task=${NTHREADS_ATMANLVAR}" + ;; + "atmanlfv3inc") + + nth_max=$((npe_node_max / npe_node_atmanlfv3inc)) + + export NTHREADS_ATMANLFV3INC=${nth_atmanlfv3inc:-${nth_max}} + [[ ${NTHREADS_ATMANLFV3INC} -gt ${nth_max} ]] && export NTHREADS_ATMANLFV3INC=${nth_max} + export APRUN_ATMANLFV3INC="${launcher} -n ${npe_atmanlfv3inc} --cpus-per-task=${NTHREADS_ATMANLFV3INC}" ;; "atmensanlrun") diff --git a/env/JET.env b/env/JET.env index df6666d8dc..3b4c2c2c53 100755 --- a/env/JET.env +++ b/env/JET.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlrun atmensanlrun aeroanlrun snowanl" + echo "atmanlvar atmensanlrun aeroanlrun snowanl atmanlfv3inc" echo "anal sfcanl fcst post metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -44,13 +44,13 @@ elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step} export wavempexec=${launcher} export wave_mpmd=${mpmd_opt} -elif [[ "${step}" = "atmanlrun" ]]; then +elif [[ "${step}" = "atmanlvar" ]]; then - nth_max=$((npe_node_max / npe_node_atmanlrun)) + nth_max=$((npe_node_max / npe_node_atmanlvar)) - export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}} - [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} - export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" + export NTHREADS_ATMANLVAR=${nth_atmanlvar:-${nth_max}} + [[ ${NTHREADS_ATMANLVAR} -gt ${nth_max} ]] && export NTHREADS_ATMANLVAR=${nth_max} + export APRUN_ATMANLVAR="${launcher} -n ${npe_atmanlvar}" elif [[ "${step}" = "atmensanlrun" ]]; then @@ -80,6 +80,14 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n 6" +elif [[ "${step}" = "atmanlfv3inc" ]]; then + + nth_max=$((npe_node_max / npe_node_atmanlfv3inc)) + + export NTHREADS_ATMANLFV3INC=${nth_atmanlfv3inc:-${nth_max}} + [[ ${NTHREADS_ATMANLFV3INC} -gt ${nth_max} ]] && export NTHREADS_ATMANLFV3INC=${nth_max} + export APRUN_ATMANLFV3INC="${launcher} -n ${npe_atmanlfv3inc}" + elif [[ "${step}" = "ocnanalbmat" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" diff --git a/env/ORION.env b/env/ORION.env index 17d0d24d97..6aac84a169 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlrun atmensanlrun aeroanlrun snowanl" + echo "atmanlvar atmensanlrun aeroanlrun snowanl atmanlfv3inc" echo "anal sfcanl fcst post metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -52,13 +52,13 @@ elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step} export wavempexec=${launcher} export wave_mpmd=${mpmd_opt} -elif [[ "${step}" = "atmanlrun" ]]; then +elif [[ "${step}" = "atmanlvar" ]]; then - nth_max=$((npe_node_max / npe_node_atmanlrun)) + nth_max=$((npe_node_max / npe_node_atmanlvar)) - export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}} - [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} - export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun} --cpus-per-task=${NTHREADS_ATMANL}" + export NTHREADS_ATMANLVAR=${nth_atmanlvar:-${nth_max}} + [[ ${NTHREADS_ATMANLVAR} -gt ${nth_max} ]] && export NTHREADS_ATMANLVAR=${nth_max} + export APRUN_ATMANLVAR="${launcher} -n ${npe_atmanlvar} --cpus-per-task=${NTHREADS_ATMANLVAR}" elif [[ "${step}" = "atmensanlrun" ]]; then @@ -88,6 +88,14 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n 6" +elif [[ "${step}" = "atmanlfv3inc" ]]; then + + nth_max=$((npe_node_max / npe_node_atmanlfv3inc)) + + export NTHREADS_ATMANLFV3INC=${nth_atmanlfv3inc:-${nth_max}} + [[ ${NTHREADS_ATMANLFV3INC} -gt ${nth_max} ]] && export NTHREADS_ATMANLFV3INC=${nth_max} + export APRUN_ATMANLFV3INC="${launcher} -n ${npe_atmanlfv3inc} --cpus-per-task=${NTHREADS_ATMANLFV3INC}" + elif [[ "${step}" = "ocnanalbmat" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" diff --git a/env/S4.env b/env/S4.env index ab564eb974..9cbf8b7bdb 100755 --- a/env/S4.env +++ b/env/S4.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlrun atmensanlrun aeroanlrun snowanl" + echo "atmanlvar atmensanlrun aeroanlrun snowanl atmanlfv3inc" echo "anal sfcanl fcst post metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -44,13 +44,13 @@ elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step} export wavempexec=${launcher} export wave_mpmd=${mpmd_opt} -elif [[ "${step}" = "atmanlrun" ]]; then +elif [[ "${step}" = "atmanlvar" ]]; then - nth_max=$((npe_node_max / npe_node_atmanlrun)) + nth_max=$((npe_node_max / npe_node_atmanlvar)) - export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}} - [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} - export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" + export NTHREADS_ATMANLVAR=${nth_atmanlvar:-${nth_max}} + [[ ${NTHREADS_ATMANLVAR} -gt ${nth_max} ]] && export NTHREADS_ATMANLVAR=${nth_max} + export APRUN_ATMANLVAR="${launcher} -n ${npe_atmanlvar}" elif [[ "${step}" = "atmensanlrun" ]]; then @@ -80,6 +80,14 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n 6" +elif [[ "${step}" = "atmanlfv3inc" ]]; then + + nth_max=$((npe_node_max / npe_node_atmanlfv3inc)) + + export NTHREADS_ATMANLFV3INC=${nth_atmanlfv3inc:-${nth_max}} + [[ ${NTHREADS_ATMANLFV3INC} -gt ${nth_max} ]] && export NTHREADS_ATMANLFV3INC=${nth_max} + export APRUN_ATMANLFV3INC="${launcher} -n ${npe_atmanlfv3inc}" + elif [[ "${step}" = "ocnanalbmat" ]]; then echo "WARNING: ${step} is not enabled on S4!" diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 4533629edc..ba55495655 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlrun atmensanlrun aeroanlrun snowanl" + echo "atmanlvar atmensanlrun aeroanlrun snowanl atmanlfv3inc" echo "anal sfcanl fcst post metp" echo "eobs eupd ecen esfc efcs epos" echo "postsnd awips gempak" @@ -38,13 +38,13 @@ elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step} export wavempexec="${launcher} -np" export wave_mpmd=${mpmd_opt} -elif [[ "${step}" = "atmanlrun" ]]; then +elif [[ "${step}" = "atmanlvar" ]]; then - nth_max=$((npe_node_max / npe_node_atmanlrun)) + nth_max=$((npe_node_max / npe_node_atmanlvar)) - export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}} - [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} - export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" + export NTHREADS_ATMANLVAR=${nth_atmanlvar:-${nth_max}} + [[ ${NTHREADS_ATMANLVAR} -gt ${nth_max} ]] && export NTHREADS_ATMANLVAR=${nth_max} + export APRUN_ATMANLVAR="${launcher} -n ${npe_atmanlvar}" elif [[ "${step}" = "atmensanlrun" ]]; then @@ -74,6 +74,14 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n 6" +elif [[ "${step}" = "atmanlfv3inc" ]]; then + + nth_max=$((npe_node_max / npe_node_atmanlfv3inc)) + + export NTHREADS_ATMANLFV3INC=${nth_atmanlfv3inc:-${nth_max}} + [[ ${NTHREADS_ATMANLFV3INC} -gt ${nth_max} ]] && export NTHREADS_ATMANLFV3INC=${nth_max} + export APRUN_ATMANLFV3INC="${launcher} -n ${npe_atmanlfv3inc}" + elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then export OMP_PLACES=cores diff --git a/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT b/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT new file mode 100755 index 0000000000..9a9a476065 --- /dev/null +++ b/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT @@ -0,0 +1,37 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA=${DATA:-${DATAROOT}/${RUN}atmanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanlfv3inc" -c "base atmanl atmanlfv3inc" + +############################################## +# Set variables used in the script +############################################## + + +############################################## +# Begin JOB SPECIFIC work +############################################## + + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASATMRUNSH:-${SCRgfs}/exglobal_atm_analysis_fv3_increment.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/JGLOBAL_ATM_ANALYSIS_RUN b/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL similarity index 84% rename from jobs/JGLOBAL_ATM_ANALYSIS_RUN rename to jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL index 2105d719de..552eccf911 100755 --- a/jobs/JGLOBAL_ATM_ANALYSIS_RUN +++ b/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL @@ -3,7 +3,7 @@ source "${HOMEgfs}/ush/preamble.sh" export WIPE_DATA="NO" export DATA=${DATA:-${DATAROOT}/${RUN}atmanl_${cyc}} -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanlrun" -c "base atmanl atmanlrun" +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanlvar" -c "base atmanl atmanlvar" ############################################## # Set variables used in the script @@ -18,7 +18,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanlrun" -c "base atmanl atmanlrun" ############################################################### # Run relevant script -EXSCRIPT=${GDASATMRUNSH:-${SCRgfs}/exglobal_atm_analysis_run.py} +EXSCRIPT=${GDASATMRUNSH:-${SCRgfs}/exglobal_atm_analysis_variational.py} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/rocoto/atmanlfv3inc.sh b/jobs/rocoto/atmanlfv3inc.sh new file mode 100755 index 0000000000..effc18cee5 --- /dev/null +++ b/jobs/rocoto/atmanlfv3inc.sh @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="atmanlfv3inc" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT" +status=$? +exit "${status}" diff --git a/jobs/rocoto/atmanlrun.sh b/jobs/rocoto/atmanlvar.sh similarity index 89% rename from jobs/rocoto/atmanlrun.sh rename to jobs/rocoto/atmanlvar.sh index 1b87cb4074..812e3c706a 100755 --- a/jobs/rocoto/atmanlrun.sh +++ b/jobs/rocoto/atmanlvar.sh @@ -8,7 +8,7 @@ source "${HOMEgfs}/ush/preamble.sh" status=$? [[ ${status} -ne 0 ]] && exit "${status}" -export job="atmanlrun" +export job="atmanlvar" export jobid="${job}.$$" ############################################################### @@ -19,6 +19,6 @@ export PYTHONPATH ############################################################### # Execute the JJOB -"${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_RUN" +"${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL" status=$? exit "${status}" diff --git a/parm/config/gfs/config.atmanlfv3inc b/parm/config/gfs/config.atmanlfv3inc new file mode 100644 index 0000000000..14c11d3dd3 --- /dev/null +++ b/parm/config/gfs/config.atmanlfv3inc @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.atmanlfv3inc ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlfv3inc" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfv3inc + +export JEDIYAML=${PARMgfs}/gdas/atm/utils/fv3jedi_fv3inc_variational.yaml.j2 +export JEDIEXE=${EXECgfs}/fv3jedi_fv3inc.x + +echo "END: config.atmanlfv3inc" diff --git a/parm/config/gfs/config.atmanlrun b/parm/config/gfs/config.atmanlrun deleted file mode 100644 index 68b7615718..0000000000 --- a/parm/config/gfs/config.atmanlrun +++ /dev/null @@ -1,11 +0,0 @@ -#! /usr/bin/env bash - -########## config.atmanlrun ########## -# Atm Var Analysis specific - -echo "BEGIN: config.atmanlrun" - -# Get task specific resources -. "${EXPDIR}/config.resources" atmanlrun - -echo "END: config.atmanlrun" diff --git a/parm/config/gfs/config.atmanlvar b/parm/config/gfs/config.atmanlvar new file mode 100644 index 0000000000..cbc0334a08 --- /dev/null +++ b/parm/config/gfs/config.atmanlvar @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlvar ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlvar" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlvar + +echo "END: config.atmanlvar" diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 98fc3b2668..9e229de11a 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -10,7 +10,7 @@ if (( $# != 1 )); then echo "argument can be any one of the following:" echo "stage_ic aerosol_init" echo "prep prepsnowobs prepatmiodaobs" - echo "atmanlinit atmanlrun atmanlfinal" + echo "atmanlinit atmanlvar atmanlfv3inc atmanlfinal" echo "atmensanlinit atmensanlrun atmensanlfinal" echo "snowanl" echo "aeroanlinit aeroanlrun aeroanlfinal" @@ -209,17 +209,31 @@ case ${step} in export memory_atmanlinit="3072M" ;; - "atmanlrun") + "atmanlvar") export layout_x=${layout_x_atmanl} export layout_y=${layout_y_atmanl} - export wtime_atmanlrun="00:30:00" - export npe_atmanlrun=$(( layout_x * layout_y * 6 )) - export npe_atmanlrun_gfs=$(( layout_x * layout_y * 6 )) - export nth_atmanlrun=1 - export nth_atmanlrun_gfs=${nth_atmanlrun} - export npe_node_atmanlrun=$(( npe_node_max / nth_atmanlrun )) - export memory_atmanlrun="96GB" + export wtime_atmanlvar="00:30:00" + export npe_atmanlvar=$(( layout_x * layout_y * 6 )) + export npe_atmanlvar_gfs=$(( layout_x * layout_y * 6 )) + export nth_atmanlvar=1 + export nth_atmanlvar_gfs=${nth_atmanlvar} + export npe_node_atmanlvar=$(( npe_node_max / nth_atmanlvar )) + export memory_atmanlvar="96GB" + export is_exclusive=True + ;; + + "atmanlfv3inc") + export layout_x=${layout_x_atmanl} + export layout_y=${layout_y_atmanl} + + export wtime_atmanlfv3inc="00:30:00" + export npe_atmanlfv3inc=$(( layout_x * layout_y * 6 )) + export npe_atmanlfv3inc_gfs=$(( layout_x * layout_y * 6 )) + export nth_atmanlfv3inc=1 + export nth_atmanlfv3inc_gfs=${nth_atmanlfv3inc} + export npe_node_atmanlfv3inc=$(( npe_node_max / nth_atmanlfv3inc )) + export memory_atmanlfv3inc="96GB" export is_exclusive=True ;; diff --git a/parm/config/gfs/config.ufs b/parm/config/gfs/config.ufs index 0db6f090a5..7cec5a0822 100644 --- a/parm/config/gfs/config.ufs +++ b/parm/config/gfs/config.ufs @@ -241,7 +241,7 @@ export cplice=".false." export cplchm=".false." export cplwav=".false." export cplwav2atm=".false." -export CCPP_SUITE="FV3_GFS_v17_p8_ugwpv1" +export CCPP_SUITE="${CCPP_SUITE:-FV3_GFS_v17_p8_ugwpv1}" model_list="atm" # Mediator specific settings diff --git a/parm/gdas/atm_jedi_fix.yaml.j2 b/parm/gdas/atm_jedi_fix.yaml.j2 index 69039baddf..a9cb2309f0 100644 --- a/parm/gdas/atm_jedi_fix.yaml.j2 +++ b/parm/gdas/atm_jedi_fix.yaml.j2 @@ -5,3 +5,5 @@ copy: - ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/fmsmpp.nml', '{{ DATA }}/fv3jedi/fmsmpp.nml'] - ['{{ FIXgfs }}/gdas/fv3jedi/fv3files/field_table_gfdl', '{{ DATA }}/fv3jedi/field_table'] - ['{{ PARMgfs }}/gdas/io/fv3jedi_fieldmetadata_restart.yaml', '{{ DATA }}/fv3jedi/fv3jedi_fieldmetadata_restart.yaml'] +- ['{{ PARMgfs }}/gdas/io/fv3jedi_fieldmetadata_history.yaml', '{{ DATA }}/fv3jedi/fv3jedi_fieldmetadata_history.yaml'] +- ['{{ PARMgfs }}/gdas/io/fv3jedi_fieldmetadata_fv3inc.yaml', '{{ DATA }}/fv3jedi/fv3jedi_fieldmetadata_fv3inc.yaml'] diff --git a/scripts/exglobal_atm_analysis_fv3_increment.py b/scripts/exglobal_atm_analysis_fv3_increment.py new file mode 100755 index 0000000000..57f2e7c9ee --- /dev/null +++ b/scripts/exglobal_atm_analysis_fv3_increment.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 +# exglobal_atm_analysis_fv3_increment.py +# This script creates an AtmAnalysis object +# and runs the increment method +# which converts the JEDI increment into an FV3 increment +import os + +from wxflow import Logger, cast_strdict_as_dtypedict +from pygfs.task.atm_analysis import AtmAnalysis + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the atm analysis task + AtmAnl = AtmAnalysis(config) + AtmAnl.init_fv3_increment() + AtmAnl.fv3_increment() diff --git a/scripts/exglobal_atm_analysis_run.py b/scripts/exglobal_atm_analysis_variational.py similarity index 89% rename from scripts/exglobal_atm_analysis_run.py rename to scripts/exglobal_atm_analysis_variational.py index 8adbe4a267..07bc208331 100755 --- a/scripts/exglobal_atm_analysis_run.py +++ b/scripts/exglobal_atm_analysis_variational.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# exglobal_atm_analysis_run.py +# exglobal_atm_analysis_variational.py # This script creates an AtmAnalysis object # and runs the execute method # which executes the global atm variational analysis @@ -19,4 +19,4 @@ # Instantiate the atm analysis task AtmAnl = AtmAnalysis(config) - AtmAnl.execute() + AtmAnl.variational() diff --git a/sorc/build_all.sh b/sorc/build_all.sh index 71c877b38d..8b781d167f 100755 --- a/sorc/build_all.sh +++ b/sorc/build_all.sh @@ -16,11 +16,13 @@ function _usage() { Builds all of the global-workflow components by calling the individual build scripts in sequence. -Usage: ${BASH_SOURCE[0]} [-a UFS_app][-c build_config][-d][-h][-j n][-v][-w] +Usage: ${BASH_SOURCE[0]} [-a UFS_app][-c build_config][-d][-f][-h][-j n][-v][-w] -a UFS_app: Build a specific UFS app instead of the default -d: Build in debug mode + -f: + Build the UFS model using the -DFASTER=ON option -g: Build GSI -h: @@ -53,9 +55,10 @@ _build_job_max=20 _quick_kill="NO" # Reset option counter in case this script is sourced OPTIND=1 -while getopts ":a:dghj:kuvw" option; do +while getopts ":a:dfghj:kuvw" option; do case "${option}" in a) _build_ufs_opt+="-a ${OPTARG} ";; + f) _build_ufs_opt+="-f ";; d) _build_debug="-d" ;; g) _build_gsi="YES" ;; h) _usage;; diff --git a/sorc/build_ufs.sh b/sorc/build_ufs.sh index ac38d1dcbe..7e84eaebc2 100755 --- a/sorc/build_ufs.sh +++ b/sorc/build_ufs.sh @@ -5,13 +5,14 @@ cwd=$(pwd) # Default settings APP="S2SWA" -CCPP_SUITES="FV3_GFS_v17_p8_ugwpv1,FV3_GFS_v17_coupled_p8_ugwpv1" # TODO: does the g-w need to build with all these CCPP_SUITES? +CCPP_SUITES="FV3_GFS_v17_p8_ugwpv1,FV3_GFS_v17_coupled_p8_ugwpv1,FV3_global_nest_v1" # TODO: does the g-w need to build with all these CCPP_SUITES? PDLIB="ON" -while getopts ":da:j:vw" option; do +while getopts ":da:fj:vw" option; do case "${option}" in d) BUILD_TYPE="Debug";; a) APP="${OPTARG}";; + f) FASTER="ON";; j) BUILD_JOBS="${OPTARG}";; v) export BUILD_VERBOSE="YES";; w) PDLIB="OFF";; @@ -31,7 +32,11 @@ source "./tests/module-setup.sh" MAKE_OPT="-DAPP=${APP} -D32BIT=ON -DCCPP_SUITES=${CCPP_SUITES}" [[ ${PDLIB:-"OFF"} = "ON" ]] && MAKE_OPT+=" -DPDLIB=ON" -[[ ${BUILD_TYPE:-"Release"} = "Debug" ]] && MAKE_OPT+=" -DDEBUG=ON" +if [[ ${BUILD_TYPE:-"Release"} = "DEBUG" ]] ; then + MAKE_OPT+=" -DDEBUG=ON" +elif [[ "${FASTER:-OFF}" == ON ]] ; then + MAKE_OPT+=" -DFASTER=ON" +fi COMPILE_NR=0 CLEAN_BEFORE=YES CLEAN_AFTER=NO diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 2198b41956..fc62ef5f4d 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 2198b419567cf7efa7404cd076e76e01d86f9e58 +Subproject commit fc62ef5f4ddcd2c13df515b593e5ce1c7b5dd10b diff --git a/sorc/gsi_utils.fd b/sorc/gsi_utils.fd index 67b014d8d3..68bc14d30b 160000 --- a/sorc/gsi_utils.fd +++ b/sorc/gsi_utils.fd @@ -1 +1 @@ -Subproject commit 67b014d8d3e5acc1d21aca15e3fe2d66d327a206 +Subproject commit 68bc14d30b3ca8f890f2761c8bdd0a3cea635cf1 diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index b51c234529..b3512d14d9 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -341,6 +341,7 @@ if [[ -d "${HOMEgfs}/sorc/gdas.cd/build" ]]; then "fv3jedi_enshofx.x" \ "fv3jedi_hofx_nomodel.x" \ "fv3jedi_testdata_downloader.py" \ + "fv3jedi_fv3inc.x" \ "gdas_ens_handler.x" \ "gdas_incr_handler.x" \ "gdas_obsprovider2ioda.x" \ diff --git a/ush/calcanl_gfs.py b/ush/calcanl_gfs.py index 25bbae4bce..8c5c643417 100755 --- a/ush/calcanl_gfs.py +++ b/ush/calcanl_gfs.py @@ -12,12 +12,14 @@ from collections import OrderedDict import datetime +python2fortran_bool = {True: '.true.', False: '.false.'} + # function to calculate analysis from a given increment file and background def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix, ComIn_Ges, GPrefix, FixDir, atmges_ens_mean, RunDir, NThreads, NEMSGet, IAUHrs, - ExecCMD, ExecCMDMPI, ExecAnl, ExecChgresInc, Cdump): + ExecCMD, ExecCMDMPI, ExecAnl, ExecChgresInc, Cdump, JEDI): print('calcanl_gfs beginning at: ', datetime.datetime.utcnow()) IAUHH = IAUHrs @@ -273,6 +275,7 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix, "firstguess_filename": "'ges'", "increment_filename": "'inc.fullres'", "fhr": 6, + "jedi": python2fortran_bool[JEDI], } gsi_utils.write_nml(namelist, CalcAnlDir6 + '/calc_analysis.nml') @@ -311,6 +314,7 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix, "firstguess_filename": "'ges.ensres'", "increment_filename": "'siginc.nc'", "fhr": fh, + "jedi": python2fortran_bool[JEDI], } gsi_utils.write_nml(namelist, CalcAnlDir6 + '/calc_analysis.nml') @@ -356,10 +360,11 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix, NEMSGet = os.getenv('NEMSIOGET', 'nemsio_get') IAUHrs = list(map(int, os.getenv('IAUFHRS', '6').split(','))) Cdump = os.getenv('CDUMP', 'gdas') + JEDI = gsi_utils.isTrue(os.getenv('DO_JEDIATMVAR', 'YES')) print(locals()) calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix, ComIn_Ges, GPrefix, FixDir, atmges_ens_mean, RunDir, NThreads, NEMSGet, IAUHrs, ExecCMD, ExecCMDMPI, ExecAnl, ExecChgresInc, - Cdump) + Cdump, JEDI) diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index 2221fb7b34..078e013e7f 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -229,7 +229,7 @@ def link_jediexe(self) -> None: rm_p(exe_dest) os.symlink(exe_src, exe_dest) - return + return exe_dest @staticmethod @logit(logger) diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 6348bdf319..ebeb0c7ba6 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -113,11 +113,11 @@ def initialize(self: Analysis) -> None: FileHandler({'mkdir': newdirs}).sync() @logit(logger) - def execute(self: Analysis) -> None: + def variational(self: Analysis) -> None: chdir(self.task_config.DATA) - exec_cmd = Executable(self.task_config.APRUN_ATMANL) + exec_cmd = Executable(self.task_config.APRUN_ATMANLVAR) exec_name = os.path.join(self.task_config.DATA, 'fv3jedi_var.x') exec_cmd.add_default_arg(exec_name) exec_cmd.add_default_arg(self.task_config.jedi_yaml) @@ -132,6 +132,23 @@ def execute(self: Analysis) -> None: pass + @logit(logger) + def init_fv3_increment(self: Analysis) -> None: + # Setup JEDI YAML file + self.task_config.jedi_yaml = os.path.join(self.runtime_config.DATA, os.path.basename(self.task_config.JEDIYAML)) + save_as_yaml(self.get_jedi_config(), self.task_config.jedi_yaml) + + # Link JEDI executable to run directory + self.task_config.jedi_exe = self.link_jediexe() + + @logit(logger) + def fv3_increment(self: Analysis) -> None: + # Run executable + self.execute_jediexe(self.runtime_config.DATA, + self.task_config.APRUN_ATMANLFV3INC, + self.task_config.jedi_exe, + self.task_config.jedi_yaml) + @logit(logger) def finalize(self: Analysis) -> None: """Finalize a global atm analysis @@ -209,9 +226,17 @@ def finalize(self: Analysis) -> None: } FileHandler(bias_copy).sync() - # Create UFS model readable atm increment file from UFS-DA atm increment - logger.info("Create UFS model readable atm increment file from UFS-DA atm increment") - self.jedi2fv3inc() + # Copy FV3 atm increment to comrot directory + logger.info("Copy UFS model readable atm increment file") + cdate = to_fv3time(self.task_config.current_cycle) + cdate_inc = cdate.replace('.', '_') + src = os.path.join(self.task_config.DATA, 'anl', f"atminc.{cdate_inc}z.nc4") + dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f'{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atminc.nc') + logger.debug(f"Copying {src} to {dest}") + inc_copy = { + 'copy': [[src, dest]] + } + FileHandler(inc_copy).sync() def clean(self): super().clean() @@ -401,43 +426,3 @@ def _get_berror_dict_gsibec(config: Dict[str, Any]) -> Dict[str, List[str]]: 'copy': berror_list, } return berror_dict - - @logit(logger) - def jedi2fv3inc(self: Analysis) -> None: - """Generate UFS model readable analysis increment - - This method writes a UFS DA atm increment in UFS model readable format. - This includes: - - write UFS-DA atm increments using variable names expected by UFS model - - compute and write delp increment - - compute and write hydrostatic delz increment - - Please note that some of these steps are temporary and will be modified - once the modle is able to directly read atm increments. - - """ - # Select the atm guess file based on the analysis and background resolutions - # Fields from the atm guess are used to compute the delp and delz increments - case_anl = int(self.task_config.CASE_ANL[1:]) - case = int(self.task_config.CASE[1:]) - - file = f"{self.task_config.GPREFIX}" + "atmf006" + f"{'' if case_anl == case else '.ensres'}" + ".nc" - atmges_fv3 = os.path.join(self.task_config.COM_ATMOS_HISTORY_PREV, file) - - # Set the path/name to the input UFS-DA atm increment file (atminc_jedi) - # and the output UFS model atm increment file (atminc_fv3) - cdate = to_fv3time(self.task_config.current_cycle) - cdate_inc = cdate.replace('.', '_') - atminc_jedi = os.path.join(self.task_config.DATA, 'anl', f'atminc.{cdate_inc}z.nc4') - atminc_fv3 = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atminc.nc") - - # Reference the python script which does the actual work - incpy = os.path.join(self.task_config.HOMEgfs, 'ush/jediinc2fv3.py') - - # Execute incpy to create the UFS model atm increment file - cmd = Executable(incpy) - cmd.add_default_arg(atmges_fv3) - cmd.add_default_arg(atminc_jedi) - cmd.add_default_arg(atminc_fv3) - logger.debug(f"Executing {cmd}") - cmd(output='stdout', error='stderr') diff --git a/workflow/applications/applications.py b/workflow/applications/applications.py index b20c5a7c28..3a8d23f744 100644 --- a/workflow/applications/applications.py +++ b/workflow/applications/applications.py @@ -145,7 +145,7 @@ def _source_configs(self, conf: Configuration) -> Dict[str, Any]: files += ['config.anal', 'config.eupd'] elif config in ['efcs']: files += ['config.fcst', 'config.efcs'] - elif config in ['atmanlinit', 'atmanlrun']: + elif config in ['atmanlinit', 'atmanlvar', 'atmanlfv3inc']: files += ['config.atmanl', f'config.{config}'] elif config in ['atmensanlinit', 'atmensanlrun']: files += ['config.atmensanl', f'config.{config}'] diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index fdb81205db..c2a6a32f02 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -38,7 +38,7 @@ def _get_app_configs(self): configs = ['prep'] if self.do_jediatmvar: - configs += ['prepatmiodaobs', 'atmanlinit', 'atmanlrun', 'atmanlfinal'] + configs += ['prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal'] else: configs += ['anal', 'analdiag'] @@ -136,7 +136,7 @@ def get_task_names(self): gdas_gfs_common_cleanup_tasks = ['arch', 'cleanup'] if self.do_jediatmvar: - gdas_gfs_common_tasks_before_fcst += ['prepatmiodaobs', 'atmanlinit', 'atmanlrun', 'atmanlfinal'] + gdas_gfs_common_tasks_before_fcst += ['prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal'] else: gdas_gfs_common_tasks_before_fcst += ['anal'] diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 8789a418f9..bba7bac3dd 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -411,21 +411,45 @@ def atmanlinit(self): return task - def atmanlrun(self): + def atmanlvar(self): deps = [] dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlinit'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep=deps) - resources = self.get_resource('atmanlrun') - task_name = f'{self.cdump}atmanlrun' + resources = self.get_resource('atmanlvar') + task_name = f'{self.cdump}atmanlvar' task_dict = {'task_name': task_name, 'resources': resources, 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.cdump.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/jobs/rocoto/atmanlrun.sh', + 'command': f'{self.HOMEgfs}/jobs/rocoto/atmanlvar.sh', + 'job_name': f'{self.pslot}_{task_name}_@H', + 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', + 'maxtries': '&MAXTRIES;' + } + + task = rocoto.create_task(task_dict) + + return task + + def atmanlfv3inc(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlvar'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('atmanlfv3inc') + task_name = f'{self.cdump}atmanlfv3inc' + task_dict = {'task_name': task_name, + 'resources': resources, + 'dependency': dependencies, + 'envars': self.envars, + 'cycledef': self.cdump.replace('enkf', ''), + 'command': f'{self.HOMEgfs}/jobs/rocoto/atmanlfv3inc.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -438,7 +462,7 @@ def atmanlrun(self): def atmanlfinal(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlrun'} + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlfv3inc'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index e917f2c642..3abae9b5b7 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -13,7 +13,7 @@ class Tasks: SERVICE_TASKS = ['arch', 'earc'] VALID_TASKS = ['aerosol_init', 'stage_ic', 'prep', 'anal', 'sfcanl', 'analcalc', 'analdiag', 'arch', "cleanup", - 'prepatmiodaobs', 'atmanlinit', 'atmanlrun', 'atmanlfinal', + 'prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal', 'prepoceanobs', 'ocnanalprep', 'ocnanalbmat', 'ocnanalrun', 'ocnanalecen', 'ocnanalchkpt', 'ocnanalpost', 'ocnanalvrfy', 'earc', 'ecen', 'echgres', 'ediag', 'efcs',